mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-19 10:35:50 +00:00
Compare commits
1276 Commits
0.25.0
...
0.29.0-hf1
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6a55f99ede | ||
|
|
0b457497d0 | ||
|
|
b7c9e33343 | ||
|
|
5964710f73 | ||
|
|
262a61564c | ||
|
|
4cc4421c82 | ||
|
|
2786950c16 | ||
|
|
b25f322c93 | ||
|
|
f77ecba896 | ||
|
|
2336fe2708 | ||
|
|
0cbd81146f | ||
|
|
a268c1a7ad | ||
|
|
3b7107b255 | ||
|
|
a2ea89c64e | ||
|
|
b44cb1a64c | ||
|
|
16c89aee04 | ||
|
|
abe735102a | ||
|
|
70a37811bd | ||
|
|
edab96e973 | ||
|
|
e7a50e2a5a | ||
|
|
e18ba24670 | ||
|
|
f41fc87a33 | ||
|
|
5ec3534fea | ||
|
|
6bd39a316e | ||
|
|
8434203e71 | ||
|
|
59431c513a | ||
|
|
edc1bc35fd | ||
|
|
18ccdf8bd4 | ||
|
|
a0dcc4da8c | ||
|
|
2b91e62d5d | ||
|
|
bc5a25168a | ||
|
|
f453c58389 | ||
|
|
ad74606ab3 | ||
|
|
bb15295935 | ||
|
|
f0c1fbb098 | ||
|
|
692f224e1c | ||
|
|
5bd9cd2ee8 | ||
|
|
70fc091e74 | ||
|
|
1d09c54fdc | ||
|
|
c7ebe7205c | ||
|
|
1b6c707abb | ||
|
|
a0010effbc | ||
|
|
c094772bc0 | ||
|
|
7ce871f3b2 | ||
|
|
77b3ad5de1 | ||
|
|
a75a0c0b84 | ||
|
|
072fc53019 | ||
|
|
5bad2db667 | ||
|
|
7c2478480d | ||
|
|
761f218c0a | ||
|
|
163e8eb8fc | ||
|
|
6fccd07479 | ||
|
|
c86a40a361 | ||
|
|
362726de4a | ||
|
|
5962a4817a | ||
|
|
2e05471d72 | ||
|
|
d998c6461e | ||
|
|
94f13fb606 | ||
|
|
7edf783102 | ||
|
|
efc2159441 | ||
|
|
3078c6da12 | ||
|
|
de6f678de7 | ||
|
|
d8d51e8103 | ||
|
|
361f1da5b8 | ||
|
|
30e068ae17 | ||
|
|
3b751cc6e6 | ||
|
|
6d60f19d73 | ||
|
|
94235d4b4f | ||
|
|
367c3a5bfc | ||
|
|
f3b172b0c9 | ||
|
|
023715474c | ||
|
|
f5873bcad0 | ||
|
|
9f27801b8d | ||
|
|
9eb0c2964c | ||
|
|
73d64bbafc | ||
|
|
ea67a2d051 | ||
|
|
1e7588d0ab | ||
|
|
b6b3548c0c | ||
|
|
097a8ce640 | ||
|
|
6374aad9bc | ||
|
|
fed3f7b74b | ||
|
|
c079d9ae38 | ||
|
|
6a093b1b44 | ||
|
|
4dc573f195 | ||
|
|
b7f07aed00 | ||
|
|
cb791482a0 | ||
|
|
bd7eb94d69 | ||
|
|
223389a464 | ||
|
|
575f124fb8 | ||
|
|
483f768370 | ||
|
|
ab20ca95aa | ||
|
|
bf775036bc | ||
|
|
08bfd302fe | ||
|
|
3b6ea02920 | ||
|
|
26bfeb1319 | ||
|
|
72659d431e | ||
|
|
53dabe68ef | ||
|
|
ccd6b46995 | ||
|
|
91b3227a0f | ||
|
|
adebba94dc | ||
|
|
9d3b3f7a01 | ||
|
|
b354360bc0 | ||
|
|
8c24f7eb03 | ||
|
|
f473eade5a | ||
|
|
dc74a44b70 | ||
|
|
f96ac3db67 | ||
|
|
b11b9939f4 | ||
|
|
e595fd5e02 | ||
|
|
af52276cd9 | ||
|
|
2eec47415e | ||
|
|
4a2af70c2c | ||
|
|
6211065802 | ||
|
|
a338d9efe0 | ||
|
|
e2ef423624 | ||
|
|
0f8206e269 | ||
|
|
4ccc3751d6 | ||
|
|
7ef6e58024 | ||
|
|
57689c4e66 | ||
|
|
2fc8d70655 | ||
|
|
7ee9f8513c | ||
|
|
1a843fb4f6 | ||
|
|
9111ad1a9d | ||
|
|
a5a9242f4e | ||
|
|
3fcf4ae8b7 | ||
|
|
c3d34aaf4d | ||
|
|
e4f585b7fe | ||
|
|
e3ac1001be | ||
|
|
67f2a5d9d6 | ||
|
|
89b9fa0b35 | ||
|
|
4c0de726c8 | ||
|
|
7d96075e14 | ||
|
|
924a8cdd4b | ||
|
|
b27d078c67 | ||
|
|
c64ec9cfb9 | ||
|
|
3d6e76046c | ||
|
|
48d6a4ab6a | ||
|
|
f535304e1b | ||
|
|
2f485672fa | ||
|
|
63d438c979 | ||
|
|
c955c03197 | ||
|
|
ceff6bc271 | ||
|
|
80a5f5878f | ||
|
|
1e6111c09c | ||
|
|
d468deee12 | ||
|
|
e7eb3aa63d | ||
|
|
749f31f69d | ||
|
|
2d6af1da1d | ||
|
|
6ec5fa9cae | ||
|
|
72832c0fa2 | ||
|
|
7d329570f4 | ||
|
|
c334093223 | ||
|
|
27274c9620 | ||
|
|
cd5e36045c | ||
|
|
0062a260b9 | ||
|
|
fdd2ea8feb | ||
|
|
fbc1768784 | ||
|
|
c53b9f3713 | ||
|
|
eb709f415b | ||
|
|
9dd08e4dab | ||
|
|
e1cd1e9e32 | ||
|
|
f875603525 | ||
|
|
e95ab65396 | ||
|
|
e0907ede4f | ||
|
|
17020ffc54 | ||
|
|
abd05a6378 | ||
|
|
189592938a | ||
|
|
4f34724c5a | ||
|
|
e958f72ba9 | ||
|
|
5c5b121b62 | ||
|
|
d21171b21e | ||
|
|
aead038215 | ||
|
|
a16c07c78a | ||
|
|
3aebabce3d | ||
|
|
7239bcf0b7 | ||
|
|
623faa1550 | ||
|
|
60fad25476 | ||
|
|
d06e07ef0e | ||
|
|
ca800f9e8d | ||
|
|
4225b78bf5 | ||
|
|
8c68eff460 | ||
|
|
c2814308f1 | ||
|
|
f237187b0d | ||
|
|
187b4caf3c | ||
|
|
b8526f7894 | ||
|
|
b2b0377717 | ||
|
|
24ea1ab035 | ||
|
|
e980e69eca | ||
|
|
2904add428 | ||
|
|
babaac9305 | ||
|
|
454d2f8c45 | ||
|
|
5d42d52660 | ||
|
|
a0e87c7aee | ||
|
|
49c4ec6f93 | ||
|
|
eb0ece417d | ||
|
|
7b5bf7f129 | ||
|
|
ab8ffc1a00 | ||
|
|
64c8335e22 | ||
|
|
4cfffdf76f | ||
|
|
8adc4c1e3c | ||
|
|
02c118981f | ||
|
|
4825cefbf8 | ||
|
|
4b91a18532 | ||
|
|
ff2453e42c | ||
|
|
269ad321e6 | ||
|
|
8be4e7e65f | ||
|
|
c25184cc88 | ||
|
|
06823349f9 | ||
|
|
e18c6f63cc | ||
|
|
36a864106d | ||
|
|
3028ffd083 | ||
|
|
a3d5a97df6 | ||
|
|
52879d964e | ||
|
|
aaf209485c | ||
|
|
4515ac0bca | ||
|
|
d7def5509d | ||
|
|
4ff845ac91 | ||
|
|
df26c08a34 | ||
|
|
e9d147f4b8 | ||
|
|
f1a1ef49d5 | ||
|
|
fd03c3297c | ||
|
|
ffd2e884f2 | ||
|
|
0d700d9833 | ||
|
|
dd902292ed | ||
|
|
1b4e0f5f48 | ||
|
|
31d352b3aa | ||
|
|
c26b8124e5 | ||
|
|
6bf7de2415 | ||
|
|
18c51f4e4a | ||
|
|
adf4860988 | ||
|
|
0dd6b95ac2 | ||
|
|
41a840e776 | ||
|
|
7d75041fb1 | ||
|
|
6675ee7f5c | ||
|
|
155fcdbcd0 | ||
|
|
52f298f150 | ||
|
|
860ab3523c | ||
|
|
36ac3bc672 | ||
|
|
fc9ab12bf9 | ||
|
|
b39c1eb92c | ||
|
|
6994423a49 | ||
|
|
172e967a73 | ||
|
|
6f5d8bba2d | ||
|
|
67b18e4bea | ||
|
|
e932ba591f | ||
|
|
f58399d2f3 | ||
|
|
730cd5d513 | ||
|
|
e838b30def | ||
|
|
dddcc09378 | ||
|
|
5adb68bba4 | ||
|
|
d6ef66646f | ||
|
|
cf1638e6de | ||
|
|
64ebd64d2b | ||
|
|
92799187ed | ||
|
|
2f3834359e | ||
|
|
399c43cae6 | ||
|
|
80acbe6f6b | ||
|
|
d90ba775e8 | ||
|
|
ea4161d880 | ||
|
|
172a060330 | ||
|
|
231efb5aa5 | ||
|
|
079475e491 | ||
|
|
aa775b4d3d | ||
|
|
216c8125e2 | ||
|
|
c61d0c663e | ||
|
|
0187c6a5a1 | ||
|
|
8289d4140a | ||
|
|
c7118a183a | ||
|
|
b1881e798b | ||
|
|
d44230b745 | ||
|
|
7b417b9d51 | ||
|
|
cc05e5727d | ||
|
|
764a8f2644 | ||
|
|
a15785eb64 | ||
|
|
688f8c5f3f | ||
|
|
dde5ccf7fa | ||
|
|
d5a6313c71 | ||
|
|
f030aab759 | ||
|
|
4393f98a2c | ||
|
|
c377d6c94b | ||
|
|
16aa015682 | ||
|
|
9cded76cf0 | ||
|
|
4ad07bb6b2 | ||
|
|
d0b28a6700 | ||
|
|
18299c3f7a | ||
|
|
ca07a1230b | ||
|
|
e0ad66d967 | ||
|
|
5615c4a2a7 | ||
|
|
d7fbef6764 | ||
|
|
e95bda3bdf | ||
|
|
c010a85ef5 | ||
|
|
798d36efcf | ||
|
|
2d44c8568f | ||
|
|
7232bdb40c | ||
|
|
45f092488a | ||
|
|
4244e1070d | ||
|
|
5a7fa8cfa9 | ||
|
|
daf4f8fcde | ||
|
|
d182d1455e | ||
|
|
dc2260adbe | ||
|
|
83a01e0c7d | ||
|
|
53c1269ebd | ||
|
|
f8bfe3a550 | ||
|
|
90bb53af20 | ||
|
|
c77a2f335a | ||
|
|
8e34a1f6a7 | ||
|
|
2564b62f5c | ||
|
|
a7598c5610 | ||
|
|
8377f2516b | ||
|
|
7efd0ab0d6 | ||
|
|
14d38a1a8d | ||
|
|
c8447c190c | ||
|
|
aa5d16b3d8 | ||
|
|
fd1135315c | ||
|
|
98c915b2ca | ||
|
|
9114f3d2e6 | ||
|
|
5b0109055d | ||
|
|
5a3168c9ff | ||
|
|
a14f29f84f | ||
|
|
6c1190a361 | ||
|
|
100a76f0e8 | ||
|
|
47482acf83 | ||
|
|
54ef4ee6ef | ||
|
|
2389abc295 | ||
|
|
67c666b033 | ||
|
|
5ce3ed3555 | ||
|
|
d30b32fcde | ||
|
|
568e4cebda | ||
|
|
29d644e9d3 | ||
|
|
2dbb7301fb | ||
|
|
d2cba1c54f | ||
|
|
6a0c26a709 | ||
|
|
e44ae6af93 | ||
|
|
837b0799ac | ||
|
|
bc85a8b24f | ||
|
|
15d68649d5 | ||
|
|
e0d96ae807 | ||
|
|
3aa39ced60 | ||
|
|
1f1c0618e1 | ||
|
|
7788aa25b5 | ||
|
|
d5b460a85c | ||
|
|
9019f3a4f2 | ||
|
|
dfda0d566a | ||
|
|
99c2fac143 | ||
|
|
4c5308da8d | ||
|
|
4d0ed3d857 | ||
|
|
0b5582ed0d | ||
|
|
17734f833c | ||
|
|
98a9d5d424 | ||
|
|
6d74f36449 | ||
|
|
47a5bf6aa5 | ||
|
|
2805e9eb3b | ||
|
|
72a1a86886 | ||
|
|
ec190bae33 | ||
|
|
83003e43d7 | ||
|
|
3b20dc2994 | ||
|
|
a7198298e7 | ||
|
|
f3d76d5780 | ||
|
|
e2305c3c5e | ||
|
|
ba737d7e58 | ||
|
|
88f69204c8 | ||
|
|
bb4561c2b8 | ||
|
|
4710f764e4 | ||
|
|
11a59a767e | ||
|
|
4cf3157aad | ||
|
|
b1f6cb349b | ||
|
|
0c134582ca | ||
|
|
acf2833362 | ||
|
|
20f9971096 | ||
|
|
cefeaceef0 | ||
|
|
1ba7c4b6ee | ||
|
|
1b49776819 | ||
|
|
41c68f4bbc | ||
|
|
56ac830405 | ||
|
|
ebcf821d81 | ||
|
|
e874a2624f | ||
|
|
03d1c0ed21 | ||
|
|
1b8c77eee0 | ||
|
|
d575cd50b1 | ||
|
|
2b040569e7 | ||
|
|
7a53f86fff | ||
|
|
a90bb53cd2 | ||
|
|
b450d62138 | ||
|
|
1a9d65c52a | ||
|
|
05f4746bbe | ||
|
|
1c587723fa | ||
|
|
b2a9c79de5 | ||
|
|
64259c7bcb | ||
|
|
a7efdb4e52 | ||
|
|
091ff0cce0 | ||
|
|
7e25a3a942 | ||
|
|
b3254e2b18 | ||
|
|
9a0fa79144 | ||
|
|
352db260b2 | ||
|
|
f072b5b679 | ||
|
|
b4058a813b | ||
|
|
b27e152ead | ||
|
|
936e83759d | ||
|
|
18fdc175c6 | ||
|
|
47c6ab0ced | ||
|
|
4868135d47 | ||
|
|
5e70db651d | ||
|
|
1fedede771 | ||
|
|
00596f1436 | ||
|
|
db840b5604 | ||
|
|
45070d0e51 | ||
|
|
8a1081f9ef | ||
|
|
ac84e44161 | ||
|
|
836dfb6503 | ||
|
|
35a8ce2349 | ||
|
|
bb7d68b3b9 | ||
|
|
1979846e5e | ||
|
|
a61ffab3f9 | ||
|
|
698fe73608 | ||
|
|
0083c32629 | ||
|
|
f313caaa73 | ||
|
|
6e3f07ddce | ||
|
|
11d28c4856 | ||
|
|
e9394ca85a | ||
|
|
9445a30e72 | ||
|
|
185b1a3d36 | ||
|
|
1c2f5d60a5 | ||
|
|
2f32910bef | ||
|
|
8de1b20bb5 | ||
|
|
60a7abcef6 | ||
|
|
e44e75fa6b | ||
|
|
ff7dc0b446 | ||
|
|
f813cb2310 | ||
|
|
cba19d7e23 | ||
|
|
9479c0e12d | ||
|
|
65c9c45ec6 | ||
|
|
6d79004d4f | ||
|
|
97623d20c5 | ||
|
|
d37802a42f | ||
|
|
9b837a24aa | ||
|
|
d0ef2f7dd8 | ||
|
|
c7cfd23580 | ||
|
|
7cf1ec3f89 | ||
|
|
9708a12607 | ||
|
|
92812fe723 | ||
|
|
79417ac59a | ||
|
|
984f66e083 | ||
|
|
ef2a436769 | ||
|
|
7f1a95550f | ||
|
|
803f5b5613 | ||
|
|
8ca9fa1c26 | ||
|
|
3b3b897193 | ||
|
|
6c364f63cc | ||
|
|
6b9e842ddd | ||
|
|
8f88d915ba | ||
|
|
eaa1f47f00 | ||
|
|
cbeae85731 | ||
|
|
84e618b3f2 | ||
|
|
382a16ff07 | ||
|
|
7bd339b645 | ||
|
|
70d8b2c4b7 | ||
|
|
3764a83c6b | ||
|
|
c3d200ddcd | ||
|
|
44c5e337ab | ||
|
|
040982e321 | ||
|
|
6c81ea846c | ||
|
|
d082a0696d | ||
|
|
f999839e59 | ||
|
|
f1bc662a24 | ||
|
|
232693419a | ||
|
|
ed66b951c6 | ||
|
|
70c2854f7c | ||
|
|
e9381ddeb2 | ||
|
|
1b46e003c3 | ||
|
|
4611d5a35f | ||
|
|
2e59378ab7 | ||
|
|
fc8bf39043 | ||
|
|
2cccd8ab28 | ||
|
|
d537ceedd6 | ||
|
|
ac7243b309 | ||
|
|
607e983f37 | ||
|
|
02f7326b7e | ||
|
|
b688f69031 | ||
|
|
df41329df9 | ||
|
|
0825bd7076 | ||
|
|
e9b7003cf5 | ||
|
|
c5d673c426 | ||
|
|
9cc8eec773 | ||
|
|
0b45535061 | ||
|
|
9f64ad8d89 | ||
|
|
e5b0b7e9a7 | ||
|
|
9c3522cb70 | ||
|
|
b357390215 | ||
|
|
c66fc2f656 | ||
|
|
64554aca6d | ||
|
|
f1df9a02fa | ||
|
|
f3725bdd2e | ||
|
|
cb92b94d55 | ||
|
|
ef01f82e0c | ||
|
|
4ba7ee8c92 | ||
|
|
c59633a588 | ||
|
|
f56e37398c | ||
|
|
e43ffa6f2b | ||
|
|
6991bc9723 | ||
|
|
9d6106a80b | ||
|
|
9e70404411 | ||
|
|
bc48d299b6 | ||
|
|
a8db5650a5 | ||
|
|
91871b418b | ||
|
|
aaf98082e9 | ||
|
|
ac228deeda | ||
|
|
fc661c83ef | ||
|
|
a2acffdfa3 | ||
|
|
79ce4ed226 | ||
|
|
e3a7aa0033 | ||
|
|
95973ba3e8 | ||
|
|
fde6303ae6 | ||
|
|
b4a1948951 | ||
|
|
b927028416 | ||
|
|
fe5d1ff6c5 | ||
|
|
1308656000 | ||
|
|
ec1e6b9385 | ||
|
|
315a8b6b60 | ||
|
|
558c6b621b | ||
|
|
6d91d02c62 | ||
|
|
436ded68b7 | ||
|
|
3ec88b3665 | ||
|
|
2caedb38a6 | ||
|
|
49378ab7fe | ||
|
|
982dc6aa8c | ||
|
|
33175187b7 | ||
|
|
0339904920 | ||
|
|
8bda9487c6 | ||
|
|
617d84c0ef | ||
|
|
ac64731d55 | ||
|
|
5dc064e971 | ||
|
|
c24732ed4e | ||
|
|
ba710bee86 | ||
|
|
c20392ca80 | ||
|
|
8eb05d0950 | ||
|
|
b11ad375cd | ||
|
|
7a6d533014 | ||
|
|
be44f75d2d | ||
|
|
ab14123aed | ||
|
|
a963a6d10d | ||
|
|
69b4cd22a2 | ||
|
|
958325653f | ||
|
|
c5dc419f9e | ||
|
|
2a201f9525 | ||
|
|
b7ba509618 | ||
|
|
f946d7b447 | ||
|
|
e2a5535ed6 | ||
|
|
0f94e2c0c3 | ||
|
|
a25508b98d | ||
|
|
e825433a38 | ||
|
|
d1c08889fe | ||
|
|
0b82b5a0d6 | ||
|
|
a33d0d4fb6 | ||
|
|
ba42334d36 | ||
|
|
dad460dcfc | ||
|
|
2e62641aa4 | ||
|
|
3ae23b6a54 | ||
|
|
97126f18b1 | ||
|
|
3838d222c2 | ||
|
|
96c3292210 | ||
|
|
b0fd92cb3f | ||
|
|
62c5b5e570 | ||
|
|
feaa0871ac | ||
|
|
9f41976926 | ||
|
|
1784f24c5f | ||
|
|
fc47d9fc4d | ||
|
|
eade9f8f2b | ||
|
|
6276c55cc9 | ||
|
|
afa6ff7c4b | ||
|
|
f4dcbe3a84 | ||
|
|
9c02cc1b17 | ||
|
|
0cc3ef8f90 | ||
|
|
4cbbacc946 | ||
|
|
9a0c71d4a7 | ||
|
|
0f1b831de7 | ||
|
|
37a7a2aacd | ||
|
|
635b157b11 | ||
|
|
c3ae4da83a | ||
|
|
c3809ece67 | ||
|
|
bfc436dccd | ||
|
|
71d6874236 | ||
|
|
9bf1f994ae | ||
|
|
bb4127a6fb | ||
|
|
a691632995 | ||
|
|
5d6ea3d75f | ||
|
|
43873b1b2c | ||
|
|
9430f3665b | ||
|
|
f3c1f63444 | ||
|
|
b5c7232d6f | ||
|
|
2f3677d593 | ||
|
|
1e0efaffe8 | ||
|
|
fc79754750 | ||
|
|
0e4de42be8 | ||
|
|
4e389127b5 | ||
|
|
47593730d6 | ||
|
|
e742da73bd | ||
|
|
890bf3cce1 | ||
|
|
60eb312e3b | ||
|
|
06207da185 | ||
|
|
4dc2cf8a6b | ||
|
|
44450bf644 | ||
|
|
312aec79ca | ||
|
|
a8578c73f8 | ||
|
|
c522ffa6db | ||
|
|
93b7599b1c | ||
|
|
3ccbd7c9b2 | ||
|
|
385a87db31 | ||
|
|
5530353eef | ||
|
|
d1193093ef | ||
|
|
b203db27a4 | ||
|
|
0a3e1af04c | ||
|
|
c6c8e5d70c | ||
|
|
fa354ec8d9 | ||
|
|
d0375f697d | ||
|
|
33c8257d25 | ||
|
|
f389bc33c3 | ||
|
|
4d5dca71ce | ||
|
|
a9c44a1b9c | ||
|
|
4144f800a1 | ||
|
|
6ef9a81017 | ||
|
|
8c6722f3c5 | ||
|
|
40e138627b | ||
|
|
a470dda4e6 | ||
|
|
b725410623 | ||
|
|
a9dfb33126 | ||
|
|
8b848770dc | ||
|
|
94629edb9b | ||
|
|
2a3f2ca28d | ||
|
|
8ab1e7d432 | ||
|
|
b2ba6a0c85 | ||
|
|
cca5421aed | ||
|
|
799d9a73e6 | ||
|
|
b0781622b2 | ||
|
|
0d0eec6345 | ||
|
|
1af79f7960 | ||
|
|
15b570bbdd | ||
|
|
7aa5599cc2 | ||
|
|
676293ec42 | ||
|
|
abc4fb81b1 | ||
|
|
53a16f354f | ||
|
|
6ab1ecd836 | ||
|
|
e7b16e7b47 | ||
|
|
14804f81a8 | ||
|
|
9a61b8d77d | ||
|
|
f42c2763d5 | ||
|
|
98d4e0e1b5 | ||
|
|
9156633baf | ||
|
|
bcf4f836b4 | ||
|
|
dbc1d70f99 | ||
|
|
78bc190a85 | ||
|
|
02855d7fed | ||
|
|
6fdd5d32be | ||
|
|
d7f32b105b | ||
|
|
0ac480a0bd | ||
|
|
417996de02 | ||
|
|
6c2d60cec2 | ||
|
|
743bd6c917 | ||
|
|
ab61aa41d9 | ||
|
|
36396ae29e | ||
|
|
749e083e6e | ||
|
|
67b9cf9e82 | ||
|
|
27fb20f3ab | ||
|
|
1c71b274f0 | ||
|
|
fcd20b63fe | ||
|
|
8ec344ac1b | ||
|
|
df966a9ac6 | ||
|
|
f634666dc6 | ||
|
|
e2f9f5d7e5 | ||
|
|
d078b0d143 | ||
|
|
0ccdea3cd8 | ||
|
|
df54b47cd0 | ||
|
|
2e595830b3 | ||
|
|
96fbcc9a5a | ||
|
|
6283801981 | ||
|
|
9a3214d46e | ||
|
|
9eb7c8344f | ||
|
|
4140bbb1f7 | ||
|
|
ea44497136 | ||
|
|
07737c6e5b | ||
|
|
98d5eefc86 | ||
|
|
4f2d93bb65 | ||
|
|
a5df3f1747 | ||
|
|
7f5f73887d | ||
|
|
91ce7807b9 | ||
|
|
d26fae9875 | ||
|
|
60bdc79ec4 | ||
|
|
253ddf2998 | ||
|
|
9fa15b390a | ||
|
|
e7d6fe6c8b | ||
|
|
9650b1aa70 | ||
|
|
eafa6f960f | ||
|
|
c62ccf4870 | ||
|
|
ef34439a79 | ||
|
|
b328ec2462 | ||
|
|
60f27178b8 | ||
|
|
e3fbb83ad0 | ||
|
|
28b70a7b9a | ||
|
|
dcdc341d0f | ||
|
|
fce77c9372 | ||
|
|
a360c481c2 | ||
|
|
c72db5fa5f | ||
|
|
fc9a23d6d4 | ||
|
|
167f4666e2 | ||
|
|
1cbcc7be21 | ||
|
|
8053598069 | ||
|
|
7cfac1a91a | ||
|
|
192cdd028e | ||
|
|
029c143922 | ||
|
|
00298cc68c | ||
|
|
d9c7db51af | ||
|
|
f12b15d22b | ||
|
|
409b8bac00 | ||
|
|
28b09bde4b | ||
|
|
2f6af906f4 | ||
|
|
628e3ac1eb | ||
|
|
fbf5785e35 | ||
|
|
eeea2b1ff8 | ||
|
|
32062e439f | ||
|
|
930a0beaf1 | ||
|
|
4a49fefdd9 | ||
|
|
8e792855e0 | ||
|
|
69f5c6987a | ||
|
|
85fc9e4ecf | ||
|
|
d5c3f0c9cf | ||
|
|
a48120e675 | ||
|
|
36f8e4f2ad | ||
|
|
1084a39a45 | ||
|
|
86df482842 | ||
|
|
b0d47ebcc6 | ||
|
|
fffdf1dfba | ||
|
|
3273ed2616 | ||
|
|
aa7b0a31b0 | ||
|
|
fb0d44d403 | ||
|
|
cd8ec89cbb | ||
|
|
252f271dc5 | ||
|
|
62d400c3a9 | ||
|
|
f9aa3e0da5 | ||
|
|
685fe5b0fb | ||
|
|
5180e71a0d | ||
|
|
55637f7508 | ||
|
|
7d72dfe0be | ||
|
|
02529a0fc2 | ||
|
|
b44974677e | ||
|
|
d4fd5e4fce | ||
|
|
30123eaa4a | ||
|
|
454ec97d51 | ||
|
|
c2ac331e78 | ||
|
|
be4a35af11 | ||
|
|
445b29ad0d | ||
|
|
64d0f7fffd | ||
|
|
baf0d09455 | ||
|
|
08a81a0ab9 | ||
|
|
31110c7fd9 | ||
|
|
0e1dd92d9b | ||
|
|
a3204a4df7 | ||
|
|
2288ab48b9 | ||
|
|
670401884c | ||
|
|
37181c341e | ||
|
|
be7e677448 | ||
|
|
b2eeb49a45 | ||
|
|
9aa040d917 | ||
|
|
c2043a223b | ||
|
|
f24e859f17 | ||
|
|
737b33f9d1 | ||
|
|
00791d2151 | ||
|
|
b141598f9b | ||
|
|
d1618d79b0 | ||
|
|
00c84dfe5c | ||
|
|
95f31b98a8 | ||
|
|
10d74ed100 | ||
|
|
8a7f612d5b | ||
|
|
0829ee9234 | ||
|
|
b22e33444b | ||
|
|
d115a12cbe | ||
|
|
b7b744de94 | ||
|
|
68e46e406a | ||
|
|
a46ae4efec | ||
|
|
62777a794e | ||
|
|
329a969761 | ||
|
|
30170bc394 | ||
|
|
f193302e15 | ||
|
|
7c4870d641 | ||
|
|
8b84a76d5d | ||
|
|
a4cd761372 | ||
|
|
63d2cfd6ba | ||
|
|
bb44bdd047 | ||
|
|
6904e66384 | ||
|
|
fbffe2367e | ||
|
|
e442a2846d | ||
|
|
f6985586ea | ||
|
|
2bae5b0959 | ||
|
|
1e58809fcc | ||
|
|
6b1d213cc2 | ||
|
|
42bec13a83 | ||
|
|
4415a179b3 | ||
|
|
5d42604efd | ||
|
|
b134b7d3f6 | ||
|
|
788219fe05 | ||
|
|
9a7f66cfe9 | ||
|
|
daa4d16e61 | ||
|
|
cf05f87795 | ||
|
|
c2f2f83b7c | ||
|
|
b30b2a523f | ||
|
|
150a3810a8 | ||
|
|
ac0eaa912b | ||
|
|
05a04aa801 | ||
|
|
e37d4043f6 | ||
|
|
d073425b44 | ||
|
|
825b18cf71 | ||
|
|
549ad3204f | ||
|
|
35f9499b67 | ||
|
|
db82c35c17 | ||
|
|
73c74f753c | ||
|
|
a38fb2a5dc | ||
|
|
38e99e01f9 | ||
|
|
a1f46e84b8 | ||
|
|
6540804571 | ||
|
|
ffe6707595 | ||
|
|
9b21740c9f | ||
|
|
bd12e2ab95 | ||
|
|
bffb5ef8b4 | ||
|
|
e4c9822d78 | ||
|
|
73187d8832 | ||
|
|
8101154d5e | ||
|
|
c02937fd6f | ||
|
|
3430be4075 | ||
|
|
3f2b6f771f | ||
|
|
6e39b49cc2 | ||
|
|
71c34ed4e0 | ||
|
|
477178675c | ||
|
|
dbdf68b248 | ||
|
|
2fd139b307 | ||
|
|
a6c2657062 | ||
|
|
78a0bc0e2c | ||
|
|
d7116d6867 | ||
|
|
edc15b9fa2 | ||
|
|
93d4b73b2f | ||
|
|
8e3849e591 | ||
|
|
acaa1098f7 | ||
|
|
c1a5e88752 | ||
|
|
74b99014d2 | ||
|
|
9cba944d21 | ||
|
|
8c1c2f5d05 | ||
|
|
bf0fa8c562 | ||
|
|
3e1fc9ba6c | ||
|
|
4ceba603e4 | ||
|
|
6591c21ace | ||
|
|
e8d03c7b9b | ||
|
|
6fbce4c2f7 | ||
|
|
c168d54495 | ||
|
|
2cce22052b | ||
|
|
4e19d5f625 | ||
|
|
5b667da526 | ||
|
|
f9fc9a3518 | ||
|
|
e005cfd70e | ||
|
|
feb997481c | ||
|
|
2c8e90c9d8 | ||
|
|
ec96d5afa0 | ||
|
|
8be8853c33 | ||
|
|
c228f5a244 | ||
|
|
d4c8b4e3ac | ||
|
|
6564f6c164 | ||
|
|
1e37a5509c | ||
|
|
1e9503deaa | ||
|
|
ab1f36c565 | ||
|
|
5a212cd626 | ||
|
|
856fd9d69f | ||
|
|
4606d99951 | ||
|
|
dbd75169e5 | ||
|
|
f5b39ee911 | ||
|
|
db5d52b4b2 | ||
|
|
dfeb9967b8 | ||
|
|
673e860c18 | ||
|
|
9deae34b20 | ||
|
|
ec92344fb4 | ||
|
|
44c68d6174 | ||
|
|
5b7f172d03 | ||
|
|
65125eac87 | ||
|
|
761902864a | ||
|
|
af24d541d1 | ||
|
|
3ad68a617e | ||
|
|
9e1a6589d4 | ||
|
|
da8ceed07e | ||
|
|
35935adc98 | ||
|
|
5b4a501f68 | ||
|
|
5425a90f16 | ||
|
|
7eaca149c1 | ||
|
|
4b5fd95657 | ||
|
|
96dedf553e | ||
|
|
23219f2662 | ||
|
|
af78ed608e | ||
|
|
51dc59e019 | ||
|
|
afc102e90a | ||
|
|
fc560179e0 | ||
|
|
d26241de0e | ||
|
|
00310f4f10 | ||
|
|
8caae219cf | ||
|
|
2264ae9247 | ||
|
|
29225bbe75 | ||
|
|
4b5625fd59 | ||
|
|
7c0c2419f7 | ||
|
|
5f59282ba1 | ||
|
|
db03ce939c | ||
|
|
68bcbbb701 | ||
|
|
8bdf7b3983 | ||
|
|
4ab427d315 | ||
|
|
9a0a434dd8 | ||
|
|
33d1dda954 | ||
|
|
8e9efb4ceb | ||
|
|
8835af11d5 | ||
|
|
cfb6b678f1 | ||
|
|
365500da98 | ||
|
|
f14d75e798 | ||
|
|
0f71b4a378 | ||
|
|
b651e0146d | ||
|
|
a0dbbb2d84 | ||
|
|
a85fbf69e0 | ||
|
|
92b8c7961b | ||
|
|
225f8ac12f | ||
|
|
1161511207 | ||
|
|
ca8eda412e | ||
|
|
ec4ec48fb8 | ||
|
|
c0b69e8ef7 | ||
|
|
4241dbb600 | ||
|
|
f54280aaad | ||
|
|
6069400538 | ||
|
|
616be1d76c | ||
|
|
8e91ce67c5 | ||
|
|
c1ecd661c3 | ||
|
|
b27e2aad07 | ||
|
|
5ce508e09d | ||
|
|
3cfa5a41b1 | ||
|
|
6c072f37ef | ||
|
|
dbd993ed2b | ||
|
|
45b5c4ba7a | ||
|
|
7933e5d1f9 | ||
|
|
01e52e6f9f | ||
|
|
40a955e192 | ||
|
|
a8296f7301 | ||
|
|
590c3b876b | ||
|
|
6dfc805eaa | ||
|
|
5ce6068df5 | ||
|
|
bf9b8f4d1b | ||
|
|
d618581060 | ||
|
|
2936bbfae8 | ||
|
|
47b08bfc02 | ||
|
|
da4f77ca1f | ||
|
|
1c0a75d467 | ||
|
|
659cf0c221 | ||
|
|
430229fd84 | ||
|
|
81699a0971 | ||
|
|
c54aff74b3 | ||
|
|
7f43ab9097 | ||
|
|
d78f740250 | ||
|
|
cd1bd18a49 | ||
|
|
f81b084448 | ||
|
|
02d9c77402 | ||
|
|
a0c903c68c | ||
|
|
6aa325d3da | ||
|
|
041f874d4c | ||
|
|
526ecd6a81 | ||
|
|
d373054fc4 | ||
|
|
b6d9f1d4b2 | ||
|
|
3fef916972 | ||
|
|
89a51e5b91 | ||
|
|
f87a6ccc7a | ||
|
|
f65cea66ef | ||
|
|
4239880acb | ||
|
|
1dcd06a1c1 | ||
|
|
0f30191d10 | ||
|
|
8fb9d5daaa | ||
|
|
ed3c942ff1 | ||
|
|
80436d4a8b | ||
|
|
cfc702c766 | ||
|
|
88ae15ea8e | ||
|
|
6bafca7386 | ||
|
|
379e842080 | ||
|
|
c41ce469d0 | ||
|
|
a1ca68473d | ||
|
|
3345d03433 | ||
|
|
81a426608a | ||
|
|
2ad6f0a65e | ||
|
|
ee8bd8ddae | ||
|
|
319ac14e7d | ||
|
|
0215a7400d | ||
|
|
79db0ca7a6 | ||
|
|
1a7eafb699 | ||
|
|
81a06ea6cd | ||
|
|
de4be649ab | ||
|
|
d90ec5f06c | ||
|
|
32065ced6e | ||
|
|
b5224a2227 | ||
|
|
c55777738f | ||
|
|
c72dff5a24 | ||
|
|
6b09e49c08 | ||
|
|
413218c4c4 | ||
|
|
16c04b50ee | ||
|
|
56c18f7768 | ||
|
|
22ca13bc78 | ||
|
|
4c7fd18230 | ||
|
|
39730fc13e | ||
|
|
889c0a0d0f | ||
|
|
624a803955 | ||
|
|
9f5c21f80e | ||
|
|
a3fe089367 | ||
|
|
85d5cd3118 | ||
|
|
61006e626d | ||
|
|
15aad1cb24 | ||
|
|
95c1c5f54e | ||
|
|
c65fb91878 | ||
|
|
d5a7e1331e | ||
|
|
04bcd93ba3 | ||
|
|
f97ef7039a | ||
|
|
9160b46c1e | ||
|
|
aa4b116498 | ||
|
|
612bb71165 | ||
|
|
5c67f99ef9 | ||
|
|
101a4808a0 | ||
|
|
1d38671f5e | ||
|
|
7f25d88f02 | ||
|
|
be830d3dad | ||
|
|
5bc949d70f | ||
|
|
43817bd722 | ||
|
|
61623d6d75 | ||
|
|
9aad60f56d | ||
|
|
e7cf3e8084 | ||
|
|
e024e7c2ec | ||
|
|
6fc136ae9a | ||
|
|
2b69ded1ea | ||
|
|
8dd799aa6f | ||
|
|
7230ef41ee | ||
|
|
a86f0a743c | ||
|
|
af75b55ef7 | ||
|
|
9ecb37dd4f | ||
|
|
2e3784a914 | ||
|
|
019c1af435 | ||
|
|
5322955f2b | ||
|
|
a8ea4ce283 | ||
|
|
c12862f60d | ||
|
|
e889183fc5 | ||
|
|
7be695c6bd | ||
|
|
956901ae02 | ||
|
|
d562c5b2d5 | ||
|
|
d7b054c3f6 | ||
|
|
901ccad0cf | ||
|
|
b9454e0f0c | ||
|
|
26181907fc | ||
|
|
8368798ad2 | ||
|
|
ed597e5e99 | ||
|
|
2c88c15f7f | ||
|
|
af7cd3cc04 | ||
|
|
9552551f9a | ||
|
|
1c73a0f649 | ||
|
|
e3698b2a07 | ||
|
|
c841f8b360 | ||
|
|
50f9b68d61 | ||
|
|
27b48bc16e | ||
|
|
bccdbaed2b | ||
|
|
398095a667 | ||
|
|
80095824b9 | ||
|
|
d91c1f96cc | ||
|
|
1c005a0292 | ||
|
|
c7ced496ac | ||
|
|
d4ff18834c | ||
|
|
f86d9fd626 | ||
|
|
854604f724 | ||
|
|
cfd3642cb1 | ||
|
|
f493590604 | ||
|
|
8c084a3de8 | ||
|
|
985aa803a4 | ||
|
|
9e319d7bd6 | ||
|
|
a122e176d7 | ||
|
|
88a6f2931e | ||
|
|
54f3a83e25 | ||
|
|
0955c0d8d3 | ||
|
|
6bb5be5216 | ||
|
|
c9cd7e4be0 | ||
|
|
ce2cecf046 | ||
|
|
6e934ee6a1 | ||
|
|
723d7d1263 | ||
|
|
298572893e | ||
|
|
405f6f7368 | ||
|
|
648ccc7c17 | ||
|
|
f5afe0587f | ||
|
|
91a227a475 | ||
|
|
4b905fe9ff | ||
|
|
0f409b7bec | ||
|
|
295c8de858 | ||
|
|
e5252f90af | ||
|
|
c2276155bf | ||
|
|
dbe49bcd87 | ||
|
|
7b936de32c | ||
|
|
9eb34f542c | ||
|
|
194304e544 | ||
|
|
c59fc332d5 | ||
|
|
b43832fe57 | ||
|
|
c24a497a23 | ||
|
|
4096fcd1bf | ||
|
|
9a0e806f78 | ||
|
|
20c9632996 | ||
|
|
65a628ca88 | ||
|
|
d82dbba096 | ||
|
|
58547f6997 | ||
|
|
e6f4eedb1e | ||
|
|
c5b963141f | ||
|
|
5df40bd746 | ||
|
|
403f15dc48 | ||
|
|
704d7451a0 | ||
|
|
fa11071443 | ||
|
|
87351c8a0c | ||
|
|
2f5fb1e68e | ||
|
|
96e1ec6d31 | ||
|
|
ac3cf05f1a | ||
|
|
6335e34395 | ||
|
|
02c2029ac1 | ||
|
|
6914aa3e27 | ||
|
|
f4fcb1cc9a | ||
|
|
b6eec21ec0 | ||
|
|
0ce3aeb189 | ||
|
|
713c8efcbe | ||
|
|
9fa5e39872 | ||
|
|
ace53fa405 | ||
|
|
3c06980107 | ||
|
|
1f26fbb5af | ||
|
|
84c6622122 | ||
|
|
63f099f2f6 | ||
|
|
373ce72984 | ||
|
|
4cf29455e4 | ||
|
|
07db5d497c | ||
|
|
f1bb0afc4e | ||
|
|
d791fe3013 | ||
|
|
c16e22a5c6 | ||
|
|
db7a720445 | ||
|
|
6c09a02099 | ||
|
|
1b3356cafd | ||
|
|
5869902f2c | ||
|
|
28898031f0 | ||
|
|
1ce0f94638 | ||
|
|
f876ad973f | ||
|
|
6014b13234 | ||
|
|
3e9c702c47 | ||
|
|
3ff919ccf1 | ||
|
|
8dc0844c79 | ||
|
|
a2764b68ca | ||
|
|
bbbae072ea | ||
|
|
b4735b5931 | ||
|
|
418638ad16 | ||
|
|
9fb09d3109 | ||
|
|
d7e08f96a5 | ||
|
|
4d49d272eb | ||
|
|
faa6890950 | ||
|
|
9c390f6da4 | ||
|
|
6842277977 | ||
|
|
ddf68d464d | ||
|
|
b2f19e8dc6 | ||
|
|
5714b42975 | ||
|
|
c4e9c49c10 | ||
|
|
9210efb051 | ||
|
|
b9f1b05625 | ||
|
|
828c2e3c71 | ||
|
|
10150a7352 | ||
|
|
baaa45f8c7 | ||
|
|
322af30d6a | ||
|
|
206efbf30d | ||
|
|
a96dee85d2 | ||
|
|
d307568cbc | ||
|
|
0ee27b143c | ||
|
|
7bfb4a9ba5 | ||
|
|
110c73fc8d | ||
|
|
424d9b8385 | ||
|
|
1b48ccc868 | ||
|
|
fac82204b6 | ||
|
|
61f114e655 | ||
|
|
24410bf1bb | ||
|
|
aa24969eee | ||
|
|
a5297d13c4 | ||
|
|
b06bdb83cb | ||
|
|
d06092212f | ||
|
|
914778eae1 | ||
|
|
e14c700c60 | ||
|
|
0848e348bb | ||
|
|
3d5ae42660 | ||
|
|
f207b6b4c9 | ||
|
|
ed2c5078ad | ||
|
|
aec792f5b8 | ||
|
|
17d64de3d5 | ||
|
|
f6bea08535 | ||
|
|
feab6c39b3 | ||
|
|
e999c76882 | ||
|
|
686cc599a2 | ||
|
|
92983556a0 | ||
|
|
837872c3f3 | ||
|
|
55222dc5d1 | ||
|
|
adce6ae851 | ||
|
|
9dc32cb791 | ||
|
|
23dc08c925 | ||
|
|
488a44b88e | ||
|
|
530bdf975e | ||
|
|
d7a6627a1f | ||
|
|
d6066183b9 | ||
|
|
3e2c3ba035 | ||
|
|
e24cba8c35 | ||
|
|
a23013abc1 | ||
|
|
27a4f44de5 | ||
|
|
4e07dbbefc | ||
|
|
dcf4ad2c21 | ||
|
|
04dd861fe3 | ||
|
|
c8ee6c6f6d | ||
|
|
a57e4263d7 | ||
|
|
3d58f0d941 | ||
|
|
a52c9232c4 | ||
|
|
7a059c7a73 | ||
|
|
506910147f | ||
|
|
cf3eb24eb0 | ||
|
|
d965b23b2a | ||
|
|
f660743065 | ||
|
|
4559bd9030 | ||
|
|
3ac98fb101 | ||
|
|
aff52db289 | ||
|
|
ea27dfe08d | ||
|
|
27620af1bf | ||
|
|
bf116308d4 | ||
|
|
3fb27d98ab | ||
|
|
7e45c17730 | ||
|
|
626533d4a7 | ||
|
|
39719f4c17 | ||
|
|
526bd88dc4 | ||
|
|
37201ecaa6 | ||
|
|
02ed879837 | ||
|
|
8b881d3a77 | ||
|
|
f25456ce25 | ||
|
|
dfb1db4ab3 | ||
|
|
4362cb660b | ||
|
|
1aa0749ba8 | ||
|
|
c7f1f6a91f | ||
|
|
888a3fec21 | ||
|
|
8514b88974 | ||
|
|
724ec46129 | ||
|
|
4f1d1d2a8a | ||
|
|
39a387b54c | ||
|
|
2b0034667d | ||
|
|
98202c56ae | ||
|
|
1e06ddf13c | ||
|
|
560071bb68 | ||
|
|
2e49ec47a3 | ||
|
|
e70d618aff | ||
|
|
3e4cf426bd | ||
|
|
3f10924594 | ||
|
|
f8182c335a | ||
|
|
1d8d6a6d68 | ||
|
|
69fccdf5c6 | ||
|
|
27e8d44a56 | ||
|
|
72bc4ebf37 | ||
|
|
353f32e6af | ||
|
|
9bbaa9a2ae | ||
|
|
096fcefae9 | ||
|
|
0f1e292e34 | ||
|
|
195957a7cc | ||
|
|
eb122f45f9 | ||
|
|
c51ac9d6da | ||
|
|
97c9d02c43 | ||
|
|
0c06939f38 | ||
|
|
5db677d74d | ||
|
|
251ce4efbc | ||
|
|
386eabb61f | ||
|
|
b35eabe161 | ||
|
|
9ad9845d69 | ||
|
|
74eb25f9b5 | ||
|
|
044f390fe0 | ||
|
|
06d6e4901e | ||
|
|
3ca9646329 | ||
|
|
4b3e629dfd | ||
|
|
568fae9878 | ||
|
|
a844026f6b | ||
|
|
b677cacb8c | ||
|
|
1d66169ef1 | ||
|
|
74653e57e6 |
7
.gitignore
vendored
7
.gitignore
vendored
@@ -19,6 +19,7 @@
|
||||
*.o
|
||||
build
|
||||
tags
|
||||
TAGS
|
||||
bin/rippled
|
||||
Debug/*.*
|
||||
Release/*.*
|
||||
@@ -30,6 +31,7 @@ Release/*.*
|
||||
tmp
|
||||
|
||||
# Ignore database directory.
|
||||
db/
|
||||
db/*.db
|
||||
db/*.db-*
|
||||
|
||||
@@ -74,4 +76,7 @@ My Amplifier XE Results - RippleD
|
||||
/out.txt
|
||||
|
||||
# Build Log
|
||||
rippled-build.log
|
||||
rippled-build.log
|
||||
|
||||
# Profiling data
|
||||
gmon.out
|
||||
|
||||
38
.travis.yml
38
.travis.yml
@@ -2,16 +2,21 @@ language: cpp
|
||||
compiler:
|
||||
- clang
|
||||
- gcc
|
||||
env:
|
||||
- TARGET=debug
|
||||
- TARGET=debug.nounity
|
||||
# We can specify any combination of builds here, for example, to
|
||||
# include release builds, too, uncomment the following lines.
|
||||
#- TARGET=release
|
||||
#- TARGET=release.nounity
|
||||
before_install:
|
||||
- sudo apt-get update -qq
|
||||
- sudo apt-get install -qq python-software-properties
|
||||
- sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||
- sudo add-apt-repository -y ppa:boost-latest/ppa
|
||||
- sudo add-apt-repository -y ppa:afrank/boost
|
||||
- sudo apt-get update -qq
|
||||
- sudo apt-get install -qq g++-4.8
|
||||
- sudo apt-get install -qq libboost1.55-all-dev
|
||||
# We want debug symbols for boost as we install gdb later
|
||||
- sudo apt-get install -qq libboost1.55-dbg
|
||||
- sudo apt-get install -qq libboost1.57-all-dev
|
||||
- sudo apt-get install -qq mlocate
|
||||
- sudo updatedb
|
||||
- sudo locate libboost | grep /lib | grep -e ".a$"
|
||||
@@ -27,23 +32,38 @@ before_install:
|
||||
# What versions are we ACTUALLY running?
|
||||
- g++ -v
|
||||
- clang -v
|
||||
# Avoid `spurious errors` caused by ~/.npm permission issues
|
||||
# Does it already exist? Who owns? What permissions?
|
||||
- ls -lah ~/.npm || mkdir ~/.npm
|
||||
# Make sure we own it
|
||||
- sudo chown -R $USER ~/.npm
|
||||
|
||||
script:
|
||||
# Set so any failing command will abort the build
|
||||
- set -e
|
||||
# If only we could do -j12 ;)
|
||||
- scons
|
||||
# Make sure vcxproj is up to date
|
||||
- scons vcxproj
|
||||
- git diff --exit-code
|
||||
# $CC will be either `clang` or `gcc` (If only we could do -j12 ;)
|
||||
- scons $CC.$TARGET
|
||||
# We can be sure we're using the build/$CC.$TARGET variant (-f so never err)
|
||||
- rm -f build/rippled
|
||||
- export RIPPLED_PATH="$PWD/build/$CC.$TARGET/rippled"
|
||||
# See what we've actually built
|
||||
- ldd ./build/rippled
|
||||
- ldd $RIPPLED_PATH
|
||||
# Run unittests (under gdb)
|
||||
- | # create gdb script
|
||||
echo "set env MALLOC_CHECK_=3" > script.gdb
|
||||
echo "run" >> script.gdb
|
||||
echo "backtrace full" >> script.gdb
|
||||
# gdb --help
|
||||
- cat script.gdb | gdb --ex 'set print thread-events off' --return-child-result --args ./build/rippled --unittest
|
||||
# Run integration tests
|
||||
- cat script.gdb | gdb --ex 'set print thread-events off' --return-child-result --args $RIPPLED_PATH --unittest
|
||||
- npm install
|
||||
# Use build/(gcc|clang).$TARGET/rippled
|
||||
- |
|
||||
echo "exports.default_server_config = {\"rippled_path\" : \"$RIPPLED_PATH\"};" > test/config.js
|
||||
|
||||
# Run integration tests
|
||||
- npm test
|
||||
notifications:
|
||||
email:
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
# Maintainer: Roberto Catini <roberto.catini@gmail.com>
|
||||
# Maintainer: Roberto Catini <roberto.catini@gmail.com>
|
||||
|
||||
pkgname=rippled
|
||||
pkgrel=1
|
||||
@@ -21,7 +21,7 @@ pkgver() {
|
||||
|
||||
build() {
|
||||
cd "$srcdir/$pkgname"
|
||||
scons build/rippled
|
||||
scons
|
||||
}
|
||||
|
||||
check() {
|
||||
|
||||
30
Builds/Docker/Dockerfile
Normal file
30
Builds/Docker/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
||||
# rippled
|
||||
|
||||
# use the ubuntu base image
|
||||
FROM ubuntu
|
||||
MAINTAINER Roberto Catini roberto.catini@gmail.com
|
||||
|
||||
# make sure the package repository is up to date
|
||||
RUN apt-get update
|
||||
RUN apt-get -y upgrade
|
||||
|
||||
# install the dependencies
|
||||
RUN apt-get -y install git scons pkg-config protobuf-compiler libprotobuf-dev libssl-dev libboost1.55-all-dev
|
||||
|
||||
# download source code from official repository
|
||||
RUN git clone https://github.com/ripple/rippled.git src; cd src/; git checkout master
|
||||
|
||||
# compile
|
||||
RUN cd src/; scons build/rippled
|
||||
|
||||
# move to root directory and strip
|
||||
RUN cp src/build/rippled rippled; strip rippled
|
||||
|
||||
# copy default config
|
||||
RUN cp src/doc/rippled-example.cfg rippled.cfg
|
||||
|
||||
# clean source
|
||||
RUN rm -r src
|
||||
|
||||
# launch rippled when launching the container
|
||||
ENTRYPOINT ./rippled
|
||||
23
Builds/Docker/Dockerfile-testnet
Normal file
23
Builds/Docker/Dockerfile-testnet
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM ubuntu
|
||||
MAINTAINER Torrie Fischer <torrie@ripple.com>
|
||||
|
||||
RUN apt-get update -qq &&\
|
||||
apt-get install -qq software-properties-common &&\
|
||||
apt-add-repository -y ppa:ubuntu-toolchain-r/test &&\
|
||||
apt-add-repository -y ppa:afrank/boost &&\
|
||||
apt-get update -qq
|
||||
|
||||
RUN apt-get purge -qq libboost1.48-dev &&\
|
||||
apt-get install -qq libprotobuf8 libboost1.57-all-dev
|
||||
|
||||
RUN mkdir -p /srv/rippled/data
|
||||
|
||||
VOLUME /srv/rippled/data/
|
||||
|
||||
ENTRYPOINT ["/srv/rippled/bin/rippled"]
|
||||
CMD ["--conf", "/srv/rippled/data/rippled.cfg"]
|
||||
EXPOSE 51235/udp
|
||||
EXPOSE 5005/tcp
|
||||
|
||||
ADD ./rippled.cfg /srv/rippled/data/rippled.cfg
|
||||
ADD ./rippled /srv/rippled/bin/
|
||||
13
Builds/Docker/build-ci.sh
Executable file
13
Builds/Docker/build-ci.sh
Executable file
@@ -0,0 +1,13 @@
|
||||
set -e
|
||||
|
||||
mkdir -p build/docker/
|
||||
cp doc/rippled-example.cfg build/clang.debug/rippled build/docker/
|
||||
cp Builds/Docker/Dockerfile-testnet build/docker/Dockerfile
|
||||
mv build/docker/rippled-example.cfg build/docker/rippled.cfg
|
||||
strip build/docker/rippled
|
||||
docker build -t ripple/rippled:$CIRCLE_SHA1 build/docker/
|
||||
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:latest
|
||||
|
||||
if [ -z "$CIRCLE_PR_NUMBER" ]; then
|
||||
docker tag ripple/rippled:$CIRCLE_SHA1 ripple/rippled:$CIRCLE_BRANCH
|
||||
fi
|
||||
16
Builds/Docker/push-to-hub.sh
Executable file
16
Builds/Docker/push-to-hub.sh
Executable file
@@ -0,0 +1,16 @@
|
||||
set -e
|
||||
|
||||
if [ -z "$DOCKER_EMAIL" -o -z "$DOCKER_USERNAME" -o -z "$DOCKER_PASSWORD" ];then
|
||||
echo "Docker credentials are not set. Can't login to docker, no containers will be pushed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [ -n "$CIRCLE_PR_NUMBER" ]; then
|
||||
echo "Not pushing results of a pull request build."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
docker login -e $DOCKER_EMAIL -u $DOCKER_USERNAME -p $DOCKER_PASSWORD
|
||||
docker push ripple/rippled:$CIRCLE_SHA1
|
||||
docker push ripple/rippled:$CIRCLE_BRANCH
|
||||
docker push ripple/rippled:latest
|
||||
31
Builds/Eclipse/README.md
Normal file
31
Builds/Eclipse/README.md
Normal file
@@ -0,0 +1,31 @@
|
||||
**Requirements**
|
||||
|
||||
1. Java Runtime Environment (JRE)
|
||||
2. Eclipse with CDT (tested on Luna):
|
||||
http://www.eclipse.org/downloads/packages/eclipse-ide-cc-developers/lunasr2
|
||||
3. Eclipse SCons plugin: http://sconsolidator.com/
|
||||
**WARNING**: by default the SCons plugin uses 16 threads. Go to
|
||||
*Window->Preferences->SCons->Build Settings* in Eclipse and make it
|
||||
use only 4-8 jobs(threads) or whatever you feel confortable with. It will
|
||||
positively freeze your system if you run with 16 threads/jobs.
|
||||
|
||||

|
||||
|
||||
**Getting Started**
|
||||
|
||||
After setting up Eclipse just do a File->New->Other...
|
||||
Select: C/C++ / New SCons project from existing source
|
||||
Point the importer to the folder where the SConstruct resides (the root
|
||||
folder of your git workspace normally)
|
||||
|
||||
**Build**
|
||||
|
||||
Just hit Project->Build All in Eclipse to get started. And remember to not
|
||||
let it run 16 threads!
|
||||
|
||||
**Debug**
|
||||
|
||||
Start a new Eclipse debug configuration and set binary to run to build/rippled
|
||||
(assuming you have built it).
|
||||
|
||||

|
||||
BIN
Builds/Eclipse/debug.png
Normal file
BIN
Builds/Eclipse/debug.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 18 KiB |
BIN
Builds/Eclipse/scons.png
Normal file
BIN
Builds/Eclipse/scons.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 17 KiB |
@@ -62,42 +62,42 @@ UI_HEADERS_DIR += ../../src/ripple_basics
|
||||
# New style
|
||||
#
|
||||
SOURCES += \
|
||||
../../src/ripple/beast/ripple_beast.cpp \
|
||||
../../src/ripple/beast/ripple_beast.unity.cpp \
|
||||
../../src/ripple/beast/ripple_beastc.c \
|
||||
../../src/ripple/common/ripple_common.cpp \
|
||||
../../src/ripple/http/ripple_http.cpp \
|
||||
../../src/ripple/json/ripple_json.cpp \
|
||||
../../src/ripple/peerfinder/ripple_peerfinder.cpp \
|
||||
../../src/ripple/radmap/ripple_radmap.cpp \
|
||||
../../src/ripple/resource/ripple_resource.cpp \
|
||||
../../src/ripple/sitefiles/ripple_sitefiles.cpp \
|
||||
../../src/ripple/sslutil/ripple_sslutil.cpp \
|
||||
../../src/ripple/testoverlay/ripple_testoverlay.cpp \
|
||||
../../src/ripple/types/ripple_types.cpp \
|
||||
../../src/ripple/validators/ripple_validators.cpp
|
||||
../../src/ripple/common/ripple_common.unity.cpp \
|
||||
../../src/ripple/http/ripple_http.unity.cpp \
|
||||
../../src/ripple/json/ripple_json.unity.cpp \
|
||||
../../src/ripple/peerfinder/ripple_peerfinder.unity.cpp \
|
||||
../../src/ripple/radmap/ripple_radmap.unity.cpp \
|
||||
../../src/ripple/resource/ripple_resource.unity.cpp \
|
||||
../../src/ripple/sitefiles/ripple_sitefiles.unity.cpp \
|
||||
../../src/ripple/sslutil/ripple_sslutil.unity.cpp \
|
||||
../../src/ripple/testoverlay/ripple_testoverlay.unity.cpp \
|
||||
../../src/ripple/types/ripple_types.unity.cpp \
|
||||
../../src/ripple/validators/ripple_validators.unity.cpp
|
||||
|
||||
# ---------
|
||||
# Old style
|
||||
#
|
||||
SOURCES += \
|
||||
../../src/ripple_app/ripple_app.cpp \
|
||||
../../src/ripple_app/ripple_app_pt1.cpp \
|
||||
../../src/ripple_app/ripple_app_pt2.cpp \
|
||||
../../src/ripple_app/ripple_app_pt3.cpp \
|
||||
../../src/ripple_app/ripple_app_pt4.cpp \
|
||||
../../src/ripple_app/ripple_app_pt5.cpp \
|
||||
../../src/ripple_app/ripple_app_pt6.cpp \
|
||||
../../src/ripple_app/ripple_app_pt7.cpp \
|
||||
../../src/ripple_app/ripple_app_pt8.cpp \
|
||||
../../src/ripple_basics/ripple_basics.cpp \
|
||||
../../src/ripple_core/ripple_core.cpp \
|
||||
../../src/ripple_data/ripple_data.cpp \
|
||||
../../src/ripple_hyperleveldb/ripple_hyperleveldb.cpp \
|
||||
../../src/ripple_leveldb/ripple_leveldb.cpp \
|
||||
../../src/ripple_net/ripple_net.cpp \
|
||||
../../src/ripple_overlay/ripple_overlay.cpp \
|
||||
../../src/ripple_rpc/ripple_rpc.cpp \
|
||||
../../src/ripple_websocket/ripple_websocket.cpp
|
||||
../../src/ripple_app/ripple_app.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt1.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt2.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt3.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt4.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt5.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt6.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt7.unity.cpp \
|
||||
../../src/ripple_app/ripple_app_pt8.unity.cpp \
|
||||
../../src/ripple_basics/ripple_basics.unity.cpp \
|
||||
../../src/ripple_core/ripple_core.unity.cpp \
|
||||
../../src/ripple_data/ripple_data.unity.cpp \
|
||||
../../src/ripple_hyperleveldb/ripple_hyperleveldb.unity.cpp \
|
||||
../../src/ripple_leveldb/ripple_leveldb.unity.cpp \
|
||||
../../src/ripple_net/ripple_net.unity.cpp \
|
||||
../../src/ripple_overlay/ripple_overlay.unity.cpp \
|
||||
../../src/ripple_rpc/ripple_rpc.unity.cpp \
|
||||
../../src/ripple_websocket/ripple_websocket.unity.cpp
|
||||
|
||||
LIBS += \
|
||||
-lboost_date_time-mt\
|
||||
|
||||
180
Builds/Test.py
Executable file
180
Builds/Test.py
Executable file
@@ -0,0 +1,180 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
# This file is part of rippled: https://github.com/ripple/rippled
|
||||
# Copyright (c) 2012 - 2015 Ripple Labs Inc.
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
"""
|
||||
Invocation:
|
||||
./Builds/Test.py - builds and tests all configurations
|
||||
|
||||
#
|
||||
# The build must succeed without shell aliases for this to work.
|
||||
#
|
||||
# Common problems:
|
||||
# 1) Boost not found. Solution: export BOOST_ROOT=[path to boost folder]
|
||||
# 2) OpenSSL not found. Solution: export OPENSSL_ROOT=[path to OpenSSL folder]
|
||||
# 3) scons is an alias. Solution: Create a script named "scons" somewhere in
|
||||
# your $PATH (eg. ~/bin/scons will often work).
|
||||
# #!/bin/sh
|
||||
# python /C/Python27/Scripts/scons.py "${@}"
|
||||
"""
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import itertools
|
||||
import os
|
||||
import platform
|
||||
import re
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
IS_WINDOWS = platform.system().lower() == 'windows'
|
||||
|
||||
if IS_WINDOWS:
|
||||
BINARY_RE = re.compile(r'build\\([^\\]+)\\rippled.exe')
|
||||
|
||||
else:
|
||||
BINARY_RE = re.compile(r'build/([^/]+)/rippled')
|
||||
|
||||
ALL_TARGETS = ['debug', 'release']
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Test.py - run ripple tests'
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--all', '-a',
|
||||
action='store_true',
|
||||
help='Build all configurations.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--keep_going', '-k',
|
||||
action='store_true',
|
||||
help='Keep going after one configuration has failed.',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--silent', '-s',
|
||||
action='store_true',
|
||||
help='Silence all messages except errors',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help=('Report more information about which commands are executed and the '
|
||||
'results.'),
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--test', '-t',
|
||||
default='',
|
||||
help='Add a prefix for unit tests',
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'scons_args',
|
||||
default=(),
|
||||
nargs='*'
|
||||
)
|
||||
|
||||
ARGS = parser.parse_args()
|
||||
|
||||
def shell(*cmd, **kwds):
|
||||
"Execute a shell command and return the output."
|
||||
silent = kwds.pop('silent', ARGS.silent)
|
||||
verbose = not silent and kwds.pop('verbose', ARGS.verbose)
|
||||
if verbose:
|
||||
print('$', ' '.join(cmd))
|
||||
kwds['shell'] = IS_WINDOWS
|
||||
|
||||
process = subprocess.Popen(
|
||||
cmd,
|
||||
stdin=subprocess.PIPE,
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT,
|
||||
**kwds)
|
||||
lines = []
|
||||
count = 0
|
||||
for line in process.stdout:
|
||||
lines.append(line)
|
||||
if verbose:
|
||||
print(line, end='')
|
||||
elif not silent:
|
||||
count += 1
|
||||
if count >= 80:
|
||||
print()
|
||||
count = 0
|
||||
else:
|
||||
print('.', end='')
|
||||
|
||||
if not verbose and count:
|
||||
print()
|
||||
process.wait()
|
||||
return process.returncode, lines
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = list(ARGS.scons_args)
|
||||
if ARGS.all:
|
||||
for a in ALL_TARGETS:
|
||||
if a not in args:
|
||||
args.append(a)
|
||||
print('Building:', *(args or ['(default)']))
|
||||
|
||||
# Build everything.
|
||||
resultcode, lines = shell('scons', *args)
|
||||
if resultcode:
|
||||
print('Build FAILED:')
|
||||
if not ARGS.verbose:
|
||||
print(*lines, sep='')
|
||||
exit(1)
|
||||
|
||||
# Now extract the executable names and corresponding targets.
|
||||
failed = []
|
||||
_, lines = shell('scons', '-n', '--tree=derived', *args, silent=True)
|
||||
for line in lines:
|
||||
match = BINARY_RE.search(line)
|
||||
if match:
|
||||
executable, target = match.group(0, 1)
|
||||
|
||||
print('Unit tests for', target)
|
||||
testflag = '--unittest'
|
||||
if ARGS.test:
|
||||
testflag += ('=' + ARGS.test)
|
||||
|
||||
resultcode, lines = shell(executable, testflag)
|
||||
if resultcode:
|
||||
print('ERROR:', *lines, sep='')
|
||||
failed.append([target, 'unittest'])
|
||||
if not ARGS.keep_going:
|
||||
break
|
||||
ARGS.verbose and print(*lines, sep='')
|
||||
|
||||
print('npm tests for', target)
|
||||
resultcode, lines = shell('npm', 'test', '--rippled=' + executable)
|
||||
if resultcode:
|
||||
print('ERROR:\n', *lines, sep='')
|
||||
failed.append([target, 'npm'])
|
||||
if not ARGS.keep_going:
|
||||
break
|
||||
else:
|
||||
ARGS.verbose and print(*lines, sep='')
|
||||
|
||||
if failed:
|
||||
print('FAILED:', *(':'.join(f) for f in failed))
|
||||
exit(1)
|
||||
else:
|
||||
print('Success')
|
||||
4
Builds/VisualStudio2013/.gitattributes
vendored
Normal file
4
Builds/VisualStudio2013/.gitattributes
vendored
Normal file
@@ -0,0 +1,4 @@
|
||||
RippleD.vcxproj -text
|
||||
RippleD.vcxproj.filters -text
|
||||
|
||||
|
||||
@@ -1,33 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ImportGroup Label="PropertySheets" />
|
||||
<PropertyGroup Label="UserMacros">
|
||||
<RepoDir>..\..</RepoDir>
|
||||
</PropertyGroup>
|
||||
<PropertyGroup>
|
||||
<OutDir>$(RepoDir)\build\VisualStudio2013\$(Configuration).$(Platform)\</OutDir>
|
||||
<IntDir>$(RepoDir)\build\obj\VisualStudio2013\$(Configuration).$(Platform)\</IntDir>
|
||||
<TargetName>rippled</TargetName>
|
||||
</PropertyGroup>
|
||||
<ItemDefinitionGroup>
|
||||
<ClCompile>
|
||||
<PreprocessorDefinitions>_VARIADIC_MAX=10;_WIN32_WINNT=0x0600;_SCL_SECURE_NO_WARNINGS;_CRT_SECURE_NO_WARNINGS;WIN32;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
||||
<MultiProcessorCompilation>true</MultiProcessorCompilation>
|
||||
<WarningLevel>Level3</WarningLevel>
|
||||
<AdditionalIncludeDirectories>$(RepoDir)\src\protobuf\src;$(RepoDir)\src\protobuf\vsprojects;$(RepoDir)\src\leveldb;$(RepoDir)\src\leveldb\include;$(RepoDir)\src\snappy\snappy;$(RepoDir)\src\snappy\config;$(RepoDir)\build\proto;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
||||
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
|
||||
<ExceptionHandling>Async</ExceptionHandling>
|
||||
<DisableSpecificWarnings>4018;4244</DisableSpecificWarnings>
|
||||
</ClCompile>
|
||||
<Link>
|
||||
<AdditionalDependencies>Shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
||||
<SpecifySectionAttributes>
|
||||
</SpecifySectionAttributes>
|
||||
</Link>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup>
|
||||
<BuildMacro Include="RepoDir">
|
||||
<Value>$(RepoDir)</Value>
|
||||
</BuildMacro>
|
||||
</ItemGroup>
|
||||
</Project>
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,8 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ImportGroup Label="PropertySheets" />
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup />
|
||||
<ItemDefinitionGroup />
|
||||
<ItemGroup />
|
||||
</Project>
|
||||
@@ -1,35 +1,26 @@
|
||||
|
||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||
# Visual Studio Express 2013 for Windows Desktop
|
||||
VisualStudioVersion = 12.0.30110.0
|
||||
VisualStudioVersion = 12.0.31101.0
|
||||
MinimumVisualStudioVersion = 10.0.40219.1
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "beast", "..\..\src\beast\Builds\VisualStudio2013\beast.vcxproj", "{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}"
|
||||
EndProject
|
||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{B7F39ECD-473C-484D-BC34-31F8362506A5}"
|
||||
Project("{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
||||
EndProject
|
||||
Global
|
||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||
Debug|Win32 = Debug|Win32
|
||||
Debug|x64 = Debug|x64
|
||||
Release|Win32 = Release|Win32
|
||||
Release|x64 = Release|x64
|
||||
debug.classic|x64 = debug.classic|x64
|
||||
debug|x64 = debug|x64
|
||||
release.classic|x64 = release.classic|x64
|
||||
release|x64 = release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|Win32.ActiveCfg = Debug|Win32
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|Win32.Build.0 = Debug|Win32
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|x64.Build.0 = Debug|x64
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|Win32.ActiveCfg = Release|Win32
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|Win32.Build.0 = Release|Win32
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|x64.ActiveCfg = Release|x64
|
||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|x64.Build.0 = Release|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|Win32.ActiveCfg = Debug|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|Win32.Build.0 = Debug|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|x64.ActiveCfg = Debug|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|x64.Build.0 = Debug|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|Win32.ActiveCfg = Release|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|x64.ActiveCfg = Release|x64
|
||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|x64.Build.0 = Release|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.ActiveCfg = debug.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug.classic|x64.Build.0 = debug.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.ActiveCfg = debug|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.debug|x64.Build.0 = debug|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.ActiveCfg = release.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release.classic|x64.Build.0 = release.classic|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.ActiveCfg = release|x64
|
||||
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.release|x64.Build.0 = release|x64
|
||||
EndGlobalSection
|
||||
GlobalSection(SolutionProperties) = preSolution
|
||||
HideSolutionNode = FALSE
|
||||
|
||||
@@ -1,12 +0,0 @@
|
||||
<?xml version="1.0" encoding="utf-8"?>
|
||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
||||
<ImportGroup Label="PropertySheets" />
|
||||
<PropertyGroup Label="UserMacros" />
|
||||
<PropertyGroup />
|
||||
<ItemDefinitionGroup>
|
||||
<ClCompile>
|
||||
<DisableSpecificWarnings>4018;4244;4267</DisableSpecificWarnings>
|
||||
</ClCompile>
|
||||
</ItemDefinitionGroup>
|
||||
<ItemGroup />
|
||||
</Project>
|
||||
@@ -1,5 +1,5 @@
|
||||
Name: rippled
|
||||
Version: 0.24.0
|
||||
Version: 0.29.0-hf1
|
||||
Release: 1%{?dist}
|
||||
Summary: Ripple peer-to-peer network daemon
|
||||
|
||||
@@ -50,4 +50,3 @@ rm -rf %{buildroot}
|
||||
/usr/bin/rippled
|
||||
/usr/share/rippled/LICENSE
|
||||
/etc/rippled/rippled-example.cfg
|
||||
|
||||
|
||||
13
Builds/travis/clang.boost.patch
Normal file
13
Builds/travis/clang.boost.patch
Normal file
@@ -0,0 +1,13 @@
|
||||
--- /usr/include/boost/config/compiler/clang.hpp 2013-07-20 13:17:10.000000000 -0400
|
||||
+++ /usr/include/boost/config/compiler/clang.rippled.hpp 2014-03-11 16:40:51.000000000 -0400
|
||||
@@ -39,6 +39,10 @@
|
||||
// Clang supports "long long" in all compilation modes.
|
||||
#define BOOST_HAS_LONG_LONG
|
||||
|
||||
+#if defined(__SIZEOF_INT128__)
|
||||
+# define BOOST_HAS_INT128
|
||||
+#endif
|
||||
+
|
||||
//
|
||||
// Dynamic shared object (DSO) and dynamic-link library (DLL) support
|
||||
//
|
||||
10
Builds/travis/static_error.boost.patch
Normal file
10
Builds/travis/static_error.boost.patch
Normal file
@@ -0,0 +1,10 @@
|
||||
--- /usr/include/boost/bimap/detail/debug/static_error.hpp 2008-03-22 17:45:55.000000000 -0400
|
||||
+++ /usr/include/boost/bimap/detail/debug/static_error.rippled.hpp 2014-03-12 19:40:05.000000000 -0400
|
||||
@@ -25,7 +25,6 @@
|
||||
// a static error.
|
||||
/*===========================================================================*/
|
||||
#define BOOST_BIMAP_STATIC_ERROR(MESSAGE,VARIABLES) \
|
||||
- struct BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE) {}; \
|
||||
BOOST_MPL_ASSERT_MSG(false, \
|
||||
BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE), \
|
||||
VARIABLES)
|
||||
92
README.md
92
README.md
@@ -1,3 +1,77 @@
|
||||

|
||||
|
||||
#The World’s Fastest and Most Secure Payment System
|
||||
|
||||
**What is Ripple?**
|
||||
|
||||
Ripple is the open-source, distributed payment protocol that enables instant
|
||||
payments with low fees, no chargebacks, and currency flexibility (for example
|
||||
dollars, yen, euros, bitcoins, or even loyalty points). Businesses of any size
|
||||
can easily build payment solutions such as banking or remittance apps, and
|
||||
accelerate the movement of money. Ripple enables the world to move value the
|
||||
way it moves information on the Internet.
|
||||
|
||||

|
||||
|
||||
**What is a Gateway?**
|
||||
|
||||
Ripple works with gateways: independent businesses which hold customer
|
||||
deposits in various currencies such as U.S. dollars (USD) or Euros (EUR),
|
||||
in exchange for providing cryptographically-signed issuances that users can
|
||||
send and trade with one another in seconds on the Ripple network. Within the
|
||||
protocol, exchanges between multiple currencies can occur atomically without
|
||||
any central authority to monitor them. Later, customers can withdraw their
|
||||
Ripple balances from the gateways that created those issuances.
|
||||
|
||||
**How do Ripple payments work?**
|
||||
|
||||
A sender specifies the amount and currency the recipient should receive and
|
||||
Ripple automatically converts the sender’s available currencies using the
|
||||
distributed order books integrated into the Ripple protocol. Independent third
|
||||
parties acting as market makers provide liquidity in these order books.
|
||||
|
||||
Ripple uses a pathfinding algorithm that considers currency pairs when
|
||||
converting from the source to the destination currency. This algorithm searches
|
||||
for a series of currency swaps that gives the user the lowest cost. Since
|
||||
anyone can participate as a market maker, market forces drive fees to the
|
||||
lowest practical level.
|
||||
|
||||
**What can you do with Ripple?**
|
||||
|
||||
The protocol is entirely open-source and the network’s shared ledger is public
|
||||
information, so no central authority prevents anyone from participating. Anyone
|
||||
can become a market maker, create a wallet or a gateway, or monitor network
|
||||
behavior. Competition drives down spreads and fees, making the network useful
|
||||
to everyone.
|
||||
|
||||
|
||||
###Key Protocol Features
|
||||
1. XRP is Ripple’s native [cryptocurrency]
|
||||
(http://en.wikipedia.org/wiki/Cryptocurrency) with a fixed supply that
|
||||
decreases slowly over time, with no mining. XRP acts as a bridge currency, and
|
||||
pays for transaction fees that protect the network against spam.
|
||||

|
||||
|
||||
2. Pathfinding discovers cheap and efficient payment paths through multiple
|
||||
[order books](https://www.ripplecharts.com) allowing anyone to [trade](https://www.rippletrade.com) anything. When two accounts aren’t linked by relationships of trust, the Ripple pathfinding engine considers intermediate links and order books to produce a set of possible paths the transaction can take. When the payment is processed, the liquidity along these paths is iteratively consumed in best-first order.
|
||||

|
||||
|
||||
3. [Consensus](https://www.youtube.com/watch?v=pj1QVb1vlC0) confirms
|
||||
transactions in an atomic fashion, without mining, ensuring efficient use of
|
||||
resources.
|
||||
|
||||
[transact]: https://ripple.com/files/ripple-FIs.pdf
|
||||
[build]: https://ripple.com/build/
|
||||
|
||||
[transact.png]: /images/transact.png
|
||||
[build.png]: /images/build.png
|
||||
[contribute.png]: /images/contribute.png
|
||||
|
||||
###Join The Ripple Community
|
||||
|![Transact][transact.png]|![Build][build.png]|![Contribute][contribute.png]|
|
||||
|:-----------------------:|:-----------------:|:---------------------------:|
|
||||
|[Transact on the fastest payment infrastructure][transact]|[Build Imaginative Apps][build]|Contribute to the Ripple Protocol Implementation|
|
||||
|
||||
#rippled - Ripple P2P server
|
||||
|
||||
##[](https://travis-ci.org/ripple/rippled)
|
||||
@@ -10,6 +84,9 @@ This is the repository for Ripple's `rippled`, reference P2P server.
|
||||
###Setup instructions:
|
||||
* https://ripple.com/wiki/Rippled_setup_instructions
|
||||
|
||||
###Issues
|
||||
* https://ripplelabs.atlassian.net/browse/RIPD
|
||||
|
||||
### Repository Contents
|
||||
|
||||
#### ./bin
|
||||
@@ -37,5 +114,16 @@ Ripple is open source and permissively licensed under the ISC license. See the
|
||||
LICENSE file for more details.
|
||||
|
||||
###For more information:
|
||||
* https://ripple.com
|
||||
* https://ripple.com/wiki
|
||||
* Ripple Wiki - https://ripple.com/wiki/
|
||||
* Ripple Primer - https://ripple.com/ripple_primer.pdf
|
||||
* Ripple Primer (Market Making) - https://ripple.com/ripple-mm.pdf
|
||||
* Ripple Gateway Primer - https://ripple.com/ripple-gateways.pdf
|
||||
* Consensus - https://wiki.ripple.com/Consensus
|
||||
|
||||
- - -
|
||||
|
||||
Copyright © 2015, Ripple Labs. All rights reserved.
|
||||
|
||||
Portions of this document, including but not limited to the Ripple logo, images
|
||||
and image templates are the property of Ripple Labs and cannot be copied or
|
||||
used without permission.
|
||||
|
||||
1479
SConstruct
1479
SConstruct
File diff suppressed because it is too large
Load Diff
77
appveyor.yml
Normal file
77
appveyor.yml
Normal file
@@ -0,0 +1,77 @@
|
||||
# Set environment variables.
|
||||
environment:
|
||||
PYTHON: C:/Python27-x64
|
||||
|
||||
# We bundle up protoc.exe and only the parts of boost and openssl we need so
|
||||
# that it's a small download. We also use appveyor's free cache, avoiding fees
|
||||
# downloading from S3 each time.
|
||||
# TODO: script to create this package.
|
||||
RIPPLED_DEPS_URL: https://s3-ap-northeast-1.amazonaws.com/history-replay/rippled_deps.zip
|
||||
|
||||
# Other dependencies we just download each time.
|
||||
PIP_URL: https://bootstrap.pypa.io/get-pip.py
|
||||
PYWIN32_URL: https://downloads.sourceforge.net/project/pywin32/pywin32/Build%20219/pywin32-219.win-amd64-py2.7.exe
|
||||
|
||||
# Scons honours these environment variables, setting the include/lib paths.
|
||||
BOOST_ROOT: C:/rippled_deps/boost
|
||||
OPENSSL_ROOT: C:/rippled_deps/openssl
|
||||
|
||||
# At the end of each successful build we cache this directory. It must be less
|
||||
# than 100MB total compressed.
|
||||
cache:
|
||||
- "C:\\rippled_deps"
|
||||
|
||||
# This means we'll download a zip of the branch we want, rather than the full
|
||||
# history.
|
||||
shallow_clone: true
|
||||
|
||||
install:
|
||||
# We want easy_install, python and protoc.exe on PATH.
|
||||
- SET PATH=%PYTHON%;%PYTHON%/Scripts;C:/rippled_deps;%PATH%
|
||||
|
||||
# `ps` prefix means the command is executed by powershell.
|
||||
- ps: Start-FileDownload $env:PIP_URL
|
||||
- ps: Start-FileDownload $env:PYWIN32_URL
|
||||
|
||||
# Installing pip will install setuptools/easy_install.
|
||||
- python get-pip.py
|
||||
|
||||
# Pip has some problems installing scons on windows so we use easy install.
|
||||
- easy_install scons
|
||||
|
||||
# Scons has problems with parallel builds on windows without pywin32.
|
||||
- easy_install pywin32-219.win-amd64-py2.7.exe
|
||||
# (easy_install can do headless installs of .exe wizards)
|
||||
|
||||
# Download dependencies if appveyor didn't restore them from the cache.
|
||||
# Use 7zip to unzip.
|
||||
- ps: |
|
||||
if (-not(Test-Path 'C:/rippled_deps')) {
|
||||
Start-FileDownload "$env:RIPPLED_DEPS_URL"
|
||||
7z x rippled_deps.zip -oC:\ -y > $null
|
||||
}
|
||||
|
||||
# TODO: This is giving me grief
|
||||
# artifacts:
|
||||
# # Save rippled.exe in the cloud after each build.
|
||||
# - path: "build\\rippled.exe"
|
||||
|
||||
build_script:
|
||||
# We set the environment variables needed to put compilers on the PATH.
|
||||
- '"%VS120COMNTOOLS%../../VC/vcvarsall.bat" x86_amd64'
|
||||
# Show which version of the compiler we are using.
|
||||
- cl
|
||||
- scons msvc.debug -j%NUMBER_OF_PROCESSORS%
|
||||
|
||||
after_build:
|
||||
# Put our executable in a place where npm test can find it.
|
||||
- ps: cp build/msvc.debug/rippled.exe build
|
||||
- ps: ls build
|
||||
|
||||
test_script:
|
||||
# Run the unit tests
|
||||
- build\\rippled --unittest
|
||||
|
||||
# Run the integration tests
|
||||
- npm install
|
||||
- npm test
|
||||
1
bin/manifest
Symbolic link
1
bin/manifest
Symbolic link
@@ -0,0 +1 @@
|
||||
python/Manifest.py
|
||||
24
bin/python/LedgerTool.py
Executable file
24
bin/python/LedgerTool.py
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
import traceback
|
||||
|
||||
from ripple.ledger import Server
|
||||
from ripple.ledger.commands import Cache, Info, Print
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
from ripple.util.CommandList import CommandList
|
||||
|
||||
_COMMANDS = CommandList(Cache, Info, Print)
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
server = Server.Server()
|
||||
args = list(ARGS.command)
|
||||
_COMMANDS.run_safe(args.pop(0), server, *args)
|
||||
except Exception as e:
|
||||
if ARGS.verbose:
|
||||
print(traceback.format_exc(), sys.stderr)
|
||||
Log.error(e)
|
||||
7
bin/python/Manifest.py
Executable file
7
bin/python/Manifest.py
Executable file
@@ -0,0 +1,7 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
import sys
|
||||
from ripple.util import Sign
|
||||
|
||||
result = Sign.run_command(sys.argv[1:])
|
||||
exit(0 if result else -1)
|
||||
8
bin/python/README.md
Normal file
8
bin/python/README.md
Normal file
@@ -0,0 +1,8 @@
|
||||
Unit Tests
|
||||
==========
|
||||
|
||||
To run the Python unit tests, execute:
|
||||
|
||||
python -m unittest discover
|
||||
|
||||
from this directory.
|
||||
251
bin/python/decorator.py
Normal file
251
bin/python/decorator.py
Normal file
@@ -0,0 +1,251 @@
|
||||
########################## LICENCE ###############################
|
||||
|
||||
# Copyright (c) 2005-2012, Michele Simionato
|
||||
# All rights reserved.
|
||||
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
|
||||
# Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# Redistributions in bytecode form must reproduce the above copyright
|
||||
# notice, this list of conditions and the following disclaimer in
|
||||
# the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||
# DAMAGE.
|
||||
|
||||
"""
|
||||
Decorator module, see http://pypi.python.org/pypi/decorator
|
||||
for the documentation.
|
||||
"""
|
||||
|
||||
__version__ = '3.4.0'
|
||||
|
||||
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
|
||||
|
||||
import sys, re, inspect
|
||||
if sys.version >= '3':
|
||||
from inspect import getfullargspec
|
||||
def get_init(cls):
|
||||
return cls.__init__
|
||||
else:
|
||||
class getfullargspec(object):
|
||||
"A quick and dirty replacement for getfullargspec for Python 2.X"
|
||||
def __init__(self, f):
|
||||
self.args, self.varargs, self.varkw, self.defaults = \
|
||||
inspect.getargspec(f)
|
||||
self.kwonlyargs = []
|
||||
self.kwonlydefaults = None
|
||||
def __iter__(self):
|
||||
yield self.args
|
||||
yield self.varargs
|
||||
yield self.varkw
|
||||
yield self.defaults
|
||||
def get_init(cls):
|
||||
return cls.__init__.im_func
|
||||
|
||||
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
|
||||
|
||||
# basic functionality
|
||||
class FunctionMaker(object):
|
||||
"""
|
||||
An object with the ability to create functions with a given signature.
|
||||
It has attributes name, doc, module, signature, defaults, dict and
|
||||
methods update and make.
|
||||
"""
|
||||
def __init__(self, func=None, name=None, signature=None,
|
||||
defaults=None, doc=None, module=None, funcdict=None):
|
||||
self.shortsignature = signature
|
||||
if func:
|
||||
# func can be a class or a callable, but not an instance method
|
||||
self.name = func.__name__
|
||||
if self.name == '<lambda>': # small hack for lambda functions
|
||||
self.name = '_lambda_'
|
||||
self.doc = func.__doc__
|
||||
self.module = func.__module__
|
||||
if inspect.isfunction(func):
|
||||
argspec = getfullargspec(func)
|
||||
self.annotations = getattr(func, '__annotations__', {})
|
||||
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
||||
'kwonlydefaults'):
|
||||
setattr(self, a, getattr(argspec, a))
|
||||
for i, arg in enumerate(self.args):
|
||||
setattr(self, 'arg%d' % i, arg)
|
||||
if sys.version < '3': # easy way
|
||||
self.shortsignature = self.signature = \
|
||||
inspect.formatargspec(
|
||||
formatvalue=lambda val: "", *argspec)[1:-1]
|
||||
else: # Python 3 way
|
||||
allargs = list(self.args)
|
||||
allshortargs = list(self.args)
|
||||
if self.varargs:
|
||||
allargs.append('*' + self.varargs)
|
||||
allshortargs.append('*' + self.varargs)
|
||||
elif self.kwonlyargs:
|
||||
allargs.append('*') # single star syntax
|
||||
for a in self.kwonlyargs:
|
||||
allargs.append('%s=None' % a)
|
||||
allshortargs.append('%s=%s' % (a, a))
|
||||
if self.varkw:
|
||||
allargs.append('**' + self.varkw)
|
||||
allshortargs.append('**' + self.varkw)
|
||||
self.signature = ', '.join(allargs)
|
||||
self.shortsignature = ', '.join(allshortargs)
|
||||
self.dict = func.__dict__.copy()
|
||||
# func=None happens when decorating a caller
|
||||
if name:
|
||||
self.name = name
|
||||
if signature is not None:
|
||||
self.signature = signature
|
||||
if defaults:
|
||||
self.defaults = defaults
|
||||
if doc:
|
||||
self.doc = doc
|
||||
if module:
|
||||
self.module = module
|
||||
if funcdict:
|
||||
self.dict = funcdict
|
||||
# check existence required attributes
|
||||
assert hasattr(self, 'name')
|
||||
if not hasattr(self, 'signature'):
|
||||
raise TypeError('You are decorating a non function: %s' % func)
|
||||
|
||||
def update(self, func, **kw):
|
||||
"Update the signature of func with the data in self"
|
||||
func.__name__ = self.name
|
||||
func.__doc__ = getattr(self, 'doc', None)
|
||||
func.__dict__ = getattr(self, 'dict', {})
|
||||
func.func_defaults = getattr(self, 'defaults', ())
|
||||
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
||||
func.__annotations__ = getattr(self, 'annotations', None)
|
||||
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
|
||||
func.__module__ = getattr(self, 'module', callermodule)
|
||||
func.__dict__.update(kw)
|
||||
|
||||
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
||||
"Make a new function from a given template and update the signature"
|
||||
src = src_templ % vars(self) # expand name and signature
|
||||
evaldict = evaldict or {}
|
||||
mo = DEF.match(src)
|
||||
if mo is None:
|
||||
raise SyntaxError('not a valid function template\n%s' % src)
|
||||
name = mo.group(1) # extract the function name
|
||||
names = set([name] + [arg.strip(' *') for arg in
|
||||
self.shortsignature.split(',')])
|
||||
for n in names:
|
||||
if n in ('_func_', '_call_'):
|
||||
raise NameError('%s is overridden in\n%s' % (n, src))
|
||||
if not src.endswith('\n'): # add a newline just for safety
|
||||
src += '\n' # this is needed in old versions of Python
|
||||
try:
|
||||
code = compile(src, '<string>', 'single')
|
||||
# print >> sys.stderr, 'Compiling %s' % src
|
||||
exec code in evaldict
|
||||
except:
|
||||
print >> sys.stderr, 'Error in generated code:'
|
||||
print >> sys.stderr, src
|
||||
raise
|
||||
func = evaldict[name]
|
||||
if addsource:
|
||||
attrs['__source__'] = src
|
||||
self.update(func, **attrs)
|
||||
return func
|
||||
|
||||
@classmethod
|
||||
def create(cls, obj, body, evaldict, defaults=None,
|
||||
doc=None, module=None, addsource=True, **attrs):
|
||||
"""
|
||||
Create a function from the strings name, signature and body.
|
||||
evaldict is the evaluation dictionary. If addsource is true an attribute
|
||||
__source__ is added to the result. The attributes attrs are added,
|
||||
if any.
|
||||
"""
|
||||
if isinstance(obj, str): # "name(signature)"
|
||||
name, rest = obj.strip().split('(', 1)
|
||||
signature = rest[:-1] #strip a right parens
|
||||
func = None
|
||||
else: # a function
|
||||
name = None
|
||||
signature = None
|
||||
func = obj
|
||||
self = cls(func, name, signature, defaults, doc, module)
|
||||
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
||||
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
||||
evaldict, addsource, **attrs)
|
||||
|
||||
def decorator(caller, func=None):
|
||||
"""
|
||||
decorator(caller) converts a caller function into a decorator;
|
||||
decorator(caller, func) decorates a function using a caller.
|
||||
"""
|
||||
if func is not None: # returns a decorated function
|
||||
evaldict = func.func_globals.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['_func_'] = func
|
||||
return FunctionMaker.create(
|
||||
func, "return _call_(_func_, %(shortsignature)s)",
|
||||
evaldict, undecorated=func, __wrapped__=func)
|
||||
else: # returns a decorator
|
||||
if inspect.isclass(caller):
|
||||
name = caller.__name__.lower()
|
||||
callerfunc = get_init(caller)
|
||||
doc = 'decorator(%s) converts functions/generators into ' \
|
||||
'factories of %s objects' % (caller.__name__, caller.__name__)
|
||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||
elif inspect.isfunction(caller):
|
||||
name = '_lambda_' if caller.__name__ == '<lambda>' \
|
||||
else caller.__name__
|
||||
callerfunc = caller
|
||||
doc = caller.__doc__
|
||||
fun = getfullargspec(callerfunc).args[0] # first arg
|
||||
else: # assume caller is an object with a __call__ method
|
||||
name = caller.__class__.__name__.lower()
|
||||
callerfunc = caller.__call__.im_func
|
||||
doc = caller.__call__.__doc__
|
||||
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||
evaldict = callerfunc.func_globals.copy()
|
||||
evaldict['_call_'] = caller
|
||||
evaldict['decorator'] = decorator
|
||||
return FunctionMaker.create(
|
||||
'%s(%s)' % (name, fun),
|
||||
'return decorator(_call_, %s)' % fun,
|
||||
evaldict, undecorated=caller, __wrapped__=caller,
|
||||
doc=doc, module=caller.__module__)
|
||||
|
||||
######################### contextmanager ########################
|
||||
|
||||
def __call__(self, func):
|
||||
'Context manager decorator'
|
||||
return FunctionMaker.create(
|
||||
func, "with _self_: return _func_(%(shortsignature)s)",
|
||||
dict(_self_=self, _func_=func), __wrapped__=func)
|
||||
|
||||
try: # Python >= 3.2
|
||||
|
||||
from contextlib import _GeneratorContextManager
|
||||
ContextManager = type(
|
||||
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
|
||||
|
||||
except ImportError: # Python >= 2.5
|
||||
|
||||
from contextlib import GeneratorContextManager
|
||||
def __init__(self, f, *a, **k):
|
||||
return GeneratorContextManager.__init__(self, f(*a, **k))
|
||||
ContextManager = type(
|
||||
'ContextManager', (GeneratorContextManager,),
|
||||
dict(__call__=__call__, __init__=__init__))
|
||||
|
||||
contextmanager = decorator(ContextManager)
|
||||
14
bin/python/ecdsa/__init__.py
Normal file
14
bin/python/ecdsa/__init__.py
Normal file
@@ -0,0 +1,14 @@
|
||||
__all__ = ["curves", "der", "ecdsa", "ellipticcurve", "keys", "numbertheory",
|
||||
"test_pyecdsa", "util", "six"]
|
||||
from .keys import SigningKey, VerifyingKey, BadSignatureError, BadDigestError
|
||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||
|
||||
_hush_pyflakes = [SigningKey, VerifyingKey, BadSignatureError, BadDigestError,
|
||||
NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
del _hush_pyflakes
|
||||
|
||||
# This code comes from http://github.com/warner/python-ecdsa
|
||||
|
||||
from ._version import get_versions
|
||||
__version__ = get_versions()['version']
|
||||
del get_versions
|
||||
183
bin/python/ecdsa/_version.py
Normal file
183
bin/python/ecdsa/_version.py
Normal file
@@ -0,0 +1,183 @@
|
||||
|
||||
# This file helps to compute a version number in source trees obtained from
|
||||
# git-archive tarball (such as those provided by githubs download-from-tag
|
||||
# feature). Distribution tarballs (built by setup.py sdist) and build
|
||||
# directories (produced by setup.py build) will contain a much shorter file
|
||||
# that just contains the computed version number.
|
||||
|
||||
# This file is released into the public domain. Generated by
|
||||
# versioneer-0.12 (https://github.com/warner/python-versioneer)
|
||||
|
||||
# these strings will be replaced by git during git-archive
|
||||
git_refnames = " (HEAD, master)"
|
||||
git_full = "e7a6daff51221b8edd888cff404596ef90432869"
|
||||
|
||||
# these strings are filled in when 'setup.py versioneer' creates _version.py
|
||||
tag_prefix = "python-ecdsa-"
|
||||
parentdir_prefix = "ecdsa-"
|
||||
versionfile_source = "ecdsa/_version.py"
|
||||
|
||||
import os, sys, re, subprocess, errno
|
||||
|
||||
def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False):
|
||||
assert isinstance(commands, list)
|
||||
p = None
|
||||
for c in commands:
|
||||
try:
|
||||
# remember shell=False, so use git.cmd on windows, not just git
|
||||
p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE,
|
||||
stderr=(subprocess.PIPE if hide_stderr
|
||||
else None))
|
||||
break
|
||||
except EnvironmentError:
|
||||
e = sys.exc_info()[1]
|
||||
if e.errno == errno.ENOENT:
|
||||
continue
|
||||
if verbose:
|
||||
print("unable to run %s" % args[0])
|
||||
print(e)
|
||||
return None
|
||||
else:
|
||||
if verbose:
|
||||
print("unable to find command, tried %s" % (commands,))
|
||||
return None
|
||||
stdout = p.communicate()[0].strip()
|
||||
if sys.version >= '3':
|
||||
stdout = stdout.decode()
|
||||
if p.returncode != 0:
|
||||
if verbose:
|
||||
print("unable to run %s (error)" % args[0])
|
||||
return None
|
||||
return stdout
|
||||
|
||||
|
||||
def versions_from_parentdir(parentdir_prefix, root, verbose=False):
|
||||
# Source tarballs conventionally unpack into a directory that includes
|
||||
# both the project name and a version string.
|
||||
dirname = os.path.basename(root)
|
||||
if not dirname.startswith(parentdir_prefix):
|
||||
if verbose:
|
||||
print("guessing rootdir is '%s', but '%s' doesn't start with prefix '%s'" %
|
||||
(root, dirname, parentdir_prefix))
|
||||
return None
|
||||
return {"version": dirname[len(parentdir_prefix):], "full": ""}
|
||||
|
||||
def git_get_keywords(versionfile_abs):
|
||||
# the code embedded in _version.py can just fetch the value of these
|
||||
# keywords. When used from setup.py, we don't want to import _version.py,
|
||||
# so we do it with a regexp instead. This function is not used from
|
||||
# _version.py.
|
||||
keywords = {}
|
||||
try:
|
||||
f = open(versionfile_abs,"r")
|
||||
for line in f.readlines():
|
||||
if line.strip().startswith("git_refnames ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["refnames"] = mo.group(1)
|
||||
if line.strip().startswith("git_full ="):
|
||||
mo = re.search(r'=\s*"(.*)"', line)
|
||||
if mo:
|
||||
keywords["full"] = mo.group(1)
|
||||
f.close()
|
||||
except EnvironmentError:
|
||||
pass
|
||||
return keywords
|
||||
|
||||
def git_versions_from_keywords(keywords, tag_prefix, verbose=False):
|
||||
if not keywords:
|
||||
return {} # keyword-finding function failed to find keywords
|
||||
refnames = keywords["refnames"].strip()
|
||||
if refnames.startswith("$Format"):
|
||||
if verbose:
|
||||
print("keywords are unexpanded, not using")
|
||||
return {} # unexpanded, so not in an unpacked git-archive tarball
|
||||
refs = set([r.strip() for r in refnames.strip("()").split(",")])
|
||||
# starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of
|
||||
# just "foo-1.0". If we see a "tag: " prefix, prefer those.
|
||||
TAG = "tag: "
|
||||
tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)])
|
||||
if not tags:
|
||||
# Either we're using git < 1.8.3, or there really are no tags. We use
|
||||
# a heuristic: assume all version tags have a digit. The old git %d
|
||||
# expansion behaves like git log --decorate=short and strips out the
|
||||
# refs/heads/ and refs/tags/ prefixes that would let us distinguish
|
||||
# between branches and tags. By ignoring refnames without digits, we
|
||||
# filter out many common branch names like "release" and
|
||||
# "stabilization", as well as "HEAD" and "master".
|
||||
tags = set([r for r in refs if re.search(r'\d', r)])
|
||||
if verbose:
|
||||
print("discarding '%s', no digits" % ",".join(refs-tags))
|
||||
if verbose:
|
||||
print("likely tags: %s" % ",".join(sorted(tags)))
|
||||
for ref in sorted(tags):
|
||||
# sorting will prefer e.g. "2.0" over "2.0rc1"
|
||||
if ref.startswith(tag_prefix):
|
||||
r = ref[len(tag_prefix):]
|
||||
if verbose:
|
||||
print("picking %s" % r)
|
||||
return { "version": r,
|
||||
"full": keywords["full"].strip() }
|
||||
# no suitable tags, so we use the full revision id
|
||||
if verbose:
|
||||
print("no suitable tags, using full revision id")
|
||||
return { "version": keywords["full"].strip(),
|
||||
"full": keywords["full"].strip() }
|
||||
|
||||
|
||||
def git_versions_from_vcs(tag_prefix, root, verbose=False):
|
||||
# this runs 'git' from the root of the source tree. This only gets called
|
||||
# if the git-archive 'subst' keywords were *not* expanded, and
|
||||
# _version.py hasn't already been rewritten with a short version string,
|
||||
# meaning we're inside a checked out source tree.
|
||||
|
||||
if not os.path.exists(os.path.join(root, ".git")):
|
||||
if verbose:
|
||||
print("no .git in %s" % root)
|
||||
return {}
|
||||
|
||||
GITS = ["git"]
|
||||
if sys.platform == "win32":
|
||||
GITS = ["git.cmd", "git.exe"]
|
||||
stdout = run_command(GITS, ["describe", "--tags", "--dirty", "--always"],
|
||||
cwd=root)
|
||||
if stdout is None:
|
||||
return {}
|
||||
if not stdout.startswith(tag_prefix):
|
||||
if verbose:
|
||||
print("tag '%s' doesn't start with prefix '%s'" % (stdout, tag_prefix))
|
||||
return {}
|
||||
tag = stdout[len(tag_prefix):]
|
||||
stdout = run_command(GITS, ["rev-parse", "HEAD"], cwd=root)
|
||||
if stdout is None:
|
||||
return {}
|
||||
full = stdout.strip()
|
||||
if tag.endswith("-dirty"):
|
||||
full += "-dirty"
|
||||
return {"version": tag, "full": full}
|
||||
|
||||
|
||||
def get_versions(default={"version": "unknown", "full": ""}, verbose=False):
|
||||
# I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have
|
||||
# __file__, we can work backwards from there to the root. Some
|
||||
# py2exe/bbfreeze/non-CPython implementations don't do __file__, in which
|
||||
# case we can only use expanded keywords.
|
||||
|
||||
keywords = { "refnames": git_refnames, "full": git_full }
|
||||
ver = git_versions_from_keywords(keywords, tag_prefix, verbose)
|
||||
if ver:
|
||||
return ver
|
||||
|
||||
try:
|
||||
root = os.path.abspath(__file__)
|
||||
# versionfile_source is the relative path from the top of the source
|
||||
# tree (where the .git directory might live) to this file. Invert
|
||||
# this to find the root from __file__.
|
||||
for i in range(len(versionfile_source.split(os.sep))):
|
||||
root = os.path.dirname(root)
|
||||
except NameError:
|
||||
return default
|
||||
|
||||
return (git_versions_from_vcs(tag_prefix, root, verbose)
|
||||
or versions_from_parentdir(parentdir_prefix, root, verbose)
|
||||
or default)
|
||||
53
bin/python/ecdsa/curves.py
Normal file
53
bin/python/ecdsa/curves.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import division
|
||||
|
||||
from . import der, ecdsa
|
||||
|
||||
class UnknownCurveError(Exception):
|
||||
pass
|
||||
|
||||
def orderlen(order):
|
||||
return (1+len("%x"%order))//2 # bytes
|
||||
|
||||
# the NIST curves
|
||||
class Curve:
|
||||
def __init__(self, name, openssl_name,
|
||||
curve, generator, oid):
|
||||
self.name = name
|
||||
self.openssl_name = openssl_name # maybe None
|
||||
self.curve = curve
|
||||
self.generator = generator
|
||||
self.order = generator.order()
|
||||
self.baselen = orderlen(self.order)
|
||||
self.verifying_key_length = 2*self.baselen
|
||||
self.signature_length = 2*self.baselen
|
||||
self.oid = oid
|
||||
self.encoded_oid = der.encode_oid(*oid)
|
||||
|
||||
NIST192p = Curve("NIST192p", "prime192v1",
|
||||
ecdsa.curve_192, ecdsa.generator_192,
|
||||
(1, 2, 840, 10045, 3, 1, 1))
|
||||
NIST224p = Curve("NIST224p", "secp224r1",
|
||||
ecdsa.curve_224, ecdsa.generator_224,
|
||||
(1, 3, 132, 0, 33))
|
||||
NIST256p = Curve("NIST256p", "prime256v1",
|
||||
ecdsa.curve_256, ecdsa.generator_256,
|
||||
(1, 2, 840, 10045, 3, 1, 7))
|
||||
NIST384p = Curve("NIST384p", "secp384r1",
|
||||
ecdsa.curve_384, ecdsa.generator_384,
|
||||
(1, 3, 132, 0, 34))
|
||||
NIST521p = Curve("NIST521p", "secp521r1",
|
||||
ecdsa.curve_521, ecdsa.generator_521,
|
||||
(1, 3, 132, 0, 35))
|
||||
SECP256k1 = Curve("SECP256k1", "secp256k1",
|
||||
ecdsa.curve_secp256k1, ecdsa.generator_secp256k1,
|
||||
(1, 3, 132, 0, 10))
|
||||
|
||||
curves = [NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1]
|
||||
|
||||
def find_curve(oid_curve):
|
||||
for c in curves:
|
||||
if c.oid == oid_curve:
|
||||
return c
|
||||
raise UnknownCurveError("I don't know about the curve with oid %s."
|
||||
"I only know about these: %s" %
|
||||
(oid_curve, [c.name for c in curves]))
|
||||
199
bin/python/ecdsa/der.py
Normal file
199
bin/python/ecdsa/der.py
Normal file
@@ -0,0 +1,199 @@
|
||||
from __future__ import division
|
||||
|
||||
import binascii
|
||||
import base64
|
||||
from .six import int2byte, b, integer_types, text_type
|
||||
|
||||
class UnexpectedDER(Exception):
|
||||
pass
|
||||
|
||||
def encode_constructed(tag, value):
|
||||
return int2byte(0xa0+tag) + encode_length(len(value)) + value
|
||||
def encode_integer(r):
|
||||
assert r >= 0 # can't support negative numbers yet
|
||||
h = ("%x" % r).encode()
|
||||
if len(h) % 2:
|
||||
h = b("0") + h
|
||||
s = binascii.unhexlify(h)
|
||||
num = s[0] if isinstance(s[0], integer_types) else ord(s[0])
|
||||
if num <= 0x7f:
|
||||
return b("\x02") + int2byte(len(s)) + s
|
||||
else:
|
||||
# DER integers are two's complement, so if the first byte is
|
||||
# 0x80-0xff then we need an extra 0x00 byte to prevent it from
|
||||
# looking negative.
|
||||
return b("\x02") + int2byte(len(s)+1) + b("\x00") + s
|
||||
|
||||
def encode_bitstring(s):
|
||||
return b("\x03") + encode_length(len(s)) + s
|
||||
def encode_octet_string(s):
|
||||
return b("\x04") + encode_length(len(s)) + s
|
||||
def encode_oid(first, second, *pieces):
|
||||
assert first <= 2
|
||||
assert second <= 39
|
||||
encoded_pieces = [int2byte(40*first+second)] + [encode_number(p)
|
||||
for p in pieces]
|
||||
body = b('').join(encoded_pieces)
|
||||
return b('\x06') + encode_length(len(body)) + body
|
||||
def encode_sequence(*encoded_pieces):
|
||||
total_len = sum([len(p) for p in encoded_pieces])
|
||||
return b('\x30') + encode_length(total_len) + b('').join(encoded_pieces)
|
||||
def encode_number(n):
|
||||
b128_digits = []
|
||||
while n:
|
||||
b128_digits.insert(0, (n & 0x7f) | 0x80)
|
||||
n = n >> 7
|
||||
if not b128_digits:
|
||||
b128_digits.append(0)
|
||||
b128_digits[-1] &= 0x7f
|
||||
return b('').join([int2byte(d) for d in b128_digits])
|
||||
|
||||
def remove_constructed(string):
|
||||
s0 = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if (s0 & 0xe0) != 0xa0:
|
||||
raise UnexpectedDER("wanted constructed tag (0xa0-0xbf), got 0x%02x"
|
||||
% s0)
|
||||
tag = s0 & 0x1f
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return tag, body, rest
|
||||
|
||||
def remove_sequence(string):
|
||||
if not string.startswith(b("\x30")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted sequence (0x30), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
endseq = 1+lengthlength+length
|
||||
return string[1+lengthlength:endseq], string[endseq:]
|
||||
|
||||
def remove_octet_string(string):
|
||||
if not string.startswith(b("\x04")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted octetstring (0x04), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
def remove_object(string):
|
||||
if not string.startswith(b("\x06")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted object (0x06), got 0x%02x" % n)
|
||||
length, lengthlength = read_length(string[1:])
|
||||
body = string[1+lengthlength:1+lengthlength+length]
|
||||
rest = string[1+lengthlength+length:]
|
||||
numbers = []
|
||||
while body:
|
||||
n, ll = read_number(body)
|
||||
numbers.append(n)
|
||||
body = body[ll:]
|
||||
n0 = numbers.pop(0)
|
||||
first = n0//40
|
||||
second = n0-(40*first)
|
||||
numbers.insert(0, first)
|
||||
numbers.insert(1, second)
|
||||
return tuple(numbers), rest
|
||||
|
||||
def remove_integer(string):
|
||||
if not string.startswith(b("\x02")):
|
||||
n = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
raise UnexpectedDER("wanted integer (0x02), got 0x%02x" % n)
|
||||
length, llen = read_length(string[1:])
|
||||
numberbytes = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
nbytes = numberbytes[0] if isinstance(numberbytes[0], integer_types) else ord(numberbytes[0])
|
||||
assert nbytes < 0x80 # can't support negative numbers yet
|
||||
return int(binascii.hexlify(numberbytes), 16), rest
|
||||
|
||||
def read_number(string):
|
||||
number = 0
|
||||
llen = 0
|
||||
# base-128 big endian, with b7 set in all but the last byte
|
||||
while True:
|
||||
if llen > len(string):
|
||||
raise UnexpectedDER("ran out of length bytes")
|
||||
number = number << 7
|
||||
d = string[llen] if isinstance(string[llen], integer_types) else ord(string[llen])
|
||||
number += (d & 0x7f)
|
||||
llen += 1
|
||||
if not d & 0x80:
|
||||
break
|
||||
return number, llen
|
||||
|
||||
def encode_length(l):
|
||||
assert l >= 0
|
||||
if l < 0x80:
|
||||
return int2byte(l)
|
||||
s = ("%x" % l).encode()
|
||||
if len(s)%2:
|
||||
s = b("0")+s
|
||||
s = binascii.unhexlify(s)
|
||||
llen = len(s)
|
||||
return int2byte(0x80|llen) + s
|
||||
|
||||
def read_length(string):
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not (num & 0x80):
|
||||
# short form
|
||||
return (num & 0x7f), 1
|
||||
# else long-form: b0&0x7f is number of additional base256 length bytes,
|
||||
# big-endian
|
||||
llen = num & 0x7f
|
||||
if llen > len(string)-1:
|
||||
raise UnexpectedDER("ran out of length bytes")
|
||||
return int(binascii.hexlify(string[1:1+llen]), 16), 1+llen
|
||||
|
||||
def remove_bitstring(string):
|
||||
num = string[0] if isinstance(string[0], integer_types) else ord(string[0])
|
||||
if not string.startswith(b("\x03")):
|
||||
raise UnexpectedDER("wanted bitstring (0x03), got 0x%02x" % num)
|
||||
length, llen = read_length(string[1:])
|
||||
body = string[1+llen:1+llen+length]
|
||||
rest = string[1+llen+length:]
|
||||
return body, rest
|
||||
|
||||
# SEQUENCE([1, STRING(secexp), cont[0], OBJECT(curvename), cont[1], BINTSTRING)
|
||||
|
||||
|
||||
# signatures: (from RFC3279)
|
||||
# ansi-X9-62 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) member-body(2) us(840) 10045 }
|
||||
#
|
||||
# id-ecSigType OBJECT IDENTIFIER ::= {
|
||||
# ansi-X9-62 signatures(4) }
|
||||
# ecdsa-with-SHA1 OBJECT IDENTIFIER ::= {
|
||||
# id-ecSigType 1 }
|
||||
## so 1,2,840,10045,4,1
|
||||
## so 0x42, .. ..
|
||||
|
||||
# Ecdsa-Sig-Value ::= SEQUENCE {
|
||||
# r INTEGER,
|
||||
# s INTEGER }
|
||||
|
||||
# id-public-key-type OBJECT IDENTIFIER ::= { ansi-X9.62 2 }
|
||||
#
|
||||
# id-ecPublicKey OBJECT IDENTIFIER ::= { id-publicKeyType 1 }
|
||||
|
||||
# I think the secp224r1 identifier is (t=06,l=05,v=2b81040021)
|
||||
# secp224r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 33 }
|
||||
# and the secp384r1 is (t=06,l=05,v=2b81040022)
|
||||
# secp384r1 OBJECT IDENTIFIER ::= {
|
||||
# iso(1) identified-organization(3) certicom(132) curve(0) 34 }
|
||||
|
||||
def unpem(pem):
|
||||
if isinstance(pem, text_type):
|
||||
pem = pem.encode()
|
||||
|
||||
d = b("").join([l.strip() for l in pem.split(b("\n"))
|
||||
if l and not l.startswith(b("-----"))])
|
||||
return base64.b64decode(d)
|
||||
def topem(der, name):
|
||||
b64 = base64.b64encode(der)
|
||||
lines = [("-----BEGIN %s-----\n" % name).encode()]
|
||||
lines.extend([b64[start:start+64]+b("\n")
|
||||
for start in range(0, len(b64), 64)])
|
||||
lines.append(("-----END %s-----\n" % name).encode())
|
||||
return b("").join(lines)
|
||||
|
||||
576
bin/python/ecdsa/ecdsa.py
Normal file
576
bin/python/ecdsa/ecdsa.py
Normal file
@@ -0,0 +1,576 @@
|
||||
#! /usr/bin/env python
|
||||
|
||||
"""
|
||||
Implementation of Elliptic-Curve Digital Signatures.
|
||||
|
||||
Classes and methods for elliptic-curve signatures:
|
||||
private keys, public keys, signatures,
|
||||
NIST prime-modulus curves with modulus lengths of
|
||||
192, 224, 256, 384, and 521 bits.
|
||||
|
||||
Example:
|
||||
|
||||
# (In real-life applications, you would probably want to
|
||||
# protect against defects in SystemRandom.)
|
||||
from random import SystemRandom
|
||||
randrange = SystemRandom().randrange
|
||||
|
||||
# Generate a public/private key pair using the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
print_("*** Demo verification failed.")
|
||||
|
||||
# Verification fails if the hash value is modified:
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
print_("**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
Version of 2009.05.16.
|
||||
|
||||
Revision history:
|
||||
2005.12.31 - Initial version.
|
||||
2008.11.25 - Substantial revisions introducing new classes.
|
||||
2009.05.16 - Warn against using random.randrange in real applications.
|
||||
2009.05.17 - Use random.SystemRandom by default.
|
||||
|
||||
Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
"""
|
||||
|
||||
from .six import int2byte, b, print_
|
||||
from . import ellipticcurve
|
||||
from . import numbertheory
|
||||
import random
|
||||
|
||||
|
||||
|
||||
class Signature( object ):
|
||||
"""ECDSA signature.
|
||||
"""
|
||||
def __init__( self, r, s ):
|
||||
self.r = r
|
||||
self.s = s
|
||||
|
||||
|
||||
|
||||
class Public_key( object ):
|
||||
"""Public key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, generator, point ):
|
||||
"""generator is the Point that generates the group,
|
||||
point is the Point that defines the public key.
|
||||
"""
|
||||
|
||||
self.curve = generator.curve()
|
||||
self.generator = generator
|
||||
self.point = point
|
||||
n = generator.order()
|
||||
if not n:
|
||||
raise RuntimeError("Generator point must have order.")
|
||||
if not n * point == ellipticcurve.INFINITY:
|
||||
raise RuntimeError("Generator point order is bad.")
|
||||
if point.x() < 0 or n <= point.x() or point.y() < 0 or n <= point.y():
|
||||
raise RuntimeError("Generator point has x or y out of range.")
|
||||
|
||||
|
||||
def verifies( self, hash, signature ):
|
||||
"""Verify that signature is a valid signature of hash.
|
||||
Return True if the signature is valid.
|
||||
"""
|
||||
|
||||
# From X9.62 J.3.1.
|
||||
|
||||
G = self.generator
|
||||
n = G.order()
|
||||
r = signature.r
|
||||
s = signature.s
|
||||
if r < 1 or r > n-1: return False
|
||||
if s < 1 or s > n-1: return False
|
||||
c = numbertheory.inverse_mod( s, n )
|
||||
u1 = ( hash * c ) % n
|
||||
u2 = ( r * c ) % n
|
||||
xy = u1 * G + u2 * self.point
|
||||
v = xy.x() % n
|
||||
return v == r
|
||||
|
||||
|
||||
|
||||
class Private_key( object ):
|
||||
"""Private key for ECDSA.
|
||||
"""
|
||||
|
||||
def __init__( self, public_key, secret_multiplier ):
|
||||
"""public_key is of class Public_key;
|
||||
secret_multiplier is a large integer.
|
||||
"""
|
||||
|
||||
self.public_key = public_key
|
||||
self.secret_multiplier = secret_multiplier
|
||||
|
||||
def sign( self, hash, random_k ):
|
||||
"""Return a signature for the provided hash, using the provided
|
||||
random nonce. It is absolutely vital that random_k be an unpredictable
|
||||
number in the range [1, self.public_key.point.order()-1]. If
|
||||
an attacker can guess random_k, he can compute our private key from a
|
||||
single signature. Also, if an attacker knows a few high-order
|
||||
bits (or a few low-order bits) of random_k, he can compute our private
|
||||
key from many signatures. The generation of nonces with adequate
|
||||
cryptographic strength is very difficult and far beyond the scope
|
||||
of this comment.
|
||||
|
||||
May raise RuntimeError, in which case retrying with a new
|
||||
random value k is in order.
|
||||
"""
|
||||
|
||||
G = self.public_key.generator
|
||||
n = G.order()
|
||||
k = random_k % n
|
||||
p1 = k * G
|
||||
r = p1.x()
|
||||
if r == 0: raise RuntimeError("amazingly unlucky random number r")
|
||||
s = ( numbertheory.inverse_mod( k, n ) * \
|
||||
( hash + ( self.secret_multiplier * r ) % n ) ) % n
|
||||
if s == 0: raise RuntimeError("amazingly unlucky random number s")
|
||||
return Signature( r, s )
|
||||
|
||||
|
||||
|
||||
def int_to_string( x ):
|
||||
"""Convert integer x into a string of bytes, as per X9.62."""
|
||||
assert x >= 0
|
||||
if x == 0: return b('\0')
|
||||
result = []
|
||||
while x:
|
||||
ordinal = x & 0xFF
|
||||
result.append(int2byte(ordinal))
|
||||
x >>= 8
|
||||
|
||||
result.reverse()
|
||||
return b('').join(result)
|
||||
|
||||
|
||||
def string_to_int( s ):
|
||||
"""Convert a string of bytes into an integer, as per X9.62."""
|
||||
result = 0
|
||||
for c in s:
|
||||
if not isinstance(c, int): c = ord( c )
|
||||
result = 256 * result + c
|
||||
return result
|
||||
|
||||
|
||||
def digest_integer( m ):
|
||||
"""Convert an integer into a string of bytes, compute
|
||||
its SHA-1 hash, and convert the result to an integer."""
|
||||
#
|
||||
# I don't expect this function to be used much. I wrote
|
||||
# it in order to be able to duplicate the examples
|
||||
# in ECDSAVS.
|
||||
#
|
||||
from hashlib import sha1
|
||||
return string_to_int( sha1( int_to_string( m ) ).digest() )
|
||||
|
||||
|
||||
def point_is_valid( generator, x, y ):
|
||||
"""Is (x,y) a valid public key based on the specified generator?"""
|
||||
|
||||
# These are the tests specified in X9.62.
|
||||
|
||||
n = generator.order()
|
||||
curve = generator.curve()
|
||||
if x < 0 or n <= x or y < 0 or n <= y:
|
||||
return False
|
||||
if not curve.contains_point( x, y ):
|
||||
return False
|
||||
if not n*ellipticcurve.Point( curve, x, y ) == \
|
||||
ellipticcurve.INFINITY:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
|
||||
# NIST Curve P-192:
|
||||
_p = 6277101735386680763835789423207666416083908700390324961279
|
||||
_r = 6277101735386680763835789423176059013767194773182842284081
|
||||
# s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
# c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65L
|
||||
_b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
_Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
_Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
curve_192 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_192 = ellipticcurve.Point( curve_192, _Gx, _Gy, _r )
|
||||
|
||||
|
||||
# NIST Curve P-224:
|
||||
_p = 26959946667150639794667015087019630673557916260026308143510066298881
|
||||
_r = 26959946667150639794667015087019625940457807714424391721682722368061
|
||||
# s = 0xbd71344799d5c7fcdc45b59fa3b9ab8f6a948bc5L
|
||||
# c = 0x5b056c7e11dd68f40469ee7f3c7a7d74f7d121116506d031218291fbL
|
||||
_b = 0xb4050a850c04b3abf54132565044b0b7d7bfd8ba270b39432355ffb4
|
||||
_Gx =0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21
|
||||
_Gy = 0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34
|
||||
|
||||
curve_224 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_224 = ellipticcurve.Point( curve_224, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-256:
|
||||
_p = 115792089210356248762697446949407573530086143415290314195533631308867097853951
|
||||
_r = 115792089210356248762697446949407573529996955224135760342422259061068512044369
|
||||
# s = 0xc49d360886e704936a6678e1139d26b7819f7e90L
|
||||
# c = 0x7efba1662985be9403cb055c75d4f7e0ce8d84a9c5114abcaf3177680104fa0dL
|
||||
_b = 0x5ac635d8aa3a93e7b3ebbd55769886bc651d06b0cc53b0f63bce3c3e27d2604b
|
||||
_Gx = 0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296
|
||||
_Gy = 0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5
|
||||
|
||||
curve_256 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_256 = ellipticcurve.Point( curve_256, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-384:
|
||||
_p = 39402006196394479212279040100143613805079739270465446667948293404245721771496870329047266088258938001861606973112319
|
||||
_r = 39402006196394479212279040100143613805079739270465446667946905279627659399113263569398956308152294913554433653942643
|
||||
# s = 0xa335926aa319a27a1d00896a6773a4827acdac73L
|
||||
# c = 0x79d1e655f868f02fff48dcdee14151ddb80643c1406d0ca10dfe6fc52009540a495e8042ea5f744f6e184667cc722483L
|
||||
_b = 0xb3312fa7e23ee7e4988e056be3f82d19181d9c6efe8141120314088f5013875ac656398d8a2ed19d2a85c8edd3ec2aef
|
||||
_Gx = 0xaa87ca22be8b05378eb1c71ef320ad746e1d3b628ba79b9859f741e082542a385502f25dbf55296c3a545e3872760ab7
|
||||
_Gy = 0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c00a60b1ce1d7e819d7a431d7c90ea0e5f
|
||||
|
||||
curve_384 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_384 = ellipticcurve.Point( curve_384, _Gx, _Gy, _r )
|
||||
|
||||
# NIST Curve P-521:
|
||||
_p = 6864797660130609714981900799081393217269435300143305409394463459185543183397656052122559640661454554977296311391480858037121987999716643812574028291115057151
|
||||
_r = 6864797660130609714981900799081393217269435300143305409394463459185543183397655394245057746333217197532963996371363321113864768612440380340372808892707005449
|
||||
# s = 0xd09e8800291cb85396cc6717393284aaa0da64baL
|
||||
# c = 0x0b48bfa5f420a34949539d2bdfc264eeeeb077688e44fbf0ad8f6d0edb37bd6b533281000518e19f1b9ffbe0fe9ed8a3c2200b8f875e523868c70c1e5bf55bad637L
|
||||
_b = 0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef109e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00
|
||||
_Gx = 0xc6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66
|
||||
_Gy = 0x11839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650
|
||||
|
||||
curve_521 = ellipticcurve.CurveFp( _p, -3, _b )
|
||||
generator_521 = ellipticcurve.Point( curve_521, _Gx, _Gy, _r )
|
||||
|
||||
# Certicom secp256-k1
|
||||
_a = 0x0000000000000000000000000000000000000000000000000000000000000000
|
||||
_b = 0x0000000000000000000000000000000000000000000000000000000000000007
|
||||
_p = 0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffefffffc2f
|
||||
_Gx = 0x79be667ef9dcbbac55a06295ce870b07029bfcdb2dce28d959f2815b16f81798
|
||||
_Gy = 0x483ada7726a3c4655da4fbfc0e1108a8fd17b448a68554199c47d08ffb10d4b8
|
||||
_r = 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
|
||||
|
||||
curve_secp256k1 = ellipticcurve.CurveFp( _p, _a, _b)
|
||||
generator_secp256k1 = ellipticcurve.Point( curve_secp256k1, _Gx, _Gy, _r)
|
||||
|
||||
|
||||
|
||||
def __main__():
|
||||
class TestFailure(Exception): pass
|
||||
|
||||
def test_point_validity( generator, x, y, expected ):
|
||||
"""generator defines the curve; is (x,y) a point on
|
||||
this curve? "expected" is True if the right answer is Yes."""
|
||||
if point_is_valid( generator, x, y ) == expected:
|
||||
print_("Point validity tested as expected.")
|
||||
else:
|
||||
raise TestFailure("*** Point validity test gave wrong result.")
|
||||
|
||||
def test_signature_validity( Msg, Qx, Qy, R, S, expected ):
|
||||
"""Msg = message, Qx and Qy represent the base point on
|
||||
elliptic curve c192, R and S are the signature, and
|
||||
"expected" is True iff the signature is expected to be valid."""
|
||||
pubk = Public_key( generator_192,
|
||||
ellipticcurve.Point( curve_192, Qx, Qy ) )
|
||||
got = pubk.verifies( digest_integer( Msg ), Signature( R, S ) )
|
||||
if got == expected:
|
||||
print_("Signature tested as expected: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
else:
|
||||
raise TestFailure("*** Signature test failed: got %s, expected %s." % \
|
||||
( got, expected ))
|
||||
|
||||
print_("NIST Curve P-192:")
|
||||
|
||||
p192 = generator_192
|
||||
|
||||
# From X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise TestFailure("*** p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise TestFailure("*** u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
e = 968236873715988614170569073515315707566766479517
|
||||
pubk = Public_key( generator_192, generator_192 * d )
|
||||
privk = Private_key( pubk, d )
|
||||
sig = privk.sign( e, k )
|
||||
r, s = sig.r, sig.s
|
||||
if r != 3342403536405981729393488334694600415596881826869351677613 \
|
||||
or s != 5735822328888155254683894997897571951568553642892029982342:
|
||||
raise TestFailure("*** r or s came out wrong.")
|
||||
else:
|
||||
print_("r and s came out right.")
|
||||
|
||||
valid = pubk.verifies( e, sig )
|
||||
if valid: print_("Signature verified OK.")
|
||||
else: raise TestFailure("*** Signature failed verification.")
|
||||
|
||||
valid = pubk.verifies( e-1, sig )
|
||||
if not valid: print_("Forgery was correctly rejected.")
|
||||
else: raise TestFailure("*** Forgery was erroneously accepted.")
|
||||
|
||||
print_("Testing point validity, as per ECDSAVS.pdf B.2.2:")
|
||||
|
||||
test_point_validity( \
|
||||
p192, \
|
||||
0xcd6d0f029a023e9aaca429615b8f577abee685d8257cc83a, \
|
||||
0x00019c410987680e9fb6c0b6ecc01d9a2647c8bae27721bacdfc, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x00017f2fce203639e9eaf9fb50b81fc32776b30e3b02af16c73b, \
|
||||
0x95da95c5e72dd48e229d4748d4eee658a9a54111b23b2adb, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x4f77f8bc7fccbadd5760f4938746d5f253ee2168c1cf2792, \
|
||||
0x000147156ff824d131629739817edb197717c41aab5c2a70f0f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xc58d61f88d905293bcd4cd0080bcb1b7f811f2ffa41979f6, \
|
||||
0x8804dc7a7c4c7f8b5d437f5156f3312ca7d6de8a0e11867f, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xcdf56c1aa3d8afc53c521adf3ffb96734a6a630a4a5b5a70, \
|
||||
0x97c1c44a5fb229007b5ec5d25f7413d170068ffd023caa4e, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x89009c0dc361c81e99280c8e91df578df88cdf4b0cdedced, \
|
||||
0x27be44a529b7513e727251f128b34262a0fd4d8ec82377b9, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6a223d00bd22c52833409a163e057e5b5da1def2a197dd15, \
|
||||
0x7b482604199367f1f303f9ef627f922f97023e90eae08abf, \
|
||||
True )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x6dccbde75c0948c98dab32ea0bc59fe125cf0fb1a3798eda, \
|
||||
0x0001171a3e0fa60cf3096f4e116b556198de430e1fbd330c8835, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xd266b39e1f491fc4acbbbc7d098430931cfa66d55015af12, \
|
||||
0x193782eb909e391a3148b7764e6b234aa94e48d30a16dbb2, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x9d6ddbcd439baa0c6b80a654091680e462a7d1d3f1ffeb43, \
|
||||
0x6ad8efc4d133ccf167c44eb4691c80abffb9f82b932b8caa, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0x146479d944e6bda87e5b35818aa666a4c998a71f4e95edbc, \
|
||||
0xa86d6fe62bc8fbd88139693f842635f687f132255858e7f6, \
|
||||
False )
|
||||
|
||||
test_point_validity(
|
||||
p192, \
|
||||
0xe594d4a598046f3598243f50fd2c7bd7d380edb055802253, \
|
||||
0x509014c0c4d6b536e3ca750ec09066af39b4c8616a53a923, \
|
||||
False )
|
||||
|
||||
print_("Trying signature-verification tests from ECDSAVS.pdf B.2.4:")
|
||||
print_("P-192:")
|
||||
Msg = 0x84ce72aa8699df436059f052ac51b6398d2511e49631bcb7e71f89c499b9ee425dfbc13a5f6d408471b054f2655617cbbaf7937b7c80cd8865cf02c8487d30d2b0fbd8b2c4e102e16d828374bbc47b93852f212d5043c3ea720f086178ff798cc4f63f787b9c2e419efa033e7644ea7936f54462dc21a6c4580725f7f0e7d158
|
||||
Qx = 0xd9dbfb332aa8e5ff091e8ce535857c37c73f6250ffb2e7ac
|
||||
Qy = 0x282102e364feded3ad15ddf968f88d8321aa268dd483ebc4
|
||||
R = 0x64dca58a20787c488d11d6dd96313f1b766f2d8efe122916
|
||||
S = 0x1ecba28141e84ab4ecad92f56720e2cc83eb3d22dec72479
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0x94bb5bacd5f8ea765810024db87f4224ad71362a3c28284b2b9f39fab86db12e8beb94aae899768229be8fdb6c4f12f28912bb604703a79ccff769c1607f5a91450f30ba0460d359d9126cbd6296be6d9c4bb96c0ee74cbb44197c207f6db326ab6f5a659113a9034e54be7b041ced9dcf6458d7fb9cbfb2744d999f7dfd63f4
|
||||
Qx = 0x3e53ef8d3112af3285c0e74842090712cd324832d4277ae7
|
||||
Qy = 0xcc75f8952d30aec2cbb719fc6aa9934590b5d0ff5a83adb7
|
||||
R = 0x8285261607283ba18f335026130bab31840dcfd9c3e555af
|
||||
S = 0x356d89e1b04541afc9704a45e9c535ce4a50929e33d7e06c
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xf6227a8eeb34afed1621dcc89a91d72ea212cb2f476839d9b4243c66877911b37b4ad6f4448792a7bbba76c63bdd63414b6facab7dc71c3396a73bd7ee14cdd41a659c61c99b779cecf07bc51ab391aa3252386242b9853ea7da67fd768d303f1b9b513d401565b6f1eb722dfdb96b519fe4f9bd5de67ae131e64b40e78c42dd
|
||||
Qx = 0x16335dbe95f8e8254a4e04575d736befb258b8657f773cb7
|
||||
Qy = 0x421b13379c59bc9dce38a1099ca79bbd06d647c7f6242336
|
||||
R = 0x4141bd5d64ea36c5b0bd21ef28c02da216ed9d04522b1e91
|
||||
S = 0x159a6aa852bcc579e821b7bb0994c0861fb08280c38daa09
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x16b5f93afd0d02246f662761ed8e0dd9504681ed02a253006eb36736b563097ba39f81c8e1bce7a16c1339e345efabbc6baa3efb0612948ae51103382a8ee8bc448e3ef71e9f6f7a9676694831d7f5dd0db5446f179bcb737d4a526367a447bfe2c857521c7f40b6d7d7e01a180d92431fb0bbd29c04a0c420a57b3ed26ccd8a
|
||||
Qx = 0xfd14cdf1607f5efb7b1793037b15bdf4baa6f7c16341ab0b
|
||||
Qy = 0x83fa0795cc6c4795b9016dac928fd6bac32f3229a96312c4
|
||||
R = 0x8dfdb832951e0167c5d762a473c0416c5c15bc1195667dc1
|
||||
S = 0x1720288a2dc13fa1ec78f763f8fe2ff7354a7e6fdde44520
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x08a2024b61b79d260e3bb43ef15659aec89e5b560199bc82cf7c65c77d39192e03b9a895d766655105edd9188242b91fbde4167f7862d4ddd61e5d4ab55196683d4f13ceb90d87aea6e07eb50a874e33086c4a7cb0273a8e1c4408f4b846bceae1ebaac1b2b2ea851a9b09de322efe34cebe601653efd6ddc876ce8c2f2072fb
|
||||
Qx = 0x674f941dc1a1f8b763c9334d726172d527b90ca324db8828
|
||||
Qy = 0x65adfa32e8b236cb33a3e84cf59bfb9417ae7e8ede57a7ff
|
||||
R = 0x9508b9fdd7daf0d8126f9e2bc5a35e4c6d800b5b804d7796
|
||||
S = 0x36f2bf6b21b987c77b53bb801b3435a577e3d493744bfab0
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x1843aba74b0789d4ac6b0b8923848023a644a7b70afa23b1191829bbe4397ce15b629bf21a8838298653ed0c19222b95fa4f7390d1b4c844d96e645537e0aae98afb5c0ac3bd0e4c37f8daaff25556c64e98c319c52687c904c4de7240a1cc55cd9756b7edaef184e6e23b385726e9ffcba8001b8f574987c1a3fedaaa83ca6d
|
||||
Qx = 0x10ecca1aad7220b56a62008b35170bfd5e35885c4014a19f
|
||||
Qy = 0x04eb61984c6c12ade3bc47f3c629ece7aa0a033b9948d686
|
||||
R = 0x82bfa4e82c0dfe9274169b86694e76ce993fd83b5c60f325
|
||||
S = 0xa97685676c59a65dbde002fe9d613431fb183e8006d05633
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x5a478f4084ddd1a7fea038aa9732a822106385797d02311aeef4d0264f824f698df7a48cfb6b578cf3da416bc0799425bb491be5b5ecc37995b85b03420a98f2c4dc5c31a69a379e9e322fbe706bbcaf0f77175e05cbb4fa162e0da82010a278461e3e974d137bc746d1880d6eb02aa95216014b37480d84b87f717bb13f76e1
|
||||
Qx = 0x6636653cb5b894ca65c448277b29da3ad101c4c2300f7c04
|
||||
Qy = 0xfdf1cbb3fc3fd6a4f890b59e554544175fa77dbdbeb656c1
|
||||
R = 0xeac2ddecddfb79931a9c3d49c08de0645c783a24cb365e1c
|
||||
S = 0x3549fee3cfa7e5f93bc47d92d8ba100e881a2a93c22f8d50
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xc598774259a058fa65212ac57eaa4f52240e629ef4c310722088292d1d4af6c39b49ce06ba77e4247b20637174d0bd67c9723feb57b5ead232b47ea452d5d7a089f17c00b8b6767e434a5e16c231ba0efa718a340bf41d67ea2d295812ff1b9277daacb8bc27b50ea5e6443bcf95ef4e9f5468fe78485236313d53d1c68f6ba2
|
||||
Qx = 0xa82bd718d01d354001148cd5f69b9ebf38ff6f21898f8aaa
|
||||
Qy = 0xe67ceede07fc2ebfafd62462a51e4b6c6b3d5b537b7caf3e
|
||||
R = 0x4d292486c620c3de20856e57d3bb72fcde4a73ad26376955
|
||||
S = 0xa85289591a6081d5728825520e62ff1c64f94235c04c7f95
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xca98ed9db081a07b7557f24ced6c7b9891269a95d2026747add9e9eb80638a961cf9c71a1b9f2c29744180bd4c3d3db60f2243c5c0b7cc8a8d40a3f9a7fc910250f2187136ee6413ffc67f1a25e1c4c204fa9635312252ac0e0481d89b6d53808f0c496ba87631803f6c572c1f61fa049737fdacce4adff757afed4f05beb658
|
||||
Qx = 0x7d3b016b57758b160c4fca73d48df07ae3b6b30225126c2f
|
||||
Qy = 0x4af3790d9775742bde46f8da876711be1b65244b2b39e7ec
|
||||
R = 0x95f778f5f656511a5ab49a5d69ddd0929563c29cbc3a9e62
|
||||
S = 0x75c87fc358c251b4c83d2dd979faad496b539f9f2ee7a289
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x31dd9a54c8338bea06b87eca813d555ad1850fac9742ef0bbe40dad400e10288acc9c11ea7dac79eb16378ebea9490e09536099f1b993e2653cd50240014c90a9c987f64545abc6a536b9bd2435eb5e911fdfde2f13be96ea36ad38df4ae9ea387b29cced599af777338af2794820c9cce43b51d2112380a35802ab7e396c97a
|
||||
Qx = 0x9362f28c4ef96453d8a2f849f21e881cd7566887da8beb4a
|
||||
Qy = 0xe64d26d8d74c48a024ae85d982ee74cd16046f4ee5333905
|
||||
R = 0xf3923476a296c88287e8de914b0b324ad5a963319a4fe73b
|
||||
S = 0xf0baeed7624ed00d15244d8ba2aede085517dbdec8ac65f5
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, True )
|
||||
|
||||
Msg = 0xb2b94e4432267c92f9fdb9dc6040c95ffa477652761290d3c7de312283f6450d89cc4aabe748554dfb6056b2d8e99c7aeaad9cdddebdee9dbc099839562d9064e68e7bb5f3a6bba0749ca9a538181fc785553a4000785d73cc207922f63e8ce1112768cb1de7b673aed83a1e4a74592f1268d8e2a4e9e63d414b5d442bd0456d
|
||||
Qx = 0xcc6fc032a846aaac25533eb033522824f94e670fa997ecef
|
||||
Qy = 0xe25463ef77a029eccda8b294fd63dd694e38d223d30862f1
|
||||
R = 0x066b1d07f3a40e679b620eda7f550842a35c18b80c5ebe06
|
||||
S = 0xa0b0fb201e8f2df65e2c4508ef303bdc90d934016f16b2dc
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x4366fcadf10d30d086911de30143da6f579527036937007b337f7282460eae5678b15cccda853193ea5fc4bc0a6b9d7a31128f27e1214988592827520b214eed5052f7775b750b0c6b15f145453ba3fee24a085d65287e10509eb5d5f602c440341376b95c24e5c4727d4b859bfe1483d20538acdd92c7997fa9c614f0f839d7
|
||||
Qx = 0x955c908fe900a996f7e2089bee2f6376830f76a19135e753
|
||||
Qy = 0xba0c42a91d3847de4a592a46dc3fdaf45a7cc709b90de520
|
||||
R = 0x1f58ad77fc04c782815a1405b0925e72095d906cbf52a668
|
||||
S = 0xf2e93758b3af75edf784f05a6761c9b9a6043c66b845b599
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x543f8af57d750e33aa8565e0cae92bfa7a1ff78833093421c2942cadf9986670a5ff3244c02a8225e790fbf30ea84c74720abf99cfd10d02d34377c3d3b41269bea763384f372bb786b5846f58932defa68023136cd571863b304886e95e52e7877f445b9364b3f06f3c28da12707673fecb4b8071de06b6e0a3c87da160cef3
|
||||
Qx = 0x31f7fa05576d78a949b24812d4383107a9a45bb5fccdd835
|
||||
Qy = 0x8dc0eb65994a90f02b5e19bd18b32d61150746c09107e76b
|
||||
R = 0xbe26d59e4e883dde7c286614a767b31e49ad88789d3a78ff
|
||||
S = 0x8762ca831c1ce42df77893c9b03119428e7a9b819b619068
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0xd2e8454143ce281e609a9d748014dcebb9d0bc53adb02443a6aac2ffe6cb009f387c346ecb051791404f79e902ee333ad65e5c8cb38dc0d1d39a8dc90add5023572720e5b94b190d43dd0d7873397504c0c7aef2727e628eb6a74411f2e400c65670716cb4a815dc91cbbfeb7cfe8c929e93184c938af2c078584da045e8f8d1
|
||||
Qx = 0x66aa8edbbdb5cf8e28ceb51b5bda891cae2df84819fe25c0
|
||||
Qy = 0x0c6bc2f69030a7ce58d4a00e3b3349844784a13b8936f8da
|
||||
R = 0xa4661e69b1734f4a71b788410a464b71e7ffe42334484f23
|
||||
S = 0x738421cf5e049159d69c57a915143e226cac8355e149afe9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
Msg = 0x6660717144040f3e2f95a4e25b08a7079c702a8b29babad5a19a87654bc5c5afa261512a11b998a4fb36b5d8fe8bd942792ff0324b108120de86d63f65855e5461184fc96a0a8ffd2ce6d5dfb0230cbbdd98f8543e361b3205f5da3d500fdc8bac6db377d75ebef3cb8f4d1ff738071ad0938917889250b41dd1d98896ca06fb
|
||||
Qx = 0xbcfacf45139b6f5f690a4c35a5fffa498794136a2353fc77
|
||||
Qy = 0x6f4a6c906316a6afc6d98fe1f0399d056f128fe0270b0f22
|
||||
R = 0x9db679a3dafe48f7ccad122933acfe9da0970b71c94c21c1
|
||||
S = 0x984c2db99827576c0a41a5da41e07d8cc768bc82f18c9da9
|
||||
test_signature_validity( Msg, Qx, Qy, R, S, False )
|
||||
|
||||
|
||||
|
||||
print_("Testing the example code:")
|
||||
|
||||
# Building a public/private key pair from the NIST Curve P-192:
|
||||
|
||||
g = generator_192
|
||||
n = g.order()
|
||||
|
||||
# (random.SystemRandom is supposed to provide
|
||||
# crypto-quality random numbers, but as Debian recently
|
||||
# illustrated, a systems programmer can accidentally
|
||||
# demolish this security, so in serious applications
|
||||
# further precautions are appropriate.)
|
||||
|
||||
randrange = random.SystemRandom().randrange
|
||||
|
||||
secret = randrange( 1, n )
|
||||
pubkey = Public_key( g, g * secret )
|
||||
privkey = Private_key( pubkey, secret )
|
||||
|
||||
# Signing a hash value:
|
||||
|
||||
hash = randrange( 1, n )
|
||||
signature = privkey.sign( hash, randrange( 1, n ) )
|
||||
|
||||
# Verifying a signature for a hash value:
|
||||
|
||||
if pubkey.verifies( hash, signature ):
|
||||
print_("Demo verification succeeded.")
|
||||
else:
|
||||
raise TestFailure("*** Demo verification failed.")
|
||||
|
||||
if pubkey.verifies( hash-1, signature ):
|
||||
raise TestFailure( "**** Demo verification failed to reject tampered hash.")
|
||||
else:
|
||||
print_("Demo verification correctly rejected tampered hash.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
293
bin/python/ecdsa/ellipticcurve.py
Normal file
293
bin/python/ecdsa/ellipticcurve.py
Normal file
@@ -0,0 +1,293 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Implementation of elliptic curves, for cryptographic applications.
|
||||
#
|
||||
# This module doesn't provide any way to choose a random elliptic
|
||||
# curve, nor to verify that an elliptic curve was chosen randomly,
|
||||
# because one can simply use NIST's standard curves.
|
||||
#
|
||||
# Notes from X9.62-1998 (draft):
|
||||
# Nomenclature:
|
||||
# - Q is a public key.
|
||||
# The "Elliptic Curve Domain Parameters" include:
|
||||
# - q is the "field size", which in our case equals p.
|
||||
# - p is a big prime.
|
||||
# - G is a point of prime order (5.1.1.1).
|
||||
# - n is the order of G (5.1.1.1).
|
||||
# Public-key validation (5.2.2):
|
||||
# - Verify that Q is not the point at infinity.
|
||||
# - Verify that X_Q and Y_Q are in [0,p-1].
|
||||
# - Verify that Q is on the curve.
|
||||
# - Verify that nQ is the point at infinity.
|
||||
# Signature generation (5.3):
|
||||
# - Pick random k from [1,n-1].
|
||||
# Signature checking (5.4.2):
|
||||
# - Verify that r and s are in [1,n-1].
|
||||
#
|
||||
# Version of 2008.11.25.
|
||||
#
|
||||
# Revision history:
|
||||
# 2005.12.31 - Initial version.
|
||||
# 2008.11.25 - Change CurveFp.is_on to contains_point.
|
||||
#
|
||||
# Written in 2005 by Peter Pearson and placed in the public domain.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from .six import print_
|
||||
from . import numbertheory
|
||||
|
||||
class CurveFp( object ):
|
||||
"""Elliptic Curve over the field of integers modulo a prime."""
|
||||
def __init__( self, p, a, b ):
|
||||
"""The curve of points satisfying y^2 = x^3 + a*x + b (mod p)."""
|
||||
self.__p = p
|
||||
self.__a = a
|
||||
self.__b = b
|
||||
|
||||
def p( self ):
|
||||
return self.__p
|
||||
|
||||
def a( self ):
|
||||
return self.__a
|
||||
|
||||
def b( self ):
|
||||
return self.__b
|
||||
|
||||
def contains_point( self, x, y ):
|
||||
"""Is the point (x,y) on this curve?"""
|
||||
return ( y * y - ( x * x * x + self.__a * x + self.__b ) ) % self.__p == 0
|
||||
|
||||
|
||||
|
||||
class Point( object ):
|
||||
"""A point on an elliptic curve. Altering x and y is forbidding,
|
||||
but they can be read by the x() and y() methods."""
|
||||
def __init__( self, curve, x, y, order = None ):
|
||||
"""curve, x, y, order; order (optional) is the order of this point."""
|
||||
self.__curve = curve
|
||||
self.__x = x
|
||||
self.__y = y
|
||||
self.__order = order
|
||||
# self.curve is allowed to be None only for INFINITY:
|
||||
if self.__curve: assert self.__curve.contains_point( x, y )
|
||||
if order: assert self * order == INFINITY
|
||||
|
||||
def __eq__( self, other ):
|
||||
"""Return True if the points are identical, False otherwise."""
|
||||
if self.__curve == other.__curve \
|
||||
and self.__x == other.__x \
|
||||
and self.__y == other.__y:
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def __add__( self, other ):
|
||||
"""Add one point to another point."""
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
if other == INFINITY: return self
|
||||
if self == INFINITY: return other
|
||||
assert self.__curve == other.__curve
|
||||
if self.__x == other.__x:
|
||||
if ( self.__y + other.__y ) % self.__curve.p() == 0:
|
||||
return INFINITY
|
||||
else:
|
||||
return self.double()
|
||||
|
||||
p = self.__curve.p()
|
||||
|
||||
l = ( ( other.__y - self.__y ) * \
|
||||
numbertheory.inverse_mod( other.__x - self.__x, p ) ) % p
|
||||
|
||||
x3 = ( l * l - self.__x - other.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def __mul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
def leftmost_bit( x ):
|
||||
assert x > 0
|
||||
result = 1
|
||||
while result <= x: result = 2 * result
|
||||
return result // 2
|
||||
|
||||
e = other
|
||||
if self.__order: e = e % self.__order
|
||||
if e == 0: return INFINITY
|
||||
if self == INFINITY: return INFINITY
|
||||
assert e > 0
|
||||
|
||||
# From X9.62 D.3.2:
|
||||
|
||||
e3 = 3 * e
|
||||
negative_self = Point( self.__curve, self.__x, -self.__y, self.__order )
|
||||
i = leftmost_bit( e3 ) // 2
|
||||
result = self
|
||||
# print_("Multiplying %s by %d (e3 = %d):" % ( self, other, e3 ))
|
||||
while i > 1:
|
||||
result = result.double()
|
||||
if ( e3 & i ) != 0 and ( e & i ) == 0: result = result + self
|
||||
if ( e3 & i ) == 0 and ( e & i ) != 0: result = result + negative_self
|
||||
# print_(". . . i = %d, result = %s" % ( i, result ))
|
||||
i = i // 2
|
||||
|
||||
return result
|
||||
|
||||
def __rmul__( self, other ):
|
||||
"""Multiply a point by an integer."""
|
||||
|
||||
return self * other
|
||||
|
||||
def __str__( self ):
|
||||
if self == INFINITY: return "infinity"
|
||||
return "(%d,%d)" % ( self.__x, self.__y )
|
||||
|
||||
def double( self ):
|
||||
"""Return a new point that is twice the old."""
|
||||
|
||||
if self == INFINITY:
|
||||
return INFINITY
|
||||
|
||||
# X9.62 B.3:
|
||||
|
||||
p = self.__curve.p()
|
||||
a = self.__curve.a()
|
||||
|
||||
l = ( ( 3 * self.__x * self.__x + a ) * \
|
||||
numbertheory.inverse_mod( 2 * self.__y, p ) ) % p
|
||||
|
||||
x3 = ( l * l - 2 * self.__x ) % p
|
||||
y3 = ( l * ( self.__x - x3 ) - self.__y ) % p
|
||||
|
||||
return Point( self.__curve, x3, y3 )
|
||||
|
||||
def x( self ):
|
||||
return self.__x
|
||||
|
||||
def y( self ):
|
||||
return self.__y
|
||||
|
||||
def curve( self ):
|
||||
return self.__curve
|
||||
|
||||
def order( self ):
|
||||
return self.__order
|
||||
|
||||
|
||||
# This one point is the Point At Infinity for all purposes:
|
||||
INFINITY = Point( None, None, None )
|
||||
|
||||
def __main__():
|
||||
|
||||
class FailedTest(Exception): pass
|
||||
def test_add( c, x1, y1, x2, y2, x3, y3 ):
|
||||
"""We expect that on curve c, (x1,y1) + (x2, y2 ) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p2 = Point( c, x2, y2 )
|
||||
p3 = p1 + p2
|
||||
print_("%s + %s = %s" % ( p1, p2, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double( c, x1, y1, x3, y3 ):
|
||||
"""We expect that on curve c, 2*(x1,y1) = (x3, y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_double_infinity( c ):
|
||||
"""We expect that on curve c, 2*INFINITY = INFINITY."""
|
||||
p1 = INFINITY
|
||||
p3 = p1.double()
|
||||
print_("%s doubled = %s" % ( p1, p3 ), end=' ')
|
||||
if p3.x() != INFINITY.x() or p3.y() != INFINITY.y():
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( INFINITY.x(), INFINITY.y() ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
def test_multiply( c, x1, y1, m, x3, y3 ):
|
||||
"""We expect that on curve c, m*(x1,y1) = (x3,y3)."""
|
||||
p1 = Point( c, x1, y1 )
|
||||
p3 = p1 * m
|
||||
print_("%s * %d = %s" % ( p1, m, p3 ), end=' ')
|
||||
if p3.x() != x3 or p3.y() != y3:
|
||||
raise FailedTest("Failure: should give (%d,%d)." % ( x3, y3 ))
|
||||
else:
|
||||
print_(" Good.")
|
||||
|
||||
|
||||
# A few tests from X9.62 B.3:
|
||||
|
||||
c = CurveFp( 23, 1, 1 )
|
||||
test_add( c, 3, 10, 9, 7, 17, 20 )
|
||||
test_double( c, 3, 10, 7, 12 )
|
||||
test_add( c, 3, 10, 3, 10, 7, 12 ) # (Should just invoke double.)
|
||||
test_multiply( c, 3, 10, 2, 7, 12 )
|
||||
|
||||
test_double_infinity(c)
|
||||
|
||||
# From X9.62 I.1 (p. 96):
|
||||
|
||||
g = Point( c, 13, 7, 7 )
|
||||
|
||||
check = INFINITY
|
||||
for i in range( 7 + 1 ):
|
||||
p = ( i % 7 ) * g
|
||||
print_("%s * %d = %s, expected %s . . ." % ( g, i, p, check ), end=' ')
|
||||
if p == check:
|
||||
print_(" Good.")
|
||||
else:
|
||||
raise FailedTest("Bad.")
|
||||
check = check + g
|
||||
|
||||
# NIST Curve P-192:
|
||||
p = 6277101735386680763835789423207666416083908700390324961279
|
||||
r = 6277101735386680763835789423176059013767194773182842284081
|
||||
#s = 0x3045ae6fc8422f64ed579528d38120eae12196d5L
|
||||
c = 0x3099d2bbbfcb2538542dcd5fb078b6ef5f3d6fe2c745de65
|
||||
b = 0x64210519e59c80e70fa7e9ab72243049feb8deecc146b9b1
|
||||
Gx = 0x188da80eb03090f67cbf20eb43a18800f4ff0afd82ff1012
|
||||
Gy = 0x07192b95ffc8da78631011ed6b24cdd573f977a11e794811
|
||||
|
||||
c192 = CurveFp( p, -3, b )
|
||||
p192 = Point( c192, Gx, Gy, r )
|
||||
|
||||
# Checking against some sample computations presented
|
||||
# in X9.62:
|
||||
|
||||
d = 651056770906015076056810763456358567190100156695615665659
|
||||
Q = d * p192
|
||||
if Q.x() != 0x62B12D60690CDCF330BABAB6E69763B471F994DD702D16A5:
|
||||
raise FailedTest("p192 * d came out wrong.")
|
||||
else:
|
||||
print_("p192 * d came out right.")
|
||||
|
||||
k = 6140507067065001063065065565667405560006161556565665656654
|
||||
R = k * p192
|
||||
if R.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or R.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("k * p192 came out wrong.")
|
||||
else:
|
||||
print_("k * p192 came out right.")
|
||||
|
||||
u1 = 2563697409189434185194736134579731015366492496392189760599
|
||||
u2 = 6266643813348617967186477710235785849136406323338782220568
|
||||
temp = u1 * p192 + u2 * Q
|
||||
if temp.x() != 0x885052380FF147B734C330C43D39B2C4A89F29B0F749FEAD \
|
||||
or temp.y() != 0x9CF9FA1CBEFEFB917747A3BB29C072B9289C2547884FD835:
|
||||
raise FailedTest("u1 * p192 + u2 * Q came out wrong.")
|
||||
else:
|
||||
print_("u1 * p192 + u2 * Q came out right.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
283
bin/python/ecdsa/keys.py
Normal file
283
bin/python/ecdsa/keys.py
Normal file
@@ -0,0 +1,283 @@
|
||||
import binascii
|
||||
|
||||
from . import ecdsa
|
||||
from . import der
|
||||
from . import rfc6979
|
||||
from .curves import NIST192p, find_curve
|
||||
from .util import string_to_number, number_to_string, randrange
|
||||
from .util import sigencode_string, sigdecode_string
|
||||
from .util import oid_ecPublicKey, encoded_oid_ecPublicKey
|
||||
from .six import PY3, b
|
||||
from hashlib import sha1
|
||||
|
||||
class BadSignatureError(Exception):
|
||||
pass
|
||||
class BadDigestError(Exception):
|
||||
pass
|
||||
|
||||
class VerifyingKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def from_public_point(klass, point, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.pubkey = ecdsa.Public_key(curve.generator, point)
|
||||
self.pubkey.order = curve.order
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1,
|
||||
validate_point=True):
|
||||
order = curve.order
|
||||
assert len(string) == curve.verifying_key_length, \
|
||||
(len(string), curve.verifying_key_length)
|
||||
xs = string[:curve.baselen]
|
||||
ys = string[curve.baselen:]
|
||||
assert len(xs) == curve.baselen, (len(xs), curve.baselen)
|
||||
assert len(ys) == curve.baselen, (len(ys), curve.baselen)
|
||||
x = string_to_number(xs)
|
||||
y = string_to_number(ys)
|
||||
if validate_point:
|
||||
assert ecdsa.point_is_valid(curve.generator, x, y)
|
||||
from . import ellipticcurve
|
||||
point = ellipticcurve.Point(curve.curve, x, y, order)
|
||||
return klass.from_public_point(point, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string):
|
||||
return klass.from_der(der.unpem(string))
|
||||
|
||||
@classmethod
|
||||
def from_der(klass, string):
|
||||
# [[oid_ecPublicKey,oid_curve], point_str_bitstring]
|
||||
s1,empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
s2,point_str_bitstring = der.remove_sequence(s1)
|
||||
# s2 = oid_ecPublicKey,oid_curve
|
||||
oid_pk, rest = der.remove_object(s2)
|
||||
oid_curve, empty = der.remove_object(rest)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER pubkey objects: %s" %
|
||||
binascii.hexlify(empty))
|
||||
assert oid_pk == oid_ecPublicKey, (oid_pk, oid_ecPublicKey)
|
||||
curve = find_curve(oid_curve)
|
||||
point_str, empty = der.remove_bitstring(point_str_bitstring)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after pubkey pointstring: %s" %
|
||||
binascii.hexlify(empty))
|
||||
assert point_str.startswith(b("\x00\x04"))
|
||||
return klass.from_string(point_str[2:], curve)
|
||||
|
||||
def to_string(self):
|
||||
# VerifyingKey.from_string(vk.to_string()) == vk as long as the
|
||||
# curves are the same: the curve itself is not included in the
|
||||
# serialized form
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
return x_str + y_str
|
||||
|
||||
def to_pem(self):
|
||||
return der.topem(self.to_der(), "PUBLIC KEY")
|
||||
|
||||
def to_der(self):
|
||||
order = self.pubkey.order
|
||||
x_str = number_to_string(self.pubkey.point.x(), order)
|
||||
y_str = number_to_string(self.pubkey.point.y(), order)
|
||||
point_str = b("\x00\x04") + x_str + y_str
|
||||
return der.encode_sequence(der.encode_sequence(encoded_oid_ecPublicKey,
|
||||
self.curve.encoded_oid),
|
||||
der.encode_bitstring(point_str))
|
||||
|
||||
def verify(self, signature, data, hashfunc=None, sigdecode=sigdecode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
return self.verify_digest(signature, digest, sigdecode)
|
||||
|
||||
def verify_digest(self, signature, digest, sigdecode=sigdecode_string):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
r, s = sigdecode(signature, self.pubkey.order)
|
||||
sig = ecdsa.Signature(r, s)
|
||||
if self.pubkey.verifies(number, sig):
|
||||
return True
|
||||
raise BadSignatureError
|
||||
|
||||
class SigningKey:
|
||||
def __init__(self, _error__please_use_generate=None):
|
||||
if not _error__please_use_generate:
|
||||
raise TypeError("Please use SigningKey.generate() to construct me")
|
||||
|
||||
@classmethod
|
||||
def generate(klass, curve=NIST192p, entropy=None, hashfunc=sha1):
|
||||
secexp = randrange(curve.order, entropy)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
# to create a signing key from a short (arbitrary-length) seed, convert
|
||||
# that seed into an integer with something like
|
||||
# secexp=util.randrange_from_seed__X(seed, curve.order), and then pass
|
||||
# that integer into SigningKey.from_secret_exponent(secexp, curve)
|
||||
|
||||
@classmethod
|
||||
def from_secret_exponent(klass, secexp, curve=NIST192p, hashfunc=sha1):
|
||||
self = klass(_error__please_use_generate=True)
|
||||
self.curve = curve
|
||||
self.default_hashfunc = hashfunc
|
||||
self.baselen = curve.baselen
|
||||
n = curve.order
|
||||
assert 1 <= secexp < n
|
||||
pubkey_point = curve.generator*secexp
|
||||
pubkey = ecdsa.Public_key(curve.generator, pubkey_point)
|
||||
pubkey.order = n
|
||||
self.verifying_key = VerifyingKey.from_public_point(pubkey_point, curve,
|
||||
hashfunc)
|
||||
self.privkey = ecdsa.Private_key(pubkey, secexp)
|
||||
self.privkey.order = n
|
||||
return self
|
||||
|
||||
@classmethod
|
||||
def from_string(klass, string, curve=NIST192p, hashfunc=sha1):
|
||||
assert len(string) == curve.baselen, (len(string), curve.baselen)
|
||||
secexp = string_to_number(string)
|
||||
return klass.from_secret_exponent(secexp, curve, hashfunc)
|
||||
|
||||
@classmethod
|
||||
def from_pem(klass, string, hashfunc=sha1):
|
||||
# the privkey pem file has two sections: "EC PARAMETERS" and "EC
|
||||
# PRIVATE KEY". The first is redundant.
|
||||
if PY3 and isinstance(string, str):
|
||||
string = string.encode()
|
||||
privkey_pem = string[string.index(b("-----BEGIN EC PRIVATE KEY-----")):]
|
||||
return klass.from_der(der.unpem(privkey_pem), hashfunc)
|
||||
@classmethod
|
||||
def from_der(klass, string, hashfunc=sha1):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
s, empty = der.remove_sequence(string)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey: %s" %
|
||||
binascii.hexlify(empty))
|
||||
one, s = der.remove_integer(s)
|
||||
if one != 1:
|
||||
raise der.UnexpectedDER("expected '1' at start of DER privkey,"
|
||||
" got %d" % one)
|
||||
privkey_str, s = der.remove_octet_string(s)
|
||||
tag, curve_oid_str, s = der.remove_constructed(s)
|
||||
if tag != 0:
|
||||
raise der.UnexpectedDER("expected tag 0 in DER privkey,"
|
||||
" got %d" % tag)
|
||||
curve_oid, empty = der.remove_object(curve_oid_str)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
"curve_oid: %s" % binascii.hexlify(empty))
|
||||
curve = find_curve(curve_oid)
|
||||
|
||||
# we don't actually care about the following fields
|
||||
#
|
||||
#tag, pubkey_bitstring, s = der.remove_constructed(s)
|
||||
#if tag != 1:
|
||||
# raise der.UnexpectedDER("expected tag 1 in DER privkey, got %d"
|
||||
# % tag)
|
||||
#pubkey_str = der.remove_bitstring(pubkey_bitstring)
|
||||
#if empty != "":
|
||||
# raise der.UnexpectedDER("trailing junk after DER privkey "
|
||||
# "pubkeystr: %s" % binascii.hexlify(empty))
|
||||
|
||||
# our from_string method likes fixed-length privkey strings
|
||||
if len(privkey_str) < curve.baselen:
|
||||
privkey_str = b("\x00")*(curve.baselen-len(privkey_str)) + privkey_str
|
||||
return klass.from_string(privkey_str, curve, hashfunc)
|
||||
|
||||
def to_string(self):
|
||||
secexp = self.privkey.secret_multiplier
|
||||
s = number_to_string(secexp, self.privkey.order)
|
||||
return s
|
||||
|
||||
def to_pem(self):
|
||||
# TODO: "BEGIN ECPARAMETERS"
|
||||
return der.topem(self.to_der(), "EC PRIVATE KEY")
|
||||
|
||||
def to_der(self):
|
||||
# SEQ([int(1), octetstring(privkey),cont[0], oid(secp224r1),
|
||||
# cont[1],bitstring])
|
||||
encoded_vk = b("\x00\x04") + self.get_verifying_key().to_string()
|
||||
return der.encode_sequence(der.encode_integer(1),
|
||||
der.encode_octet_string(self.to_string()),
|
||||
der.encode_constructed(0, self.curve.encoded_oid),
|
||||
der.encode_constructed(1, der.encode_bitstring(encoded_vk)),
|
||||
)
|
||||
|
||||
def get_verifying_key(self):
|
||||
return self.verifying_key
|
||||
|
||||
def sign_deterministic(self, data, hashfunc=None, sigencode=sigencode_string):
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
digest = hashfunc(data).digest()
|
||||
|
||||
return self.sign_digest_deterministic(digest, hashfunc=hashfunc, sigencode=sigencode)
|
||||
|
||||
def sign_digest_deterministic(self, digest, hashfunc=None, sigencode=sigencode_string):
|
||||
"""
|
||||
Calculates 'k' from data itself, removing the need for strong
|
||||
random generator and producing deterministic (reproducible) signatures.
|
||||
See RFC 6979 for more details.
|
||||
"""
|
||||
secexp = self.privkey.secret_multiplier
|
||||
k = rfc6979.generate_k(
|
||||
self.curve.generator.order(), secexp, hashfunc, digest)
|
||||
|
||||
return self.sign_digest(digest, sigencode=sigencode, k=k)
|
||||
|
||||
def sign(self, data, entropy=None, hashfunc=None, sigencode=sigencode_string, k=None):
|
||||
"""
|
||||
hashfunc= should behave like hashlib.sha1 . The output length of the
|
||||
hash (in bytes) must not be longer than the length of the curve order
|
||||
(rounded up to the nearest byte), so using SHA256 with nist256p is
|
||||
ok, but SHA256 with nist192p is not. (In the 2**-96ish unlikely event
|
||||
of a hash output larger than the curve order, the hash will
|
||||
effectively be wrapped mod n).
|
||||
|
||||
Use hashfunc=hashlib.sha1 to match openssl's -ecdsa-with-SHA1 mode,
|
||||
or hashfunc=hashlib.sha256 for openssl-1.0.0's -ecdsa-with-SHA256.
|
||||
"""
|
||||
|
||||
hashfunc = hashfunc or self.default_hashfunc
|
||||
h = hashfunc(data).digest()
|
||||
return self.sign_digest(h, entropy, sigencode, k)
|
||||
|
||||
def sign_digest(self, digest, entropy=None, sigencode=sigencode_string, k=None):
|
||||
if len(digest) > self.curve.baselen:
|
||||
raise BadDigestError("this curve (%s) is too short "
|
||||
"for your digest (%d)" % (self.curve.name,
|
||||
8*len(digest)))
|
||||
number = string_to_number(digest)
|
||||
r, s = self.sign_number(number, entropy, k)
|
||||
return sigencode(r, s, self.privkey.order)
|
||||
|
||||
def sign_number(self, number, entropy=None, k=None):
|
||||
# returns a pair of numbers
|
||||
order = self.privkey.order
|
||||
# privkey.sign() may raise RuntimeError in the amazingly unlikely
|
||||
# (2**-192) event that r=0 or s=0, because that would leak the key.
|
||||
# We could re-try with a different 'k', but we couldn't test that
|
||||
# code, so I choose to allow the signature to fail instead.
|
||||
|
||||
# If k is set, it is used directly. In other cases
|
||||
# it is generated using entropy function
|
||||
if k is not None:
|
||||
_k = k
|
||||
else:
|
||||
_k = randrange(order, entropy)
|
||||
|
||||
assert 1 <= _k < order
|
||||
sig = self.privkey.sign(number, _k)
|
||||
return sig.r, sig.s
|
||||
613
bin/python/ecdsa/numbertheory.py
Normal file
613
bin/python/ecdsa/numbertheory.py
Normal file
@@ -0,0 +1,613 @@
|
||||
#! /usr/bin/env python
|
||||
#
|
||||
# Provide some simple capabilities from number theory.
|
||||
#
|
||||
# Version of 2008.11.14.
|
||||
#
|
||||
# Written in 2005 and 2006 by Peter Pearson and placed in the public domain.
|
||||
# Revision history:
|
||||
# 2008.11.14: Use pow( base, exponent, modulus ) for modular_exp.
|
||||
# Make gcd and lcm accept arbitrarly many arguments.
|
||||
|
||||
from __future__ import division
|
||||
|
||||
from .six import print_, integer_types
|
||||
from .six.moves import reduce
|
||||
|
||||
import math
|
||||
|
||||
|
||||
class Error( Exception ):
|
||||
"""Base class for exceptions in this module."""
|
||||
pass
|
||||
|
||||
class SquareRootError( Error ):
|
||||
pass
|
||||
|
||||
class NegativeExponentError( Error ):
|
||||
pass
|
||||
|
||||
|
||||
def modular_exp( base, exponent, modulus ):
|
||||
"Raise base to exponent, reducing by modulus"
|
||||
if exponent < 0:
|
||||
raise NegativeExponentError( "Negative exponents (%d) not allowed" \
|
||||
% exponent )
|
||||
return pow( base, exponent, modulus )
|
||||
# result = 1L
|
||||
# x = exponent
|
||||
# b = base + 0L
|
||||
# while x > 0:
|
||||
# if x % 2 > 0: result = (result * b) % modulus
|
||||
# x = x // 2
|
||||
# b = ( b * b ) % modulus
|
||||
# return result
|
||||
|
||||
|
||||
def polynomial_reduce_mod( poly, polymod, p ):
|
||||
"""Reduce poly by polymod, integer arithmetic modulo p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
# Just to make this easy, require a monic polynomial:
|
||||
assert polymod[-1] == 1
|
||||
|
||||
assert len( polymod ) > 1
|
||||
|
||||
while len( poly ) >= len( polymod ):
|
||||
if poly[-1] != 0:
|
||||
for i in range( 2, len( polymod ) + 1 ):
|
||||
poly[-i] = ( poly[-i] - poly[-1] * polymod[-i] ) % p
|
||||
poly = poly[0:-1]
|
||||
|
||||
return poly
|
||||
|
||||
|
||||
|
||||
def polynomial_multiply_mod( m1, m2, polymod, p ):
|
||||
"""Polynomial multiplication modulo a polynomial over ints mod p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# This is just a seat-of-the-pants implementation.
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
# Initialize the product to zero:
|
||||
|
||||
prod = ( len( m1 ) + len( m2 ) - 1 ) * [0]
|
||||
|
||||
# Add together all the cross-terms:
|
||||
|
||||
for i in range( len( m1 ) ):
|
||||
for j in range( len( m2 ) ):
|
||||
prod[i+j] = ( prod[i+j] + m1[i] * m2[j] ) % p
|
||||
|
||||
return polynomial_reduce_mod( prod, polymod, p )
|
||||
|
||||
|
||||
def polynomial_exp_mod( base, exponent, polymod, p ):
|
||||
"""Polynomial exponentiation modulo a polynomial over ints mod p.
|
||||
|
||||
Polynomials are represented as lists of coefficients
|
||||
of increasing powers of x."""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography, algorithm 2.227.
|
||||
|
||||
# This module has been tested only by extensive use
|
||||
# in calculating modular square roots.
|
||||
|
||||
assert exponent < p
|
||||
|
||||
if exponent == 0: return [ 1 ]
|
||||
|
||||
G = base
|
||||
k = exponent
|
||||
if k%2 == 1: s = G
|
||||
else: s = [ 1 ]
|
||||
|
||||
while k > 1:
|
||||
k = k // 2
|
||||
G = polynomial_multiply_mod( G, G, polymod, p )
|
||||
if k%2 == 1: s = polynomial_multiply_mod( G, s, polymod, p )
|
||||
|
||||
return s
|
||||
|
||||
|
||||
|
||||
def jacobi( a, n ):
|
||||
"""Jacobi symbol"""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography (HAC), algorithm 2.149.
|
||||
|
||||
# This function has been tested by comparison with a small
|
||||
# table printed in HAC, and by extensive use in calculating
|
||||
# modular square roots.
|
||||
|
||||
assert n >= 3
|
||||
assert n%2 == 1
|
||||
a = a % n
|
||||
if a == 0: return 0
|
||||
if a == 1: return 1
|
||||
a1, e = a, 0
|
||||
while a1%2 == 0:
|
||||
a1, e = a1//2, e+1
|
||||
if e%2 == 0 or n%8 == 1 or n%8 == 7: s = 1
|
||||
else: s = -1
|
||||
if a1 == 1: return s
|
||||
if n%4 == 3 and a1%4 == 3: s = -s
|
||||
return s * jacobi( n % a1, a1 )
|
||||
|
||||
|
||||
|
||||
def square_root_mod_prime( a, p ):
|
||||
"""Modular square root of a, mod p, p prime."""
|
||||
|
||||
# Based on the Handbook of Applied Cryptography, algorithms 3.34 to 3.39.
|
||||
|
||||
# This module has been tested for all values in [0,p-1] for
|
||||
# every prime p from 3 to 1229.
|
||||
|
||||
assert 0 <= a < p
|
||||
assert 1 < p
|
||||
|
||||
if a == 0: return 0
|
||||
if p == 2: return a
|
||||
|
||||
jac = jacobi( a, p )
|
||||
if jac == -1: raise SquareRootError( "%d has no square root modulo %d" \
|
||||
% ( a, p ) )
|
||||
|
||||
if p % 4 == 3: return modular_exp( a, (p+1)//4, p )
|
||||
|
||||
if p % 8 == 5:
|
||||
d = modular_exp( a, (p-1)//4, p )
|
||||
if d == 1: return modular_exp( a, (p+3)//8, p )
|
||||
if d == p-1: return ( 2 * a * modular_exp( 4*a, (p-5)//8, p ) ) % p
|
||||
raise RuntimeError("Shouldn't get here.")
|
||||
|
||||
for b in range( 2, p ):
|
||||
if jacobi( b*b-4*a, p ) == -1:
|
||||
f = ( a, -b, 1 )
|
||||
ff = polynomial_exp_mod( ( 0, 1 ), (p+1)//2, f, p )
|
||||
assert ff[1] == 0
|
||||
return ff[0]
|
||||
raise RuntimeError("No b found.")
|
||||
|
||||
|
||||
|
||||
def inverse_mod( a, m ):
|
||||
"""Inverse of a mod m."""
|
||||
|
||||
if a < 0 or m <= a: a = a % m
|
||||
|
||||
# From Ferguson and Schneier, roughly:
|
||||
|
||||
c, d = a, m
|
||||
uc, vc, ud, vd = 1, 0, 0, 1
|
||||
while c != 0:
|
||||
q, c, d = divmod( d, c ) + ( c, )
|
||||
uc, vc, ud, vd = ud - q*uc, vd - q*vc, uc, vc
|
||||
|
||||
# At this point, d is the GCD, and ud*a+vd*m = d.
|
||||
# If d == 1, this means that ud is a inverse.
|
||||
|
||||
assert d == 1
|
||||
if ud > 0: return ud
|
||||
else: return ud + m
|
||||
|
||||
|
||||
def gcd2(a, b):
|
||||
"""Greatest common divisor using Euclid's algorithm."""
|
||||
while a:
|
||||
a, b = b%a, a
|
||||
return b
|
||||
|
||||
|
||||
def gcd( *a ):
|
||||
"""Greatest common divisor.
|
||||
|
||||
Usage: gcd( [ 2, 4, 6 ] )
|
||||
or: gcd( 2, 4, 6 )
|
||||
"""
|
||||
|
||||
if len( a ) > 1: return reduce( gcd2, a )
|
||||
if hasattr( a[0], "__iter__" ): return reduce( gcd2, a[0] )
|
||||
return a[0]
|
||||
|
||||
|
||||
def lcm2(a,b):
|
||||
"""Least common multiple of two integers."""
|
||||
|
||||
return (a*b)//gcd(a,b)
|
||||
|
||||
|
||||
def lcm( *a ):
|
||||
"""Least common multiple.
|
||||
|
||||
Usage: lcm( [ 3, 4, 5 ] )
|
||||
or: lcm( 3, 4, 5 )
|
||||
"""
|
||||
|
||||
if len( a ) > 1: return reduce( lcm2, a )
|
||||
if hasattr( a[0], "__iter__" ): return reduce( lcm2, a[0] )
|
||||
return a[0]
|
||||
|
||||
|
||||
|
||||
def factorization( n ):
|
||||
"""Decompose n into a list of (prime,exponent) pairs."""
|
||||
|
||||
assert isinstance( n, integer_types )
|
||||
|
||||
if n < 2: return []
|
||||
|
||||
result = []
|
||||
d = 2
|
||||
|
||||
# Test the small primes:
|
||||
|
||||
for d in smallprimes:
|
||||
if d > n: break
|
||||
q, r = divmod( n, d )
|
||||
if r == 0:
|
||||
count = 1
|
||||
while d <= n:
|
||||
n = q
|
||||
q, r = divmod( n, d )
|
||||
if r != 0: break
|
||||
count = count + 1
|
||||
result.append( ( d, count ) )
|
||||
|
||||
# If n is still greater than the last of our small primes,
|
||||
# it may require further work:
|
||||
|
||||
if n > smallprimes[-1]:
|
||||
if is_prime( n ): # If what's left is prime, it's easy:
|
||||
result.append( ( n, 1 ) )
|
||||
else: # Ugh. Search stupidly for a divisor:
|
||||
d = smallprimes[-1]
|
||||
while 1:
|
||||
d = d + 2 # Try the next divisor.
|
||||
q, r = divmod( n, d )
|
||||
if q < d: break # n < d*d means we're done, n = 1 or prime.
|
||||
if r == 0: # d divides n. How many times?
|
||||
count = 1
|
||||
n = q
|
||||
while d <= n: # As long as d might still divide n,
|
||||
q, r = divmod( n, d ) # see if it does.
|
||||
if r != 0: break
|
||||
n = q # It does. Reduce n, increase count.
|
||||
count = count + 1
|
||||
result.append( ( d, count ) )
|
||||
if n > 1: result.append( ( n, 1 ) )
|
||||
|
||||
return result
|
||||
|
||||
|
||||
|
||||
def phi( n ):
|
||||
"""Return the Euler totient function of n."""
|
||||
|
||||
assert isinstance( n, integer_types )
|
||||
|
||||
if n < 3: return 1
|
||||
|
||||
result = 1
|
||||
ff = factorization( n )
|
||||
for f in ff:
|
||||
e = f[1]
|
||||
if e > 1:
|
||||
result = result * f[0] ** (e-1) * ( f[0] - 1 )
|
||||
else:
|
||||
result = result * ( f[0] - 1 )
|
||||
return result
|
||||
|
||||
|
||||
def carmichael( n ):
|
||||
"""Return Carmichael function of n.
|
||||
|
||||
Carmichael(n) is the smallest integer x such that
|
||||
m**x = 1 mod n for all m relatively prime to n.
|
||||
"""
|
||||
|
||||
return carmichael_of_factorized( factorization( n ) )
|
||||
|
||||
|
||||
def carmichael_of_factorized( f_list ):
|
||||
"""Return the Carmichael function of a number that is
|
||||
represented as a list of (prime,exponent) pairs.
|
||||
"""
|
||||
|
||||
if len( f_list ) < 1: return 1
|
||||
|
||||
result = carmichael_of_ppower( f_list[0] )
|
||||
for i in range( 1, len( f_list ) ):
|
||||
result = lcm( result, carmichael_of_ppower( f_list[i] ) )
|
||||
|
||||
return result
|
||||
|
||||
def carmichael_of_ppower( pp ):
|
||||
"""Carmichael function of the given power of the given prime.
|
||||
"""
|
||||
|
||||
p, a = pp
|
||||
if p == 2 and a > 2: return 2**(a-2)
|
||||
else: return (p-1) * p**(a-1)
|
||||
|
||||
|
||||
|
||||
def order_mod( x, m ):
|
||||
"""Return the order of x in the multiplicative group mod m.
|
||||
"""
|
||||
|
||||
# Warning: this implementation is not very clever, and will
|
||||
# take a long time if m is very large.
|
||||
|
||||
if m <= 1: return 0
|
||||
|
||||
assert gcd( x, m ) == 1
|
||||
|
||||
z = x
|
||||
result = 1
|
||||
while z != 1:
|
||||
z = ( z * x ) % m
|
||||
result = result + 1
|
||||
return result
|
||||
|
||||
|
||||
def largest_factor_relatively_prime( a, b ):
|
||||
"""Return the largest factor of a relatively prime to b.
|
||||
"""
|
||||
|
||||
while 1:
|
||||
d = gcd( a, b )
|
||||
if d <= 1: break
|
||||
b = d
|
||||
while 1:
|
||||
q, r = divmod( a, d )
|
||||
if r > 0:
|
||||
break
|
||||
a = q
|
||||
return a
|
||||
|
||||
|
||||
def kinda_order_mod( x, m ):
|
||||
"""Return the order of x in the multiplicative group mod m',
|
||||
where m' is the largest factor of m relatively prime to x.
|
||||
"""
|
||||
|
||||
return order_mod( x, largest_factor_relatively_prime( m, x ) )
|
||||
|
||||
|
||||
def is_prime( n ):
|
||||
"""Return True if x is prime, False otherwise.
|
||||
|
||||
We use the Miller-Rabin test, as given in Menezes et al. p. 138.
|
||||
This test is not exact: there are composite values n for which
|
||||
it returns True.
|
||||
|
||||
In testing the odd numbers from 10000001 to 19999999,
|
||||
about 66 composites got past the first test,
|
||||
5 got past the second test, and none got past the third.
|
||||
Since factors of 2, 3, 5, 7, and 11 were detected during
|
||||
preliminary screening, the number of numbers tested by
|
||||
Miller-Rabin was (19999999 - 10000001)*(2/3)*(4/5)*(6/7)
|
||||
= 4.57 million.
|
||||
"""
|
||||
|
||||
# (This is used to study the risk of false positives:)
|
||||
global miller_rabin_test_count
|
||||
|
||||
miller_rabin_test_count = 0
|
||||
|
||||
if n <= smallprimes[-1]:
|
||||
if n in smallprimes: return True
|
||||
else: return False
|
||||
|
||||
if gcd( n, 2*3*5*7*11 ) != 1: return False
|
||||
|
||||
# Choose a number of iterations sufficient to reduce the
|
||||
# probability of accepting a composite below 2**-80
|
||||
# (from Menezes et al. Table 4.4):
|
||||
|
||||
t = 40
|
||||
n_bits = 1 + int( math.log( n, 2 ) )
|
||||
for k, tt in ( ( 100, 27 ),
|
||||
( 150, 18 ),
|
||||
( 200, 15 ),
|
||||
( 250, 12 ),
|
||||
( 300, 9 ),
|
||||
( 350, 8 ),
|
||||
( 400, 7 ),
|
||||
( 450, 6 ),
|
||||
( 550, 5 ),
|
||||
( 650, 4 ),
|
||||
( 850, 3 ),
|
||||
( 1300, 2 ),
|
||||
):
|
||||
if n_bits < k: break
|
||||
t = tt
|
||||
|
||||
# Run the test t times:
|
||||
|
||||
s = 0
|
||||
r = n - 1
|
||||
while ( r % 2 ) == 0:
|
||||
s = s + 1
|
||||
r = r // 2
|
||||
for i in range( t ):
|
||||
a = smallprimes[ i ]
|
||||
y = modular_exp( a, r, n )
|
||||
if y != 1 and y != n-1:
|
||||
j = 1
|
||||
while j <= s - 1 and y != n - 1:
|
||||
y = modular_exp( y, 2, n )
|
||||
if y == 1:
|
||||
miller_rabin_test_count = i + 1
|
||||
return False
|
||||
j = j + 1
|
||||
if y != n-1:
|
||||
miller_rabin_test_count = i + 1
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def next_prime( starting_value ):
|
||||
"Return the smallest prime larger than the starting value."
|
||||
|
||||
if starting_value < 2: return 2
|
||||
result = ( starting_value + 1 ) | 1
|
||||
while not is_prime( result ): result = result + 2
|
||||
return result
|
||||
|
||||
|
||||
smallprimes = [2, 3, 5, 7, 11, 13, 17, 19, 23, 29, 31, 37, 41,
|
||||
43, 47, 53, 59, 61, 67, 71, 73, 79, 83, 89, 97,
|
||||
101, 103, 107, 109, 113, 127, 131, 137, 139, 149,
|
||||
151, 157, 163, 167, 173, 179, 181, 191, 193, 197,
|
||||
199, 211, 223, 227, 229, 233, 239, 241, 251, 257,
|
||||
263, 269, 271, 277, 281, 283, 293, 307, 311, 313,
|
||||
317, 331, 337, 347, 349, 353, 359, 367, 373, 379,
|
||||
383, 389, 397, 401, 409, 419, 421, 431, 433, 439,
|
||||
443, 449, 457, 461, 463, 467, 479, 487, 491, 499,
|
||||
503, 509, 521, 523, 541, 547, 557, 563, 569, 571,
|
||||
577, 587, 593, 599, 601, 607, 613, 617, 619, 631,
|
||||
641, 643, 647, 653, 659, 661, 673, 677, 683, 691,
|
||||
701, 709, 719, 727, 733, 739, 743, 751, 757, 761,
|
||||
769, 773, 787, 797, 809, 811, 821, 823, 827, 829,
|
||||
839, 853, 857, 859, 863, 877, 881, 883, 887, 907,
|
||||
911, 919, 929, 937, 941, 947, 953, 967, 971, 977,
|
||||
983, 991, 997, 1009, 1013, 1019, 1021, 1031, 1033,
|
||||
1039, 1049, 1051, 1061, 1063, 1069, 1087, 1091, 1093,
|
||||
1097, 1103, 1109, 1117, 1123, 1129, 1151, 1153, 1163,
|
||||
1171, 1181, 1187, 1193, 1201, 1213, 1217, 1223, 1229]
|
||||
|
||||
miller_rabin_test_count = 0
|
||||
|
||||
def __main__():
|
||||
|
||||
# Making sure locally defined exceptions work:
|
||||
# p = modular_exp( 2, -2, 3 )
|
||||
# p = square_root_mod_prime( 2, 3 )
|
||||
|
||||
|
||||
print_("Testing gcd...")
|
||||
assert gcd( 3*5*7, 3*5*11, 3*5*13 ) == 3*5
|
||||
assert gcd( [ 3*5*7, 3*5*11, 3*5*13 ] ) == 3*5
|
||||
assert gcd( 3 ) == 3
|
||||
|
||||
print_("Testing lcm...")
|
||||
assert lcm( 3, 5*3, 7*3 ) == 3*5*7
|
||||
assert lcm( [ 3, 5*3, 7*3 ] ) == 3*5*7
|
||||
assert lcm( 3 ) == 3
|
||||
|
||||
print_("Testing next_prime...")
|
||||
bigprimes = ( 999671,
|
||||
999683,
|
||||
999721,
|
||||
999727,
|
||||
999749,
|
||||
999763,
|
||||
999769,
|
||||
999773,
|
||||
999809,
|
||||
999853,
|
||||
999863,
|
||||
999883,
|
||||
999907,
|
||||
999917,
|
||||
999931,
|
||||
999953,
|
||||
999959,
|
||||
999961,
|
||||
999979,
|
||||
999983 )
|
||||
|
||||
for i in range( len( bigprimes ) - 1 ):
|
||||
assert next_prime( bigprimes[i] ) == bigprimes[ i+1 ]
|
||||
|
||||
error_tally = 0
|
||||
|
||||
# Test the square_root_mod_prime function:
|
||||
|
||||
for p in smallprimes:
|
||||
print_("Testing square_root_mod_prime for modulus p = %d." % p)
|
||||
squares = []
|
||||
|
||||
for root in range( 0, 1+p//2 ):
|
||||
sq = ( root * root ) % p
|
||||
squares.append( sq )
|
||||
calculated = square_root_mod_prime( sq, p )
|
||||
if ( calculated * calculated ) % p != sq:
|
||||
error_tally = error_tally + 1
|
||||
print_("Failed to find %d as sqrt( %d ) mod %d. Said %d." % \
|
||||
( root, sq, p, calculated ))
|
||||
|
||||
for nonsquare in range( 0, p ):
|
||||
if nonsquare not in squares:
|
||||
try:
|
||||
calculated = square_root_mod_prime( nonsquare, p )
|
||||
except SquareRootError:
|
||||
pass
|
||||
else:
|
||||
error_tally = error_tally + 1
|
||||
print_("Failed to report no root for sqrt( %d ) mod %d." % \
|
||||
( nonsquare, p ))
|
||||
|
||||
# Test the jacobi function:
|
||||
for m in range( 3, 400, 2 ):
|
||||
print_("Testing jacobi for modulus m = %d." % m)
|
||||
if is_prime( m ):
|
||||
squares = []
|
||||
for root in range( 1, m ):
|
||||
if jacobi( root * root, m ) != 1:
|
||||
error_tally = error_tally + 1
|
||||
print_("jacobi( %d * %d, %d ) != 1" % ( root, root, m ))
|
||||
squares.append( root * root % m )
|
||||
for i in range( 1, m ):
|
||||
if not i in squares:
|
||||
if jacobi( i, m ) != -1:
|
||||
error_tally = error_tally + 1
|
||||
print_("jacobi( %d, %d ) != -1" % ( i, m ))
|
||||
else: # m is not prime.
|
||||
f = factorization( m )
|
||||
for a in range( 1, m ):
|
||||
c = 1
|
||||
for i in f:
|
||||
c = c * jacobi( a, i[0] ) ** i[1]
|
||||
if c != jacobi( a, m ):
|
||||
error_tally = error_tally + 1
|
||||
print_("%d != jacobi( %d, %d )" % ( c, a, m ))
|
||||
|
||||
|
||||
# Test the inverse_mod function:
|
||||
print_("Testing inverse_mod . . .")
|
||||
import random
|
||||
n_tests = 0
|
||||
for i in range( 100 ):
|
||||
m = random.randint( 20, 10000 )
|
||||
for j in range( 100 ):
|
||||
a = random.randint( 1, m-1 )
|
||||
if gcd( a, m ) == 1:
|
||||
n_tests = n_tests + 1
|
||||
inv = inverse_mod( a, m )
|
||||
if inv <= 0 or inv >= m or ( a * inv ) % m != 1:
|
||||
error_tally = error_tally + 1
|
||||
print_("%d = inverse_mod( %d, %d ) is wrong." % ( inv, a, m ))
|
||||
assert n_tests > 1000
|
||||
print_(n_tests, " tests of inverse_mod completed.")
|
||||
|
||||
class FailedTest(Exception): pass
|
||||
print_(error_tally, "errors detected.")
|
||||
if error_tally != 0:
|
||||
raise FailedTest("%d errors detected" % error_tally)
|
||||
|
||||
if __name__ == '__main__':
|
||||
__main__()
|
||||
103
bin/python/ecdsa/rfc6979.py
Normal file
103
bin/python/ecdsa/rfc6979.py
Normal file
@@ -0,0 +1,103 @@
|
||||
'''
|
||||
RFC 6979:
|
||||
Deterministic Usage of the Digital Signature Algorithm (DSA) and
|
||||
Elliptic Curve Digital Signature Algorithm (ECDSA)
|
||||
|
||||
http://tools.ietf.org/html/rfc6979
|
||||
|
||||
Many thanks to Coda Hale for his implementation in Go language:
|
||||
https://github.com/codahale/rfc6979
|
||||
'''
|
||||
|
||||
import hmac
|
||||
from binascii import hexlify
|
||||
from .util import number_to_string, number_to_string_crop
|
||||
from .six import b
|
||||
|
||||
try:
|
||||
bin(0)
|
||||
except NameError:
|
||||
binmap = {"0": "0000", "1": "0001", "2": "0010", "3": "0011",
|
||||
"4": "0100", "5": "0101", "6": "0110", "7": "0111",
|
||||
"8": "1000", "9": "1001", "a": "1010", "b": "1011",
|
||||
"c": "1100", "d": "1101", "e": "1110", "f": "1111"}
|
||||
def bin(value): # for python2.5
|
||||
v = "".join(binmap[x] for x in "%x"%abs(value)).lstrip("0")
|
||||
if value < 0:
|
||||
return "-0b" + v
|
||||
return "0b" + v
|
||||
|
||||
def bit_length(num):
|
||||
# http://docs.python.org/dev/library/stdtypes.html#int.bit_length
|
||||
s = bin(num) # binary representation: bin(-37) --> '-0b100101'
|
||||
s = s.lstrip('-0b') # remove leading zeros and minus sign
|
||||
return len(s) # len('100101') --> 6
|
||||
|
||||
def bits2int(data, qlen):
|
||||
x = int(hexlify(data), 16)
|
||||
l = len(data) * 8
|
||||
|
||||
if l > qlen:
|
||||
return x >> (l-qlen)
|
||||
return x
|
||||
|
||||
def bits2octets(data, order):
|
||||
z1 = bits2int(data, bit_length(order))
|
||||
z2 = z1 - order
|
||||
|
||||
if z2 < 0:
|
||||
z2 = z1
|
||||
|
||||
return number_to_string_crop(z2, order)
|
||||
|
||||
# https://tools.ietf.org/html/rfc6979#section-3.2
|
||||
def generate_k(order, secexp, hash_func, data):
|
||||
'''
|
||||
generator - order of the DSA generator used in the signature
|
||||
secexp - secure exponent (private key) in numeric form
|
||||
hash_func - reference to the same hash function used for generating hash
|
||||
data - hash in binary form of the signing data
|
||||
'''
|
||||
|
||||
qlen = bit_length(order)
|
||||
holen = hash_func().digest_size
|
||||
rolen = (qlen + 7) / 8
|
||||
bx = number_to_string(secexp, order) + bits2octets(data, order)
|
||||
|
||||
# Step B
|
||||
v = b('\x01') * holen
|
||||
|
||||
# Step C
|
||||
k = b('\x00') * holen
|
||||
|
||||
# Step D
|
||||
|
||||
k = hmac.new(k, v+b('\x00')+bx, hash_func).digest()
|
||||
|
||||
# Step E
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
|
||||
# Step F
|
||||
k = hmac.new(k, v+b('\x01')+bx, hash_func).digest()
|
||||
|
||||
# Step G
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
|
||||
# Step H
|
||||
while True:
|
||||
# Step H1
|
||||
t = b('')
|
||||
|
||||
# Step H2
|
||||
while len(t) < rolen:
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
t += v
|
||||
|
||||
# Step H3
|
||||
secret = bits2int(t, qlen)
|
||||
|
||||
if secret >= 1 and secret < order:
|
||||
return secret
|
||||
|
||||
k = hmac.new(k, v+b('\x00'), hash_func).digest()
|
||||
v = hmac.new(k, v, hash_func).digest()
|
||||
394
bin/python/ecdsa/six.py
Normal file
394
bin/python/ecdsa/six.py
Normal file
@@ -0,0 +1,394 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2012 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
# this software and associated documentation files (the "Software"), to deal in
|
||||
# the Software without restriction, including without limitation the rights to
|
||||
# use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
# the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
# subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
# FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
# COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
# IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.2.0"
|
||||
|
||||
|
||||
# True if we are running on Python 3.
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result)
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(tp, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
|
||||
class _MovedItems(types.ModuleType):
|
||||
"""Lazy loading of moved objects"""
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
del attr
|
||||
|
||||
moves = sys.modules[__name__ + ".moves"] = _MovedItems("moves")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
|
||||
_iterkeys = "keys"
|
||||
_itervalues = "values"
|
||||
_iteritems = "items"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
|
||||
_iterkeys = "iterkeys"
|
||||
_itervalues = "itervalues"
|
||||
_iteritems = "iteritems"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
|
||||
|
||||
def iterkeys(d):
|
||||
"""Return an iterator over the keys of a dictionary."""
|
||||
return iter(getattr(d, _iterkeys)())
|
||||
|
||||
def itervalues(d):
|
||||
"""Return an iterator over the values of a dictionary."""
|
||||
return iter(getattr(d, _itervalues)())
|
||||
|
||||
def iteritems(d):
|
||||
"""Return an iterator over the (key, value) pairs of a dictionary."""
|
||||
return iter(getattr(d, _iteritems)())
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
def u(s):
|
||||
return s
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
def u(s):
|
||||
if isinstance(s, unicode):
|
||||
return s
|
||||
return unicode(s, "unicode_escape")
|
||||
int2byte = chr
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
import builtins
|
||||
exec_ = getattr(builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
|
||||
print_ = getattr(builtins, "print")
|
||||
del builtins
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
|
||||
def with_metaclass(meta, base=object):
|
||||
"""Create a base class with a metaclass."""
|
||||
return meta("NewBase", (base,), {})
|
||||
663
bin/python/ecdsa/test_pyecdsa.py
Normal file
663
bin/python/ecdsa/test_pyecdsa.py
Normal file
@@ -0,0 +1,663 @@
|
||||
from __future__ import with_statement, division
|
||||
|
||||
import unittest
|
||||
import os
|
||||
import time
|
||||
import shutil
|
||||
import subprocess
|
||||
from binascii import hexlify, unhexlify
|
||||
from hashlib import sha1, sha256, sha512
|
||||
|
||||
from .six import b, print_, binary_type
|
||||
from .keys import SigningKey, VerifyingKey
|
||||
from .keys import BadSignatureError
|
||||
from . import util
|
||||
from .util import sigencode_der, sigencode_strings
|
||||
from .util import sigdecode_der, sigdecode_strings
|
||||
from .curves import Curve, UnknownCurveError
|
||||
from .curves import NIST192p, NIST224p, NIST256p, NIST384p, NIST521p, SECP256k1
|
||||
from .ellipticcurve import Point
|
||||
from . import der
|
||||
from . import rfc6979
|
||||
|
||||
class SubprocessError(Exception):
|
||||
pass
|
||||
|
||||
def run_openssl(cmd):
|
||||
OPENSSL = "openssl"
|
||||
p = subprocess.Popen([OPENSSL] + cmd.split(),
|
||||
stdout=subprocess.PIPE,
|
||||
stderr=subprocess.STDOUT)
|
||||
stdout, ignored = p.communicate()
|
||||
if p.returncode != 0:
|
||||
raise SubprocessError("cmd '%s %s' failed: rc=%s, stdout/err was %s" %
|
||||
(OPENSSL, cmd, p.returncode, stdout))
|
||||
return stdout.decode()
|
||||
|
||||
BENCH = False
|
||||
|
||||
class ECDSA(unittest.TestCase):
|
||||
def test_basic(self):
|
||||
priv = SigningKey.generate()
|
||||
pub = priv.get_verifying_key()
|
||||
|
||||
data = b("blahblah")
|
||||
sig = priv.sign(data)
|
||||
|
||||
self.assertTrue(pub.verify(sig, data))
|
||||
self.assertRaises(BadSignatureError, pub.verify, sig, data+b("bad"))
|
||||
|
||||
pub2 = VerifyingKey.from_string(pub.to_string())
|
||||
self.assertTrue(pub2.verify(sig, data))
|
||||
|
||||
def test_deterministic(self):
|
||||
data = b("blahblah")
|
||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16)
|
||||
|
||||
priv = SigningKey.from_secret_exponent(secexp, SECP256k1, sha256)
|
||||
pub = priv.get_verifying_key()
|
||||
|
||||
k = rfc6979.generate_k(
|
||||
SECP256k1.generator.order(), secexp, sha256, sha256(data).digest())
|
||||
|
||||
sig1 = priv.sign(data, k=k)
|
||||
self.assertTrue(pub.verify(sig1, data))
|
||||
|
||||
sig2 = priv.sign(data, k=k)
|
||||
self.assertTrue(pub.verify(sig2, data))
|
||||
|
||||
sig3 = priv.sign_deterministic(data, sha256)
|
||||
self.assertTrue(pub.verify(sig3, data))
|
||||
|
||||
self.assertEqual(sig1, sig2)
|
||||
self.assertEqual(sig1, sig3)
|
||||
|
||||
def test_bad_usage(self):
|
||||
# sk=SigningKey() is wrong
|
||||
self.assertRaises(TypeError, SigningKey)
|
||||
self.assertRaises(TypeError, VerifyingKey)
|
||||
|
||||
def test_lengths(self):
|
||||
default = NIST192p
|
||||
priv = SigningKey.generate()
|
||||
pub = priv.get_verifying_key()
|
||||
self.assertEqual(len(pub.to_string()), default.verifying_key_length)
|
||||
sig = priv.sign(b("data"))
|
||||
self.assertEqual(len(sig), default.signature_length)
|
||||
if BENCH:
|
||||
print_()
|
||||
for curve in (NIST192p, NIST224p, NIST256p, NIST384p, NIST521p):
|
||||
start = time.time()
|
||||
priv = SigningKey.generate(curve=curve)
|
||||
pub1 = priv.get_verifying_key()
|
||||
keygen_time = time.time() - start
|
||||
pub2 = VerifyingKey.from_string(pub1.to_string(), curve)
|
||||
self.assertEqual(pub1.to_string(), pub2.to_string())
|
||||
self.assertEqual(len(pub1.to_string()),
|
||||
curve.verifying_key_length)
|
||||
start = time.time()
|
||||
sig = priv.sign(b("data"))
|
||||
sign_time = time.time() - start
|
||||
self.assertEqual(len(sig), curve.signature_length)
|
||||
if BENCH:
|
||||
start = time.time()
|
||||
pub1.verify(sig, b("data"))
|
||||
verify_time = time.time() - start
|
||||
print_("%s: siglen=%d, keygen=%0.3fs, sign=%0.3f, verify=%0.3f" \
|
||||
% (curve.name, curve.signature_length,
|
||||
keygen_time, sign_time, verify_time))
|
||||
|
||||
def test_serialize(self):
|
||||
seed = b("secret")
|
||||
curve = NIST192p
|
||||
secexp1 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||
secexp2 = util.randrange_from_seed__trytryagain(seed, curve.order)
|
||||
self.assertEqual(secexp1, secexp2)
|
||||
priv1 = SigningKey.from_secret_exponent(secexp1, curve)
|
||||
priv2 = SigningKey.from_secret_exponent(secexp2, curve)
|
||||
self.assertEqual(hexlify(priv1.to_string()),
|
||||
hexlify(priv2.to_string()))
|
||||
self.assertEqual(priv1.to_pem(), priv2.to_pem())
|
||||
pub1 = priv1.get_verifying_key()
|
||||
pub2 = priv2.get_verifying_key()
|
||||
data = b("data")
|
||||
sig1 = priv1.sign(data)
|
||||
sig2 = priv2.sign(data)
|
||||
self.assertTrue(pub1.verify(sig1, data))
|
||||
self.assertTrue(pub2.verify(sig1, data))
|
||||
self.assertTrue(pub1.verify(sig2, data))
|
||||
self.assertTrue(pub2.verify(sig2, data))
|
||||
self.assertEqual(hexlify(pub1.to_string()),
|
||||
hexlify(pub2.to_string()))
|
||||
|
||||
def test_nonrandom(self):
|
||||
s = b("all the entropy in the entire world, compressed into one line")
|
||||
def not_much_entropy(numbytes):
|
||||
return s[:numbytes]
|
||||
# we control the entropy source, these two keys should be identical:
|
||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(hexlify(priv1.get_verifying_key().to_string()),
|
||||
hexlify(priv2.get_verifying_key().to_string()))
|
||||
# likewise, signatures should be identical. Obviously you'd never
|
||||
# want to do this with keys you care about, because the secrecy of
|
||||
# the private key depends upon using different random numbers for
|
||||
# each signature
|
||||
sig1 = priv1.sign(b("data"), entropy=not_much_entropy)
|
||||
sig2 = priv2.sign(b("data"), entropy=not_much_entropy)
|
||||
self.assertEqual(hexlify(sig1), hexlify(sig2))
|
||||
|
||||
def assertTruePrivkeysEqual(self, priv1, priv2):
|
||||
self.assertEqual(priv1.privkey.secret_multiplier,
|
||||
priv2.privkey.secret_multiplier)
|
||||
self.assertEqual(priv1.privkey.public_key.generator,
|
||||
priv2.privkey.public_key.generator)
|
||||
|
||||
def failIfPrivkeysEqual(self, priv1, priv2):
|
||||
self.failIfEqual(priv1.privkey.secret_multiplier,
|
||||
priv2.privkey.secret_multiplier)
|
||||
|
||||
def test_privkey_creation(self):
|
||||
s = b("all the entropy in the entire world, compressed into one line")
|
||||
def not_much_entropy(numbytes):
|
||||
return s[:numbytes]
|
||||
priv1 = SigningKey.generate()
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST224p)
|
||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||
|
||||
priv1 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
priv2 = SigningKey.generate(entropy=not_much_entropy)
|
||||
self.assertEqual(priv2.baselen, NIST192p.baselen)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.from_secret_exponent(secexp=3)
|
||||
self.assertEqual(priv1.baselen, NIST192p.baselen)
|
||||
priv2 = SigningKey.from_secret_exponent(secexp=3)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.from_secret_exponent(secexp=4, curve=NIST224p)
|
||||
self.assertEqual(priv1.baselen, NIST224p.baselen)
|
||||
|
||||
def test_privkey_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
s1 = priv1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST192p.baselen)
|
||||
priv2 = SigningKey.from_string(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_pem()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||
priv2 = SigningKey.from_pem(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_der()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
priv2 = SigningKey.from_der(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST256p)
|
||||
s1 = priv1.to_pem()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertTrue(s1.startswith(b("-----BEGIN EC PRIVATE KEY-----")))
|
||||
self.assertTrue(s1.strip().endswith(b("-----END EC PRIVATE KEY-----")))
|
||||
priv2 = SigningKey.from_pem(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
s1 = priv1.to_der()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
priv2 = SigningKey.from_der(s1)
|
||||
self.assertTruePrivkeysEqual(priv1, priv2)
|
||||
|
||||
def assertTruePubkeysEqual(self, pub1, pub2):
|
||||
self.assertEqual(pub1.pubkey.point, pub2.pubkey.point)
|
||||
self.assertEqual(pub1.pubkey.generator, pub2.pubkey.generator)
|
||||
self.assertEqual(pub1.curve, pub2.curve)
|
||||
|
||||
def test_pubkey_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
pub1 = priv1.get_verifying_key()
|
||||
s1 = pub1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST192p.verifying_key_length)
|
||||
pub2 = VerifyingKey.from_string(s1)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
priv1 = SigningKey.generate(curve=NIST256p)
|
||||
pub1 = priv1.get_verifying_key()
|
||||
s1 = pub1.to_string()
|
||||
self.assertEqual(type(s1), binary_type)
|
||||
self.assertEqual(len(s1), NIST256p.verifying_key_length)
|
||||
pub2 = VerifyingKey.from_string(s1, curve=NIST256p)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
pub1_der = pub1.to_der()
|
||||
self.assertEqual(type(pub1_der), binary_type)
|
||||
pub2 = VerifyingKey.from_der(pub1_der)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
self.assertRaises(der.UnexpectedDER,
|
||||
VerifyingKey.from_der, pub1_der+b("junk"))
|
||||
badpub = VerifyingKey.from_der(pub1_der)
|
||||
class FakeGenerator:
|
||||
def order(self): return 123456789
|
||||
badcurve = Curve("unknown", None, None, FakeGenerator(), (1,2,3,4,5,6))
|
||||
badpub.curve = badcurve
|
||||
badder = badpub.to_der()
|
||||
self.assertRaises(UnknownCurveError, VerifyingKey.from_der, badder)
|
||||
|
||||
pem = pub1.to_pem()
|
||||
self.assertEqual(type(pem), binary_type)
|
||||
self.assertTrue(pem.startswith(b("-----BEGIN PUBLIC KEY-----")), pem)
|
||||
self.assertTrue(pem.strip().endswith(b("-----END PUBLIC KEY-----")), pem)
|
||||
pub2 = VerifyingKey.from_pem(pem)
|
||||
self.assertTruePubkeysEqual(pub1, pub2)
|
||||
|
||||
def test_signature_strings(self):
|
||||
priv1 = SigningKey.generate()
|
||||
pub1 = priv1.get_verifying_key()
|
||||
data = b("data")
|
||||
|
||||
sig = priv1.sign(data)
|
||||
self.assertEqual(type(sig), binary_type)
|
||||
self.assertEqual(len(sig), NIST192p.signature_length)
|
||||
self.assertTrue(pub1.verify(sig, data))
|
||||
|
||||
sig = priv1.sign(data, sigencode=sigencode_strings)
|
||||
self.assertEqual(type(sig), tuple)
|
||||
self.assertEqual(len(sig), 2)
|
||||
self.assertEqual(type(sig[0]), binary_type)
|
||||
self.assertEqual(type(sig[1]), binary_type)
|
||||
self.assertEqual(len(sig[0]), NIST192p.baselen)
|
||||
self.assertEqual(len(sig[1]), NIST192p.baselen)
|
||||
self.assertTrue(pub1.verify(sig, data, sigdecode=sigdecode_strings))
|
||||
|
||||
sig_der = priv1.sign(data, sigencode=sigencode_der)
|
||||
self.assertEqual(type(sig_der), binary_type)
|
||||
self.assertTrue(pub1.verify(sig_der, data, sigdecode=sigdecode_der))
|
||||
|
||||
def test_hashfunc(self):
|
||||
sk = SigningKey.generate(curve=NIST256p, hashfunc=sha256)
|
||||
data = b("security level is 128 bits")
|
||||
sig = sk.sign(data)
|
||||
vk = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||
curve=NIST256p, hashfunc=sha256)
|
||||
self.assertTrue(vk.verify(sig, data))
|
||||
|
||||
sk2 = SigningKey.generate(curve=NIST256p)
|
||||
sig2 = sk2.sign(data, hashfunc=sha256)
|
||||
vk2 = VerifyingKey.from_string(sk2.get_verifying_key().to_string(),
|
||||
curve=NIST256p, hashfunc=sha256)
|
||||
self.assertTrue(vk2.verify(sig2, data))
|
||||
|
||||
vk3 = VerifyingKey.from_string(sk.get_verifying_key().to_string(),
|
||||
curve=NIST256p)
|
||||
self.assertTrue(vk3.verify(sig, data, hashfunc=sha256))
|
||||
|
||||
|
||||
class OpenSSL(unittest.TestCase):
|
||||
# test interoperability with OpenSSL tools. Note that openssl's ECDSA
|
||||
# sign/verify arguments changed between 0.9.8 and 1.0.0: the early
|
||||
# versions require "-ecdsa-with-SHA1", the later versions want just
|
||||
# "-SHA1" (or to leave out that argument entirely, which means the
|
||||
# signature will use some default digest algorithm, probably determined
|
||||
# by the key, probably always SHA1).
|
||||
#
|
||||
# openssl ecparam -name secp224r1 -genkey -out privkey.pem
|
||||
# openssl ec -in privkey.pem -text -noout # get the priv/pub keys
|
||||
# openssl dgst -ecdsa-with-SHA1 -sign privkey.pem -out data.sig data.txt
|
||||
# openssl asn1parse -in data.sig -inform DER
|
||||
# data.sig is 64 bytes, probably 56b plus ASN1 overhead
|
||||
# openssl dgst -ecdsa-with-SHA1 -prverify privkey.pem -signature data.sig data.txt ; echo $?
|
||||
# openssl ec -in privkey.pem -pubout -out pubkey.pem
|
||||
# openssl ec -in privkey.pem -pubout -outform DER -out pubkey.der
|
||||
|
||||
def get_openssl_messagedigest_arg(self):
|
||||
v = run_openssl("version")
|
||||
# e.g. "OpenSSL 1.0.0 29 Mar 2010", or "OpenSSL 1.0.0a 1 Jun 2010",
|
||||
# or "OpenSSL 0.9.8o 01 Jun 2010"
|
||||
vs = v.split()[1].split(".")
|
||||
if vs >= ["1","0","0"]:
|
||||
return "-SHA1"
|
||||
else:
|
||||
return "-ecdsa-with-SHA1"
|
||||
|
||||
# sk: 1:OpenSSL->python 2:python->OpenSSL
|
||||
# vk: 3:OpenSSL->python 4:python->OpenSSL
|
||||
# sig: 5:OpenSSL->python 6:python->OpenSSL
|
||||
|
||||
def test_from_openssl_nist192p(self):
|
||||
return self.do_test_from_openssl(NIST192p)
|
||||
def test_from_openssl_nist224p(self):
|
||||
return self.do_test_from_openssl(NIST224p)
|
||||
def test_from_openssl_nist256p(self):
|
||||
return self.do_test_from_openssl(NIST256p)
|
||||
def test_from_openssl_nist384p(self):
|
||||
return self.do_test_from_openssl(NIST384p)
|
||||
def test_from_openssl_nist521p(self):
|
||||
return self.do_test_from_openssl(NIST521p)
|
||||
def test_from_openssl_secp256k1(self):
|
||||
return self.do_test_from_openssl(SECP256k1)
|
||||
|
||||
def do_test_from_openssl(self, curve):
|
||||
curvename = curve.openssl_name
|
||||
assert curvename
|
||||
# OpenSSL: create sk, vk, sign.
|
||||
# Python: read vk(3), checksig(5), read sk(1), sign, check
|
||||
mdarg = self.get_openssl_messagedigest_arg()
|
||||
if os.path.isdir("t"):
|
||||
shutil.rmtree("t")
|
||||
os.mkdir("t")
|
||||
run_openssl("ecparam -name %s -genkey -out t/privkey.pem" % curvename)
|
||||
run_openssl("ec -in t/privkey.pem -pubout -out t/pubkey.pem")
|
||||
data = b("data")
|
||||
with open("t/data.txt","wb") as e: e.write(data)
|
||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig t/data.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig t/data.txt" % mdarg)
|
||||
with open("t/pubkey.pem","rb") as e: pubkey_pem = e.read()
|
||||
vk = VerifyingKey.from_pem(pubkey_pem) # 3
|
||||
with open("t/data.sig","rb") as e: sig_der = e.read()
|
||||
self.assertTrue(vk.verify(sig_der, data, # 5
|
||||
hashfunc=sha1, sigdecode=sigdecode_der))
|
||||
|
||||
with open("t/privkey.pem") as e: fp = e.read()
|
||||
sk = SigningKey.from_pem(fp) # 1
|
||||
sig = sk.sign(data)
|
||||
self.assertTrue(vk.verify(sig, data))
|
||||
|
||||
def test_to_openssl_nist192p(self):
|
||||
self.do_test_to_openssl(NIST192p)
|
||||
def test_to_openssl_nist224p(self):
|
||||
self.do_test_to_openssl(NIST224p)
|
||||
def test_to_openssl_nist256p(self):
|
||||
self.do_test_to_openssl(NIST256p)
|
||||
def test_to_openssl_nist384p(self):
|
||||
self.do_test_to_openssl(NIST384p)
|
||||
def test_to_openssl_nist521p(self):
|
||||
self.do_test_to_openssl(NIST521p)
|
||||
def test_to_openssl_secp256k1(self):
|
||||
self.do_test_to_openssl(SECP256k1)
|
||||
|
||||
def do_test_to_openssl(self, curve):
|
||||
curvename = curve.openssl_name
|
||||
assert curvename
|
||||
# Python: create sk, vk, sign.
|
||||
# OpenSSL: read vk(4), checksig(6), read sk(2), sign, check
|
||||
mdarg = self.get_openssl_messagedigest_arg()
|
||||
if os.path.isdir("t"):
|
||||
shutil.rmtree("t")
|
||||
os.mkdir("t")
|
||||
sk = SigningKey.generate(curve=curve)
|
||||
vk = sk.get_verifying_key()
|
||||
data = b("data")
|
||||
with open("t/pubkey.der","wb") as e: e.write(vk.to_der()) # 4
|
||||
with open("t/pubkey.pem","wb") as e: e.write(vk.to_pem()) # 4
|
||||
sig_der = sk.sign(data, hashfunc=sha1, sigencode=sigencode_der)
|
||||
|
||||
with open("t/data.sig","wb") as e: e.write(sig_der) # 6
|
||||
with open("t/data.txt","wb") as e: e.write(data)
|
||||
with open("t/baddata.txt","wb") as e: e.write(data+b("corrupt"))
|
||||
|
||||
self.assertRaises(SubprocessError, run_openssl,
|
||||
"dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/baddata.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.der -keyform DER -signature t/data.sig t/data.txt" % mdarg)
|
||||
|
||||
with open("t/privkey.pem","wb") as e: e.write(sk.to_pem()) # 2
|
||||
run_openssl("dgst %s -sign t/privkey.pem -out t/data.sig2 t/data.txt" % mdarg)
|
||||
run_openssl("dgst %s -verify t/pubkey.pem -signature t/data.sig2 t/data.txt" % mdarg)
|
||||
|
||||
class DER(unittest.TestCase):
|
||||
def test_oids(self):
|
||||
oid_ecPublicKey = der.encode_oid(1, 2, 840, 10045, 2, 1)
|
||||
self.assertEqual(hexlify(oid_ecPublicKey), b("06072a8648ce3d0201"))
|
||||
self.assertEqual(hexlify(NIST224p.encoded_oid), b("06052b81040021"))
|
||||
self.assertEqual(hexlify(NIST256p.encoded_oid),
|
||||
b("06082a8648ce3d030107"))
|
||||
x = oid_ecPublicKey + b("more")
|
||||
x1, rest = der.remove_object(x)
|
||||
self.assertEqual(x1, (1, 2, 840, 10045, 2, 1))
|
||||
self.assertEqual(rest, b("more"))
|
||||
|
||||
def test_integer(self):
|
||||
self.assertEqual(der.encode_integer(0), b("\x02\x01\x00"))
|
||||
self.assertEqual(der.encode_integer(1), b("\x02\x01\x01"))
|
||||
self.assertEqual(der.encode_integer(127), b("\x02\x01\x7f"))
|
||||
self.assertEqual(der.encode_integer(128), b("\x02\x02\x00\x80"))
|
||||
self.assertEqual(der.encode_integer(256), b("\x02\x02\x01\x00"))
|
||||
#self.assertEqual(der.encode_integer(-1), b("\x02\x01\xff"))
|
||||
|
||||
def s(n): return der.remove_integer(der.encode_integer(n) + b("junk"))
|
||||
self.assertEqual(s(0), (0, b("junk")))
|
||||
self.assertEqual(s(1), (1, b("junk")))
|
||||
self.assertEqual(s(127), (127, b("junk")))
|
||||
self.assertEqual(s(128), (128, b("junk")))
|
||||
self.assertEqual(s(256), (256, b("junk")))
|
||||
self.assertEqual(s(1234567890123456789012345678901234567890),
|
||||
(1234567890123456789012345678901234567890,b("junk")))
|
||||
|
||||
def test_number(self):
|
||||
self.assertEqual(der.encode_number(0), b("\x00"))
|
||||
self.assertEqual(der.encode_number(127), b("\x7f"))
|
||||
self.assertEqual(der.encode_number(128), b("\x81\x00"))
|
||||
self.assertEqual(der.encode_number(3*128+7), b("\x83\x07"))
|
||||
#self.assertEqual(der.read_number("\x81\x9b"+"more"), (155, 2))
|
||||
#self.assertEqual(der.encode_number(155), b("\x81\x9b"))
|
||||
for n in (0, 1, 2, 127, 128, 3*128+7, 840, 10045): #, 155):
|
||||
x = der.encode_number(n) + b("more")
|
||||
n1, llen = der.read_number(x)
|
||||
self.assertEqual(n1, n)
|
||||
self.assertEqual(x[llen:], b("more"))
|
||||
|
||||
def test_length(self):
|
||||
self.assertEqual(der.encode_length(0), b("\x00"))
|
||||
self.assertEqual(der.encode_length(127), b("\x7f"))
|
||||
self.assertEqual(der.encode_length(128), b("\x81\x80"))
|
||||
self.assertEqual(der.encode_length(255), b("\x81\xff"))
|
||||
self.assertEqual(der.encode_length(256), b("\x82\x01\x00"))
|
||||
self.assertEqual(der.encode_length(3*256+7), b("\x82\x03\x07"))
|
||||
self.assertEqual(der.read_length(b("\x81\x9b")+b("more")), (155, 2))
|
||||
self.assertEqual(der.encode_length(155), b("\x81\x9b"))
|
||||
for n in (0, 1, 2, 127, 128, 255, 256, 3*256+7, 155):
|
||||
x = der.encode_length(n) + b("more")
|
||||
n1, llen = der.read_length(x)
|
||||
self.assertEqual(n1, n)
|
||||
self.assertEqual(x[llen:], b("more"))
|
||||
|
||||
def test_sequence(self):
|
||||
x = der.encode_sequence(b("ABC"), b("DEF")) + b("GHI")
|
||||
self.assertEqual(x, b("\x30\x06ABCDEFGHI"))
|
||||
x1, rest = der.remove_sequence(x)
|
||||
self.assertEqual(x1, b("ABCDEF"))
|
||||
self.assertEqual(rest, b("GHI"))
|
||||
|
||||
def test_constructed(self):
|
||||
x = der.encode_constructed(0, NIST224p.encoded_oid)
|
||||
self.assertEqual(hexlify(x), b("a007") + b("06052b81040021"))
|
||||
x = der.encode_constructed(1, unhexlify(b("0102030a0b0c")))
|
||||
self.assertEqual(hexlify(x), b("a106") + b("0102030a0b0c"))
|
||||
|
||||
class Util(unittest.TestCase):
|
||||
def test_trytryagain(self):
|
||||
tta = util.randrange_from_seed__trytryagain
|
||||
for i in range(1000):
|
||||
seed = "seed-%d" % i
|
||||
for order in (2**8-2, 2**8-1, 2**8, 2**8+1, 2**8+2,
|
||||
2**16-1, 2**16+1):
|
||||
n = tta(seed, order)
|
||||
self.assertTrue(1 <= n < order, (1, n, order))
|
||||
# this trytryagain *does* provide long-term stability
|
||||
self.assertEqual(("%x"%(tta("seed", NIST224p.order))).encode(),
|
||||
b("6fa59d73bf0446ae8743cf748fc5ac11d5585a90356417e97155c3bc"))
|
||||
|
||||
def test_randrange(self):
|
||||
# util.randrange does not provide long-term stability: we might
|
||||
# change the algorithm in the future.
|
||||
for i in range(1000):
|
||||
entropy = util.PRNG("seed-%d" % i)
|
||||
for order in (2**8-2, 2**8-1, 2**8,
|
||||
2**16-1, 2**16+1,
|
||||
):
|
||||
# that oddball 2**16+1 takes half our runtime
|
||||
n = util.randrange(order, entropy=entropy)
|
||||
self.assertTrue(1 <= n < order, (1, n, order))
|
||||
|
||||
def OFF_test_prove_uniformity(self):
|
||||
order = 2**8-2
|
||||
counts = dict([(i, 0) for i in range(1, order)])
|
||||
assert 0 not in counts
|
||||
assert order not in counts
|
||||
for i in range(1000000):
|
||||
seed = "seed-%d" % i
|
||||
n = util.randrange_from_seed__trytryagain(seed, order)
|
||||
counts[n] += 1
|
||||
# this technique should use the full range
|
||||
self.assertTrue(counts[order-1])
|
||||
for i in range(1, order):
|
||||
print_("%3d: %s" % (i, "*"*(counts[i]//100)))
|
||||
|
||||
class RFC6979(unittest.TestCase):
|
||||
# https://tools.ietf.org/html/rfc6979#appendix-A.1
|
||||
def _do(self, generator, secexp, hsh, hash_func, expected):
|
||||
actual = rfc6979.generate_k(generator.order(), secexp, hash_func, hsh)
|
||||
self.assertEqual(expected, actual)
|
||||
|
||||
def test_SECP256k1(self):
|
||||
'''RFC doesn't contain test vectors for SECP256k1 used in bitcoin.
|
||||
This vector has been computed by Golang reference implementation instead.'''
|
||||
self._do(
|
||||
generator = SECP256k1.generator,
|
||||
secexp = int("9d0219792467d7d37b4d43298a7d0c05", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("8fa1f95d514760e498f28957b824ee6ec39ed64826ff4fecc2b5739ec45b91cd", 16))
|
||||
|
||||
def test_SECP256k1_2(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=int("cca9fbcc1b41e5a95d369eaa6ddcff73b61a4efaa279cfc6567e8daa39cbaf50", 16),
|
||||
hsh=sha256(b("sample")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=int("2df40ca70e639d89528a6b670d9d48d9165fdc0febc0974056bdce192b8e16a3", 16))
|
||||
|
||||
def test_SECP256k1_3(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0x1,
|
||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x8F8A276C19F4149656B280621E358CCE24F5F52542772691EE69063B74F15D15)
|
||||
|
||||
def test_SECP256k1_4(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0x1,
|
||||
hsh=sha256(b("All those moments will be lost in time, like tears in rain. Time to die...")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x38AA22D72376B4DBC472E06C3BA403EE0A394DA63FC58D88686C611ABA98D6B3)
|
||||
|
||||
def test_SECP256k1_5(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364140,
|
||||
hsh=sha256(b("Satoshi Nakamoto")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x33A19B60E25FB6F4435AF53A3D42D493644827367E6453928554F43E49AA6F90)
|
||||
|
||||
def test_SECP256k1_6(self):
|
||||
self._do(
|
||||
generator=SECP256k1.generator,
|
||||
secexp=0xf8b8af8ce3c7cca5e300d33939540c10d45ce001b8f252bfbc57ba0342904181,
|
||||
hsh=sha256(b("Alan Turing")).digest(),
|
||||
hash_func=sha256,
|
||||
expected=0x525A82B70E67874398067543FD84C83D30C175FDC45FDEEE082FE13B1D7CFDF1)
|
||||
|
||||
def test_1(self):
|
||||
# Basic example of the RFC, it also tests 'try-try-again' from Step H of rfc6979
|
||||
self._do(
|
||||
generator = Point(None, 0, 0, int("4000000000000000000020108A2E0CC0D99F8A5EF", 16)),
|
||||
secexp = int("09A4D6792295A7F730FC3F2B49CBC0F62E862272F", 16),
|
||||
hsh = unhexlify(b("AF2BDBE1AA9B6EC1E2ADE1D694F41FC71A831D0268E9891562113D8A62ADD1BF")),
|
||||
hash_func = sha256,
|
||||
expected = int("23AF4074C90A02B3FE61D286D5C87F425E6BDD81B", 16))
|
||||
|
||||
def test_2(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha1(b("sample")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("37D7CA00D2C7B0E5E412AC03BD44BA837FDD5B28CD3B0021", 16))
|
||||
|
||||
def test_3(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("32B1B6D7D42A05CB449065727A84804FB1A3E34D8F261496", 16))
|
||||
|
||||
def test_4(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha512(b("sample")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("A2AC7AB055E4F20692D49209544C203A7D1F2C0BFBC75DB1", 16))
|
||||
|
||||
def test_5(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha1(b("test")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("D9CF9C3D3297D3260773A1DA7418DB5537AB8DD93DE7FA25", 16))
|
||||
|
||||
def test_6(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha256(b("test")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("5C4CE89CF56D9E7C77C8585339B006B97B5F0680B4306C6C", 16))
|
||||
|
||||
def test_7(self):
|
||||
self._do(
|
||||
generator=NIST192p.generator,
|
||||
secexp = int("6FAB034934E4C0FC9AE67F5B5659A9D7D1FEFD187EE09FD4", 16),
|
||||
hsh = sha512(b("test")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("0758753A5254759C7CFBAD2E2D9B0792EEE44136C9480527", 16))
|
||||
|
||||
def test_8(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha1(b("sample")).digest(),
|
||||
hash_func = sha1,
|
||||
expected = int("089C071B419E1C2820962321787258469511958E80582E95D8378E0C2CCDB3CB42BEDE42F50E3FA3C71F5A76724281D31D9C89F0F91FC1BE4918DB1C03A5838D0F9", 16))
|
||||
|
||||
def test_9(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha256(b("sample")).digest(),
|
||||
hash_func = sha256,
|
||||
expected = int("0EDF38AFCAAECAB4383358B34D67C9F2216C8382AAEA44A3DAD5FDC9C32575761793FEF24EB0FC276DFC4F6E3EC476752F043CF01415387470BCBD8678ED2C7E1A0", 16))
|
||||
|
||||
def test_10(self):
|
||||
self._do(
|
||||
generator=NIST521p.generator,
|
||||
secexp = int("0FAD06DAA62BA3B25D2FB40133DA757205DE67F5BB0018FEE8C86E1B68C7E75CAA896EB32F1F47C70855836A6D16FCC1466F6D8FBEC67DB89EC0C08B0E996B83538", 16),
|
||||
hsh = sha512(b("test")).digest(),
|
||||
hash_func = sha512,
|
||||
expected = int("16200813020EC986863BEDFC1B121F605C1215645018AEA1A7B215A564DE9EB1B38A67AA1128B80CE391C4FB71187654AAA3431027BFC7F395766CA988C964DC56D", 16))
|
||||
|
||||
def __main__():
|
||||
unittest.main()
|
||||
if __name__ == "__main__":
|
||||
__main__()
|
||||
247
bin/python/ecdsa/util.py
Normal file
247
bin/python/ecdsa/util.py
Normal file
@@ -0,0 +1,247 @@
|
||||
from __future__ import division
|
||||
|
||||
import os
|
||||
import math
|
||||
import binascii
|
||||
from hashlib import sha256
|
||||
from . import der
|
||||
from .curves import orderlen
|
||||
from .six import PY3, int2byte, b, next
|
||||
|
||||
# RFC5480:
|
||||
# The "unrestricted" algorithm identifier is:
|
||||
# id-ecPublicKey OBJECT IDENTIFIER ::= {
|
||||
# iso(1) member-body(2) us(840) ansi-X9-62(10045) keyType(2) 1 }
|
||||
|
||||
oid_ecPublicKey = (1, 2, 840, 10045, 2, 1)
|
||||
encoded_oid_ecPublicKey = der.encode_oid(*oid_ecPublicKey)
|
||||
|
||||
def randrange(order, entropy=None):
|
||||
"""Return a random integer k such that 1 <= k < order, uniformly
|
||||
distributed across that range. For simplicity, this only behaves well if
|
||||
'order' is fairly close (but below) a power of 256. The try-try-again
|
||||
algorithm we use takes longer and longer time (on average) to complete as
|
||||
'order' falls, rising to a maximum of avg=512 loops for the worst-case
|
||||
(256**k)+1 . All of the standard curves behave well. There is a cutoff at
|
||||
10k loops (which raises RuntimeError) to prevent an infinite loop when
|
||||
something is really broken like the entropy function not working.
|
||||
|
||||
Note that this function is not declared to be forwards-compatible: we may
|
||||
change the behavior in future releases. The entropy= argument (which
|
||||
should get a callable that behaves like os.urandom) can be used to
|
||||
achieve stability within a given release (for repeatable unit tests), but
|
||||
should not be used as a long-term-compatible key generation algorithm.
|
||||
"""
|
||||
# we could handle arbitrary orders (even 256**k+1) better if we created
|
||||
# candidates bit-wise instead of byte-wise, which would reduce the
|
||||
# worst-case behavior to avg=2 loops, but that would be more complex. The
|
||||
# change would be to round the order up to a power of 256, subtract one
|
||||
# (to get 0xffff..), use that to get a byte-long mask for the top byte,
|
||||
# generate the len-1 entropy bytes, generate one extra byte and mask off
|
||||
# the top bits, then combine it with the rest. Requires jumping back and
|
||||
# forth between strings and integers a lot.
|
||||
|
||||
if entropy is None:
|
||||
entropy = os.urandom
|
||||
assert order > 1
|
||||
bytes = orderlen(order)
|
||||
dont_try_forever = 10000 # gives about 2**-60 failures for worst case
|
||||
while dont_try_forever > 0:
|
||||
dont_try_forever -= 1
|
||||
candidate = string_to_number(entropy(bytes)) + 1
|
||||
if 1 <= candidate < order:
|
||||
return candidate
|
||||
continue
|
||||
raise RuntimeError("randrange() tried hard but gave up, either something"
|
||||
" is very wrong or you got realllly unlucky. Order was"
|
||||
" %x" % order)
|
||||
|
||||
class PRNG:
|
||||
# this returns a callable which, when invoked with an integer N, will
|
||||
# return N pseudorandom bytes. Note: this is a short-term PRNG, meant
|
||||
# primarily for the needs of randrange_from_seed__trytryagain(), which
|
||||
# only needs to run it a few times per seed. It does not provide
|
||||
# protection against state compromise (forward security).
|
||||
def __init__(self, seed):
|
||||
self.generator = self.block_generator(seed)
|
||||
|
||||
def __call__(self, numbytes):
|
||||
a = [next(self.generator) for i in range(numbytes)]
|
||||
|
||||
if PY3:
|
||||
return bytes(a)
|
||||
else:
|
||||
return "".join(a)
|
||||
|
||||
|
||||
def block_generator(self, seed):
|
||||
counter = 0
|
||||
while True:
|
||||
for byte in sha256(("prng-%d-%s" % (counter, seed)).encode()).digest():
|
||||
yield byte
|
||||
counter += 1
|
||||
|
||||
def randrange_from_seed__overshoot_modulo(seed, order):
|
||||
# hash the data, then turn the digest into a number in [1,order).
|
||||
#
|
||||
# We use David-Sarah Hopwood's suggestion: turn it into a number that's
|
||||
# sufficiently larger than the group order, then modulo it down to fit.
|
||||
# This should give adequate (but not perfect) uniformity, and simple
|
||||
# code. There are other choices: try-try-again is the main one.
|
||||
base = PRNG(seed)(2*orderlen(order))
|
||||
number = (int(binascii.hexlify(base), 16) % (order-1)) + 1
|
||||
assert 1 <= number < order, (1, number, order)
|
||||
return number
|
||||
|
||||
def lsb_of_ones(numbits):
|
||||
return (1 << numbits) - 1
|
||||
def bits_and_bytes(order):
|
||||
bits = int(math.log(order-1, 2)+1)
|
||||
bytes = bits // 8
|
||||
extrabits = bits % 8
|
||||
return bits, bytes, extrabits
|
||||
|
||||
# the following randrange_from_seed__METHOD() functions take an
|
||||
# arbitrarily-sized secret seed and turn it into a number that obeys the same
|
||||
# range limits as randrange() above. They are meant for deriving consistent
|
||||
# signing keys from a secret rather than generating them randomly, for
|
||||
# example a protocol in which three signing keys are derived from a master
|
||||
# secret. You should use a uniformly-distributed unguessable seed with about
|
||||
# curve.baselen bytes of entropy. To use one, do this:
|
||||
# seed = os.urandom(curve.baselen) # or other starting point
|
||||
# secexp = ecdsa.util.randrange_from_seed__trytryagain(sed, curve.order)
|
||||
# sk = SigningKey.from_secret_exponent(secexp, curve)
|
||||
|
||||
def randrange_from_seed__truncate_bytes(seed, order, hashmod=sha256):
|
||||
# hash the seed, then turn the digest into a number in [1,order), but
|
||||
# don't worry about trying to uniformly fill the range. This will lose,
|
||||
# on average, four bits of entropy.
|
||||
bits, bytes, extrabits = bits_and_bytes(order)
|
||||
if extrabits:
|
||||
bytes += 1
|
||||
base = hashmod(seed).digest()[:bytes]
|
||||
base = "\x00"*(bytes-len(base)) + base
|
||||
number = 1+int(binascii.hexlify(base), 16)
|
||||
assert 1 <= number < order
|
||||
return number
|
||||
|
||||
def randrange_from_seed__truncate_bits(seed, order, hashmod=sha256):
|
||||
# like string_to_randrange_truncate_bytes, but only lose an average of
|
||||
# half a bit
|
||||
bits = int(math.log(order-1, 2)+1)
|
||||
maxbytes = (bits+7) // 8
|
||||
base = hashmod(seed).digest()[:maxbytes]
|
||||
base = "\x00"*(maxbytes-len(base)) + base
|
||||
topbits = 8*maxbytes - bits
|
||||
if topbits:
|
||||
base = int2byte(ord(base[0]) & lsb_of_ones(topbits)) + base[1:]
|
||||
number = 1+int(binascii.hexlify(base), 16)
|
||||
assert 1 <= number < order
|
||||
return number
|
||||
|
||||
def randrange_from_seed__trytryagain(seed, order):
|
||||
# figure out exactly how many bits we need (rounded up to the nearest
|
||||
# bit), so we can reduce the chance of looping to less than 0.5 . This is
|
||||
# specified to feed from a byte-oriented PRNG, and discards the
|
||||
# high-order bits of the first byte as necessary to get the right number
|
||||
# of bits. The average number of loops will range from 1.0 (when
|
||||
# order=2**k-1) to 2.0 (when order=2**k+1).
|
||||
assert order > 1
|
||||
bits, bytes, extrabits = bits_and_bytes(order)
|
||||
generate = PRNG(seed)
|
||||
while True:
|
||||
extrabyte = b("")
|
||||
if extrabits:
|
||||
extrabyte = int2byte(ord(generate(1)) & lsb_of_ones(extrabits))
|
||||
guess = string_to_number(extrabyte + generate(bytes)) + 1
|
||||
if 1 <= guess < order:
|
||||
return guess
|
||||
|
||||
|
||||
def number_to_string(num, order):
|
||||
l = orderlen(order)
|
||||
fmt_str = "%0" + str(2*l) + "x"
|
||||
string = binascii.unhexlify((fmt_str % num).encode())
|
||||
assert len(string) == l, (len(string), l)
|
||||
return string
|
||||
|
||||
def number_to_string_crop(num, order):
|
||||
l = orderlen(order)
|
||||
fmt_str = "%0" + str(2*l) + "x"
|
||||
string = binascii.unhexlify((fmt_str % num).encode())
|
||||
return string[:l]
|
||||
|
||||
def string_to_number(string):
|
||||
return int(binascii.hexlify(string), 16)
|
||||
|
||||
def string_to_number_fixedlen(string, order):
|
||||
l = orderlen(order)
|
||||
assert len(string) == l, (len(string), l)
|
||||
return int(binascii.hexlify(string), 16)
|
||||
|
||||
# these methods are useful for the sigencode= argument to SK.sign() and the
|
||||
# sigdecode= argument to VK.verify(), and control how the signature is packed
|
||||
# or unpacked.
|
||||
|
||||
def sigencode_strings(r, s, order):
|
||||
r_str = number_to_string(r, order)
|
||||
s_str = number_to_string(s, order)
|
||||
return (r_str, s_str)
|
||||
|
||||
def sigencode_string(r, s, order):
|
||||
# for any given curve, the size of the signature numbers is
|
||||
# fixed, so just use simple concatenation
|
||||
r_str, s_str = sigencode_strings(r, s, order)
|
||||
return r_str + s_str
|
||||
|
||||
def sigencode_der(r, s, order):
|
||||
return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||
|
||||
# canonical versions of sigencode methods
|
||||
# these enforce low S values, by negating the value (modulo the order) if above order/2
|
||||
# see CECKey::Sign() https://github.com/bitcoin/bitcoin/blob/master/src/key.cpp#L214
|
||||
def sigencode_strings_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_strings(r, s, order)
|
||||
|
||||
def sigencode_string_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_string(r, s, order)
|
||||
|
||||
def sigencode_der_canonize(r, s, order):
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return sigencode_der(r, s, order)
|
||||
|
||||
|
||||
def sigdecode_string(signature, order):
|
||||
l = orderlen(order)
|
||||
assert len(signature) == 2*l, (len(signature), 2*l)
|
||||
r = string_to_number_fixedlen(signature[:l], order)
|
||||
s = string_to_number_fixedlen(signature[l:], order)
|
||||
return r, s
|
||||
|
||||
def sigdecode_strings(rs_strings, order):
|
||||
(r_str, s_str) = rs_strings
|
||||
l = orderlen(order)
|
||||
assert len(r_str) == l, (len(r_str), l)
|
||||
assert len(s_str) == l, (len(s_str), l)
|
||||
r = string_to_number_fixedlen(r_str, order)
|
||||
s = string_to_number_fixedlen(s_str, order)
|
||||
return r, s
|
||||
|
||||
def sigdecode_der(sig_der, order):
|
||||
#return der.encode_sequence(der.encode_integer(r), der.encode_integer(s))
|
||||
rs_strings, empty = der.remove_sequence(sig_der)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER sig: %s" %
|
||||
binascii.hexlify(empty))
|
||||
r, rest = der.remove_integer(rs_strings)
|
||||
s, empty = der.remove_integer(rest)
|
||||
if empty != b(""):
|
||||
raise der.UnexpectedDER("trailing junk after DER numbers: %s" %
|
||||
binascii.hexlify(empty))
|
||||
return r, s
|
||||
|
||||
105
bin/python/ed25519.py
Normal file
105
bin/python/ed25519.py
Normal file
@@ -0,0 +1,105 @@
|
||||
import hashlib
|
||||
|
||||
b = 256
|
||||
q = 2**255 - 19
|
||||
l = 2**252 + 27742317777372353535851937790883648493
|
||||
|
||||
def H(m):
|
||||
return hashlib.sha512(m).digest()
|
||||
|
||||
def expmod(b,e,m):
|
||||
if e == 0: return 1
|
||||
t = expmod(b,e/2,m)**2 % m
|
||||
if e & 1: t = (t*b) % m
|
||||
return t
|
||||
|
||||
def inv(x):
|
||||
return expmod(x,q-2,q)
|
||||
|
||||
d = -121665 * inv(121666)
|
||||
I = expmod(2,(q-1)/4,q)
|
||||
|
||||
def xrecover(y):
|
||||
xx = (y*y-1) * inv(d*y*y+1)
|
||||
x = expmod(xx,(q+3)/8,q)
|
||||
if (x*x - xx) % q != 0: x = (x*I) % q
|
||||
if x % 2 != 0: x = q-x
|
||||
return x
|
||||
|
||||
By = 4 * inv(5)
|
||||
Bx = xrecover(By)
|
||||
B = [Bx % q,By % q]
|
||||
|
||||
def edwards(P,Q):
|
||||
x1 = P[0]
|
||||
y1 = P[1]
|
||||
x2 = Q[0]
|
||||
y2 = Q[1]
|
||||
x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2)
|
||||
y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2)
|
||||
return [x3 % q,y3 % q]
|
||||
|
||||
def scalarmult(P,e):
|
||||
if e == 0: return [0,1]
|
||||
Q = scalarmult(P,e/2)
|
||||
Q = edwards(Q,Q)
|
||||
if e & 1: Q = edwards(Q,P)
|
||||
return Q
|
||||
|
||||
def encodeint(y):
|
||||
bits = [(y >> i) & 1 for i in range(b)]
|
||||
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
||||
|
||||
def encodepoint(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
bits = [(y >> i) & 1 for i in range(b - 1)] + [x & 1]
|
||||
return ''.join([chr(sum([bits[i * 8 + j] << j for j in range(8)])) for i in range(b/8)])
|
||||
|
||||
def bit(h,i):
|
||||
return (ord(h[i/8]) >> (i%8)) & 1
|
||||
|
||||
def publickey(sk):
|
||||
h = H(sk)
|
||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||
A = scalarmult(B,a)
|
||||
return encodepoint(A)
|
||||
|
||||
def Hint(m):
|
||||
h = H(m)
|
||||
return sum(2**i * bit(h,i) for i in range(2*b))
|
||||
|
||||
def signature(m,sk,pk):
|
||||
h = H(sk)
|
||||
a = 2**(b-2) + sum(2**i * bit(h,i) for i in range(3,b-2))
|
||||
r = Hint(''.join([h[i] for i in range(b/8,b/4)]) + m)
|
||||
R = scalarmult(B,r)
|
||||
S = (r + Hint(encodepoint(R) + pk + m) * a) % l
|
||||
return encodepoint(R) + encodeint(S)
|
||||
|
||||
def isoncurve(P):
|
||||
x = P[0]
|
||||
y = P[1]
|
||||
return (-x*x + y*y - 1 - d*x*x*y*y) % q == 0
|
||||
|
||||
def decodeint(s):
|
||||
return sum(2**i * bit(s,i) for i in range(0,b))
|
||||
|
||||
def decodepoint(s):
|
||||
y = sum(2**i * bit(s,i) for i in range(0,b-1))
|
||||
x = xrecover(y)
|
||||
if x & 1 != bit(s,b-1): x = q-x
|
||||
P = [x,y]
|
||||
if not isoncurve(P): raise Exception("decoding point that is not on curve")
|
||||
return P
|
||||
|
||||
def checkvalid(s,m,pk):
|
||||
if len(s) != b/4: raise Exception("signature length is wrong")
|
||||
if len(pk) != b/8: raise Exception("public-key length is wrong")
|
||||
R = decodepoint(s[0:b/8])
|
||||
A = decodepoint(pk)
|
||||
S = decodeint(s[b/8:b/4])
|
||||
h = Hint(encodepoint(R) + pk + m)
|
||||
if scalarmult(B,S) != edwards(R,scalarmult(A,h)):
|
||||
raise Exception("signature does not pass verification")
|
||||
|
||||
4
bin/python/jsonpath_rw/__init__.py
Normal file
4
bin/python/jsonpath_rw/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from .jsonpath import *
|
||||
from .parser import parse
|
||||
|
||||
__version__ = '1.3.0'
|
||||
510
bin/python/jsonpath_rw/jsonpath.py
Normal file
510
bin/python/jsonpath_rw/jsonpath.py
Normal file
@@ -0,0 +1,510 @@
|
||||
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
||||
import logging
|
||||
import six
|
||||
from six.moves import xrange
|
||||
from itertools import *
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Turn on/off the automatic creation of id attributes
|
||||
# ... could be a kwarg pervasively but uses are rare and simple today
|
||||
auto_id_field = None
|
||||
|
||||
class JSONPath(object):
|
||||
"""
|
||||
The base class for JSONPath abstract syntax; those
|
||||
methods stubbed here are the interface to supported
|
||||
JSONPath semantics.
|
||||
"""
|
||||
|
||||
def find(self, data):
|
||||
"""
|
||||
All `JSONPath` types support `find()`, which returns an iterable of `DatumInContext`s.
|
||||
They keep track of the path followed to the current location, so if the calling code
|
||||
has some opinion about that, it can be passed in here as a starting point.
|
||||
"""
|
||||
raise NotImplementedError()
|
||||
|
||||
def update(self, data, val):
|
||||
"Returns `data` with the specified path replaced by `val`"
|
||||
raise NotImplementedError()
|
||||
|
||||
def child(self, child):
|
||||
"""
|
||||
Equivalent to Child(self, next) but with some canonicalization
|
||||
"""
|
||||
if isinstance(self, This) or isinstance(self, Root):
|
||||
return child
|
||||
elif isinstance(child, This):
|
||||
return self
|
||||
elif isinstance(child, Root):
|
||||
return child
|
||||
else:
|
||||
return Child(self, child)
|
||||
|
||||
def make_datum(self, value):
|
||||
if isinstance(value, DatumInContext):
|
||||
return value
|
||||
else:
|
||||
return DatumInContext(value, path=Root(), context=None)
|
||||
|
||||
class DatumInContext(object):
|
||||
"""
|
||||
Represents a datum along a path from a context.
|
||||
|
||||
Essentially a zipper but with a structure represented by JsonPath,
|
||||
and where the context is more of a parent pointer than a proper
|
||||
representation of the context.
|
||||
|
||||
For quick-and-dirty work, this proxies any non-special attributes
|
||||
to the underlying datum, but the actual datum can (and usually should)
|
||||
be retrieved via the `value` attribute.
|
||||
|
||||
To place `datum` within another, use `datum.in_context(context=..., path=...)`
|
||||
which extends the path. If the datum already has a context, it places the entire
|
||||
context within that passed in, so an object can be built from the inside
|
||||
out.
|
||||
"""
|
||||
@classmethod
|
||||
def wrap(cls, data):
|
||||
if isinstance(data, cls):
|
||||
return data
|
||||
else:
|
||||
return cls(data)
|
||||
|
||||
def __init__(self, value, path=None, context=None):
|
||||
self.value = value
|
||||
self.path = path or This()
|
||||
self.context = None if context is None else DatumInContext.wrap(context)
|
||||
|
||||
def in_context(self, context, path):
|
||||
context = DatumInContext.wrap(context)
|
||||
|
||||
if self.context:
|
||||
return DatumInContext(value=self.value, path=self.path, context=context.in_context(path=path, context=context))
|
||||
else:
|
||||
return DatumInContext(value=self.value, path=path, context=context)
|
||||
|
||||
@property
|
||||
def full_path(self):
|
||||
return self.path if self.context is None else self.context.full_path.child(self.path)
|
||||
|
||||
@property
|
||||
def id_pseudopath(self):
|
||||
"""
|
||||
Looks like a path, but with ids stuck in when available
|
||||
"""
|
||||
try:
|
||||
pseudopath = Fields(str(self.value[auto_id_field]))
|
||||
except (TypeError, AttributeError, KeyError): # This may not be all the interesting exceptions
|
||||
pseudopath = self.path
|
||||
|
||||
if self.context:
|
||||
return self.context.id_pseudopath.child(pseudopath)
|
||||
else:
|
||||
return pseudopath
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(value=%r, path=%r, context=%r)' % (self.__class__.__name__, self.value, self.path, self.context)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, DatumInContext) and other.value == self.value and other.path == self.path and self.context == other.context
|
||||
|
||||
class AutoIdForDatum(DatumInContext):
|
||||
"""
|
||||
This behaves like a DatumInContext, but the value is
|
||||
always the path leading up to it, not including the "id",
|
||||
and with any "id" fields along the way replacing the prior
|
||||
segment of the path
|
||||
|
||||
For example, it will make "foo.bar.id" return a datum
|
||||
that behaves like DatumInContext(value="foo.bar", path="foo.bar.id").
|
||||
|
||||
This is disabled by default; it can be turned on by
|
||||
settings the `auto_id_field` global to a value other
|
||||
than `None`.
|
||||
"""
|
||||
|
||||
def __init__(self, datum, id_field=None):
|
||||
"""
|
||||
Invariant is that datum.path is the path from context to datum. The auto id
|
||||
will either be the id in the datum (if present) or the id of the context
|
||||
followed by the path to the datum.
|
||||
|
||||
The path to this datum is always the path to the context, the path to the
|
||||
datum, and then the auto id field.
|
||||
"""
|
||||
self.datum = datum
|
||||
self.id_field = id_field or auto_id_field
|
||||
|
||||
@property
|
||||
def value(self):
|
||||
return str(self.datum.id_pseudopath)
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
return self.id_field
|
||||
|
||||
@property
|
||||
def context(self):
|
||||
return self.datum
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r)' % (self.__class__.__name__, self.datum)
|
||||
|
||||
def in_context(self, context, path):
|
||||
return AutoIdForDatum(self.datum.in_context(context=context, path=path))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, AutoIdForDatum) and other.datum == self.datum and self.id_field == other.id_field
|
||||
|
||||
|
||||
class Root(JSONPath):
|
||||
"""
|
||||
The JSONPath referring to the "root" object. Concrete syntax is '$'.
|
||||
The root is the topmost datum without any context attached.
|
||||
"""
|
||||
|
||||
def find(self, data):
|
||||
if not isinstance(data, DatumInContext):
|
||||
return [DatumInContext(data, path=Root(), context=None)]
|
||||
else:
|
||||
if data.context is None:
|
||||
return [DatumInContext(data.value, context=None, path=Root())]
|
||||
else:
|
||||
return Root().find(data.context)
|
||||
|
||||
def update(self, data, val):
|
||||
return val
|
||||
|
||||
def __str__(self):
|
||||
return '$'
|
||||
|
||||
def __repr__(self):
|
||||
return 'Root()'
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Root)
|
||||
|
||||
class This(JSONPath):
|
||||
"""
|
||||
The JSONPath referring to the current datum. Concrete syntax is '@'.
|
||||
"""
|
||||
|
||||
def find(self, datum):
|
||||
return [DatumInContext.wrap(datum)]
|
||||
|
||||
def update(self, data, val):
|
||||
return val
|
||||
|
||||
def __str__(self):
|
||||
return '`this`'
|
||||
|
||||
def __repr__(self):
|
||||
return 'This()'
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, This)
|
||||
|
||||
class Child(JSONPath):
|
||||
"""
|
||||
JSONPath that first matches the left, then the right.
|
||||
Concrete syntax is <left> '.' <right>
|
||||
"""
|
||||
|
||||
def __init__(self, left, right):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def find(self, datum):
|
||||
"""
|
||||
Extra special case: auto ids do not have children,
|
||||
so cut it off right now rather than auto id the auto id
|
||||
"""
|
||||
|
||||
return [submatch
|
||||
for subdata in self.left.find(datum)
|
||||
if not isinstance(subdata, AutoIdForDatum)
|
||||
for submatch in self.right.find(subdata)]
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Child) and self.left == other.left and self.right == other.right
|
||||
|
||||
def __str__(self):
|
||||
return '%s.%s' % (self.left, self.right)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%r, %r)' % (self.__class__.__name__, self.left, self.right)
|
||||
|
||||
class Parent(JSONPath):
|
||||
"""
|
||||
JSONPath that matches the parent node of the current match.
|
||||
Will crash if no such parent exists.
|
||||
Available via named operator `parent`.
|
||||
"""
|
||||
|
||||
def find(self, datum):
|
||||
datum = DatumInContext.wrap(datum)
|
||||
return [datum.context]
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Parent)
|
||||
|
||||
def __str__(self):
|
||||
return '`parent`'
|
||||
|
||||
def __repr__(self):
|
||||
return 'Parent()'
|
||||
|
||||
|
||||
class Where(JSONPath):
|
||||
"""
|
||||
JSONPath that first matches the left, and then
|
||||
filters for only those nodes that have
|
||||
a match on the right.
|
||||
|
||||
WARNING: Subject to change. May want to have "contains"
|
||||
or some other better word for it.
|
||||
"""
|
||||
|
||||
def __init__(self, left, right):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def find(self, data):
|
||||
return [subdata for subdata in self.left.find(data) if self.right.find(data)]
|
||||
|
||||
def __str__(self):
|
||||
return '%s where %s' % (self.left, self.right)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Where) and other.left == self.left and other.right == self.right
|
||||
|
||||
class Descendants(JSONPath):
|
||||
"""
|
||||
JSONPath that matches first the left expression then any descendant
|
||||
of it which matches the right expression.
|
||||
"""
|
||||
|
||||
def __init__(self, left, right):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def find(self, datum):
|
||||
# <left> .. <right> ==> <left> . (<right> | *..<right> | [*]..<right>)
|
||||
#
|
||||
# With with a wonky caveat that since Slice() has funky coercions
|
||||
# we cannot just delegate to that equivalence or we'll hit an
|
||||
# infinite loop. So right here we implement the coercion-free version.
|
||||
|
||||
# Get all left matches into a list
|
||||
left_matches = self.left.find(datum)
|
||||
if not isinstance(left_matches, list):
|
||||
left_matches = [left_matches]
|
||||
|
||||
def match_recursively(datum):
|
||||
right_matches = self.right.find(datum)
|
||||
|
||||
# Manually do the * or [*] to avoid coercion and recurse just the right-hand pattern
|
||||
if isinstance(datum.value, list):
|
||||
recursive_matches = [submatch
|
||||
for i in range(0, len(datum.value))
|
||||
for submatch in match_recursively(DatumInContext(datum.value[i], context=datum, path=Index(i)))]
|
||||
|
||||
elif isinstance(datum.value, dict):
|
||||
recursive_matches = [submatch
|
||||
for field in datum.value.keys()
|
||||
for submatch in match_recursively(DatumInContext(datum.value[field], context=datum, path=Fields(field)))]
|
||||
|
||||
else:
|
||||
recursive_matches = []
|
||||
|
||||
return right_matches + list(recursive_matches)
|
||||
|
||||
# TODO: repeatable iterator instead of list?
|
||||
return [submatch
|
||||
for left_match in left_matches
|
||||
for submatch in match_recursively(left_match)]
|
||||
|
||||
def is_singular():
|
||||
return False
|
||||
|
||||
def __str__(self):
|
||||
return '%s..%s' % (self.left, self.right)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Descendants) and self.left == other.left and self.right == other.right
|
||||
|
||||
class Union(JSONPath):
|
||||
"""
|
||||
JSONPath that returns the union of the results of each match.
|
||||
This is pretty shoddily implemented for now. The nicest semantics
|
||||
in case of mismatched bits (list vs atomic) is to put
|
||||
them all in a list, but I haven't done that yet.
|
||||
|
||||
WARNING: Any appearance of this being the _concatenation_ is
|
||||
coincidence. It may even be a bug! (or laziness)
|
||||
"""
|
||||
def __init__(self, left, right):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def is_singular(self):
|
||||
return False
|
||||
|
||||
def find(self, data):
|
||||
return self.left.find(data) + self.right.find(data)
|
||||
|
||||
class Intersect(JSONPath):
|
||||
"""
|
||||
JSONPath for bits that match *both* patterns.
|
||||
|
||||
This can be accomplished a couple of ways. The most
|
||||
efficient is to actually build the intersected
|
||||
AST as in building a state machine for matching the
|
||||
intersection of regular languages. The next
|
||||
idea is to build a filtered data and match against
|
||||
that.
|
||||
"""
|
||||
def __init__(self, left, right):
|
||||
self.left = left
|
||||
self.right = right
|
||||
|
||||
def is_singular(self):
|
||||
return False
|
||||
|
||||
def find(self, data):
|
||||
raise NotImplementedError()
|
||||
|
||||
class Fields(JSONPath):
|
||||
"""
|
||||
JSONPath referring to some field of the current object.
|
||||
Concrete syntax ix comma-separated field names.
|
||||
|
||||
WARNING: If '*' is any of the field names, then they will
|
||||
all be returned.
|
||||
"""
|
||||
|
||||
def __init__(self, *fields):
|
||||
self.fields = fields
|
||||
|
||||
def get_field_datum(self, datum, field):
|
||||
if field == auto_id_field:
|
||||
return AutoIdForDatum(datum)
|
||||
else:
|
||||
try:
|
||||
field_value = datum.value[field] # Do NOT use `val.get(field)` since that confuses None as a value and None due to `get`
|
||||
return DatumInContext(value=field_value, path=Fields(field), context=datum)
|
||||
except (TypeError, KeyError, AttributeError):
|
||||
return None
|
||||
|
||||
def reified_fields(self, datum):
|
||||
if '*' not in self.fields:
|
||||
return self.fields
|
||||
else:
|
||||
try:
|
||||
fields = tuple(datum.value.keys())
|
||||
return fields if auto_id_field is None else fields + (auto_id_field,)
|
||||
except AttributeError:
|
||||
return ()
|
||||
|
||||
def find(self, datum):
|
||||
datum = DatumInContext.wrap(datum)
|
||||
|
||||
return [field_datum
|
||||
for field_datum in [self.get_field_datum(datum, field) for field in self.reified_fields(datum)]
|
||||
if field_datum is not None]
|
||||
|
||||
def __str__(self):
|
||||
return ','.join(self.fields)
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(%s)' % (self.__class__.__name__, ','.join(map(repr, self.fields)))
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Fields) and tuple(self.fields) == tuple(other.fields)
|
||||
|
||||
|
||||
class Index(JSONPath):
|
||||
"""
|
||||
JSONPath that matches indices of the current datum, or none if not large enough.
|
||||
Concrete syntax is brackets.
|
||||
|
||||
WARNING: If the datum is not long enough, it will not crash but will not match anything.
|
||||
NOTE: For the concrete syntax of `[*]`, the abstract syntax is a Slice() with no parameters (equiv to `[:]`
|
||||
"""
|
||||
|
||||
def __init__(self, index):
|
||||
self.index = index
|
||||
|
||||
def find(self, datum):
|
||||
datum = DatumInContext.wrap(datum)
|
||||
|
||||
if len(datum.value) > self.index:
|
||||
return [DatumInContext(datum.value[self.index], path=self, context=datum)]
|
||||
else:
|
||||
return []
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Index) and self.index == other.index
|
||||
|
||||
def __str__(self):
|
||||
return '[%i]' % self.index
|
||||
|
||||
class Slice(JSONPath):
|
||||
"""
|
||||
JSONPath matching a slice of an array.
|
||||
|
||||
Because of a mismatch between JSON and XML when schema-unaware,
|
||||
this always returns an iterable; if the incoming data
|
||||
was not a list, then it returns a one element list _containing_ that
|
||||
data.
|
||||
|
||||
Consider these two docs, and their schema-unaware translation to JSON:
|
||||
|
||||
<a><b>hello</b></a> ==> {"a": {"b": "hello"}}
|
||||
<a><b>hello</b><b>goodbye</b></a> ==> {"a": {"b": ["hello", "goodbye"]}}
|
||||
|
||||
If there were a schema, it would be known that "b" should always be an
|
||||
array (unless the schema were wonky, but that is too much to fix here)
|
||||
so when querying with JSON if the one writing the JSON knows that it
|
||||
should be an array, they can write a slice operator and it will coerce
|
||||
a non-array value to an array.
|
||||
|
||||
This may be a bit unfortunate because it would be nice to always have
|
||||
an iterator, but dictionaries and other objects may also be iterable,
|
||||
so this is the compromise.
|
||||
"""
|
||||
def __init__(self, start=None, end=None, step=None):
|
||||
self.start = start
|
||||
self.end = end
|
||||
self.step = step
|
||||
|
||||
def find(self, datum):
|
||||
datum = DatumInContext.wrap(datum)
|
||||
|
||||
# Here's the hack. If it is a dictionary or some kind of constant,
|
||||
# put it in a single-element list
|
||||
if (isinstance(datum.value, dict) or isinstance(datum.value, six.integer_types) or isinstance(datum.value, six.string_types)):
|
||||
return self.find(DatumInContext([datum.value], path=datum.path, context=datum.context))
|
||||
|
||||
# Some iterators do not support slicing but we can still
|
||||
# at least work for '*'
|
||||
if self.start == None and self.end == None and self.step == None:
|
||||
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in xrange(0, len(datum.value))]
|
||||
else:
|
||||
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in range(0, len(datum.value))[self.start:self.end:self.step]]
|
||||
|
||||
def __str__(self):
|
||||
if self.start == None and self.end == None and self.step == None:
|
||||
return '[*]'
|
||||
else:
|
||||
return '[%s%s%s]' % (self.start or '',
|
||||
':%d'%self.end if self.end else '',
|
||||
':%d'%self.step if self.step else '')
|
||||
|
||||
def __repr__(self):
|
||||
return '%s(start=%r,end=%r,step=%r)' % (self.__class__.__name__, self.start, self.end, self.step)
|
||||
|
||||
def __eq__(self, other):
|
||||
return isinstance(other, Slice) and other.start == self.start and self.end == other.end and other.step == self.step
|
||||
171
bin/python/jsonpath_rw/lexer.py
Normal file
171
bin/python/jsonpath_rw/lexer.py
Normal file
@@ -0,0 +1,171 @@
|
||||
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
||||
import sys
|
||||
import logging
|
||||
|
||||
import ply.lex
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class JsonPathLexerError(Exception):
|
||||
pass
|
||||
|
||||
class JsonPathLexer(object):
|
||||
'''
|
||||
A Lexical analyzer for JsonPath.
|
||||
'''
|
||||
|
||||
def __init__(self, debug=False):
|
||||
self.debug = debug
|
||||
if self.__doc__ == None:
|
||||
raise JsonPathLexerError('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
||||
|
||||
def tokenize(self, string):
|
||||
'''
|
||||
Maps a string to an iterator over tokens. In other words: [char] -> [token]
|
||||
'''
|
||||
|
||||
new_lexer = ply.lex.lex(module=self, debug=self.debug, errorlog=logger)
|
||||
new_lexer.latest_newline = 0
|
||||
new_lexer.string_value = None
|
||||
new_lexer.input(string)
|
||||
|
||||
while True:
|
||||
t = new_lexer.token()
|
||||
if t is None: break
|
||||
t.col = t.lexpos - new_lexer.latest_newline
|
||||
yield t
|
||||
|
||||
if new_lexer.string_value is not None:
|
||||
raise JsonPathLexerError('Unexpected EOF in string literal or identifier')
|
||||
|
||||
# ============== PLY Lexer specification ==================
|
||||
#
|
||||
# This probably should be private but:
|
||||
# - the parser requires access to `tokens` (perhaps they should be defined in a third, shared dependency)
|
||||
# - things like `literals` might be a legitimate part of the public interface.
|
||||
#
|
||||
# Anyhow, it is pythonic to give some rope to hang oneself with :-)
|
||||
|
||||
literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&']
|
||||
|
||||
reserved_words = { 'where': 'WHERE' }
|
||||
|
||||
tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR'] + list(reserved_words.values())
|
||||
|
||||
states = [ ('singlequote', 'exclusive'),
|
||||
('doublequote', 'exclusive'),
|
||||
('backquote', 'exclusive') ]
|
||||
|
||||
# Normal lexing, rather easy
|
||||
t_DOUBLEDOT = r'\.\.'
|
||||
t_ignore = ' \t'
|
||||
|
||||
def t_ID(self, t):
|
||||
r'[a-zA-Z_@][a-zA-Z0-9_@\-]*'
|
||||
t.type = self.reserved_words.get(t.value, 'ID')
|
||||
return t
|
||||
|
||||
def t_NUMBER(self, t):
|
||||
r'-?\d+'
|
||||
t.value = int(t.value)
|
||||
return t
|
||||
|
||||
|
||||
# Single-quoted strings
|
||||
t_singlequote_ignore = ''
|
||||
def t_singlequote(self, t):
|
||||
r"'"
|
||||
t.lexer.string_start = t.lexer.lexpos
|
||||
t.lexer.string_value = ''
|
||||
t.lexer.push_state('singlequote')
|
||||
|
||||
def t_singlequote_content(self, t):
|
||||
r"[^'\\]+"
|
||||
t.lexer.string_value += t.value
|
||||
|
||||
def t_singlequote_escape(self, t):
|
||||
r'\\.'
|
||||
t.lexer.string_value += t.value[1]
|
||||
|
||||
def t_singlequote_end(self, t):
|
||||
r"'"
|
||||
t.value = t.lexer.string_value
|
||||
t.type = 'ID'
|
||||
t.lexer.string_value = None
|
||||
t.lexer.pop_state()
|
||||
return t
|
||||
|
||||
def t_singlequote_error(self, t):
|
||||
raise JsonPathLexerError('Error on line %s, col %s while lexing singlequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||
|
||||
|
||||
# Double-quoted strings
|
||||
t_doublequote_ignore = ''
|
||||
def t_doublequote(self, t):
|
||||
r'"'
|
||||
t.lexer.string_start = t.lexer.lexpos
|
||||
t.lexer.string_value = ''
|
||||
t.lexer.push_state('doublequote')
|
||||
|
||||
def t_doublequote_content(self, t):
|
||||
r'[^"\\]+'
|
||||
t.lexer.string_value += t.value
|
||||
|
||||
def t_doublequote_escape(self, t):
|
||||
r'\\.'
|
||||
t.lexer.string_value += t.value[1]
|
||||
|
||||
def t_doublequote_end(self, t):
|
||||
r'"'
|
||||
t.value = t.lexer.string_value
|
||||
t.type = 'ID'
|
||||
t.lexer.string_value = None
|
||||
t.lexer.pop_state()
|
||||
return t
|
||||
|
||||
def t_doublequote_error(self, t):
|
||||
raise JsonPathLexerError('Error on line %s, col %s while lexing doublequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||
|
||||
|
||||
# Back-quoted "magic" operators
|
||||
t_backquote_ignore = ''
|
||||
def t_backquote(self, t):
|
||||
r'`'
|
||||
t.lexer.string_start = t.lexer.lexpos
|
||||
t.lexer.string_value = ''
|
||||
t.lexer.push_state('backquote')
|
||||
|
||||
def t_backquote_escape(self, t):
|
||||
r'\\.'
|
||||
t.lexer.string_value += t.value[1]
|
||||
|
||||
def t_backquote_content(self, t):
|
||||
r"[^`\\]+"
|
||||
t.lexer.string_value += t.value
|
||||
|
||||
def t_backquote_end(self, t):
|
||||
r'`'
|
||||
t.value = t.lexer.string_value
|
||||
t.type = 'NAMED_OPERATOR'
|
||||
t.lexer.string_value = None
|
||||
t.lexer.pop_state()
|
||||
return t
|
||||
|
||||
def t_backquote_error(self, t):
|
||||
raise JsonPathLexerError('Error on line %s, col %s while lexing backquoted operator: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||
|
||||
|
||||
# Counting lines, handling errors
|
||||
def t_newline(self, t):
|
||||
r'\n'
|
||||
t.lexer.lineno += 1
|
||||
t.lexer.latest_newline = t.lexpos
|
||||
|
||||
def t_error(self, t):
|
||||
raise JsonPathLexerError('Error on line %s, col %s: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig()
|
||||
lexer = JsonPathLexer(debug=True)
|
||||
for token in lexer.tokenize(sys.stdin.read()):
|
||||
print('%-20s%s' % (token.value, token.type))
|
||||
187
bin/python/jsonpath_rw/parser.py
Normal file
187
bin/python/jsonpath_rw/parser.py
Normal file
@@ -0,0 +1,187 @@
|
||||
from __future__ import print_function, absolute_import, division, generators, nested_scopes
|
||||
import sys
|
||||
import os.path
|
||||
import logging
|
||||
|
||||
import ply.yacc
|
||||
|
||||
from jsonpath_rw.jsonpath import *
|
||||
from jsonpath_rw.lexer import JsonPathLexer
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
def parse(string):
|
||||
return JsonPathParser().parse(string)
|
||||
|
||||
class JsonPathParser(object):
|
||||
'''
|
||||
An LALR-parser for JsonPath
|
||||
'''
|
||||
|
||||
tokens = JsonPathLexer.tokens
|
||||
|
||||
def __init__(self, debug=False, lexer_class=None):
|
||||
if self.__doc__ == None:
|
||||
raise Exception('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
||||
|
||||
self.debug = debug
|
||||
self.lexer_class = lexer_class or JsonPathLexer # Crufty but works around statefulness in PLY
|
||||
|
||||
def parse(self, string, lexer = None):
|
||||
lexer = lexer or self.lexer_class()
|
||||
return self.parse_token_stream(lexer.tokenize(string))
|
||||
|
||||
def parse_token_stream(self, token_iterator, start_symbol='jsonpath'):
|
||||
|
||||
# Since PLY has some crufty aspects and dumps files, we try to keep them local
|
||||
# However, we need to derive the name of the output Python file :-/
|
||||
output_directory = os.path.dirname(__file__)
|
||||
try:
|
||||
module_name = os.path.splitext(os.path.split(__file__)[1])[0]
|
||||
except:
|
||||
module_name = __name__
|
||||
|
||||
parsing_table_module = '_'.join([module_name, start_symbol, 'parsetab'])
|
||||
|
||||
# And we regenerate the parse table every time; it doesn't actually take that long!
|
||||
new_parser = ply.yacc.yacc(module=self,
|
||||
debug=self.debug,
|
||||
tabmodule = parsing_table_module,
|
||||
outputdir = output_directory,
|
||||
write_tables=0,
|
||||
start = start_symbol,
|
||||
errorlog = logger)
|
||||
|
||||
return new_parser.parse(lexer = IteratorToTokenStream(token_iterator))
|
||||
|
||||
# ===================== PLY Parser specification =====================
|
||||
|
||||
precedence = [
|
||||
('left', ','),
|
||||
('left', 'DOUBLEDOT'),
|
||||
('left', '.'),
|
||||
('left', '|'),
|
||||
('left', '&'),
|
||||
('left', 'WHERE'),
|
||||
]
|
||||
|
||||
def p_error(self, t):
|
||||
raise Exception('Parse error at %s:%s near token %s (%s)' % (t.lineno, t.col, t.value, t.type))
|
||||
|
||||
def p_jsonpath_binop(self, p):
|
||||
"""jsonpath : jsonpath '.' jsonpath
|
||||
| jsonpath DOUBLEDOT jsonpath
|
||||
| jsonpath WHERE jsonpath
|
||||
| jsonpath '|' jsonpath
|
||||
| jsonpath '&' jsonpath"""
|
||||
op = p[2]
|
||||
|
||||
if op == '.':
|
||||
p[0] = Child(p[1], p[3])
|
||||
elif op == '..':
|
||||
p[0] = Descendants(p[1], p[3])
|
||||
elif op == 'where':
|
||||
p[0] = Where(p[1], p[3])
|
||||
elif op == '|':
|
||||
p[0] = Union(p[1], p[3])
|
||||
elif op == '&':
|
||||
p[0] = Intersect(p[1], p[3])
|
||||
|
||||
def p_jsonpath_fields(self, p):
|
||||
"jsonpath : fields_or_any"
|
||||
p[0] = Fields(*p[1])
|
||||
|
||||
def p_jsonpath_named_operator(self, p):
|
||||
"jsonpath : NAMED_OPERATOR"
|
||||
if p[1] == 'this':
|
||||
p[0] = This()
|
||||
elif p[1] == 'parent':
|
||||
p[0] = Parent()
|
||||
else:
|
||||
raise Exception('Unknown named operator `%s` at %s:%s' % (p[1], p.lineno(1), p.lexpos(1)))
|
||||
|
||||
def p_jsonpath_root(self, p):
|
||||
"jsonpath : '$'"
|
||||
p[0] = Root()
|
||||
|
||||
def p_jsonpath_idx(self, p):
|
||||
"jsonpath : '[' idx ']'"
|
||||
p[0] = p[2]
|
||||
|
||||
def p_jsonpath_slice(self, p):
|
||||
"jsonpath : '[' slice ']'"
|
||||
p[0] = p[2]
|
||||
|
||||
def p_jsonpath_fieldbrackets(self, p):
|
||||
"jsonpath : '[' fields ']'"
|
||||
p[0] = Fields(*p[2])
|
||||
|
||||
def p_jsonpath_child_fieldbrackets(self, p):
|
||||
"jsonpath : jsonpath '[' fields ']'"
|
||||
p[0] = Child(p[1], Fields(*p[3]))
|
||||
|
||||
def p_jsonpath_child_idxbrackets(self, p):
|
||||
"jsonpath : jsonpath '[' idx ']'"
|
||||
p[0] = Child(p[1], p[3])
|
||||
|
||||
def p_jsonpath_child_slicebrackets(self, p):
|
||||
"jsonpath : jsonpath '[' slice ']'"
|
||||
p[0] = Child(p[1], p[3])
|
||||
|
||||
def p_jsonpath_parens(self, p):
|
||||
"jsonpath : '(' jsonpath ')'"
|
||||
p[0] = p[2]
|
||||
|
||||
# Because fields in brackets cannot be '*' - that is reserved for array indices
|
||||
def p_fields_or_any(self, p):
|
||||
"""fields_or_any : fields
|
||||
| '*' """
|
||||
if p[1] == '*':
|
||||
p[0] = ['*']
|
||||
else:
|
||||
p[0] = p[1]
|
||||
|
||||
def p_fields_id(self, p):
|
||||
"fields : ID"
|
||||
p[0] = [p[1]]
|
||||
|
||||
def p_fields_comma(self, p):
|
||||
"fields : fields ',' fields"
|
||||
p[0] = p[1] + p[3]
|
||||
|
||||
def p_idx(self, p):
|
||||
"idx : NUMBER"
|
||||
p[0] = Index(p[1])
|
||||
|
||||
def p_slice_any(self, p):
|
||||
"slice : '*'"
|
||||
p[0] = Slice()
|
||||
|
||||
def p_slice(self, p): # Currently does not support `step`
|
||||
"slice : maybe_int ':' maybe_int"
|
||||
p[0] = Slice(start=p[1], end=p[3])
|
||||
|
||||
def p_maybe_int(self, p):
|
||||
"""maybe_int : NUMBER
|
||||
| empty"""
|
||||
p[0] = p[1]
|
||||
|
||||
def p_empty(self, p):
|
||||
'empty :'
|
||||
p[0] = None
|
||||
|
||||
class IteratorToTokenStream(object):
|
||||
def __init__(self, iterator):
|
||||
self.iterator = iterator
|
||||
|
||||
def token(self):
|
||||
try:
|
||||
return next(self.iterator)
|
||||
except StopIteration:
|
||||
return None
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logging.basicConfig()
|
||||
parser = JsonPathParser(debug=True)
|
||||
print(parser.parse(sys.stdin.read()))
|
||||
4
bin/python/ply/__init__.py
Normal file
4
bin/python/ply/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
# PLY package
|
||||
# Author: David Beazley (dave@dabeaz.com)
|
||||
|
||||
__all__ = ['lex','yacc']
|
||||
898
bin/python/ply/cpp.py
Normal file
898
bin/python/ply/cpp.py
Normal file
@@ -0,0 +1,898 @@
|
||||
# -----------------------------------------------------------------------------
|
||||
# cpp.py
|
||||
#
|
||||
# Author: David Beazley (http://www.dabeaz.com)
|
||||
# Copyright (C) 2007
|
||||
# All rights reserved
|
||||
#
|
||||
# This module implements an ANSI-C style lexical preprocessor for PLY.
|
||||
# -----------------------------------------------------------------------------
|
||||
from __future__ import generators
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# Default preprocessor lexer definitions. These tokens are enough to get
|
||||
# a basic preprocessor working. Other modules may import these if they want
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
tokens = (
|
||||
'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND'
|
||||
)
|
||||
|
||||
literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
|
||||
|
||||
# Whitespace
|
||||
def t_CPP_WS(t):
|
||||
r'\s+'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
return t
|
||||
|
||||
t_CPP_POUND = r'\#'
|
||||
t_CPP_DPOUND = r'\#\#'
|
||||
|
||||
# Identifier
|
||||
t_CPP_ID = r'[A-Za-z_][\w_]*'
|
||||
|
||||
# Integer literal
|
||||
def CPP_INTEGER(t):
|
||||
r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)'
|
||||
return t
|
||||
|
||||
t_CPP_INTEGER = CPP_INTEGER
|
||||
|
||||
# Floating literal
|
||||
t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
||||
|
||||
# String literal
|
||||
def t_CPP_STRING(t):
|
||||
r'\"([^\\\n]|(\\(.|\n)))*?\"'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
return t
|
||||
|
||||
# Character constant 'c' or L'c'
|
||||
def t_CPP_CHAR(t):
|
||||
r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
return t
|
||||
|
||||
# Comment
|
||||
def t_CPP_COMMENT(t):
|
||||
r'(/\*(.|\n)*?\*/)|(//.*?\n)'
|
||||
t.lexer.lineno += t.value.count("\n")
|
||||
return t
|
||||
|
||||
def t_error(t):
|
||||
t.type = t.value[0]
|
||||
t.value = t.value[0]
|
||||
t.lexer.skip(1)
|
||||
return t
|
||||
|
||||
import re
|
||||
import copy
|
||||
import time
|
||||
import os.path
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# trigraph()
|
||||
#
|
||||
# Given an input string, this function replaces all trigraph sequences.
|
||||
# The following mapping is used:
|
||||
#
|
||||
# ??= #
|
||||
# ??/ \
|
||||
# ??' ^
|
||||
# ??( [
|
||||
# ??) ]
|
||||
# ??! |
|
||||
# ??< {
|
||||
# ??> }
|
||||
# ??- ~
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
|
||||
_trigraph_rep = {
|
||||
'=':'#',
|
||||
'/':'\\',
|
||||
"'":'^',
|
||||
'(':'[',
|
||||
')':']',
|
||||
'!':'|',
|
||||
'<':'{',
|
||||
'>':'}',
|
||||
'-':'~'
|
||||
}
|
||||
|
||||
def trigraph(input):
|
||||
return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Macro object
|
||||
#
|
||||
# This object holds information about preprocessor macros
|
||||
#
|
||||
# .name - Macro name (string)
|
||||
# .value - Macro value (a list of tokens)
|
||||
# .arglist - List of argument names
|
||||
# .variadic - Boolean indicating whether or not variadic macro
|
||||
# .vararg - Name of the variadic parameter
|
||||
#
|
||||
# When a macro is created, the macro replacement token sequence is
|
||||
# pre-scanned and used to create patch lists that are later used
|
||||
# during macro expansion
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class Macro(object):
|
||||
def __init__(self,name,value,arglist=None,variadic=False):
|
||||
self.name = name
|
||||
self.value = value
|
||||
self.arglist = arglist
|
||||
self.variadic = variadic
|
||||
if variadic:
|
||||
self.vararg = arglist[-1]
|
||||
self.source = None
|
||||
|
||||
# ------------------------------------------------------------------
|
||||
# Preprocessor object
|
||||
#
|
||||
# Object representing a preprocessor. Contains macro definitions,
|
||||
# include directories, and other information
|
||||
# ------------------------------------------------------------------
|
||||
|
||||
class Preprocessor(object):
|
||||
def __init__(self,lexer=None):
|
||||
if lexer is None:
|
||||
lexer = lex.lexer
|
||||
self.lexer = lexer
|
||||
self.macros = { }
|
||||
self.path = []
|
||||
self.temp_path = []
|
||||
|
||||
# Probe the lexer for selected tokens
|
||||
self.lexprobe()
|
||||
|
||||
tm = time.localtime()
|
||||
self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
|
||||
self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
|
||||
self.parser = None
|
||||
|
||||
# -----------------------------------------------------------------------------
|
||||
# tokenize()
|
||||
#
|
||||
# Utility function. Given a string of text, tokenize into a list of tokens
|
||||
# -----------------------------------------------------------------------------
|
||||
|
||||
def tokenize(self,text):
|
||||
tokens = []
|
||||
self.lexer.input(text)
|
||||
while True:
|
||||
tok = self.lexer.token()
|
||||
if not tok: break
|
||||
tokens.append(tok)
|
||||
return tokens
|
||||
|
||||
# ---------------------------------------------------------------------
|
||||
# error()
|
||||
#
|
||||
# Report a preprocessor error/warning of some kind
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def error(self,file,line,msg):
|
||||
print("%s:%d %s" % (file,line,msg))
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# lexprobe()
|
||||
#
|
||||
# This method probes the preprocessor lexer object to discover
|
||||
# the token types of symbols that are important to the preprocessor.
|
||||
# If this works right, the preprocessor will simply "work"
|
||||
# with any suitable lexer regardless of how tokens have been named.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def lexprobe(self):
|
||||
|
||||
# Determine the token type for identifiers
|
||||
self.lexer.input("identifier")
|
||||
tok = self.lexer.token()
|
||||
if not tok or tok.value != "identifier":
|
||||
print("Couldn't determine identifier type")
|
||||
else:
|
||||
self.t_ID = tok.type
|
||||
|
||||
# Determine the token type for integers
|
||||
self.lexer.input("12345")
|
||||
tok = self.lexer.token()
|
||||
if not tok or int(tok.value) != 12345:
|
||||
print("Couldn't determine integer type")
|
||||
else:
|
||||
self.t_INTEGER = tok.type
|
||||
self.t_INTEGER_TYPE = type(tok.value)
|
||||
|
||||
# Determine the token type for strings enclosed in double quotes
|
||||
self.lexer.input("\"filename\"")
|
||||
tok = self.lexer.token()
|
||||
if not tok or tok.value != "\"filename\"":
|
||||
print("Couldn't determine string type")
|
||||
else:
|
||||
self.t_STRING = tok.type
|
||||
|
||||
# Determine the token type for whitespace--if any
|
||||
self.lexer.input(" ")
|
||||
tok = self.lexer.token()
|
||||
if not tok or tok.value != " ":
|
||||
self.t_SPACE = None
|
||||
else:
|
||||
self.t_SPACE = tok.type
|
||||
|
||||
# Determine the token type for newlines
|
||||
self.lexer.input("\n")
|
||||
tok = self.lexer.token()
|
||||
if not tok or tok.value != "\n":
|
||||
self.t_NEWLINE = None
|
||||
print("Couldn't determine token for newlines")
|
||||
else:
|
||||
self.t_NEWLINE = tok.type
|
||||
|
||||
self.t_WS = (self.t_SPACE, self.t_NEWLINE)
|
||||
|
||||
# Check for other characters used by the preprocessor
|
||||
chars = [ '<','>','#','##','\\','(',')',',','.']
|
||||
for c in chars:
|
||||
self.lexer.input(c)
|
||||
tok = self.lexer.token()
|
||||
if not tok or tok.value != c:
|
||||
print("Unable to lex '%s' required for preprocessor" % c)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# add_path()
|
||||
#
|
||||
# Adds a search path to the preprocessor.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def add_path(self,path):
|
||||
self.path.append(path)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# group_lines()
|
||||
#
|
||||
# Given an input string, this function splits it into lines. Trailing whitespace
|
||||
# is removed. Any line ending with \ is grouped with the next line. This
|
||||
# function forms the lowest level of the preprocessor---grouping into text into
|
||||
# a line-by-line format.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def group_lines(self,input):
|
||||
lex = self.lexer.clone()
|
||||
lines = [x.rstrip() for x in input.splitlines()]
|
||||
for i in xrange(len(lines)):
|
||||
j = i+1
|
||||
while lines[i].endswith('\\') and (j < len(lines)):
|
||||
lines[i] = lines[i][:-1]+lines[j]
|
||||
lines[j] = ""
|
||||
j += 1
|
||||
|
||||
input = "\n".join(lines)
|
||||
lex.input(input)
|
||||
lex.lineno = 1
|
||||
|
||||
current_line = []
|
||||
while True:
|
||||
tok = lex.token()
|
||||
if not tok:
|
||||
break
|
||||
current_line.append(tok)
|
||||
if tok.type in self.t_WS and '\n' in tok.value:
|
||||
yield current_line
|
||||
current_line = []
|
||||
|
||||
if current_line:
|
||||
yield current_line
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# tokenstrip()
|
||||
#
|
||||
# Remove leading/trailing whitespace tokens from a token list
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def tokenstrip(self,tokens):
|
||||
i = 0
|
||||
while i < len(tokens) and tokens[i].type in self.t_WS:
|
||||
i += 1
|
||||
del tokens[:i]
|
||||
i = len(tokens)-1
|
||||
while i >= 0 and tokens[i].type in self.t_WS:
|
||||
i -= 1
|
||||
del tokens[i+1:]
|
||||
return tokens
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# collect_args()
|
||||
#
|
||||
# Collects comma separated arguments from a list of tokens. The arguments
|
||||
# must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
|
||||
# where tokencount is the number of tokens consumed, args is a list of arguments,
|
||||
# and positions is a list of integers containing the starting index of each
|
||||
# argument. Each argument is represented by a list of tokens.
|
||||
#
|
||||
# When collecting arguments, leading and trailing whitespace is removed
|
||||
# from each argument.
|
||||
#
|
||||
# This function properly handles nested parenthesis and commas---these do not
|
||||
# define new arguments.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def collect_args(self,tokenlist):
|
||||
args = []
|
||||
positions = []
|
||||
current_arg = []
|
||||
nesting = 1
|
||||
tokenlen = len(tokenlist)
|
||||
|
||||
# Search for the opening '('.
|
||||
i = 0
|
||||
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
|
||||
i += 1
|
||||
|
||||
if (i < tokenlen) and (tokenlist[i].value == '('):
|
||||
positions.append(i+1)
|
||||
else:
|
||||
self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
|
||||
return 0, [], []
|
||||
|
||||
i += 1
|
||||
|
||||
while i < tokenlen:
|
||||
t = tokenlist[i]
|
||||
if t.value == '(':
|
||||
current_arg.append(t)
|
||||
nesting += 1
|
||||
elif t.value == ')':
|
||||
nesting -= 1
|
||||
if nesting == 0:
|
||||
if current_arg:
|
||||
args.append(self.tokenstrip(current_arg))
|
||||
positions.append(i)
|
||||
return i+1,args,positions
|
||||
current_arg.append(t)
|
||||
elif t.value == ',' and nesting == 1:
|
||||
args.append(self.tokenstrip(current_arg))
|
||||
positions.append(i+1)
|
||||
current_arg = []
|
||||
else:
|
||||
current_arg.append(t)
|
||||
i += 1
|
||||
|
||||
# Missing end argument
|
||||
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
|
||||
return 0, [],[]
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# macro_prescan()
|
||||
#
|
||||
# Examine the macro value (token sequence) and identify patch points
|
||||
# This is used to speed up macro expansion later on---we'll know
|
||||
# right away where to apply patches to the value to form the expansion
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def macro_prescan(self,macro):
|
||||
macro.patch = [] # Standard macro arguments
|
||||
macro.str_patch = [] # String conversion expansion
|
||||
macro.var_comma_patch = [] # Variadic macro comma patch
|
||||
i = 0
|
||||
while i < len(macro.value):
|
||||
if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
|
||||
argnum = macro.arglist.index(macro.value[i].value)
|
||||
# Conversion of argument to a string
|
||||
if i > 0 and macro.value[i-1].value == '#':
|
||||
macro.value[i] = copy.copy(macro.value[i])
|
||||
macro.value[i].type = self.t_STRING
|
||||
del macro.value[i-1]
|
||||
macro.str_patch.append((argnum,i-1))
|
||||
continue
|
||||
# Concatenation
|
||||
elif (i > 0 and macro.value[i-1].value == '##'):
|
||||
macro.patch.append(('c',argnum,i-1))
|
||||
del macro.value[i-1]
|
||||
continue
|
||||
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
|
||||
macro.patch.append(('c',argnum,i))
|
||||
i += 1
|
||||
continue
|
||||
# Standard expansion
|
||||
else:
|
||||
macro.patch.append(('e',argnum,i))
|
||||
elif macro.value[i].value == '##':
|
||||
if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
|
||||
((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
|
||||
(macro.value[i+1].value == macro.vararg):
|
||||
macro.var_comma_patch.append(i-1)
|
||||
i += 1
|
||||
macro.patch.sort(key=lambda x: x[2],reverse=True)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# macro_expand_args()
|
||||
#
|
||||
# Given a Macro and list of arguments (each a token list), this method
|
||||
# returns an expanded version of a macro. The return value is a token sequence
|
||||
# representing the replacement macro tokens
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def macro_expand_args(self,macro,args):
|
||||
# Make a copy of the macro token sequence
|
||||
rep = [copy.copy(_x) for _x in macro.value]
|
||||
|
||||
# Make string expansion patches. These do not alter the length of the replacement sequence
|
||||
|
||||
str_expansion = {}
|
||||
for argnum, i in macro.str_patch:
|
||||
if argnum not in str_expansion:
|
||||
str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
|
||||
rep[i] = copy.copy(rep[i])
|
||||
rep[i].value = str_expansion[argnum]
|
||||
|
||||
# Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
|
||||
comma_patch = False
|
||||
if macro.variadic and not args[-1]:
|
||||
for i in macro.var_comma_patch:
|
||||
rep[i] = None
|
||||
comma_patch = True
|
||||
|
||||
# Make all other patches. The order of these matters. It is assumed that the patch list
|
||||
# has been sorted in reverse order of patch location since replacements will cause the
|
||||
# size of the replacement sequence to expand from the patch point.
|
||||
|
||||
expanded = { }
|
||||
for ptype, argnum, i in macro.patch:
|
||||
# Concatenation. Argument is left unexpanded
|
||||
if ptype == 'c':
|
||||
rep[i:i+1] = args[argnum]
|
||||
# Normal expansion. Argument is macro expanded first
|
||||
elif ptype == 'e':
|
||||
if argnum not in expanded:
|
||||
expanded[argnum] = self.expand_macros(args[argnum])
|
||||
rep[i:i+1] = expanded[argnum]
|
||||
|
||||
# Get rid of removed comma if necessary
|
||||
if comma_patch:
|
||||
rep = [_i for _i in rep if _i]
|
||||
|
||||
return rep
|
||||
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# expand_macros()
|
||||
#
|
||||
# Given a list of tokens, this function performs macro expansion.
|
||||
# The expanded argument is a dictionary that contains macros already
|
||||
# expanded. This is used to prevent infinite recursion.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def expand_macros(self,tokens,expanded=None):
|
||||
if expanded is None:
|
||||
expanded = {}
|
||||
i = 0
|
||||
while i < len(tokens):
|
||||
t = tokens[i]
|
||||
if t.type == self.t_ID:
|
||||
if t.value in self.macros and t.value not in expanded:
|
||||
# Yes, we found a macro match
|
||||
expanded[t.value] = True
|
||||
|
||||
m = self.macros[t.value]
|
||||
if not m.arglist:
|
||||
# A simple macro
|
||||
ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
|
||||
for e in ex:
|
||||
e.lineno = t.lineno
|
||||
tokens[i:i+1] = ex
|
||||
i += len(ex)
|
||||
else:
|
||||
# A macro with arguments
|
||||
j = i + 1
|
||||
while j < len(tokens) and tokens[j].type in self.t_WS:
|
||||
j += 1
|
||||
if tokens[j].value == '(':
|
||||
tokcount,args,positions = self.collect_args(tokens[j:])
|
||||
if not m.variadic and len(args) != len(m.arglist):
|
||||
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
|
||||
i = j + tokcount
|
||||
elif m.variadic and len(args) < len(m.arglist)-1:
|
||||
if len(m.arglist) > 2:
|
||||
self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
|
||||
else:
|
||||
self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
|
||||
i = j + tokcount
|
||||
else:
|
||||
if m.variadic:
|
||||
if len(args) == len(m.arglist)-1:
|
||||
args.append([])
|
||||
else:
|
||||
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
|
||||
del args[len(m.arglist):]
|
||||
|
||||
# Get macro replacement text
|
||||
rep = self.macro_expand_args(m,args)
|
||||
rep = self.expand_macros(rep,expanded)
|
||||
for r in rep:
|
||||
r.lineno = t.lineno
|
||||
tokens[i:j+tokcount] = rep
|
||||
i += len(rep)
|
||||
del expanded[t.value]
|
||||
continue
|
||||
elif t.value == '__LINE__':
|
||||
t.type = self.t_INTEGER
|
||||
t.value = self.t_INTEGER_TYPE(t.lineno)
|
||||
|
||||
i += 1
|
||||
return tokens
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# evalexpr()
|
||||
#
|
||||
# Evaluate an expression token sequence for the purposes of evaluating
|
||||
# integral expressions.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def evalexpr(self,tokens):
|
||||
# tokens = tokenize(line)
|
||||
# Search for defined macros
|
||||
i = 0
|
||||
while i < len(tokens):
|
||||
if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
|
||||
j = i + 1
|
||||
needparen = False
|
||||
result = "0L"
|
||||
while j < len(tokens):
|
||||
if tokens[j].type in self.t_WS:
|
||||
j += 1
|
||||
continue
|
||||
elif tokens[j].type == self.t_ID:
|
||||
if tokens[j].value in self.macros:
|
||||
result = "1L"
|
||||
else:
|
||||
result = "0L"
|
||||
if not needparen: break
|
||||
elif tokens[j].value == '(':
|
||||
needparen = True
|
||||
elif tokens[j].value == ')':
|
||||
break
|
||||
else:
|
||||
self.error(self.source,tokens[i].lineno,"Malformed defined()")
|
||||
j += 1
|
||||
tokens[i].type = self.t_INTEGER
|
||||
tokens[i].value = self.t_INTEGER_TYPE(result)
|
||||
del tokens[i+1:j+1]
|
||||
i += 1
|
||||
tokens = self.expand_macros(tokens)
|
||||
for i,t in enumerate(tokens):
|
||||
if t.type == self.t_ID:
|
||||
tokens[i] = copy.copy(t)
|
||||
tokens[i].type = self.t_INTEGER
|
||||
tokens[i].value = self.t_INTEGER_TYPE("0L")
|
||||
elif t.type == self.t_INTEGER:
|
||||
tokens[i] = copy.copy(t)
|
||||
# Strip off any trailing suffixes
|
||||
tokens[i].value = str(tokens[i].value)
|
||||
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
|
||||
tokens[i].value = tokens[i].value[:-1]
|
||||
|
||||
expr = "".join([str(x.value) for x in tokens])
|
||||
expr = expr.replace("&&"," and ")
|
||||
expr = expr.replace("||"," or ")
|
||||
expr = expr.replace("!"," not ")
|
||||
try:
|
||||
result = eval(expr)
|
||||
except StandardError:
|
||||
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
|
||||
result = 0
|
||||
return result
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# parsegen()
|
||||
#
|
||||
# Parse an input string/
|
||||
# ----------------------------------------------------------------------
|
||||
def parsegen(self,input,source=None):
|
||||
|
||||
# Replace trigraph sequences
|
||||
t = trigraph(input)
|
||||
lines = self.group_lines(t)
|
||||
|
||||
if not source:
|
||||
source = ""
|
||||
|
||||
self.define("__FILE__ \"%s\"" % source)
|
||||
|
||||
self.source = source
|
||||
chunk = []
|
||||
enable = True
|
||||
iftrigger = False
|
||||
ifstack = []
|
||||
|
||||
for x in lines:
|
||||
for i,tok in enumerate(x):
|
||||
if tok.type not in self.t_WS: break
|
||||
if tok.value == '#':
|
||||
# Preprocessor directive
|
||||
|
||||
for tok in x:
|
||||
if tok in self.t_WS and '\n' in tok.value:
|
||||
chunk.append(tok)
|
||||
|
||||
dirtokens = self.tokenstrip(x[i+1:])
|
||||
if dirtokens:
|
||||
name = dirtokens[0].value
|
||||
args = self.tokenstrip(dirtokens[1:])
|
||||
else:
|
||||
name = ""
|
||||
args = []
|
||||
|
||||
if name == 'define':
|
||||
if enable:
|
||||
for tok in self.expand_macros(chunk):
|
||||
yield tok
|
||||
chunk = []
|
||||
self.define(args)
|
||||
elif name == 'include':
|
||||
if enable:
|
||||
for tok in self.expand_macros(chunk):
|
||||
yield tok
|
||||
chunk = []
|
||||
oldfile = self.macros['__FILE__']
|
||||
for tok in self.include(args):
|
||||
yield tok
|
||||
self.macros['__FILE__'] = oldfile
|
||||
self.source = source
|
||||
elif name == 'undef':
|
||||
if enable:
|
||||
for tok in self.expand_macros(chunk):
|
||||
yield tok
|
||||
chunk = []
|
||||
self.undef(args)
|
||||
elif name == 'ifdef':
|
||||
ifstack.append((enable,iftrigger))
|
||||
if enable:
|
||||
if not args[0].value in self.macros:
|
||||
enable = False
|
||||
iftrigger = False
|
||||
else:
|
||||
iftrigger = True
|
||||
elif name == 'ifndef':
|
||||
ifstack.append((enable,iftrigger))
|
||||
if enable:
|
||||
if args[0].value in self.macros:
|
||||
enable = False
|
||||
iftrigger = False
|
||||
else:
|
||||
iftrigger = True
|
||||
elif name == 'if':
|
||||
ifstack.append((enable,iftrigger))
|
||||
if enable:
|
||||
result = self.evalexpr(args)
|
||||
if not result:
|
||||
enable = False
|
||||
iftrigger = False
|
||||
else:
|
||||
iftrigger = True
|
||||
elif name == 'elif':
|
||||
if ifstack:
|
||||
if ifstack[-1][0]: # We only pay attention if outer "if" allows this
|
||||
if enable: # If already true, we flip enable False
|
||||
enable = False
|
||||
elif not iftrigger: # If False, but not triggered yet, we'll check expression
|
||||
result = self.evalexpr(args)
|
||||
if result:
|
||||
enable = True
|
||||
iftrigger = True
|
||||
else:
|
||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
|
||||
|
||||
elif name == 'else':
|
||||
if ifstack:
|
||||
if ifstack[-1][0]:
|
||||
if enable:
|
||||
enable = False
|
||||
elif not iftrigger:
|
||||
enable = True
|
||||
iftrigger = True
|
||||
else:
|
||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
|
||||
|
||||
elif name == 'endif':
|
||||
if ifstack:
|
||||
enable,iftrigger = ifstack.pop()
|
||||
else:
|
||||
self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
|
||||
else:
|
||||
# Unknown preprocessor directive
|
||||
pass
|
||||
|
||||
else:
|
||||
# Normal text
|
||||
if enable:
|
||||
chunk.extend(x)
|
||||
|
||||
for tok in self.expand_macros(chunk):
|
||||
yield tok
|
||||
chunk = []
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# include()
|
||||
#
|
||||
# Implementation of file-inclusion
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def include(self,tokens):
|
||||
# Try to extract the filename and then process an include file
|
||||
if not tokens:
|
||||
return
|
||||
if tokens:
|
||||
if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
|
||||
tokens = self.expand_macros(tokens)
|
||||
|
||||
if tokens[0].value == '<':
|
||||
# Include <...>
|
||||
i = 1
|
||||
while i < len(tokens):
|
||||
if tokens[i].value == '>':
|
||||
break
|
||||
i += 1
|
||||
else:
|
||||
print("Malformed #include <...>")
|
||||
return
|
||||
filename = "".join([x.value for x in tokens[1:i]])
|
||||
path = self.path + [""] + self.temp_path
|
||||
elif tokens[0].type == self.t_STRING:
|
||||
filename = tokens[0].value[1:-1]
|
||||
path = self.temp_path + [""] + self.path
|
||||
else:
|
||||
print("Malformed #include statement")
|
||||
return
|
||||
for p in path:
|
||||
iname = os.path.join(p,filename)
|
||||
try:
|
||||
data = open(iname,"r").read()
|
||||
dname = os.path.dirname(iname)
|
||||
if dname:
|
||||
self.temp_path.insert(0,dname)
|
||||
for tok in self.parsegen(data,filename):
|
||||
yield tok
|
||||
if dname:
|
||||
del self.temp_path[0]
|
||||
break
|
||||
except IOError:
|
||||
pass
|
||||
else:
|
||||
print("Couldn't find '%s'" % filename)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# define()
|
||||
#
|
||||
# Define a new macro
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def define(self,tokens):
|
||||
if isinstance(tokens,(str,unicode)):
|
||||
tokens = self.tokenize(tokens)
|
||||
|
||||
linetok = tokens
|
||||
try:
|
||||
name = linetok[0]
|
||||
if len(linetok) > 1:
|
||||
mtype = linetok[1]
|
||||
else:
|
||||
mtype = None
|
||||
if not mtype:
|
||||
m = Macro(name.value,[])
|
||||
self.macros[name.value] = m
|
||||
elif mtype.type in self.t_WS:
|
||||
# A normal macro
|
||||
m = Macro(name.value,self.tokenstrip(linetok[2:]))
|
||||
self.macros[name.value] = m
|
||||
elif mtype.value == '(':
|
||||
# A macro with arguments
|
||||
tokcount, args, positions = self.collect_args(linetok[1:])
|
||||
variadic = False
|
||||
for a in args:
|
||||
if variadic:
|
||||
print("No more arguments may follow a variadic argument")
|
||||
break
|
||||
astr = "".join([str(_i.value) for _i in a])
|
||||
if astr == "...":
|
||||
variadic = True
|
||||
a[0].type = self.t_ID
|
||||
a[0].value = '__VA_ARGS__'
|
||||
variadic = True
|
||||
del a[1:]
|
||||
continue
|
||||
elif astr[-3:] == "..." and a[0].type == self.t_ID:
|
||||
variadic = True
|
||||
del a[1:]
|
||||
# If, for some reason, "." is part of the identifier, strip off the name for the purposes
|
||||
# of macro expansion
|
||||
if a[0].value[-3:] == '...':
|
||||
a[0].value = a[0].value[:-3]
|
||||
continue
|
||||
if len(a) > 1 or a[0].type != self.t_ID:
|
||||
print("Invalid macro argument")
|
||||
break
|
||||
else:
|
||||
mvalue = self.tokenstrip(linetok[1+tokcount:])
|
||||
i = 0
|
||||
while i < len(mvalue):
|
||||
if i+1 < len(mvalue):
|
||||
if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
|
||||
del mvalue[i]
|
||||
continue
|
||||
elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
|
||||
del mvalue[i+1]
|
||||
i += 1
|
||||
m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
|
||||
self.macro_prescan(m)
|
||||
self.macros[name.value] = m
|
||||
else:
|
||||
print("Bad macro definition")
|
||||
except LookupError:
|
||||
print("Bad macro definition")
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# undef()
|
||||
#
|
||||
# Undefine a macro
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
def undef(self,tokens):
|
||||
id = tokens[0].value
|
||||
try:
|
||||
del self.macros[id]
|
||||
except LookupError:
|
||||
pass
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# parse()
|
||||
#
|
||||
# Parse input text.
|
||||
# ----------------------------------------------------------------------
|
||||
def parse(self,input,source=None,ignore={}):
|
||||
self.ignore = ignore
|
||||
self.parser = self.parsegen(input,source)
|
||||
|
||||
# ----------------------------------------------------------------------
|
||||
# token()
|
||||
#
|
||||
# Method to return individual tokens
|
||||
# ----------------------------------------------------------------------
|
||||
def token(self):
|
||||
try:
|
||||
while True:
|
||||
tok = next(self.parser)
|
||||
if tok.type not in self.ignore: return tok
|
||||
except StopIteration:
|
||||
self.parser = None
|
||||
return None
|
||||
|
||||
if __name__ == '__main__':
|
||||
import ply.lex as lex
|
||||
lexer = lex.lex()
|
||||
|
||||
# Run a preprocessor
|
||||
import sys
|
||||
f = open(sys.argv[1])
|
||||
input = f.read()
|
||||
|
||||
p = Preprocessor(lexer)
|
||||
p.parse(input,sys.argv[1])
|
||||
while True:
|
||||
tok = p.token()
|
||||
if not tok: break
|
||||
print(p.source, tok)
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
133
bin/python/ply/ctokens.py
Normal file
133
bin/python/ply/ctokens.py
Normal file
@@ -0,0 +1,133 @@
|
||||
# ----------------------------------------------------------------------
|
||||
# ctokens.py
|
||||
#
|
||||
# Token specifications for symbols in ANSI C and C++. This file is
|
||||
# meant to be used as a library in other tokenizers.
|
||||
# ----------------------------------------------------------------------
|
||||
|
||||
# Reserved words
|
||||
|
||||
tokens = [
|
||||
# Literals (identifier, integer constant, float constant, string constant, char const)
|
||||
'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
|
||||
|
||||
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
|
||||
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
|
||||
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
|
||||
'LOR', 'LAND', 'LNOT',
|
||||
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
|
||||
|
||||
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
|
||||
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
|
||||
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
|
||||
|
||||
# Increment/decrement (++,--)
|
||||
'PLUSPLUS', 'MINUSMINUS',
|
||||
|
||||
# Structure dereference (->)
|
||||
'ARROW',
|
||||
|
||||
# Ternary operator (?)
|
||||
'TERNARY',
|
||||
|
||||
# Delimeters ( ) [ ] { } , . ; :
|
||||
'LPAREN', 'RPAREN',
|
||||
'LBRACKET', 'RBRACKET',
|
||||
'LBRACE', 'RBRACE',
|
||||
'COMMA', 'PERIOD', 'SEMI', 'COLON',
|
||||
|
||||
# Ellipsis (...)
|
||||
'ELLIPSIS',
|
||||
]
|
||||
|
||||
# Operators
|
||||
t_PLUS = r'\+'
|
||||
t_MINUS = r'-'
|
||||
t_TIMES = r'\*'
|
||||
t_DIVIDE = r'/'
|
||||
t_MODULO = r'%'
|
||||
t_OR = r'\|'
|
||||
t_AND = r'&'
|
||||
t_NOT = r'~'
|
||||
t_XOR = r'\^'
|
||||
t_LSHIFT = r'<<'
|
||||
t_RSHIFT = r'>>'
|
||||
t_LOR = r'\|\|'
|
||||
t_LAND = r'&&'
|
||||
t_LNOT = r'!'
|
||||
t_LT = r'<'
|
||||
t_GT = r'>'
|
||||
t_LE = r'<='
|
||||
t_GE = r'>='
|
||||
t_EQ = r'=='
|
||||
t_NE = r'!='
|
||||
|
||||
# Assignment operators
|
||||
|
||||
t_EQUALS = r'='
|
||||
t_TIMESEQUAL = r'\*='
|
||||
t_DIVEQUAL = r'/='
|
||||
t_MODEQUAL = r'%='
|
||||
t_PLUSEQUAL = r'\+='
|
||||
t_MINUSEQUAL = r'-='
|
||||
t_LSHIFTEQUAL = r'<<='
|
||||
t_RSHIFTEQUAL = r'>>='
|
||||
t_ANDEQUAL = r'&='
|
||||
t_OREQUAL = r'\|='
|
||||
t_XOREQUAL = r'^='
|
||||
|
||||
# Increment/decrement
|
||||
t_INCREMENT = r'\+\+'
|
||||
t_DECREMENT = r'--'
|
||||
|
||||
# ->
|
||||
t_ARROW = r'->'
|
||||
|
||||
# ?
|
||||
t_TERNARY = r'\?'
|
||||
|
||||
# Delimeters
|
||||
t_LPAREN = r'\('
|
||||
t_RPAREN = r'\)'
|
||||
t_LBRACKET = r'\['
|
||||
t_RBRACKET = r'\]'
|
||||
t_LBRACE = r'\{'
|
||||
t_RBRACE = r'\}'
|
||||
t_COMMA = r','
|
||||
t_PERIOD = r'\.'
|
||||
t_SEMI = r';'
|
||||
t_COLON = r':'
|
||||
t_ELLIPSIS = r'\.\.\.'
|
||||
|
||||
# Identifiers
|
||||
t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
|
||||
|
||||
# Integer literal
|
||||
t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
|
||||
|
||||
# Floating literal
|
||||
t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
||||
|
||||
# String literal
|
||||
t_STRING = r'\"([^\\\n]|(\\.))*?\"'
|
||||
|
||||
# Character constant 'c' or L'c'
|
||||
t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
|
||||
|
||||
# Comment (C-Style)
|
||||
def t_COMMENT(t):
|
||||
r'/\*(.|\n)*?\*/'
|
||||
t.lexer.lineno += t.value.count('\n')
|
||||
return t
|
||||
|
||||
# Comment (C++-Style)
|
||||
def t_CPPCOMMENT(t):
|
||||
r'//.*\n'
|
||||
t.lexer.lineno += 1
|
||||
return t
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
1058
bin/python/ply/lex.py
Normal file
1058
bin/python/ply/lex.py
Normal file
File diff suppressed because it is too large
Load Diff
3276
bin/python/ply/yacc.py
Normal file
3276
bin/python/ply/yacc.py
Normal file
File diff suppressed because it is too large
Load Diff
187
bin/python/ripple/ledger/Args.py
Normal file
187
bin/python/ripple/ledger/Args.py
Normal file
@@ -0,0 +1,187 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import argparse
|
||||
import importlib
|
||||
import os
|
||||
|
||||
from ripple.ledger import LedgerNumber
|
||||
from ripple.util import File
|
||||
from ripple.util import Log
|
||||
from ripple.util import PrettyPrint
|
||||
from ripple.util import Range
|
||||
from ripple.util.Function import Function
|
||||
|
||||
NAME = 'LedgerTool'
|
||||
VERSION = '0.1'
|
||||
NONE = '(none)'
|
||||
|
||||
_parser = argparse.ArgumentParser(
|
||||
prog=NAME,
|
||||
description='Retrieve and process Ripple ledgers.',
|
||||
epilog=LedgerNumber.HELP,
|
||||
)
|
||||
|
||||
# Positional arguments.
|
||||
_parser.add_argument(
|
||||
'command',
|
||||
nargs='*',
|
||||
help='Command to execute.'
|
||||
)
|
||||
|
||||
# Flag arguments.
|
||||
_parser.add_argument(
|
||||
'--binary',
|
||||
action='store_true',
|
||||
help='If true, searches are binary - by default linear search is used.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--cache',
|
||||
default='~/.local/share/ripple/ledger',
|
||||
help='The cache directory.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--complete',
|
||||
action='store_true',
|
||||
help='If set, only match complete ledgers.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--condition', '-c',
|
||||
help='The name of a condition function used to match ledgers.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--config',
|
||||
help='The rippled configuration file name.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--database', '-d',
|
||||
nargs='*',
|
||||
default=NONE,
|
||||
help='Specify a database.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--display',
|
||||
help='Specify a function to display ledgers.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--full', '-f',
|
||||
action='store_true',
|
||||
help='If true, request full ledgers.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--indent', '-i',
|
||||
type=int,
|
||||
default=2,
|
||||
help='How many spaces to indent when display in JSON.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--offline', '-o',
|
||||
action='store_true',
|
||||
help='If true, work entirely from cache, do not try to contact the server.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--position', '-p',
|
||||
choices=['all', 'first', 'last'],
|
||||
default='last',
|
||||
help='Select which ledgers to display.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--rippled', '-r',
|
||||
help='The filename of a rippled binary for retrieving ledgers.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--server', '-s',
|
||||
help='IP address of a rippled JSON server.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--utc', '-u',
|
||||
action='store_true',
|
||||
help='If true, display times in UTC rather than local time.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--validations',
|
||||
default=3,
|
||||
help='The number of validations needed before considering a ledger valid.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--version',
|
||||
action='version',
|
||||
version='%(prog)s ' + VERSION,
|
||||
help='Print the current version of %(prog)s',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help='If true, give status messages on stderr.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--window', '-w',
|
||||
type=int,
|
||||
default=0,
|
||||
help='How many ledgers to display around the matching ledger.',
|
||||
)
|
||||
|
||||
_parser.add_argument(
|
||||
'--yes', '-y',
|
||||
action='store_true',
|
||||
help='If true, don\'t ask for confirmation on large commands.',
|
||||
)
|
||||
|
||||
# Read the arguments from the command line.
|
||||
ARGS = _parser.parse_args()
|
||||
ARGS.NONE = NONE
|
||||
|
||||
Log.VERBOSE = ARGS.verbose
|
||||
|
||||
# Now remove any items that look like ledger numbers from the command line.
|
||||
_command = ARGS.command
|
||||
_parts = (ARGS.command, ARGS.ledgers) = ([], [])
|
||||
|
||||
for c in _command:
|
||||
_parts[Range.is_range(c, *LedgerNumber.LEDGERS)].append(c)
|
||||
|
||||
ARGS.command = ARGS.command or ['print' if ARGS.ledgers else 'info']
|
||||
|
||||
ARGS.cache = File.normalize(ARGS.cache)
|
||||
|
||||
if not ARGS.ledgers:
|
||||
if ARGS.condition:
|
||||
Log.warn('--condition needs a range of ledgers')
|
||||
if ARGS.display:
|
||||
Log.warn('--display needs a range of ledgers')
|
||||
|
||||
ARGS.condition = Function(
|
||||
ARGS.condition or 'all_ledgers', 'ripple.ledger.conditions')
|
||||
ARGS.display = Function(
|
||||
ARGS.display or 'ledger_number', 'ripple.ledger.displays')
|
||||
|
||||
if ARGS.window < 0:
|
||||
raise ValueError('Window cannot be negative: --window=%d' %
|
||||
ARGS.window)
|
||||
|
||||
PrettyPrint.INDENT = (ARGS.indent * ' ')
|
||||
|
||||
_loaders = (ARGS.database != NONE) + bool(ARGS.rippled) + bool(ARGS.server)
|
||||
|
||||
if not _loaders:
|
||||
ARGS.rippled = 'rippled'
|
||||
|
||||
elif _loaders > 1:
|
||||
raise ValueError('At most one of --database, --rippled and --server '
|
||||
'may be specified')
|
||||
78
bin/python/ripple/ledger/DatabaseReader.py
Normal file
78
bin/python/ripple/ledger/DatabaseReader.py
Normal file
@@ -0,0 +1,78 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import ConfigFile
|
||||
from ripple.util import Database
|
||||
from ripple.util import File
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
|
||||
LEDGER_QUERY = """
|
||||
SELECT
|
||||
L.*, count(1) validations
|
||||
FROM
|
||||
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq DESC) L
|
||||
JOIN Validations V
|
||||
ON (V.LedgerHash = L.LedgerHash)
|
||||
GROUP BY L.LedgerHash
|
||||
HAVING validations >= {validation_quorum}
|
||||
ORDER BY 2;
|
||||
"""
|
||||
|
||||
COMPLETE_QUERY = """
|
||||
SELECT
|
||||
L.LedgerSeq, count(*) validations
|
||||
FROM
|
||||
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq) L
|
||||
JOIN Validations V
|
||||
ON (V.LedgerHash = L.LedgerHash)
|
||||
GROUP BY L.LedgerHash
|
||||
HAVING validations >= :validation_quorum
|
||||
ORDER BY 2;
|
||||
"""
|
||||
|
||||
_DATABASE_NAME = 'ledger.db'
|
||||
|
||||
USE_PLACEHOLDERS = False
|
||||
|
||||
class DatabaseReader(object):
|
||||
def __init__(self, config):
|
||||
assert ARGS.database != ARGS.NONE
|
||||
database = ARGS.database or config['database_path']
|
||||
if not database.endswith(_DATABASE_NAME):
|
||||
database = os.path.join(database, _DATABASE_NAME)
|
||||
if USE_PLACEHOLDERS:
|
||||
cursor = Database.fetchall(
|
||||
database, COMPLETE_QUERY, config)
|
||||
else:
|
||||
cursor = Database.fetchall(
|
||||
database, LEDGER_QUERY.format(**config), {})
|
||||
self.complete = [c[1] for c in cursor]
|
||||
|
||||
def name_to_ledger_index(self, ledger_name, is_full=False):
|
||||
if not self.complete:
|
||||
return None
|
||||
if ledger_name == 'closed':
|
||||
return self.complete[-1]
|
||||
if ledger_name == 'current':
|
||||
return None
|
||||
if ledger_name == 'validated':
|
||||
return self.complete[-1]
|
||||
|
||||
def get_ledger(self, name, is_full=False):
|
||||
cmd = ['ledger', str(name)]
|
||||
if is_full:
|
||||
cmd.append('full')
|
||||
response = self._command(*cmd)
|
||||
result = response.get('ledger')
|
||||
if result:
|
||||
return result
|
||||
error = response['error']
|
||||
etext = _ERROR_TEXT.get(error)
|
||||
if etext:
|
||||
error = '%s (%s)' % (etext, error)
|
||||
Log.fatal(_ERROR_TEXT.get(error, error))
|
||||
18
bin/python/ripple/ledger/LedgerNumber.py
Normal file
18
bin/python/ripple/ledger/LedgerNumber.py
Normal file
@@ -0,0 +1,18 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util import Range
|
||||
|
||||
FIRST_EVER = 32570
|
||||
|
||||
LEDGERS = {
|
||||
'closed': 'the most recently closed ledger',
|
||||
'current': 'the current ledger',
|
||||
'first': 'the first complete ledger on this server',
|
||||
'last': 'the last complete ledger on this server',
|
||||
'validated': 'the most recently validated ledger',
|
||||
}
|
||||
|
||||
HELP = """
|
||||
Ledgers are either represented by a number, or one of the special ledgers;
|
||||
""" + ',\n'.join('%s, %s' % (k, v) for k, v in sorted(LEDGERS.items())
|
||||
)
|
||||
68
bin/python/ripple/ledger/RippledReader.py
Normal file
68
bin/python/ripple/ledger/RippledReader.py
Normal file
@@ -0,0 +1,68 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
import subprocess
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import File
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
|
||||
_ERROR_CODE_REASON = {
|
||||
62: 'No rippled server is running.',
|
||||
}
|
||||
|
||||
_ERROR_TEXT = {
|
||||
'lgrNotFound': 'The ledger you requested was not found.',
|
||||
'noCurrent': 'The server has no current ledger.',
|
||||
'noNetwork': 'The server did not respond to your request.',
|
||||
}
|
||||
|
||||
_DEFAULT_ERROR_ = "Couldn't connect to server."
|
||||
|
||||
class RippledReader(object):
|
||||
def __init__(self, config):
|
||||
fname = File.normalize(ARGS.rippled)
|
||||
if not os.path.exists(fname):
|
||||
raise Exception('No rippled found at %s.' % fname)
|
||||
self.cmd = [fname]
|
||||
if ARGS.config:
|
||||
self.cmd.extend(['--conf', File.normalize(ARGS.config)])
|
||||
self.info = self._command('server_info')['info']
|
||||
c = self.info.get('complete_ledgers')
|
||||
if c == 'empty':
|
||||
self.complete = []
|
||||
else:
|
||||
self.complete = sorted(Range.from_string(c))
|
||||
|
||||
def name_to_ledger_index(self, ledger_name, is_full=False):
|
||||
return self.get_ledger(ledger_name, is_full)['ledger_index']
|
||||
|
||||
def get_ledger(self, name, is_full=False):
|
||||
cmd = ['ledger', str(name)]
|
||||
if is_full:
|
||||
cmd.append('full')
|
||||
response = self._command(*cmd)
|
||||
result = response.get('ledger')
|
||||
if result:
|
||||
return result
|
||||
error = response['error']
|
||||
etext = _ERROR_TEXT.get(error)
|
||||
if etext:
|
||||
error = '%s (%s)' % (etext, error)
|
||||
Log.fatal(_ERROR_TEXT.get(error, error))
|
||||
|
||||
def _command(self, *cmds):
|
||||
cmd = self.cmd + list(cmds)
|
||||
try:
|
||||
data = subprocess.check_output(cmd, stderr=subprocess.PIPE)
|
||||
except subprocess.CalledProcessError as e:
|
||||
raise Exception(_ERROR_CODE_REASON.get(
|
||||
e.returncode, _DEFAULT_ERROR_))
|
||||
|
||||
part = json.loads(data)
|
||||
try:
|
||||
return part['result']
|
||||
except:
|
||||
raise ValueError(part.get('error', 'unknown error'))
|
||||
51
bin/python/ripple/ledger/SField.py
Normal file
51
bin/python/ripple/ledger/SField.py
Normal file
@@ -0,0 +1,51 @@
|
||||
# Constants from ripple/protocol/SField.h
|
||||
|
||||
# special types
|
||||
STI_UNKNOWN = -2
|
||||
STI_DONE = -1
|
||||
STI_NOTPRESENT = 0
|
||||
|
||||
# # types (common)
|
||||
STI_UINT16 = 1
|
||||
STI_UINT32 = 2
|
||||
STI_UINT64 = 3
|
||||
STI_HASH128 = 4
|
||||
STI_HASH256 = 5
|
||||
STI_AMOUNT = 6
|
||||
STI_VL = 7
|
||||
STI_ACCOUNT = 8
|
||||
# 9-13 are reserved
|
||||
STI_OBJECT = 14
|
||||
STI_ARRAY = 15
|
||||
|
||||
# types (uncommon)
|
||||
STI_UINT8 = 16
|
||||
STI_HASH160 = 17
|
||||
STI_PATHSET = 18
|
||||
STI_VECTOR256 = 19
|
||||
|
||||
# high level types
|
||||
# cannot be serialized inside other types
|
||||
STI_TRANSACTION = 10001
|
||||
STI_LEDGERENTRY = 10002
|
||||
STI_VALIDATION = 10003
|
||||
STI_METADATA = 10004
|
||||
|
||||
def field_code(sti, name):
|
||||
if sti < 16:
|
||||
if name < 16:
|
||||
bytes = [(sti << 4) + name]
|
||||
else:
|
||||
bytes = [sti << 4, name]
|
||||
elif name < 16:
|
||||
bytes = [name, sti]
|
||||
else:
|
||||
bytes = [0, sti, name]
|
||||
return ''.join(chr(i) for i in bytes)
|
||||
|
||||
# Selected constants from SField.cpp
|
||||
|
||||
sfSequence = field_code(STI_UINT32, 4)
|
||||
sfPublicKey = field_code(STI_VL, 1)
|
||||
sfSigningPubKey = field_code(STI_VL, 3)
|
||||
sfSignature = field_code(STI_VL, 6)
|
||||
24
bin/python/ripple/ledger/SearchLedgers.py
Normal file
24
bin/python/ripple/ledger/SearchLedgers.py
Normal file
@@ -0,0 +1,24 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
from ripple.util import Search
|
||||
|
||||
def search(server):
|
||||
"""Yields a stream of ledger numbers that match the given condition."""
|
||||
condition = lambda number: ARGS.condition(server, number)
|
||||
ledgers = server.ledgers
|
||||
if ARGS.binary:
|
||||
try:
|
||||
position = Search.FIRST if ARGS.position == 'first' else Search.LAST
|
||||
yield Search.binary_search(
|
||||
ledgers[0], ledgers[-1], condition, position)
|
||||
except:
|
||||
Log.fatal('No ledgers matching condition "%s".' % condition,
|
||||
file=sys.stderr)
|
||||
else:
|
||||
for x in Search.linear_search(ledgers, condition):
|
||||
yield x
|
||||
55
bin/python/ripple/ledger/Server.py
Normal file
55
bin/python/ripple/ledger/Server.py
Normal file
@@ -0,0 +1,55 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import json
|
||||
import os
|
||||
|
||||
from ripple.ledger import DatabaseReader, RippledReader
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util.FileCache import FileCache
|
||||
from ripple.util import ConfigFile
|
||||
from ripple.util import File
|
||||
from ripple.util import Range
|
||||
|
||||
class Server(object):
|
||||
def __init__(self):
|
||||
cfg_file = File.normalize(ARGS.config or 'rippled.cfg')
|
||||
self.config = ConfigFile.read(open(cfg_file))
|
||||
if ARGS.database != ARGS.NONE:
|
||||
reader = DatabaseReader.DatabaseReader(self.config)
|
||||
else:
|
||||
reader = RippledReader.RippledReader(self.config)
|
||||
|
||||
self.reader = reader
|
||||
self.complete = reader.complete
|
||||
|
||||
names = {
|
||||
'closed': reader.name_to_ledger_index('closed'),
|
||||
'current': reader.name_to_ledger_index('current'),
|
||||
'validated': reader.name_to_ledger_index('validated'),
|
||||
'first': self.complete[0] if self.complete else None,
|
||||
'last': self.complete[-1] if self.complete else None,
|
||||
}
|
||||
self.__dict__.update(names)
|
||||
self.ledgers = sorted(Range.join_ranges(*ARGS.ledgers, **names))
|
||||
|
||||
def make_cache(is_full):
|
||||
name = 'full' if is_full else 'summary'
|
||||
filepath = os.path.join(ARGS.cache, name)
|
||||
creator = lambda n: reader.get_ledger(n, is_full)
|
||||
return FileCache(filepath, creator)
|
||||
self._caches = [make_cache(False), make_cache(True)]
|
||||
|
||||
def info(self):
|
||||
return self.reader.info
|
||||
|
||||
def cache(self, is_full):
|
||||
return self._caches[is_full]
|
||||
|
||||
def get_ledger(self, number, is_full=False):
|
||||
num = int(number)
|
||||
save_in_cache = num in self.complete
|
||||
can_create = (not ARGS.offline and
|
||||
self.complete and
|
||||
self.complete[0] <= num - 1)
|
||||
cache = self.cache(is_full)
|
||||
return cache.get_data(number, save_in_cache, can_create)
|
||||
5
bin/python/ripple/ledger/ServerReader.py
Normal file
5
bin/python/ripple/ledger/ServerReader.py
Normal file
@@ -0,0 +1,5 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
class ServerReader(object):
|
||||
def __init__(self, config):
|
||||
raise ValueError('Direct server connections are not yet implemented.')
|
||||
34
bin/python/ripple/ledger/commands/Cache.py
Normal file
34
bin/python/ripple/ledger/commands/Cache.py
Normal file
@@ -0,0 +1,34 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
from ripple.util.PrettyPrint import pretty_print
|
||||
|
||||
SAFE = True
|
||||
|
||||
HELP = """cache
|
||||
return server_info"""
|
||||
|
||||
def cache(server, clear=False):
|
||||
cache = server.cache(ARGS.full)
|
||||
name = ['summary', 'full'][ARGS.full]
|
||||
files = cache.file_count()
|
||||
if not files:
|
||||
Log.error('No files in %s cache.' % name)
|
||||
|
||||
elif clear:
|
||||
if not clear.strip() == 'clear':
|
||||
raise Exception("Don't understand 'clear %s'." % clear)
|
||||
if not ARGS.yes:
|
||||
yes = raw_input('OK to clear %s cache? (y/N) ' % name)
|
||||
if not yes.lower().startswith('y'):
|
||||
Log.out('Cancelled.')
|
||||
return
|
||||
cache.clear(ARGS.full)
|
||||
Log.out('%s cache cleared - %d file%s deleted.' %
|
||||
(name.capitalize(), files, '' if files == 1 else 's'))
|
||||
|
||||
else:
|
||||
caches = (int(c) for c in cache.cache_list())
|
||||
Log.out(Range.to_string(caches))
|
||||
21
bin/python/ripple/ledger/commands/Info.py
Normal file
21
bin/python/ripple/ledger/commands/Info.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
from ripple.util.PrettyPrint import pretty_print
|
||||
|
||||
SAFE = True
|
||||
|
||||
HELP = 'info - return server_info'
|
||||
|
||||
def info(server):
|
||||
Log.out('first =', server.first)
|
||||
Log.out('last =', server.last)
|
||||
Log.out('closed =', server.closed)
|
||||
Log.out('current =', server.current)
|
||||
Log.out('validated =', server.validated)
|
||||
Log.out('complete =', Range.to_string(server.complete))
|
||||
|
||||
if ARGS.full:
|
||||
Log.out(pretty_print(server.info()))
|
||||
15
bin/python/ripple/ledger/commands/Print.py
Normal file
15
bin/python/ripple/ledger/commands/Print.py
Normal file
@@ -0,0 +1,15 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.ledger import SearchLedgers
|
||||
|
||||
import json
|
||||
|
||||
SAFE = True
|
||||
|
||||
HELP = """print
|
||||
|
||||
Print the ledgers to stdout. The default command."""
|
||||
|
||||
def run_print(server):
|
||||
ARGS.display(print, server, SearchLedgers.search(server))
|
||||
4
bin/python/ripple/ledger/conditions/__init__.py
Normal file
4
bin/python/ripple/ledger/conditions/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
def all_ledgers(server, ledger_number):
|
||||
return True
|
||||
89
bin/python/ripple/ledger/displays/__init__.py
Normal file
89
bin/python/ripple/ledger/displays/__init__.py
Normal file
@@ -0,0 +1,89 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from functools import wraps
|
||||
|
||||
import jsonpath_rw
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Dict
|
||||
from ripple.util import Log
|
||||
from ripple.util import Range
|
||||
from ripple.util.Decimal import Decimal
|
||||
from ripple.util.PrettyPrint import pretty_print, Streamer
|
||||
|
||||
TRANSACT_FIELDS = (
|
||||
'accepted',
|
||||
'close_time_human',
|
||||
'closed',
|
||||
'ledger_index',
|
||||
'total_coins',
|
||||
'transactions',
|
||||
)
|
||||
|
||||
LEDGER_FIELDS = (
|
||||
'accepted',
|
||||
'accountState',
|
||||
'close_time_human',
|
||||
'closed',
|
||||
'ledger_index',
|
||||
'total_coins',
|
||||
'transactions',
|
||||
)
|
||||
|
||||
def _dict_filter(d, keys):
|
||||
return dict((k, v) for (k, v) in d.items() if k in keys)
|
||||
|
||||
def ledger_number(print, server, numbers):
|
||||
print(Range.to_string(numbers))
|
||||
|
||||
def display(f):
|
||||
@wraps(f)
|
||||
def wrapper(printer, server, numbers, *args):
|
||||
streamer = Streamer(printer=printer)
|
||||
for number in numbers:
|
||||
ledger = server.get_ledger(number, ARGS.full)
|
||||
if ledger:
|
||||
streamer.add(number, f(ledger, *args))
|
||||
streamer.finish()
|
||||
return wrapper
|
||||
|
||||
def extractor(f):
|
||||
@wraps(f)
|
||||
def wrapper(printer, server, numbers, *paths):
|
||||
try:
|
||||
find = jsonpath_rw.parse('|'.join(paths)).find
|
||||
except:
|
||||
raise ValueError("Can't understand jsonpath '%s'." % path)
|
||||
def fn(ledger, *args):
|
||||
return f(find(ledger), *args)
|
||||
display(fn)(printer, server, numbers)
|
||||
return wrapper
|
||||
|
||||
@display
|
||||
def ledger(ledger, full=False):
|
||||
if ARGS.full:
|
||||
if full:
|
||||
return ledger
|
||||
|
||||
ledger = Dict.prune(ledger, 1, False)
|
||||
|
||||
return _dict_filter(ledger, LEDGER_FIELDS)
|
||||
|
||||
@display
|
||||
def prune(ledger, level=1):
|
||||
return Dict.prune(ledger, level, False)
|
||||
|
||||
@display
|
||||
def transact(ledger):
|
||||
return _dict_filter(ledger, TRANSACT_FIELDS)
|
||||
|
||||
@extractor
|
||||
def extract(finds):
|
||||
return dict((str(f.full_path), str(f.value)) for f in finds)
|
||||
|
||||
@extractor
|
||||
def sum(finds):
|
||||
d = Decimal()
|
||||
for f in finds:
|
||||
d.accumulate(f.value)
|
||||
return [str(d), len(finds)]
|
||||
94
bin/python/ripple/util/Base58.py
Normal file
94
bin/python/ripple/util/Base58.py
Normal file
@@ -0,0 +1,94 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from hashlib import sha256
|
||||
|
||||
#
|
||||
# Human strings are base-58 with a
|
||||
# version prefix and a checksum suffix.
|
||||
#
|
||||
# Copied from ripple/protocol/RippleAddress.h
|
||||
#
|
||||
|
||||
VER_NONE = 1
|
||||
VER_NODE_PUBLIC = 28
|
||||
VER_NODE_PRIVATE = 32
|
||||
VER_ACCOUNT_ID = 0
|
||||
VER_ACCOUNT_PUBLIC = 35
|
||||
VER_ACCOUNT_PRIVATE = 34
|
||||
VER_FAMILY_GENERATOR = 41
|
||||
VER_FAMILY_SEED = 33
|
||||
|
||||
ALPHABET = 'rpshnaf39wBUDNEGHJKLM4PQRST7VWXYZ2bcdeCg65jkm8oFqi1tuvAxyz'
|
||||
|
||||
VERSION_NAME = {
|
||||
VER_NONE: 'VER_NONE',
|
||||
VER_NODE_PUBLIC: 'VER_NODE_PUBLIC',
|
||||
VER_NODE_PRIVATE: 'VER_NODE_PRIVATE',
|
||||
VER_ACCOUNT_ID: 'VER_ACCOUNT_ID',
|
||||
VER_ACCOUNT_PUBLIC: 'VER_ACCOUNT_PUBLIC',
|
||||
VER_ACCOUNT_PRIVATE: 'VER_ACCOUNT_PRIVATE',
|
||||
VER_FAMILY_GENERATOR: 'VER_FAMILY_GENERATOR',
|
||||
VER_FAMILY_SEED: 'VER_FAMILY_SEED'
|
||||
}
|
||||
|
||||
class Alphabet(object):
|
||||
def __init__(self, radix, digit_to_char, char_to_digit):
|
||||
self.radix = radix
|
||||
self.digit_to_char = digit_to_char
|
||||
self.char_to_digit = char_to_digit
|
||||
|
||||
def transcode_from(self, s, source_alphabet):
|
||||
n, zero_count = source_alphabet._digits_to_number(s)
|
||||
digits = []
|
||||
while n > 0:
|
||||
n, digit = divmod(n, self.radix)
|
||||
digits.append(self.digit_to_char(digit))
|
||||
|
||||
s = ''.join(digits)
|
||||
return self.digit_to_char(0) * zero_count + s[::-1]
|
||||
|
||||
def _digits_to_number(self, digits):
|
||||
stripped = digits.lstrip(self.digit_to_char(0))
|
||||
n = 0
|
||||
for d in stripped:
|
||||
n *= self.radix
|
||||
n += self.char_to_digit(d)
|
||||
return n, len(digits) - len(stripped)
|
||||
|
||||
_INVERSE_INDEX = dict((c, i) for (i, c) in enumerate(ALPHABET))
|
||||
|
||||
# In base 58 encoding, the digits come from the ALPHABET string.
|
||||
BASE58 = Alphabet(len(ALPHABET), ALPHABET.__getitem__, _INVERSE_INDEX.get)
|
||||
|
||||
# In base 256 encoding, each digit is just a character between 0 and 255.
|
||||
BASE256 = Alphabet(256, chr, ord)
|
||||
|
||||
def encode(b):
|
||||
return BASE58.transcode_from(b, BASE256)
|
||||
|
||||
def decode(b):
|
||||
return BASE256.transcode_from(b, BASE58)
|
||||
|
||||
def checksum(b):
|
||||
"""Returns a 4-byte checksum of a binary."""
|
||||
return sha256(sha256(b).digest()).digest()[:4]
|
||||
|
||||
def encode_version(ver, b):
|
||||
"""Encodes a version encoding and a binary as human string."""
|
||||
b = chr(ver) + b
|
||||
return encode(b + checksum(b))
|
||||
|
||||
def decode_version(s):
|
||||
"""Decodes a human base-58 string into its version encoding and binary."""
|
||||
b = decode(s)
|
||||
body, check = b[:-4], b[-4:]
|
||||
assert check == checksum(body), ('Bad checksum for', s)
|
||||
return ord(body[0]), body[1:]
|
||||
|
||||
def version_name(ver):
|
||||
return VERSION_NAME.get(ver) or ('(unknown version %s)' % ver)
|
||||
|
||||
def check_version(version, expected):
|
||||
if version != expected:
|
||||
raise ValueError('Expected version %s but got %s' % (
|
||||
version_name(version), version_name(expected)))
|
||||
40
bin/python/ripple/util/Cache.py
Normal file
40
bin/python/ripple/util/Cache.py
Normal file
@@ -0,0 +1,40 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from collections import defaultdict
|
||||
|
||||
class Cache(object):
|
||||
def __init__(self):
|
||||
self._value_to_index = {}
|
||||
self._index_to_value = []
|
||||
|
||||
def value_to_index(self, value, **kwds):
|
||||
index = self._value_to_index.get(value, None)
|
||||
if index is None:
|
||||
index = len(self._index_to_value)
|
||||
self._index_to_value.append((value, kwds))
|
||||
self._value_to_index[value] = index
|
||||
return index
|
||||
|
||||
def index_to_value(self, index):
|
||||
return self._index_to_value[index]
|
||||
|
||||
def NamedCache():
|
||||
return defaultdict(Cache)
|
||||
|
||||
def cache_by_key(d, keyfunc=None, exclude=None):
|
||||
cache = defaultdict(Cache)
|
||||
exclude = exclude or None
|
||||
keyfunc = keyfunc or (lambda x: x)
|
||||
|
||||
def visit(item):
|
||||
if isinstance(item, list):
|
||||
for i, x in enumerate(item):
|
||||
item[i] = visit(x)
|
||||
|
||||
elif isinstance(item, dict):
|
||||
for k, v in item.items():
|
||||
item[k] = visit(v)
|
||||
|
||||
return item
|
||||
|
||||
return cache
|
||||
77
bin/python/ripple/util/CommandList.py
Normal file
77
bin/python/ripple/util/CommandList.py
Normal file
@@ -0,0 +1,77 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
# Code taken from github/rec/grit.
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
from collections import namedtuple
|
||||
|
||||
from ripple.ledger.Args import ARGS
|
||||
from ripple.util import Log
|
||||
|
||||
Command = namedtuple('Command', 'function help safe')
|
||||
|
||||
def make_command(module):
|
||||
name = module.__name__.split('.')[-1].lower()
|
||||
return name, Command(getattr(module, name, None) or
|
||||
getattr(module, 'run_' + name),
|
||||
getattr(module, 'HELP'),
|
||||
getattr(module, 'SAFE', False))
|
||||
|
||||
class CommandList(object):
|
||||
def __init__(self, *args, **kwds):
|
||||
self.registry = {}
|
||||
self.register(*args, **kwds)
|
||||
|
||||
def register(self, *modules, **kwds):
|
||||
for module in modules:
|
||||
name, command = make_command(module)
|
||||
self.registry[name] = command
|
||||
|
||||
for k, v in kwds.items():
|
||||
if not isinstance(v, (list, tuple)):
|
||||
v = [v]
|
||||
self.register_one(k, *v)
|
||||
|
||||
def keys(self):
|
||||
return self.registry.keys()
|
||||
|
||||
def register_one(self, name, function, help='', safe=False):
|
||||
assert name not in self.registry
|
||||
self.registry[name] = Command(function, help, safe)
|
||||
|
||||
def _get(self, command):
|
||||
command = command.lower()
|
||||
c = self.registry.get(command)
|
||||
if c:
|
||||
return command, c
|
||||
commands = [c for c in self.registry if c.startswith(command)]
|
||||
if len(commands) == 1:
|
||||
command = commands[0]
|
||||
return command, self.registry[command]
|
||||
if not commands:
|
||||
raise ValueError('No such command: %s. Commands are %s.' %
|
||||
(command, ', '.join(sorted(self.registry))))
|
||||
if len(commands) > 1:
|
||||
raise ValueError('Command %s was ambiguous: %s.' %
|
||||
(command, ', '.join(commands)))
|
||||
|
||||
def get(self, command):
|
||||
return self._get(command)[1]
|
||||
|
||||
def run(self, command, *args):
|
||||
return self.get(command).function(*args)
|
||||
|
||||
def run_safe(self, command, *args):
|
||||
name, cmd = self._get(command)
|
||||
if not (ARGS.yes or cmd.safe):
|
||||
confirm = raw_input('OK to execute "rl %s %s"? (y/N) ' %
|
||||
(name, ' '.join(args)))
|
||||
if not confirm.lower().startswith('y'):
|
||||
Log.error('Cancelled.')
|
||||
return
|
||||
cmd.function(*args)
|
||||
|
||||
def help(self, command):
|
||||
return self.get(command).help()
|
||||
54
bin/python/ripple/util/ConfigFile.py
Normal file
54
bin/python/ripple/util/ConfigFile.py
Normal file
@@ -0,0 +1,54 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import json
|
||||
|
||||
"""Ripple has a proprietary format for their .cfg files, so we need a reader for
|
||||
them."""
|
||||
|
||||
def read(lines):
|
||||
sections = []
|
||||
section = []
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if (not line) or line[0] == '#':
|
||||
continue
|
||||
if line.startswith('['):
|
||||
if section:
|
||||
sections.append(section)
|
||||
section = []
|
||||
section.append(line)
|
||||
if section:
|
||||
sections.append(section)
|
||||
|
||||
result = {}
|
||||
for section in sections:
|
||||
option = section.pop(0)
|
||||
assert section, ('No value for option "%s".' % option)
|
||||
assert option.startswith('[') and option.endswith(']'), (
|
||||
'No option name in block "%s"' % p[0])
|
||||
option = option[1:-1]
|
||||
assert option not in result, 'Duplicate option "%s".' % option
|
||||
|
||||
subdict = {}
|
||||
items = []
|
||||
for part in section:
|
||||
if '=' in part:
|
||||
assert not items, 'Dictionary mixed with list.'
|
||||
k, v = part.split('=', 1)
|
||||
assert k not in subdict, 'Repeated dictionary entry ' + k
|
||||
subdict[k] = v
|
||||
else:
|
||||
assert not subdict, 'List mixed with dictionary.'
|
||||
if part.startswith('{'):
|
||||
items.append(json.loads(part))
|
||||
else:
|
||||
words = part.split()
|
||||
if len(words) > 1:
|
||||
items.append(words)
|
||||
else:
|
||||
items.append(part)
|
||||
if len(items) == 1:
|
||||
result[option] = items[0]
|
||||
else:
|
||||
result[option] = items or subdict
|
||||
return result
|
||||
12
bin/python/ripple/util/Database.py
Normal file
12
bin/python/ripple/util/Database.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import sqlite3
|
||||
|
||||
def fetchall(database, query, kwds):
|
||||
conn = sqlite3.connect(database)
|
||||
try:
|
||||
cursor = conn.execute(query, kwds)
|
||||
return cursor.fetchall()
|
||||
|
||||
finally:
|
||||
conn.close()
|
||||
46
bin/python/ripple/util/Decimal.py
Normal file
46
bin/python/ripple/util/Decimal.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""Fixed point numbers."""
|
||||
|
||||
POSITIONS = 10
|
||||
POSITIONS_SHIFT = 10 ** POSITIONS
|
||||
|
||||
class Decimal(object):
|
||||
def __init__(self, desc='0'):
|
||||
if isinstance(desc, int):
|
||||
self.value = desc
|
||||
return
|
||||
if desc.startswith('-'):
|
||||
sign = -1
|
||||
desc = desc[1:]
|
||||
else:
|
||||
sign = 1
|
||||
parts = desc.split('.')
|
||||
if len(parts) == 1:
|
||||
parts.append('0')
|
||||
elif len(parts) > 2:
|
||||
raise Exception('Too many decimals in "%s"' % desc)
|
||||
number, decimal = parts
|
||||
# Fix the number of positions.
|
||||
decimal = (decimal + POSITIONS * '0')[:POSITIONS]
|
||||
self.value = sign * int(number + decimal)
|
||||
|
||||
def accumulate(self, item):
|
||||
if not isinstance(item, Decimal):
|
||||
item = Decimal(item)
|
||||
self.value += item.value
|
||||
|
||||
def __str__(self):
|
||||
if self.value >= 0:
|
||||
sign = ''
|
||||
value = self.value
|
||||
else:
|
||||
sign = '-'
|
||||
value = -self.value
|
||||
number = value // POSITIONS_SHIFT
|
||||
decimal = (value % POSITIONS_SHIFT) * POSITIONS_SHIFT
|
||||
|
||||
if decimal:
|
||||
return '%s%s.%s' % (sign, number, str(decimal).rstrip('0'))
|
||||
else:
|
||||
return '%s%s' % (sign, number)
|
||||
33
bin/python/ripple/util/Dict.py
Normal file
33
bin/python/ripple/util/Dict.py
Normal file
@@ -0,0 +1,33 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
def count_all_subitems(x):
|
||||
"""Count the subitems of a Python object, including the object itself."""
|
||||
if isinstance(x, list):
|
||||
return 1 + sum(count_all_subitems(i) for i in x)
|
||||
if isinstance(x, dict):
|
||||
return 1 + sum(count_all_subitems(i) for i in x.itervalues())
|
||||
return 1
|
||||
|
||||
def prune(item, level, count_recursively=True):
|
||||
def subitems(x):
|
||||
i = count_all_subitems(x) - 1 if count_recursively else len(x)
|
||||
return '1 subitem' if i == 1 else '%d subitems' % i
|
||||
|
||||
assert level >= 0
|
||||
if not item:
|
||||
return item
|
||||
|
||||
if isinstance(item, list):
|
||||
if level:
|
||||
return [prune(i, level - 1, count_recursively) for i in item]
|
||||
else:
|
||||
return '[list with %s]' % subitems(item)
|
||||
|
||||
if isinstance(item, dict):
|
||||
if level:
|
||||
return dict((k, prune(v, level - 1, count_recursively))
|
||||
for k, v in item.iteritems())
|
||||
else:
|
||||
return '{dict with %s}' % subitems(item)
|
||||
|
||||
return item
|
||||
7
bin/python/ripple/util/File.py
Normal file
7
bin/python/ripple/util/File.py
Normal file
@@ -0,0 +1,7 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import os
|
||||
|
||||
def normalize(f):
|
||||
f = os.path.join(*f.split('/')) # For Windows users.
|
||||
return os.path.abspath(os.path.expanduser(f))
|
||||
56
bin/python/ripple/util/FileCache.py
Normal file
56
bin/python/ripple/util/FileCache.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import gzip
|
||||
import json
|
||||
import os
|
||||
|
||||
_NONE = object()
|
||||
|
||||
class FileCache(object):
|
||||
"""A two-level cache, which stores expensive results in memory and on disk.
|
||||
"""
|
||||
def __init__(self, cache_directory, creator, open=gzip.open, suffix='.gz'):
|
||||
self.cache_directory = cache_directory
|
||||
self.creator = creator
|
||||
self.open = open
|
||||
self.suffix = suffix
|
||||
self.cached_data = {}
|
||||
if not os.path.exists(self.cache_directory):
|
||||
os.makedirs(self.cache_directory)
|
||||
|
||||
def get_file_data(self, name):
|
||||
if os.path.exists(filename):
|
||||
return json.load(self.open(filename))
|
||||
|
||||
result = self.creator(name)
|
||||
return result
|
||||
|
||||
def get_data(self, name, save_in_cache, can_create, default=None):
|
||||
name = str(name)
|
||||
result = self.cached_data.get(name, _NONE)
|
||||
if result is _NONE:
|
||||
filename = os.path.join(self.cache_directory, name) + self.suffix
|
||||
if os.path.exists(filename):
|
||||
result = json.load(self.open(filename)) or _NONE
|
||||
if result is _NONE and can_create:
|
||||
result = self.creator(name)
|
||||
if save_in_cache:
|
||||
json.dump(result, self.open(filename, 'w'))
|
||||
return default if result is _NONE else result
|
||||
|
||||
def _files(self):
|
||||
return os.listdir(self.cache_directory)
|
||||
|
||||
def cache_list(self):
|
||||
for f in self._files():
|
||||
if f.endswith(self.suffix):
|
||||
yield f[:-len(self.suffix)]
|
||||
|
||||
def file_count(self):
|
||||
return len(self._files())
|
||||
|
||||
def clear(self):
|
||||
"""Clears both local files and memory."""
|
||||
self.cached_data = {}
|
||||
for f in self._files():
|
||||
os.remove(os.path.join(self.cache_directory, f))
|
||||
82
bin/python/ripple/util/Function.py
Normal file
82
bin/python/ripple/util/Function.py
Normal file
@@ -0,0 +1,82 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""A function that can be specified at the command line, with an argument."""
|
||||
|
||||
import importlib
|
||||
import re
|
||||
import tokenize
|
||||
|
||||
from StringIO import StringIO
|
||||
|
||||
MATCHER = re.compile(r'([\w.]+)(.*)')
|
||||
|
||||
REMAPPINGS = {
|
||||
'false': False,
|
||||
'true': True,
|
||||
'null': None,
|
||||
'False': False,
|
||||
'True': True,
|
||||
'None': None,
|
||||
}
|
||||
|
||||
def eval_arguments(args):
|
||||
args = args.strip()
|
||||
if not args or (args == '()'):
|
||||
return ()
|
||||
tokens = list(tokenize.generate_tokens(StringIO(args).readline))
|
||||
def remap():
|
||||
for type, name, _, _, _ in tokens:
|
||||
if type == tokenize.NAME and name not in REMAPPINGS:
|
||||
yield tokenize.STRING, '"%s"' % name
|
||||
else:
|
||||
yield type, name
|
||||
untok = tokenize.untokenize(remap())
|
||||
if untok[1:-1].strip():
|
||||
untok = untok[:-1] + ',)' # Force a tuple.
|
||||
try:
|
||||
return eval(untok, REMAPPINGS)
|
||||
except Exception as e:
|
||||
raise ValueError('Couldn\'t evaluate expression "%s" (became "%s"), '
|
||||
'error "%s"' % (args, untok, str(e)))
|
||||
|
||||
class Function(object):
|
||||
def __init__(self, desc='', default_path=''):
|
||||
self.desc = desc.strip()
|
||||
if not self.desc:
|
||||
# Make an empty function that does nothing.
|
||||
self.args = ()
|
||||
self.function = lambda *args, **kwds: None
|
||||
return
|
||||
|
||||
m = MATCHER.match(desc)
|
||||
if not m:
|
||||
raise ValueError('"%s" is not a function' % desc)
|
||||
self.function, self.args = (g.strip() for g in m.groups())
|
||||
self.args = eval_arguments(self.args)
|
||||
|
||||
if '.' not in self.function:
|
||||
if default_path and not default_path.endswith('.'):
|
||||
default_path += '.'
|
||||
self.function = default_path + self.function
|
||||
p, m = self.function.rsplit('.', 1)
|
||||
mod = importlib.import_module(p)
|
||||
# Errors in modules are swallowed here.
|
||||
# except:
|
||||
# raise ValueError('Can\'t find Python module "%s"' % p)
|
||||
|
||||
try:
|
||||
self.function = getattr(mod, m)
|
||||
except:
|
||||
raise ValueError('No function "%s" in module "%s"' % (m, p))
|
||||
|
||||
def __str__(self):
|
||||
return self.desc
|
||||
|
||||
def __call__(self, *args, **kwds):
|
||||
return self.function(*(args + self.args), **kwds)
|
||||
|
||||
def __eq__(self, other):
|
||||
return self.function == other.function and self.args == other.args
|
||||
|
||||
def __ne__(self, other):
|
||||
return not (self == other)
|
||||
21
bin/python/ripple/util/Log.py
Normal file
21
bin/python/ripple/util/Log.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import sys
|
||||
|
||||
VERBOSE = False
|
||||
|
||||
def out(*args, **kwds):
|
||||
kwds.get('print', print)(*args, file=sys.stdout, **kwds)
|
||||
|
||||
def info(*args, **kwds):
|
||||
if VERBOSE:
|
||||
out(*args, **kwds)
|
||||
|
||||
def warn(*args, **kwds):
|
||||
out('WARNING:', *args, **kwds)
|
||||
|
||||
def error(*args, **kwds):
|
||||
out('ERROR:', *args, **kwds)
|
||||
|
||||
def fatal(*args, **kwds):
|
||||
raise Exception('FATAL: ' + ' '.join(str(a) for a in args))
|
||||
42
bin/python/ripple/util/PrettyPrint.py
Normal file
42
bin/python/ripple/util/PrettyPrint.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from functools import wraps
|
||||
import json
|
||||
|
||||
SEPARATORS = ',', ': '
|
||||
INDENT = ' '
|
||||
|
||||
def pretty_print(item):
|
||||
return json.dumps(item,
|
||||
sort_keys=True,
|
||||
indent=len(INDENT),
|
||||
separators=SEPARATORS)
|
||||
|
||||
class Streamer(object):
|
||||
def __init__(self, printer=print):
|
||||
# No automatic spacing or carriage returns.
|
||||
self.printer = lambda *args: printer(*args, end='', sep='')
|
||||
self.first_key = True
|
||||
|
||||
def add(self, key, value):
|
||||
if self.first_key:
|
||||
self.first_key = False
|
||||
self.printer('{')
|
||||
else:
|
||||
self.printer(',')
|
||||
|
||||
self.printer('\n', INDENT, '"', str(key), '": ')
|
||||
|
||||
pp = pretty_print(value).splitlines()
|
||||
if len(pp) > 1:
|
||||
for i, line in enumerate(pp):
|
||||
if i > 0:
|
||||
self.printer('\n', INDENT)
|
||||
self.printer(line)
|
||||
else:
|
||||
self.printer(pp[0])
|
||||
|
||||
def finish(self):
|
||||
if not self.first_key:
|
||||
self.first_key = True
|
||||
self.printer('\n}')
|
||||
53
bin/python/ripple/util/Range.py
Normal file
53
bin/python/ripple/util/Range.py
Normal file
@@ -0,0 +1,53 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
"""
|
||||
Convert a discontiguous range of integers to and from a human-friendly form.
|
||||
|
||||
Real world example is the server_info.complete_ledgers:
|
||||
8252899-8403772,8403824,8403827-8403830,8403834-8403876
|
||||
|
||||
"""
|
||||
|
||||
def from_string(desc, **aliases):
|
||||
if not desc:
|
||||
return []
|
||||
result = set()
|
||||
for d in desc.split(','):
|
||||
nums = [int(aliases.get(x) or x) for x in d.split('-')]
|
||||
if len(nums) == 1:
|
||||
result.add(nums[0])
|
||||
elif len(nums) == 2:
|
||||
result.update(range(nums[0], nums[1] + 1))
|
||||
return result
|
||||
|
||||
def to_string(r):
|
||||
groups = []
|
||||
next_group = []
|
||||
for i, x in enumerate(sorted(r)):
|
||||
if next_group and (x - next_group[-1]) > 1:
|
||||
groups.append(next_group)
|
||||
next_group = []
|
||||
next_group.append(x)
|
||||
if next_group:
|
||||
groups.append(next_group)
|
||||
|
||||
def display(g):
|
||||
if len(g) == 1:
|
||||
return str(g[0])
|
||||
else:
|
||||
return '%s-%s' % (g[0], g[-1])
|
||||
|
||||
return ','.join(display(g) for g in groups)
|
||||
|
||||
def is_range(desc, *names):
|
||||
try:
|
||||
from_string(desc, **dict((n, 1) for n in names))
|
||||
return True;
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def join_ranges(*ranges, **aliases):
|
||||
result = set()
|
||||
for r in ranges:
|
||||
result.update(from_string(r, **aliases))
|
||||
return result
|
||||
46
bin/python/ripple/util/Search.py
Normal file
46
bin/python/ripple/util/Search.py
Normal file
@@ -0,0 +1,46 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
FIRST, LAST = range(2)
|
||||
|
||||
def binary_search(begin, end, condition, location=FIRST):
|
||||
"""Search for an i in the interval [begin, end] where condition(i) is true.
|
||||
If location is FIRST, return the first such i.
|
||||
If location is LAST, return the last such i.
|
||||
If there is no such i, then throw an exception.
|
||||
"""
|
||||
b = condition(begin)
|
||||
e = condition(end)
|
||||
if b and e:
|
||||
return begin if location == FIRST else end
|
||||
|
||||
if not (b or e):
|
||||
raise ValueError('%d/%d' % (begin, end))
|
||||
|
||||
if b and location is FIRST:
|
||||
return begin
|
||||
|
||||
if e and location is LAST:
|
||||
return end
|
||||
|
||||
width = end - begin + 1
|
||||
if width == 1:
|
||||
if not b:
|
||||
raise ValueError('%d/%d' % (begin, end))
|
||||
return begin
|
||||
if width == 2:
|
||||
return begin if b else end
|
||||
|
||||
mid = (begin + end) // 2
|
||||
m = condition(mid)
|
||||
|
||||
if m == b:
|
||||
return binary_search(mid, end, condition, location)
|
||||
else:
|
||||
return binary_search(begin, mid, condition, location)
|
||||
|
||||
def linear_search(items, condition):
|
||||
"""Yields each i in the interval [begin, end] where condition(i) is true.
|
||||
"""
|
||||
for i in items:
|
||||
if condition(i):
|
||||
yield i
|
||||
164
bin/python/ripple/util/Sign.py
Normal file
164
bin/python/ripple/util/Sign.py
Normal file
@@ -0,0 +1,164 @@
|
||||
#!/usr/bin/env python
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import base64, os, random, struct, sys
|
||||
import ed25519
|
||||
import ecdsa
|
||||
from ripple.util import Base58
|
||||
from ripple.ledger import SField
|
||||
|
||||
ED25519_BYTE = chr(0xed)
|
||||
WRAP_COLUMNS = 60
|
||||
|
||||
USAGE = """\
|
||||
Usage:
|
||||
create
|
||||
Create a new master public/secret key pair.
|
||||
|
||||
check <key>
|
||||
Check an existing key for validity.
|
||||
|
||||
sign <sequence> <validator-public> <master-secret>
|
||||
Create a new signed manifest with the given sequence
|
||||
number, validator public key, and master secret key.
|
||||
"""
|
||||
|
||||
def prepend_length_byte(b):
|
||||
assert len(b) <= 192, 'Too long'
|
||||
return chr(len(b)) + b
|
||||
|
||||
def to_int32(i):
|
||||
return struct.pack('>I', i)
|
||||
|
||||
#-----------------------------------------------------------
|
||||
|
||||
def make_seed(urandom=os.urandom):
|
||||
# This is not used.
|
||||
return urandom(16)
|
||||
|
||||
def make_ed25519_keypair(urandom=os.urandom):
|
||||
private_key = urandom(32)
|
||||
return private_key, ed25519.publickey(private_key)
|
||||
|
||||
def make_ecdsa_keypair():
|
||||
# This is not used.
|
||||
private_key = ecdsa.SigningKey.generate(curve=ecdsa.SECP256k1)
|
||||
# Can't be unit tested easily - need a mock for ecdsa.
|
||||
vk = private_key.get_verifying_key()
|
||||
sig = private_key.sign('message')
|
||||
assert vk.verify(sig, 'message')
|
||||
return private_key, vk
|
||||
|
||||
def make_seed_from_passphrase(passphrase):
|
||||
# For convenience, like say testing against rippled we can hash a passphrase
|
||||
# to get the seed. validation_create (Josh may have killed it by now) takes
|
||||
# an optional arg, which can be a base58 encoded seed, or a passphrase.
|
||||
return hashlib.sha512(passphrase).digest()[:16]
|
||||
|
||||
def make_manifest(public_key, validator_public_key, seq):
|
||||
return ''.join([
|
||||
SField.sfSequence,
|
||||
to_int32(seq),
|
||||
SField.sfPublicKey, # Master public key.
|
||||
prepend_length_byte(public_key),
|
||||
SField.sfSigningPubKey, # Ephemeral public key.
|
||||
prepend_length_byte(validator_public_key)])
|
||||
|
||||
def sign_manifest(manifest, private_key, public_key):
|
||||
sig = ed25519.signature('MAN\0' + manifest, private_key, public_key)
|
||||
return manifest + SField.sfSignature + prepend_length_byte(sig)
|
||||
|
||||
def wrap(s, cols=WRAP_COLUMNS):
|
||||
if s:
|
||||
size = max((len(s) + cols - 1) / cols, 1)
|
||||
w = len(s) / size
|
||||
s = '\n'.join(s[i:i + w] for i in range(0, len(s), w))
|
||||
return s
|
||||
|
||||
def create_ed_keys(urandom=os.urandom):
|
||||
private_key, public_key = make_ed25519_keypair(urandom)
|
||||
public_key_human = Base58.encode_version(
|
||||
Base58.VER_NODE_PUBLIC, ED25519_BYTE + public_key)
|
||||
private_key_human = Base58.encode_version(
|
||||
Base58.VER_NODE_PRIVATE, private_key)
|
||||
return public_key_human, private_key_human
|
||||
|
||||
def check_validator_public(v, validator_public_key):
|
||||
Base58.check_version(v, Base58.VER_NODE_PUBLIC)
|
||||
if len(validator_public_key) != 33:
|
||||
raise ValueError('Validator key should be length 33, is %s' %
|
||||
len(validator_public_key))
|
||||
b = ord(validator_public_key[0])
|
||||
if b not in (2, 3):
|
||||
raise ValueError('First validator key byte must be 2 or 3, is %d' % b)
|
||||
|
||||
def check_master_secret(v, private_key):
|
||||
Base58.check_version(v, Base58.VER_NODE_PRIVATE)
|
||||
if len(private_key) != 32:
|
||||
raise ValueError('Length of master secret should be 32, is %s' %
|
||||
len(private_key))
|
||||
|
||||
|
||||
def get_signature(seq, validator_public_key_human, private_key_human):
|
||||
v, validator_public_key = Base58.decode_version(validator_public_key_human)
|
||||
check_validator_public(v, validator_public_key)
|
||||
|
||||
v, private_key = Base58.decode_version(private_key_human)
|
||||
check_master_secret(v, private_key)
|
||||
|
||||
pk = ed25519.publickey(private_key)
|
||||
apk = ED25519_BYTE + pk
|
||||
m = make_manifest(apk, validator_public_key, seq)
|
||||
m1 = sign_manifest(m, private_key, pk)
|
||||
return base64.b64encode(m1)
|
||||
|
||||
|
||||
# Testable versions of functions.
|
||||
def perform_create(urandom=os.urandom, print=print):
|
||||
public, private = create_ed_keys(urandom)
|
||||
print('[validator_keys]', public, '', '[master_secret]', private, sep='\n')
|
||||
|
||||
def perform_check(s, print=print):
|
||||
version, b = Base58.decode_version(s)
|
||||
print('version = ' + Base58.version_name(version))
|
||||
print('decoded length = ' + str(len(b)))
|
||||
assert Base58.encode_version(version, b) == s
|
||||
|
||||
def perform_sign(
|
||||
seq, validator_public_key_human, private_key_human, print=print):
|
||||
print('[validation_manifest]')
|
||||
print(wrap(get_signature(
|
||||
int(seq), validator_public_key_human, private_key_human)))
|
||||
|
||||
# Externally visible versions of functions.
|
||||
def create():
|
||||
perform_create()
|
||||
|
||||
def check(s):
|
||||
perform_check(s)
|
||||
|
||||
def sign(seq, validator_public_key_human, private_key_human):
|
||||
perform_sign(seq, validator_public_key_human, private_key_human)
|
||||
|
||||
|
||||
def usage(*errors):
|
||||
if errors:
|
||||
print(*errors)
|
||||
print(USAGE)
|
||||
return not errors
|
||||
|
||||
_COMMANDS = dict((f.__name__, f) for f in (create, check, sign))
|
||||
|
||||
def run_command(args):
|
||||
if not args:
|
||||
return usage()
|
||||
name = args[0]
|
||||
command = _COMMANDS.get(name)
|
||||
if not command:
|
||||
return usage('No such command:', command)
|
||||
try:
|
||||
command(*args[1:])
|
||||
except TypeError:
|
||||
return usage('Wrong number of arguments for:', command)
|
||||
return True
|
||||
21
bin/python/ripple/util/Time.py
Normal file
21
bin/python/ripple/util/Time.py
Normal file
@@ -0,0 +1,21 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
import datetime
|
||||
|
||||
# Format for human-readable dates in rippled
|
||||
_DATE_FORMAT = '%Y-%b-%d'
|
||||
_TIME_FORMAT = '%H:%M:%S'
|
||||
_DATETIME_FORMAT = '%s %s' % (_DATE_FORMAT, _TIME_FORMAT)
|
||||
|
||||
_FORMATS = _DATE_FORMAT, _TIME_FORMAT, _DATETIME_FORMAT
|
||||
|
||||
def parse_datetime(desc):
|
||||
for fmt in _FORMATS:
|
||||
try:
|
||||
return datetime.date.strptime(desc, fmt)
|
||||
except:
|
||||
pass
|
||||
raise ValueError("Can't understand date '%s'." % date)
|
||||
|
||||
def format_datetime(dt):
|
||||
return dt.strftime(_DATETIME_FORMAT)
|
||||
682
bin/python/ripple/util/ValidatorManifestTest.py
Executable file
682
bin/python/ripple/util/ValidatorManifestTest.py
Executable file
@@ -0,0 +1,682 @@
|
||||
#!/usr/bin/env python
|
||||
"""
|
||||
Test for setting ephemeral keys for the validator manifest.
|
||||
"""
|
||||
|
||||
from __future__ import (
|
||||
absolute_import, division, print_function, unicode_literals
|
||||
)
|
||||
|
||||
import argparse
|
||||
import contextlib
|
||||
from contextlib import contextmanager
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import shutil
|
||||
import subprocess
|
||||
import time
|
||||
|
||||
DELAY_WHILE_PROCESS_STARTS_UP = 1.5
|
||||
ARGS = None
|
||||
|
||||
NOT_FOUND = -1 # not in log
|
||||
ACCEPTED_NEW = 0 # added new manifest
|
||||
ACCEPTED_UPDATE = 1 # replaced old manifest with new
|
||||
UNTRUSTED = 2 # don't trust master key
|
||||
STALE = 3 # seq is too old
|
||||
REVOKED = 4 # revoked validator key
|
||||
INVALID = 5 # invalid signature
|
||||
|
||||
MANIFEST_ACTION_STR_TO_ID = {
|
||||
'NotFound': NOT_FOUND, # not found in log
|
||||
'AcceptedNew': ACCEPTED_NEW,
|
||||
'AcceptedUpdate': ACCEPTED_UPDATE,
|
||||
'Untrusted': UNTRUSTED,
|
||||
'Stale': STALE,
|
||||
'Revoked': REVOKED,
|
||||
'Invalid': INVALID,
|
||||
}
|
||||
|
||||
MANIFEST_ACTION_ID_TO_STR = {
|
||||
v: k for k, v in MANIFEST_ACTION_STR_TO_ID.items()
|
||||
}
|
||||
|
||||
CONF_TEMPLATE = """
|
||||
[server]
|
||||
port_rpc
|
||||
port_peer
|
||||
port_wss_admin
|
||||
|
||||
[port_rpc]
|
||||
port = {rpc_port}
|
||||
ip = 127.0.0.1
|
||||
admin = 127.0.0.1
|
||||
protocol = https
|
||||
|
||||
[port_peer]
|
||||
port = {peer_port}
|
||||
ip = 0.0.0.0
|
||||
protocol = peer
|
||||
|
||||
[port_wss_admin]
|
||||
port = {wss_port}
|
||||
ip = 127.0.0.1
|
||||
admin = 127.0.0.1
|
||||
protocol = wss
|
||||
|
||||
[node_size]
|
||||
medium
|
||||
|
||||
[node_db]
|
||||
type={node_db_type}
|
||||
path={node_db_path}
|
||||
open_files=2000
|
||||
filter_bits=12
|
||||
cache_mb=256
|
||||
file_size_mb=8
|
||||
file_size_mult=2
|
||||
online_delete=256
|
||||
advisory_delete=0
|
||||
|
||||
[database_path]
|
||||
{db_path}
|
||||
|
||||
[debug_logfile]
|
||||
{debug_logfile}
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
[ips]
|
||||
r.ripple.com 51235
|
||||
|
||||
[ips_fixed]
|
||||
{sibling_ip} {sibling_port}
|
||||
|
||||
[validators]
|
||||
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||
|
||||
[validation_quorum]
|
||||
3
|
||||
|
||||
[validation_seed]
|
||||
{validation_seed}
|
||||
#vaidation_public_key: {validation_public_key}
|
||||
|
||||
# Other rippled's trusting this validator need this key
|
||||
[validator_keys]
|
||||
{all_validator_keys}
|
||||
|
||||
[peer_private]
|
||||
1
|
||||
|
||||
[overlay]
|
||||
expire = 1
|
||||
auto_connect = 1
|
||||
|
||||
[validation_manifest]
|
||||
{validation_manifest}
|
||||
|
||||
[rpc_startup]
|
||||
{{ "command": "log_level", "severity": "debug" }}
|
||||
|
||||
[ssl_verify]
|
||||
0
|
||||
"""
|
||||
# End config template
|
||||
|
||||
|
||||
def static_vars(**kwargs):
|
||||
def decorate(func):
|
||||
for k in kwargs:
|
||||
setattr(func, k, kwargs[k])
|
||||
return func
|
||||
return decorate
|
||||
|
||||
|
||||
@static_vars(rpc=5005, peer=51235, wss=6006)
|
||||
def checkout_port_nums():
|
||||
"""Returns a tuple of port nums for rpc, peer, and wss_admin"""
|
||||
checkout_port_nums.rpc += 1
|
||||
checkout_port_nums.peer += 1
|
||||
checkout_port_nums.wss += 1
|
||||
return (
|
||||
checkout_port_nums.rpc,
|
||||
checkout_port_nums.peer,
|
||||
checkout_port_nums.wss
|
||||
)
|
||||
|
||||
|
||||
def is_windows():
|
||||
return platform.system() == 'Windows'
|
||||
|
||||
|
||||
def manifest_create():
|
||||
"""returns dict with keys: 'validator_keys', 'master_secret'"""
|
||||
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'create']
|
||||
r = subprocess.check_output(to_run)
|
||||
result = {}
|
||||
k = None
|
||||
for l in r.splitlines():
|
||||
l = l.strip()
|
||||
if not l:
|
||||
continue
|
||||
elif l == '[validator_keys]':
|
||||
k = l[1:-1]
|
||||
elif l == '[master_secret]':
|
||||
k = l[1:-1]
|
||||
elif l.startswith('['):
|
||||
raise ValueError(
|
||||
'Unexpected key: {} from `manifest create`'.format(l))
|
||||
else:
|
||||
if not k:
|
||||
raise ValueError('Value with no key')
|
||||
result[k] = l
|
||||
k = None
|
||||
|
||||
if k in result:
|
||||
raise ValueError('Repeat key from `manifest create`: ' + k)
|
||||
if len(result) != 2:
|
||||
raise ValueError(
|
||||
'Expected 2 keys from `manifest create` but got {} keys instead ({})'.
|
||||
format(len(result), result))
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def sign_manifest(seq, validation_pk, master_secret):
|
||||
"""returns the signed manifest as a string"""
|
||||
to_run = ['python', ARGS.ripple_home + '/bin/python/Manifest.py', 'sign',
|
||||
str(seq), validation_pk, master_secret]
|
||||
try:
|
||||
r = subprocess.check_output(to_run)
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('Error in sign_manifest: ', e.output)
|
||||
raise e
|
||||
result = []
|
||||
for l in r.splitlines():
|
||||
l.strip()
|
||||
if not l or l == '[validation_manifest]':
|
||||
continue
|
||||
result.append(l)
|
||||
return '\n'.join(result)
|
||||
|
||||
|
||||
def get_ripple_exe():
|
||||
"""Find the rippled executable"""
|
||||
prefix = ARGS.ripple_home + '/build/'
|
||||
exe = ['rippled', 'RippleD.exe']
|
||||
to_test = [prefix + t + '.debug/' + e
|
||||
for t in ['clang', 'gcc', 'msvc'] for e in exe]
|
||||
for e in exe:
|
||||
to_test.append(prefix + '/' + e)
|
||||
for t in to_test:
|
||||
if os.path.isfile(t):
|
||||
return t
|
||||
|
||||
|
||||
class RippledServer(object):
|
||||
def __init__(self, exe, config_file, server_out):
|
||||
self.config_file = config_file
|
||||
self.exe = exe
|
||||
self.process = None
|
||||
self.server_out = server_out
|
||||
self.reinit(config_file)
|
||||
|
||||
def reinit(self, config_file):
|
||||
self.config_file = config_file
|
||||
self.to_run = [self.exe, '--verbose', '--conf', self.config_file]
|
||||
|
||||
@property
|
||||
def config_root(self):
|
||||
return os.path.dirname(self.config_file)
|
||||
|
||||
@property
|
||||
def master_secret_file(self):
|
||||
return self.config_root + '/master_secret.txt'
|
||||
|
||||
def startup(self):
|
||||
if ARGS.verbose:
|
||||
print('starting rippled:' + self.config_file)
|
||||
fout = open(self.server_out, 'w')
|
||||
self.process = subprocess.Popen(
|
||||
self.to_run, stdout=fout, stderr=subprocess.STDOUT)
|
||||
|
||||
def shutdown(self):
|
||||
if not self.process:
|
||||
return
|
||||
fout = open(os.devnull, 'w')
|
||||
subprocess.Popen(
|
||||
self.to_run + ['stop'], stdout=fout, stderr=subprocess.STDOUT)
|
||||
self.process.wait()
|
||||
self.process = None
|
||||
|
||||
def rotate_logfile(self):
|
||||
if self.server_out == os.devnull:
|
||||
return
|
||||
for i in range(100):
|
||||
backup_name = '{}.{}'.format(self.server_out, i)
|
||||
if not os.path.exists(backup_name):
|
||||
os.rename(self.server_out, backup_name)
|
||||
return
|
||||
raise ValueError('Could not rotate logfile: {}'.
|
||||
format(self.server_out))
|
||||
|
||||
def validation_create(self):
|
||||
"""returns dict with keys:
|
||||
'validation_key', 'validation_public_key', 'validation_seed'
|
||||
"""
|
||||
to_run = [self.exe, '-q', '--conf', self.config_file,
|
||||
'--', 'validation_create']
|
||||
try:
|
||||
return json.loads(subprocess.check_output(to_run))['result']
|
||||
except subprocess.CalledProcessError as e:
|
||||
print('Error in validation_create: ', e.output)
|
||||
raise e
|
||||
|
||||
|
||||
@contextmanager
|
||||
def rippled_server(config_file, server_out=os.devnull):
|
||||
"""Start a ripple server"""
|
||||
try:
|
||||
server = None
|
||||
server = RippledServer(ARGS.ripple_exe, config_file, server_out)
|
||||
server.startup()
|
||||
yield server
|
||||
finally:
|
||||
if server:
|
||||
server.shutdown()
|
||||
|
||||
|
||||
@contextmanager
|
||||
def pause_server(server, config_file):
|
||||
"""Shutdown and then restart a ripple server"""
|
||||
try:
|
||||
server.shutdown()
|
||||
server.rotate_logfile()
|
||||
yield server
|
||||
finally:
|
||||
server.reinit(config_file)
|
||||
server.startup()
|
||||
|
||||
|
||||
def parse_date(d, t):
|
||||
"""Return the timestamp of a line, or none if the line has no timestamp"""
|
||||
try:
|
||||
return time.strptime(d+' '+t, '%Y-%B-%d %H:%M:%S')
|
||||
except:
|
||||
return None
|
||||
|
||||
|
||||
def to_dict(l):
|
||||
"""Given a line of the form Key0: Value0;Key2: Valuue2; Return a dict"""
|
||||
fields = l.split(';')
|
||||
result = {}
|
||||
for f in fields:
|
||||
if f:
|
||||
v = f.split(':')
|
||||
assert len(v) == 2
|
||||
result[v[0].strip()] = v[1].strip()
|
||||
return result
|
||||
|
||||
|
||||
def check_ephemeral_key(validator_key,
|
||||
log_file,
|
||||
seq,
|
||||
change_time):
|
||||
"""
|
||||
Detect when a server is informed of a validator's ephemeral key change.
|
||||
`change_time` and `seq` may be None, in which case they are ignored.
|
||||
"""
|
||||
manifest_prefix = 'Manifest:'
|
||||
# a manifest line has the form Manifest: action; Key: value;
|
||||
# Key can be Pk (public key), Seq, OldSeq,
|
||||
for l in open(log_file):
|
||||
sa = l.split()
|
||||
if len(sa) < 5 or sa[4] != manifest_prefix:
|
||||
continue
|
||||
|
||||
d = to_dict(' '.join(sa[4:]))
|
||||
# check the seq number and validator_key
|
||||
if d['Pk'] != validator_key:
|
||||
continue
|
||||
if seq is not None and int(d['Seq']) != seq:
|
||||
continue
|
||||
|
||||
if change_time:
|
||||
t = parse_date(sa[0], sa[1])
|
||||
if not t or t < change_time:
|
||||
continue
|
||||
action = d['Manifest']
|
||||
return MANIFEST_ACTION_STR_TO_ID[action]
|
||||
return NOT_FOUND
|
||||
|
||||
|
||||
def check_ephemeral_keys(validator_key,
|
||||
log_files,
|
||||
seq,
|
||||
change_time=None,
|
||||
timeout_s=60):
|
||||
result = [NOT_FOUND for i in range(len(log_files))]
|
||||
if timeout_s < 10:
|
||||
sleep_time = 1
|
||||
elif timeout_s < 60:
|
||||
sleep_time = 5
|
||||
else:
|
||||
sleep_time = 10
|
||||
n = timeout_s//sleep_time
|
||||
if n == 0:
|
||||
n = 1
|
||||
start_time = time.time()
|
||||
for _ in range(n):
|
||||
for i, lf in enumerate(log_files):
|
||||
if result[i] != NOT_FOUND:
|
||||
continue
|
||||
result[i] = check_ephemeral_key(validator_key,
|
||||
lf,
|
||||
seq,
|
||||
change_time)
|
||||
if result[i] != NOT_FOUND:
|
||||
if all(r != NOT_FOUND for r in result):
|
||||
return result
|
||||
else:
|
||||
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
||||
if ARGS.verbose:
|
||||
print('Check for {}: {}'.format(
|
||||
server_dir, MANIFEST_ACTION_ID_TO_STR[result[i]]))
|
||||
tsf = time.time() - start_time
|
||||
if tsf > 20:
|
||||
if ARGS.verbose:
|
||||
print('Waiting for key to propigate: ', tsf)
|
||||
time.sleep(sleep_time)
|
||||
return result
|
||||
|
||||
|
||||
def get_validator_key(config_file):
|
||||
in_validator_keys = False
|
||||
for l in open(config_file):
|
||||
sl = l.strip()
|
||||
if not in_validator_keys and sl == '[validator_keys]':
|
||||
in_validator_keys = True
|
||||
continue
|
||||
if in_validator_keys:
|
||||
if sl.startswith('['):
|
||||
raise ValueError('ThisServer validator key not found')
|
||||
if sl.startswith('#'):
|
||||
continue
|
||||
s = sl.split()
|
||||
if len(s) == 2 and s[1] == 'ThisServer':
|
||||
return s[0]
|
||||
|
||||
|
||||
def new_config_ephemeral_key(
|
||||
server, seq, rm_dbs=False, master_secret_file=None):
|
||||
"""Generate a new ephemeral key, add to config, restart server"""
|
||||
config_root = server.config_root
|
||||
config_file = config_root + '/rippled.cfg'
|
||||
db_dir = config_root + '/db'
|
||||
if not master_secret_file:
|
||||
master_secret_file = server.master_secret_file
|
||||
with open(master_secret_file) as f:
|
||||
master_secret = f.read()
|
||||
v = server.validation_create()
|
||||
signed = sign_manifest(seq, v['validation_public_key'], master_secret)
|
||||
with pause_server(server, config_file):
|
||||
if rm_dbs and os.path.exists(db_dir):
|
||||
shutil.rmtree(db_dir)
|
||||
os.makedirs(db_dir)
|
||||
# replace the validation_manifest section with `signed`
|
||||
bak = config_file + '.bak'
|
||||
if is_windows() and os.path.isfile(bak):
|
||||
os.remove(bak)
|
||||
os.rename(config_file, bak)
|
||||
in_manifest = False
|
||||
with open(bak, 'r') as src:
|
||||
with open(config_file, 'w') as out:
|
||||
for l in src:
|
||||
sl = l.strip()
|
||||
if not in_manifest and sl == '[validation_manifest]':
|
||||
in_manifest = True
|
||||
elif in_manifest:
|
||||
if sl.startswith('[') or sl.startswith('#'):
|
||||
in_manifest = False
|
||||
out.write(signed)
|
||||
out.write('\n\n')
|
||||
else:
|
||||
continue
|
||||
out.write(l)
|
||||
return (bak, config_file)
|
||||
|
||||
|
||||
def parse_args():
|
||||
parser = argparse.ArgumentParser(
|
||||
description=('Create config files for n validators')
|
||||
)
|
||||
|
||||
parser.add_argument(
|
||||
'--ripple_home', '-r',
|
||||
default=os.sep.join(os.path.realpath(__file__).split(os.sep)[:-5]),
|
||||
help=('Root directory of the ripple repo'), )
|
||||
parser.add_argument('--num_validators', '-n',
|
||||
default=2,
|
||||
help=('Number of validators'), )
|
||||
parser.add_argument('--conf', '-c', help=('rippled config file'), )
|
||||
parser.add_argument('--out', '-o',
|
||||
default='test_output',
|
||||
help=('config root directory'), )
|
||||
parser.add_argument(
|
||||
'--existing', '-e',
|
||||
action='store_true',
|
||||
help=('use existing config files'), )
|
||||
parser.add_argument(
|
||||
'--generate', '-g',
|
||||
action='store_true',
|
||||
help=('generate conf files only'), )
|
||||
parser.add_argument(
|
||||
'--verbose', '-v',
|
||||
action='store_true',
|
||||
help=('verbose status reporting'), )
|
||||
parser.add_argument(
|
||||
'--quiet', '-q',
|
||||
action='store_true',
|
||||
help=('quiet status reporting'), )
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_configs(manifest_seq):
|
||||
global ARGS
|
||||
ARGS.ripple_home = os.path.expanduser(ARGS.ripple_home)
|
||||
|
||||
n = int(ARGS.num_validators)
|
||||
if n<2:
|
||||
raise ValueError(
|
||||
'Need at least 2 rippled servers. Specified: {}'.format(n))
|
||||
config_root = ARGS.out
|
||||
ARGS.ripple_exe = get_ripple_exe()
|
||||
if not ARGS.ripple_exe:
|
||||
raise ValueError('No Exe Found')
|
||||
|
||||
if ARGS.existing:
|
||||
return [
|
||||
os.path.abspath('{}/validator_{}/rippled.cfg'.format(config_root, i))
|
||||
for i in range(n)
|
||||
]
|
||||
|
||||
initial_config = ARGS.conf
|
||||
|
||||
manifests = [manifest_create() for i in range(n)]
|
||||
port_nums = [checkout_port_nums() for i in range(n)]
|
||||
with rippled_server(initial_config) as server:
|
||||
time.sleep(DELAY_WHILE_PROCESS_STARTS_UP)
|
||||
validations = [server.validation_create() for i in range(n)]
|
||||
|
||||
signed_manifests = [sign_manifest(manifest_seq,
|
||||
v['validation_public_key'],
|
||||
m['master_secret'])
|
||||
for m, v in zip(manifests, validations)]
|
||||
node_db_type = 'RocksDB' if not is_windows() else 'NuDB'
|
||||
node_db_filename = node_db_type.lower()
|
||||
|
||||
config_files = []
|
||||
for i, (m, v, s) in enumerate(zip(manifests, validations, signed_manifests)):
|
||||
sibling_index = (i - 1) % len(manifests)
|
||||
all_validator_keys = '\n'.join([
|
||||
m['validator_keys'] + ' ThisServer',
|
||||
manifests[sibling_index]['validator_keys'] + ' NextInRing'])
|
||||
this_validator_dir = os.path.abspath(
|
||||
'{}/validator_{}'.format(config_root, i))
|
||||
db_path = this_validator_dir + '/db'
|
||||
node_db_path = db_path + '/' + node_db_filename
|
||||
log_path = this_validator_dir + '/log'
|
||||
debug_logfile = log_path + '/debug.log'
|
||||
rpc_port, peer_port, wss_port = port_nums[i]
|
||||
sibling_ip = '127.0.0.1'
|
||||
sibling_port = port_nums[sibling_index][1]
|
||||
d = {
|
||||
'validation_manifest': s,
|
||||
'all_validator_keys': all_validator_keys,
|
||||
'node_db_type': node_db_type,
|
||||
'node_db_path': node_db_path,
|
||||
'db_path': db_path,
|
||||
'debug_logfile': debug_logfile,
|
||||
'rpc_port': rpc_port,
|
||||
'peer_port': peer_port,
|
||||
'wss_port': wss_port,
|
||||
'sibling_ip': sibling_ip,
|
||||
'sibling_port': sibling_port,
|
||||
}
|
||||
d.update(m)
|
||||
d.update(v)
|
||||
|
||||
for p in [this_validator_dir, db_path, log_path]:
|
||||
if not os.path.exists(p):
|
||||
os.makedirs(p)
|
||||
|
||||
config_files.append('{}/rippled.cfg'.format(this_validator_dir))
|
||||
with open(config_files[-1], 'w') as f:
|
||||
f.write(CONF_TEMPLATE.format(**d))
|
||||
|
||||
with open('{}/master_secret.txt'.format(this_validator_dir), 'w') as f:
|
||||
f.write(m['master_secret'])
|
||||
|
||||
return config_files
|
||||
|
||||
|
||||
def update_ephemeral_key(
|
||||
server, new_seq, log_files,
|
||||
expected=None, rm_dbs=False, master_secret_file=None,
|
||||
restore_origional_conf=False, timeout_s=300):
|
||||
if not expected:
|
||||
expected = {}
|
||||
|
||||
change_time = time.gmtime()
|
||||
back_conf, new_conf = new_config_ephemeral_key(
|
||||
server,
|
||||
new_seq,
|
||||
rm_dbs,
|
||||
master_secret_file
|
||||
)
|
||||
validator_key = get_validator_key(server.config_file)
|
||||
start_time = time.time()
|
||||
ck = check_ephemeral_keys(validator_key,
|
||||
log_files,
|
||||
seq=new_seq,
|
||||
change_time=change_time,
|
||||
timeout_s=timeout_s)
|
||||
if ARGS.verbose:
|
||||
print('Check finished: {} secs.'.format(int(time.time() - start_time)))
|
||||
all_success = True
|
||||
for i, r in enumerate(ck):
|
||||
e = expected.get(i, UNTRUSTED)
|
||||
server_dir = os.path.basename(os.path.dirname(log_files[i]))
|
||||
status = 'OK' if e == r else 'FAIL'
|
||||
print('{}: Server: {} Expected: {} Got: {}'.
|
||||
format(status, server_dir,
|
||||
MANIFEST_ACTION_ID_TO_STR[e], MANIFEST_ACTION_ID_TO_STR[r]))
|
||||
all_success = all_success and (e == r)
|
||||
if restore_origional_conf:
|
||||
if is_windows() and os.path.isfile(new_conf):
|
||||
os.remove(new_conf)
|
||||
os.rename(back_conf, new_conf)
|
||||
return all_success
|
||||
|
||||
|
||||
def run_main():
|
||||
global ARGS
|
||||
ARGS = parse_args()
|
||||
manifest_seq = 1
|
||||
config_files = get_configs(manifest_seq)
|
||||
if ARGS.generate:
|
||||
return
|
||||
if len(config_files) <= 1:
|
||||
print('Script requires at least 2 servers. Actual #: {}'.
|
||||
format(len(config_files)))
|
||||
return
|
||||
with contextlib.nested(*(rippled_server(c, os.path.dirname(c)+'/log.txt')
|
||||
for c in config_files)) as servers:
|
||||
log_files = [os.path.dirname(cf)+'/log.txt' for cf in config_files[1:]]
|
||||
validator_key = get_validator_key(config_files[0])
|
||||
start_time = time.time()
|
||||
ck = check_ephemeral_keys(validator_key,
|
||||
[log_files[0]],
|
||||
seq=None,
|
||||
timeout_s=60)
|
||||
if ARGS.verbose:
|
||||
print('Check finished: {} secs.'.format(
|
||||
int(time.time() - start_time)))
|
||||
if any(r == NOT_FOUND for r in ck):
|
||||
print('FAIL: Initial key did not propigate to all servers')
|
||||
return
|
||||
|
||||
manifest_seq += 2
|
||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||
expected[0] = ACCEPTED_UPDATE
|
||||
if not ARGS.quiet:
|
||||
print('Testing key update')
|
||||
kr = update_ephemeral_key(servers[0], manifest_seq, log_files, expected)
|
||||
if not kr:
|
||||
print('\nFail: Key Update Test. Exiting')
|
||||
return
|
||||
|
||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||
expected[0] = STALE
|
||||
if not ARGS.quiet:
|
||||
print('Testing stale key')
|
||||
kr = update_ephemeral_key(
|
||||
servers[0], manifest_seq-1, log_files, expected, rm_dbs=True)
|
||||
if not kr:
|
||||
print('\nFail: Stale Key Test. Exiting')
|
||||
return
|
||||
|
||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||
expected[0] = STALE
|
||||
if not ARGS.quiet:
|
||||
print('Testing stale key 2')
|
||||
kr = update_ephemeral_key(
|
||||
servers[0], manifest_seq, log_files, expected, rm_dbs=True)
|
||||
if not kr:
|
||||
print('\nFail: Stale Key Test. Exiting')
|
||||
return
|
||||
|
||||
expected = {i: UNTRUSTED for i in range(len(log_files))}
|
||||
expected[0] = REVOKED
|
||||
if not ARGS.quiet:
|
||||
print('Testing revoked key')
|
||||
kr = update_ephemeral_key(
|
||||
servers[0], 0xffffffff, log_files, expected, rm_dbs=True)
|
||||
if not kr:
|
||||
print('\nFail: Revoked Key Text. Exiting')
|
||||
return
|
||||
print('\nOK: All tests passed')
|
||||
|
||||
if __name__ == '__main__':
|
||||
run_main()
|
||||
47
bin/python/ripple/util/test_Base58.py
Normal file
47
bin/python/ripple/util/test_Base58.py
Normal file
@@ -0,0 +1,47 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ripple.util import Base58
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
BINARY = 'nN9kfUnKTf7PpgLG'
|
||||
|
||||
class test_Base58(TestCase):
|
||||
def run_test(self, before, after):
|
||||
self.assertEquals(Base58.decode(before), after)
|
||||
self.assertEquals(Base58.encode(after), before)
|
||||
|
||||
def test_trivial(self):
|
||||
self.run_test('', '')
|
||||
|
||||
def test_zeroes(self):
|
||||
for before, after in (('', ''), ('abc', 'I\x8b')):
|
||||
for i in range(1, 257):
|
||||
self.run_test('r' * i + before, '\0' * i + after)
|
||||
|
||||
def test_single_digits(self):
|
||||
for i, c in enumerate(Base58.ALPHABET):
|
||||
self.run_test(c, chr(i))
|
||||
|
||||
def test_various(self):
|
||||
# Test three random numbers.
|
||||
self.run_test('88Mw', '\x88L\xed')
|
||||
self.run_test(
|
||||
'nN9kfUnKTf7PpgLG', '\x03\xdc\x9co\xdea\xefn\xd3\xb8\xe2\xc1')
|
||||
self.run_test(
|
||||
'zzWWb4C5p6kNrVa4fEBoZpZKd3XQLXch7QJbLCuLdoS1CWr8qdAZHEmwMiJy8Hwp',
|
||||
'xN\x82\xfcQ\x1f\xb3~\xdf\xc7\xb37#\xc6~A\xe9\xf6-\x1f\xcb"\xfab'
|
||||
'(\'\xccv\x9e\x85\xc3\xd1\x19\x941{\x8et\xfbS}\x86.k\x07\xb5\xb3')
|
||||
|
||||
def test_check(self):
|
||||
self.assertEquals(Base58.checksum(BINARY), '\xaa\xaar\x9d')
|
||||
|
||||
def test_encode(self):
|
||||
self.assertEquals(
|
||||
Base58.encode_version(Base58.VER_ACCOUNT_PUBLIC, BINARY),
|
||||
'sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
||||
|
||||
def test_decode(self):
|
||||
ver, b = Base58.decode_version('sB49XwJgmdEZDo8LmYwki7FYkiaN7')
|
||||
self.assertEquals(ver, Base58.VER_ACCOUNT_PUBLIC)
|
||||
self.assertEquals(b, BINARY)
|
||||
12
bin/python/ripple/util/test_Cache.py
Normal file
12
bin/python/ripple/util/test_Cache.py
Normal file
@@ -0,0 +1,12 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util.Cache import NamedCache
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_Cache(TestCase):
|
||||
def setUp(self):
|
||||
self.cache = NamedCache()
|
||||
|
||||
def test_trivial(self):
|
||||
pass
|
||||
163
bin/python/ripple/util/test_ConfigFile.py
Normal file
163
bin/python/ripple/util/test_ConfigFile.py
Normal file
@@ -0,0 +1,163 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util import ConfigFile
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_ConfigFile(TestCase):
|
||||
def test_trivial(self):
|
||||
self.assertEquals(ConfigFile.read(''), {})
|
||||
|
||||
def test_full(self):
|
||||
self.assertEquals(ConfigFile.read(FULL.splitlines()), RESULT)
|
||||
|
||||
RESULT = {
|
||||
'websocket_port': '6206',
|
||||
'database_path': '/development/alpha/db',
|
||||
'sntp_servers':
|
||||
['time.windows.com', 'time.apple.com', 'time.nist.gov', 'pool.ntp.org'],
|
||||
'validation_seed': 'sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV',
|
||||
'node_size': 'medium',
|
||||
'rpc_startup': {
|
||||
'command': 'log_level',
|
||||
'severity': 'debug'},
|
||||
'ips': ['r.ripple.com', '51235'],
|
||||
'node_db': {
|
||||
'file_size_mult': '2',
|
||||
'file_size_mb': '8',
|
||||
'cache_mb': '256',
|
||||
'path': '/development/alpha/db/rocksdb',
|
||||
'open_files': '2000',
|
||||
'type': 'RocksDB',
|
||||
'filter_bits': '12'},
|
||||
'peer_port': '53235',
|
||||
'ledger_history': 'full',
|
||||
'rpc_ip': '127.0.0.1',
|
||||
'websocket_public_ip': '0.0.0.0',
|
||||
'rpc_allow_remote': '0',
|
||||
'validators':
|
||||
[['n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7', 'RL1'],
|
||||
['n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj', 'RL2'],
|
||||
['n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C', 'RL3'],
|
||||
['n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS', 'RL4'],
|
||||
['n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA', 'RL5']],
|
||||
'debug_logfile': '/development/alpha/debug.log',
|
||||
'websocket_public_port': '5206',
|
||||
'peer_ip': '0.0.0.0',
|
||||
'rpc_port': '5205',
|
||||
'validation_quorum': '3',
|
||||
'websocket_ip': '127.0.0.1'}
|
||||
|
||||
FULL = """
|
||||
[ledger_history]
|
||||
full
|
||||
|
||||
# Allow other peers to connect to this server.
|
||||
#
|
||||
[peer_ip]
|
||||
0.0.0.0
|
||||
|
||||
[peer_port]
|
||||
53235
|
||||
|
||||
# Allow untrusted clients to connect to this server.
|
||||
#
|
||||
[websocket_public_ip]
|
||||
0.0.0.0
|
||||
|
||||
[websocket_public_port]
|
||||
5206
|
||||
|
||||
# Provide trusted websocket ADMIN access to the localhost.
|
||||
#
|
||||
[websocket_ip]
|
||||
127.0.0.1
|
||||
|
||||
[websocket_port]
|
||||
6206
|
||||
|
||||
# Provide trusted json-rpc ADMIN access to the localhost.
|
||||
#
|
||||
[rpc_ip]
|
||||
127.0.0.1
|
||||
|
||||
[rpc_port]
|
||||
5205
|
||||
|
||||
[rpc_allow_remote]
|
||||
0
|
||||
|
||||
[node_size]
|
||||
medium
|
||||
|
||||
# This is primary persistent datastore for rippled. This includes transaction
|
||||
# metadata, account states, and ledger headers. Helpful information can be
|
||||
# found here: https://ripple.com/wiki/NodeBackEnd
|
||||
[node_db]
|
||||
type=RocksDB
|
||||
path=/development/alpha/db/rocksdb
|
||||
open_files=2000
|
||||
filter_bits=12
|
||||
cache_mb=256
|
||||
file_size_mb=8
|
||||
file_size_mult=2
|
||||
|
||||
[database_path]
|
||||
/development/alpha/db
|
||||
|
||||
# This needs to be an absolute directory reference, not a relative one.
|
||||
# Modify this value as required.
|
||||
[debug_logfile]
|
||||
/development/alpha/debug.log
|
||||
|
||||
[sntp_servers]
|
||||
time.windows.com
|
||||
time.apple.com
|
||||
time.nist.gov
|
||||
pool.ntp.org
|
||||
|
||||
# Where to find some other servers speaking the Ripple protocol.
|
||||
#
|
||||
[ips]
|
||||
r.ripple.com 51235
|
||||
|
||||
# The latest validators can be obtained from
|
||||
# https://ripple.com/ripple.txt
|
||||
#
|
||||
[validators]
|
||||
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||
|
||||
# Ditto.
|
||||
[validation_quorum]
|
||||
3
|
||||
|
||||
[validation_seed]
|
||||
sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV
|
||||
|
||||
# Turn down default logging to save disk space in the long run.
|
||||
# Valid values here are trace, debug, info, warning, error, and fatal
|
||||
[rpc_startup]
|
||||
{ "command": "log_level", "severity": "debug" }
|
||||
|
||||
# Configure SSL for WebSockets. Not enabled by default because not everybody
|
||||
# has an SSL cert on their server, but if you uncomment the following lines and
|
||||
# set the path to the SSL certificate and private key the WebSockets protocol
|
||||
# will be protected by SSL/TLS.
|
||||
#[websocket_secure]
|
||||
#1
|
||||
|
||||
#[websocket_ssl_cert]
|
||||
#/etc/ssl/certs/server.crt
|
||||
|
||||
#[websocket_ssl_key]
|
||||
#/etc/ssl/private/server.key
|
||||
|
||||
# Defaults to 0 ("no") so that you can use self-signed SSL certificates for
|
||||
# development, or internally.
|
||||
#[ssl_verify]
|
||||
#0
|
||||
""".strip()
|
||||
20
bin/python/ripple/util/test_Decimal.py
Normal file
20
bin/python/ripple/util/test_Decimal.py
Normal file
@@ -0,0 +1,20 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util.Decimal import Decimal
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_Decimal(TestCase):
|
||||
def test_construct(self):
|
||||
self.assertEquals(str(Decimal('')), '0')
|
||||
self.assertEquals(str(Decimal('0')), '0')
|
||||
self.assertEquals(str(Decimal('0.2')), '0.2')
|
||||
self.assertEquals(str(Decimal('-0.2')), '-0.2')
|
||||
self.assertEquals(str(Decimal('3.1416')), '3.1416')
|
||||
|
||||
def test_accumulate(self):
|
||||
d = Decimal()
|
||||
d.accumulate('0.5')
|
||||
d.accumulate('3.1416')
|
||||
d.accumulate('-23.34234')
|
||||
self.assertEquals(str(d), '-19.70074')
|
||||
56
bin/python/ripple/util/test_Dict.py
Normal file
56
bin/python/ripple/util/test_Dict.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util import Dict
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_Dict(TestCase):
|
||||
def test_count_all_subitems(self):
|
||||
self.assertEquals(Dict.count_all_subitems({}), 1)
|
||||
self.assertEquals(Dict.count_all_subitems({'a': {}}), 2)
|
||||
self.assertEquals(Dict.count_all_subitems([1]), 2)
|
||||
self.assertEquals(Dict.count_all_subitems([1, 2]), 3)
|
||||
self.assertEquals(Dict.count_all_subitems([1, {2: 3}]), 4)
|
||||
self.assertEquals(Dict.count_all_subitems([1, {2: [3]}]), 5)
|
||||
self.assertEquals(Dict.count_all_subitems([1, {2: [3, 4]}]), 6)
|
||||
|
||||
def test_prune(self):
|
||||
self.assertEquals(Dict.prune({}, 0), {})
|
||||
self.assertEquals(Dict.prune({}, 1), {})
|
||||
|
||||
self.assertEquals(Dict.prune({1: 2}, 0), '{dict with 1 subitem}')
|
||||
self.assertEquals(Dict.prune({1: 2}, 1), {1: 2})
|
||||
self.assertEquals(Dict.prune({1: 2}, 2), {1: 2})
|
||||
|
||||
self.assertEquals(Dict.prune([1, 2, 3], 0), '[list with 3 subitems]')
|
||||
self.assertEquals(Dict.prune([1, 2, 3], 1), [1, 2, 3])
|
||||
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 0),
|
||||
'[list with 4 subitems]')
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 1),
|
||||
['{dict with 3 subitems}'])
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 2),
|
||||
[{1: u'[list with 2 subitems]'}])
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 3),
|
||||
[{1: [2, 3]}])
|
||||
|
||||
def test_prune_nosub(self):
|
||||
self.assertEquals(Dict.prune({}, 0, False), {})
|
||||
self.assertEquals(Dict.prune({}, 1, False), {})
|
||||
|
||||
self.assertEquals(Dict.prune({1: 2}, 0, False), '{dict with 1 subitem}')
|
||||
self.assertEquals(Dict.prune({1: 2}, 1, False), {1: 2})
|
||||
self.assertEquals(Dict.prune({1: 2}, 2, False), {1: 2})
|
||||
|
||||
self.assertEquals(Dict.prune([1, 2, 3], 0, False),
|
||||
'[list with 3 subitems]')
|
||||
self.assertEquals(Dict.prune([1, 2, 3], 1, False), [1, 2, 3])
|
||||
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 0, False),
|
||||
'[list with 1 subitem]')
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 1, False),
|
||||
['{dict with 1 subitem}'])
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 2, False),
|
||||
[{1: u'[list with 2 subitems]'}])
|
||||
self.assertEquals(Dict.prune([{1: [2, 3]}], 3, False),
|
||||
[{1: [2, 3]}])
|
||||
37
bin/python/ripple/util/test_Function.py
Normal file
37
bin/python/ripple/util/test_Function.py
Normal file
@@ -0,0 +1,37 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util.Function import Function, MATCHER
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
def FN(*args, **kwds):
|
||||
return args, kwds
|
||||
|
||||
class test_Function(TestCase):
|
||||
def match_test(self, item, *results):
|
||||
self.assertEquals(MATCHER.match(item).groups(), results)
|
||||
|
||||
def test_simple(self):
|
||||
self.match_test('function', 'function', '')
|
||||
self.match_test('f(x)', 'f', '(x)')
|
||||
|
||||
def test_empty_function(self):
|
||||
self.assertEquals(Function()(), None)
|
||||
|
||||
def test_empty_args(self):
|
||||
f = Function('ripple.util.test_Function.FN()')
|
||||
self.assertEquals(f(), ((), {}))
|
||||
|
||||
def test_function(self):
|
||||
f = Function('ripple.util.test_Function.FN(True, {1: 2}, None)')
|
||||
self.assertEquals(f(), ((True, {1: 2}, None), {}))
|
||||
self.assertEquals(f('hello', foo='bar'),
|
||||
(('hello', True, {1: 2}, None), {'foo':'bar'}))
|
||||
self.assertEquals(
|
||||
f, Function('ripple.util.test_Function.FN(true, {1: 2}, null)'))
|
||||
|
||||
def test_quoting(self):
|
||||
f = Function('ripple.util.test_Function.FN(testing)')
|
||||
self.assertEquals(f(), (('testing',), {}))
|
||||
f = Function('ripple.util.test_Function.FN(testing, true, false, null)')
|
||||
self.assertEquals(f(), (('testing', True, False, None), {}))
|
||||
56
bin/python/ripple/util/test_PrettyPrint.py
Normal file
56
bin/python/ripple/util/test_PrettyPrint.py
Normal file
@@ -0,0 +1,56 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util import PrettyPrint
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_PrettyPrint(TestCase):
|
||||
def setUp(self):
|
||||
self._results = []
|
||||
self.printer = PrettyPrint.Streamer(printer=self.printer)
|
||||
|
||||
def printer(self, *args, **kwds):
|
||||
self._results.extend(args)
|
||||
|
||||
def run_test(self, expected, *args):
|
||||
for i in range(0, len(args), 2):
|
||||
self.printer.add(args[i], args[i + 1])
|
||||
self.printer.finish()
|
||||
self.assertEquals(''.join(self._results), expected)
|
||||
|
||||
def test_simple_printer(self):
|
||||
self.run_test(
|
||||
'{\n "foo": "bar"\n}',
|
||||
'foo', 'bar')
|
||||
|
||||
def test_multiple_lines(self):
|
||||
self.run_test(
|
||||
'{\n "foo": "bar",\n "baz": 5\n}',
|
||||
'foo', 'bar', 'baz', 5)
|
||||
|
||||
def test_multiple_lines(self):
|
||||
self.run_test(
|
||||
"""
|
||||
{
|
||||
"foo": {
|
||||
"bar": 1,
|
||||
"baz": true
|
||||
},
|
||||
"bang": "bing"
|
||||
}
|
||||
""".strip(), 'foo', {'bar': 1, 'baz': True}, 'bang', 'bing')
|
||||
|
||||
def test_multiple_lines_with_list(self):
|
||||
self.run_test(
|
||||
"""
|
||||
{
|
||||
"foo": [
|
||||
"bar",
|
||||
1
|
||||
],
|
||||
"baz": [
|
||||
23,
|
||||
42
|
||||
]
|
||||
}
|
||||
""".strip(), 'foo', ['bar', 1], 'baz', [23, 42])
|
||||
28
bin/python/ripple/util/test_Range.py
Normal file
28
bin/python/ripple/util/test_Range.py
Normal file
@@ -0,0 +1,28 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util import Range
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_Range(TestCase):
|
||||
def round_trip(self, s, *items):
|
||||
self.assertEquals(Range.from_string(s), set(items))
|
||||
self.assertEquals(Range.to_string(items), s)
|
||||
|
||||
def test_complete(self):
|
||||
self.round_trip('10,19', 10, 19)
|
||||
self.round_trip('10', 10)
|
||||
self.round_trip('10-12', 10, 11, 12)
|
||||
self.round_trip('10,19,42-45', 10, 19, 42, 43, 44, 45)
|
||||
|
||||
def test_names(self):
|
||||
self.assertEquals(
|
||||
Range.from_string('first,last,current', first=1, last=3, current=5),
|
||||
set([1, 3, 5]))
|
||||
|
||||
def test_is_range(self):
|
||||
self.assertTrue(Range.is_range(''))
|
||||
self.assertTrue(Range.is_range('10'))
|
||||
self.assertTrue(Range.is_range('10,12'))
|
||||
self.assertFalse(Range.is_range('10,12,fred'))
|
||||
self.assertTrue(Range.is_range('10,12,fred', 'fred'))
|
||||
44
bin/python/ripple/util/test_Search.py
Normal file
44
bin/python/ripple/util/test_Search.py
Normal file
@@ -0,0 +1,44 @@
|
||||
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||
|
||||
from ripple.util.Search import binary_search, linear_search, FIRST, LAST
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
class test_Search(TestCase):
|
||||
def condition(self, i):
|
||||
return 10 <= i < 15;
|
||||
|
||||
def test_linear_full(self):
|
||||
self.assertEquals(list(linear_search(range(21), self.condition)),
|
||||
[10, 11, 12, 13, 14])
|
||||
|
||||
def test_linear_partial(self):
|
||||
self.assertEquals(list(linear_search(range(8, 14), self.condition)),
|
||||
[10, 11, 12, 13])
|
||||
self.assertEquals(list(linear_search(range(11, 14), self.condition)),
|
||||
[11, 12, 13])
|
||||
self.assertEquals(list(linear_search(range(12, 18), self.condition)),
|
||||
[12, 13, 14])
|
||||
|
||||
def test_linear_empty(self):
|
||||
self.assertEquals(list(linear_search(range(1, 4), self.condition)), [])
|
||||
|
||||
def test_binary_first(self):
|
||||
self.assertEquals(binary_search(0, 14, self.condition, FIRST), 10)
|
||||
self.assertEquals(binary_search(10, 19, self.condition, FIRST), 10)
|
||||
self.assertEquals(binary_search(14, 14, self.condition, FIRST), 14)
|
||||
self.assertEquals(binary_search(14, 15, self.condition, FIRST), 14)
|
||||
self.assertEquals(binary_search(13, 15, self.condition, FIRST), 13)
|
||||
|
||||
def test_binary_last(self):
|
||||
self.assertEquals(binary_search(10, 20, self.condition, LAST), 14)
|
||||
self.assertEquals(binary_search(0, 14, self.condition, LAST), 14)
|
||||
self.assertEquals(binary_search(14, 14, self.condition, LAST), 14)
|
||||
self.assertEquals(binary_search(14, 15, self.condition, LAST), 14)
|
||||
self.assertEquals(binary_search(13, 15, self.condition, LAST), 14)
|
||||
|
||||
def test_binary_throws(self):
|
||||
self.assertRaises(
|
||||
ValueError, binary_search, 0, 20, self.condition, LAST)
|
||||
self.assertRaises(
|
||||
ValueError, binary_search, 0, 20, self.condition, FIRST)
|
||||
127
bin/python/ripple/util/test_Sign.py
Normal file
127
bin/python/ripple/util/test_Sign.py
Normal file
@@ -0,0 +1,127 @@
|
||||
from __future__ import absolute_import, division, print_function
|
||||
|
||||
from ripple.util import Sign
|
||||
from ripple.util import Base58
|
||||
from ripple.ledger import SField
|
||||
|
||||
from unittest import TestCase
|
||||
|
||||
BINARY = 'nN9kfUnKTf7PpgLG'
|
||||
|
||||
class test_Sign(TestCase):
|
||||
SEQUENCE = 23
|
||||
SIGNATURE = (
|
||||
'JAAAABdxIe2DIKUZd9jDjKikknxnDfWCHkSXYZReFenvsmoVCdIw6nMhAnZ2dnZ2'
|
||||
'dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dkDOjlWtQSvRTjuwe+4iNusg0sJM'
|
||||
'zqkBJwDz30b2SkxZ7Fte/Vx4htM/kkfUfJCaxmxE5N4dHSKuiO9iDHsktqIA')
|
||||
|
||||
def setUp(self):
|
||||
self.results = []
|
||||
|
||||
def print(self, *args, **kwds):
|
||||
self.results.append([list(args), kwds])
|
||||
|
||||
def test_field_code(self):
|
||||
self.assertEquals(SField.field_code(SField.STI_UINT32, 4), '$')
|
||||
self.assertEquals(SField.field_code(SField.STI_VL, 1), 'q')
|
||||
self.assertEquals(SField.field_code(SField.STI_VL, 3), 's')
|
||||
self.assertEquals(SField.field_code(SField.STI_VL, 6), 'v')
|
||||
|
||||
def test_strvl(self):
|
||||
self.assertEquals(Sign.prepend_length_byte(BINARY),
|
||||
'\x10nN9kfUnKTf7PpgLG')
|
||||
|
||||
def urandom(self, bytes):
|
||||
return '\5' * bytes
|
||||
|
||||
def test_make_seed(self):
|
||||
self.assertEquals(Sign.make_seed(self.urandom),
|
||||
'\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5')
|
||||
|
||||
def test_make_ed(self):
|
||||
private, public = Sign.make_ed25519_keypair(self.urandom)
|
||||
self.assertEquals(private,
|
||||
'\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5'
|
||||
'\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5\5')
|
||||
self.assertEquals(public,
|
||||
'nz\x1c\xdd)\xb0\xb7\x8f\xd1:\xf4\xc5Y\x8f\xef\xf4'
|
||||
'\xef*\x97\x16n<\xa6\xf2\xe4\xfb\xfc\xcd\x80P[\xf1')
|
||||
|
||||
def test_make_manifest(self):
|
||||
_, pk = Sign.make_ed25519_keypair(self.urandom)
|
||||
m = Sign.make_manifest(pk, 'verify', 12345)
|
||||
self.assertEquals(
|
||||
m, '$\x00\x0009q nz\x1c\xdd)\xb0\xb7\x8f\xd1:\xf4\xc5Y\x8f\xef\xf4'
|
||||
'\xef*\x97\x16n<\xa6\xf2\xe4\xfb\xfc\xcd\x80P[\xf1s\x06verify')
|
||||
|
||||
def test_sign_manifest(self):
|
||||
sk, pk = Sign.make_ed25519_keypair(self.urandom)
|
||||
s = Sign.sign_manifest('manifest', sk, pk)
|
||||
self.assertEquals(
|
||||
s, 'manifestv@\xe5\x84\xbe\xc4\x80N\xa0v"\xb2\x80A\x88\x06\xc0'
|
||||
'\xd2\xbae\x92\x89\xa8\'!\xdd\x00\x88\x06s\xe0\xf74\xe3Yg\xad{$'
|
||||
'\x17\xd3\x99\xaa\x16\xb0\xeaZ\xd7]\r\xb3\xdc\x1b\x8f\xc1Z\xdfHU'
|
||||
'\xb5\x92\xac\x82jI\x02')
|
||||
|
||||
def test_wrap(self):
|
||||
wrap = lambda s: Sign.wrap(s, 5)
|
||||
self.assertEquals(wrap(''), '')
|
||||
self.assertEquals(wrap('12345'), '12345')
|
||||
self.assertEquals(wrap('123456'), '123\n456')
|
||||
self.assertEquals(wrap('12345678'), '1234\n5678')
|
||||
self.assertEquals(wrap('1234567890'), '12345\n67890')
|
||||
self.assertEquals(wrap('12345678901'), '123\n456\n789\n01')
|
||||
|
||||
def test_create_ed_keys(self):
|
||||
pkh, skh = Sign.create_ed_keys(self.urandom)
|
||||
self.assertEquals(
|
||||
pkh, 'nHUUaKHpxyRP4TZZ79tTpXuTpoM8pRNs5crZpGVA5jdrjib5easY')
|
||||
self.assertEquals(
|
||||
skh, 'pnEp13Zu7xTeKQVQ2RZVaUraE9GXKqFtnXQVUFKXbTE6wsP4wne')
|
||||
|
||||
def get_test_keypair(self):
|
||||
public = (Base58.VER_NODE_PUBLIC, '\x02' + (32 * 'v'))
|
||||
private = (Base58.VER_NODE_PRIVATE, 32 * 'k')
|
||||
|
||||
Sign.check_validator_public(*public)
|
||||
Sign.check_master_secret(*private)
|
||||
|
||||
return (Base58.encode_version(*public), Base58.encode_version(*private))
|
||||
|
||||
def test_get_signature(self):
|
||||
signature = Sign.get_signature(self.SEQUENCE, *self.get_test_keypair())
|
||||
self.assertEquals(
|
||||
signature,
|
||||
'JAAAABdxIe2DIKUZd9jDjKikknxnDfWCHkSXYZReFenvsmoVCdIw6nMhAnZ2dnZ2'
|
||||
'dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dkDOjlWtQSvRTjuwe+4iNusg0sJM'
|
||||
'zqkBJwDz30b2SkxZ7Fte/Vx4htM/kkfUfJCaxmxE5N4dHSKuiO9iDHsktqIA')
|
||||
|
||||
def test_check(self):
|
||||
public = Base58.encode_version(Base58.VER_NODE_PRIVATE, 32 * 'k')
|
||||
Sign.perform_check(public, self.print)
|
||||
self.assertEquals(self.results,
|
||||
[[['version = VER_NODE_PRIVATE'], {}],
|
||||
[['decoded length = 32'], {}]])
|
||||
|
||||
def test_create(self):
|
||||
Sign.perform_create(self.urandom, self.print)
|
||||
self.assertEquals(
|
||||
self.results,
|
||||
[[['[validator_keys]',
|
||||
'nHUUaKHpxyRP4TZZ79tTpXuTpoM8pRNs5crZpGVA5jdrjib5easY',
|
||||
'',
|
||||
'[master_secret]',
|
||||
'pnEp13Zu7xTeKQVQ2RZVaUraE9GXKqFtnXQVUFKXbTE6wsP4wne'],
|
||||
{'sep': '\n'}]])
|
||||
|
||||
def test_sign(self):
|
||||
public, private = self.get_test_keypair()
|
||||
Sign.perform_sign(self.SEQUENCE, public, private, print=self.print)
|
||||
self.assertEquals(
|
||||
self.results,
|
||||
[[['[validation_manifest]'], {}],
|
||||
[['JAAAABdxIe2DIKUZd9jDjKikknxnDfWCHkSXYZReFenvsmo\n'
|
||||
'VCdIw6nMhAnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dnZ2dn\n'
|
||||
'Z2dnZ2dkDOjlWtQSvRTjuwe+4iNusg0sJMzqkBJwDz30b2S\n'
|
||||
'kxZ7Fte/Vx4htM/kkfUfJCaxmxE5N4dHSKuiO9iDHsktqIA'],
|
||||
{}]])
|
||||
747
bin/python/six.py
Normal file
747
bin/python/six.py
Normal file
@@ -0,0 +1,747 @@
|
||||
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||
|
||||
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||
#
|
||||
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
# of this software and associated documentation files (the "Software"), to deal
|
||||
# in the Software without restriction, including without limitation the rights
|
||||
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
# copies of the Software, and to permit persons to whom the Software is
|
||||
# furnished to do so, subject to the following conditions:
|
||||
#
|
||||
# The above copyright notice and this permission notice shall be included in all
|
||||
# copies or substantial portions of the Software.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
# SOFTWARE.
|
||||
|
||||
import functools
|
||||
import operator
|
||||
import sys
|
||||
import types
|
||||
|
||||
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||
__version__ = "1.7.3"
|
||||
|
||||
|
||||
# Useful for very coarse version differentiation.
|
||||
PY2 = sys.version_info[0] == 2
|
||||
PY3 = sys.version_info[0] == 3
|
||||
|
||||
if PY3:
|
||||
string_types = str,
|
||||
integer_types = int,
|
||||
class_types = type,
|
||||
text_type = str
|
||||
binary_type = bytes
|
||||
|
||||
MAXSIZE = sys.maxsize
|
||||
else:
|
||||
string_types = basestring,
|
||||
integer_types = (int, long)
|
||||
class_types = (type, types.ClassType)
|
||||
text_type = unicode
|
||||
binary_type = str
|
||||
|
||||
if sys.platform.startswith("java"):
|
||||
# Jython always uses 32 bits.
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||
class X(object):
|
||||
def __len__(self):
|
||||
return 1 << 31
|
||||
try:
|
||||
len(X())
|
||||
except OverflowError:
|
||||
# 32-bit
|
||||
MAXSIZE = int((1 << 31) - 1)
|
||||
else:
|
||||
# 64-bit
|
||||
MAXSIZE = int((1 << 63) - 1)
|
||||
del X
|
||||
|
||||
|
||||
def _add_doc(func, doc):
|
||||
"""Add documentation to a function."""
|
||||
func.__doc__ = doc
|
||||
|
||||
|
||||
def _import_module(name):
|
||||
"""Import module, returning the module after the last dot."""
|
||||
__import__(name)
|
||||
return sys.modules[name]
|
||||
|
||||
|
||||
class _LazyDescr(object):
|
||||
|
||||
def __init__(self, name):
|
||||
self.name = name
|
||||
|
||||
def __get__(self, obj, tp):
|
||||
result = self._resolve()
|
||||
setattr(obj, self.name, result) # Invokes __set__.
|
||||
# This is a bit ugly, but it avoids running this again.
|
||||
delattr(obj.__class__, self.name)
|
||||
return result
|
||||
|
||||
|
||||
class MovedModule(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old, new=None):
|
||||
super(MovedModule, self).__init__(name)
|
||||
if PY3:
|
||||
if new is None:
|
||||
new = name
|
||||
self.mod = new
|
||||
else:
|
||||
self.mod = old
|
||||
|
||||
def _resolve(self):
|
||||
return _import_module(self.mod)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
_module = self._resolve()
|
||||
value = getattr(_module, attr)
|
||||
setattr(self, attr, value)
|
||||
return value
|
||||
|
||||
|
||||
class _LazyModule(types.ModuleType):
|
||||
|
||||
def __init__(self, name):
|
||||
super(_LazyModule, self).__init__(name)
|
||||
self.__doc__ = self.__class__.__doc__
|
||||
|
||||
def __dir__(self):
|
||||
attrs = ["__doc__", "__name__"]
|
||||
attrs += [attr.name for attr in self._moved_attributes]
|
||||
return attrs
|
||||
|
||||
# Subclasses should override this
|
||||
_moved_attributes = []
|
||||
|
||||
|
||||
class MovedAttribute(_LazyDescr):
|
||||
|
||||
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||
super(MovedAttribute, self).__init__(name)
|
||||
if PY3:
|
||||
if new_mod is None:
|
||||
new_mod = name
|
||||
self.mod = new_mod
|
||||
if new_attr is None:
|
||||
if old_attr is None:
|
||||
new_attr = name
|
||||
else:
|
||||
new_attr = old_attr
|
||||
self.attr = new_attr
|
||||
else:
|
||||
self.mod = old_mod
|
||||
if old_attr is None:
|
||||
old_attr = name
|
||||
self.attr = old_attr
|
||||
|
||||
def _resolve(self):
|
||||
module = _import_module(self.mod)
|
||||
return getattr(module, self.attr)
|
||||
|
||||
|
||||
class _SixMetaPathImporter(object):
|
||||
"""
|
||||
A meta path importer to import six.moves and its submodules.
|
||||
|
||||
This class implements a PEP302 finder and loader. It should be compatible
|
||||
with Python 2.5 and all existing versions of Python3
|
||||
"""
|
||||
def __init__(self, six_module_name):
|
||||
self.name = six_module_name
|
||||
self.known_modules = {}
|
||||
|
||||
def _add_module(self, mod, *fullnames):
|
||||
for fullname in fullnames:
|
||||
self.known_modules[self.name + "." + fullname] = mod
|
||||
|
||||
def _get_module(self, fullname):
|
||||
return self.known_modules[self.name + "." + fullname]
|
||||
|
||||
def find_module(self, fullname, path=None):
|
||||
if fullname in self.known_modules:
|
||||
return self
|
||||
return None
|
||||
|
||||
def __get_module(self, fullname):
|
||||
try:
|
||||
return self.known_modules[fullname]
|
||||
except KeyError:
|
||||
raise ImportError("This loader does not know module " + fullname)
|
||||
|
||||
def load_module(self, fullname):
|
||||
try:
|
||||
# in case of a reload
|
||||
return sys.modules[fullname]
|
||||
except KeyError:
|
||||
pass
|
||||
mod = self.__get_module(fullname)
|
||||
if isinstance(mod, MovedModule):
|
||||
mod = mod._resolve()
|
||||
else:
|
||||
mod.__loader__ = self
|
||||
sys.modules[fullname] = mod
|
||||
return mod
|
||||
|
||||
def is_package(self, fullname):
|
||||
"""
|
||||
Return true, if the named module is a package.
|
||||
|
||||
We need this method to get correct spec objects with
|
||||
Python 3.4 (see PEP451)
|
||||
"""
|
||||
return hasattr(self.__get_module(fullname), "__path__")
|
||||
|
||||
def get_code(self, fullname):
|
||||
"""Return None
|
||||
|
||||
Required, if is_package is implemented"""
|
||||
self.__get_module(fullname) # eventually raises ImportError
|
||||
return None
|
||||
get_source = get_code # same as get_code
|
||||
|
||||
_importer = _SixMetaPathImporter(__name__)
|
||||
|
||||
|
||||
class _MovedItems(_LazyModule):
|
||||
"""Lazy loading of moved objects"""
|
||||
__path__ = [] # mark as package
|
||||
|
||||
|
||||
_moved_attributes = [
|
||||
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||
MovedAttribute("StringIO", "StringIO", "io"),
|
||||
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||
MovedAttribute("UserList", "UserList", "collections"),
|
||||
MovedAttribute("UserString", "UserString", "collections"),
|
||||
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||
|
||||
MovedModule("builtins", "__builtin__"),
|
||||
MovedModule("configparser", "ConfigParser"),
|
||||
MovedModule("copyreg", "copy_reg"),
|
||||
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||
MovedModule("http_client", "httplib", "http.client"),
|
||||
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||
MovedModule("cPickle", "cPickle", "pickle"),
|
||||
MovedModule("queue", "Queue"),
|
||||
MovedModule("reprlib", "repr"),
|
||||
MovedModule("socketserver", "SocketServer"),
|
||||
MovedModule("_thread", "thread", "_thread"),
|
||||
MovedModule("tkinter", "Tkinter"),
|
||||
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||
"tkinter.colorchooser"),
|
||||
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||
"tkinter.commondialog"),
|
||||
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||
"tkinter.simpledialog"),
|
||||
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||
MovedModule("winreg", "_winreg"),
|
||||
]
|
||||
for attr in _moved_attributes:
|
||||
setattr(_MovedItems, attr.name, attr)
|
||||
if isinstance(attr, MovedModule):
|
||||
_importer._add_module(attr, "moves." + attr.name)
|
||||
del attr
|
||||
|
||||
_MovedItems._moved_attributes = _moved_attributes
|
||||
|
||||
moves = _MovedItems(__name__ + ".moves")
|
||||
_importer._add_module(moves, "moves")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_parse(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||
|
||||
|
||||
_urllib_parse_moved_attributes = [
|
||||
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||
]
|
||||
for attr in _urllib_parse_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||
"moves.urllib_parse", "moves.urllib.parse")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_error(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||
|
||||
|
||||
_urllib_error_moved_attributes = [
|
||||
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||
]
|
||||
for attr in _urllib_error_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||
"moves.urllib_error", "moves.urllib.error")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_request(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||
|
||||
|
||||
_urllib_request_moved_attributes = [
|
||||
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||
]
|
||||
for attr in _urllib_request_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||
"moves.urllib_request", "moves.urllib.request")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_response(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||
|
||||
|
||||
_urllib_response_moved_attributes = [
|
||||
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||
]
|
||||
for attr in _urllib_response_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||
"moves.urllib_response", "moves.urllib.response")
|
||||
|
||||
|
||||
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||
|
||||
|
||||
_urllib_robotparser_moved_attributes = [
|
||||
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||
]
|
||||
for attr in _urllib_robotparser_moved_attributes:
|
||||
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||
del attr
|
||||
|
||||
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||
|
||||
|
||||
class Module_six_moves_urllib(types.ModuleType):
|
||||
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||
__path__ = [] # mark as package
|
||||
parse = _importer._get_module("moves.urllib_parse")
|
||||
error = _importer._get_module("moves.urllib_error")
|
||||
request = _importer._get_module("moves.urllib_request")
|
||||
response = _importer._get_module("moves.urllib_response")
|
||||
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||
|
||||
def __dir__(self):
|
||||
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||
|
||||
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||
"moves.urllib")
|
||||
|
||||
|
||||
def add_move(move):
|
||||
"""Add an item to six.moves."""
|
||||
setattr(_MovedItems, move.name, move)
|
||||
|
||||
|
||||
def remove_move(name):
|
||||
"""Remove item from six.moves."""
|
||||
try:
|
||||
delattr(_MovedItems, name)
|
||||
except AttributeError:
|
||||
try:
|
||||
del moves.__dict__[name]
|
||||
except KeyError:
|
||||
raise AttributeError("no such move, %r" % (name,))
|
||||
|
||||
|
||||
if PY3:
|
||||
_meth_func = "__func__"
|
||||
_meth_self = "__self__"
|
||||
|
||||
_func_closure = "__closure__"
|
||||
_func_code = "__code__"
|
||||
_func_defaults = "__defaults__"
|
||||
_func_globals = "__globals__"
|
||||
else:
|
||||
_meth_func = "im_func"
|
||||
_meth_self = "im_self"
|
||||
|
||||
_func_closure = "func_closure"
|
||||
_func_code = "func_code"
|
||||
_func_defaults = "func_defaults"
|
||||
_func_globals = "func_globals"
|
||||
|
||||
|
||||
try:
|
||||
advance_iterator = next
|
||||
except NameError:
|
||||
def advance_iterator(it):
|
||||
return it.next()
|
||||
next = advance_iterator
|
||||
|
||||
|
||||
try:
|
||||
callable = callable
|
||||
except NameError:
|
||||
def callable(obj):
|
||||
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||
|
||||
|
||||
if PY3:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound
|
||||
|
||||
create_bound_method = types.MethodType
|
||||
|
||||
Iterator = object
|
||||
else:
|
||||
def get_unbound_function(unbound):
|
||||
return unbound.im_func
|
||||
|
||||
def create_bound_method(func, obj):
|
||||
return types.MethodType(func, obj, obj.__class__)
|
||||
|
||||
class Iterator(object):
|
||||
|
||||
def next(self):
|
||||
return type(self).__next__(self)
|
||||
|
||||
callable = callable
|
||||
_add_doc(get_unbound_function,
|
||||
"""Get the function out of a possibly unbound function""")
|
||||
|
||||
|
||||
get_method_function = operator.attrgetter(_meth_func)
|
||||
get_method_self = operator.attrgetter(_meth_self)
|
||||
get_function_closure = operator.attrgetter(_func_closure)
|
||||
get_function_code = operator.attrgetter(_func_code)
|
||||
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||
get_function_globals = operator.attrgetter(_func_globals)
|
||||
|
||||
|
||||
if PY3:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.keys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.values(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.items(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.lists(**kw))
|
||||
else:
|
||||
def iterkeys(d, **kw):
|
||||
return iter(d.iterkeys(**kw))
|
||||
|
||||
def itervalues(d, **kw):
|
||||
return iter(d.itervalues(**kw))
|
||||
|
||||
def iteritems(d, **kw):
|
||||
return iter(d.iteritems(**kw))
|
||||
|
||||
def iterlists(d, **kw):
|
||||
return iter(d.iterlists(**kw))
|
||||
|
||||
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||
_add_doc(iteritems,
|
||||
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||
_add_doc(iterlists,
|
||||
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||
|
||||
|
||||
if PY3:
|
||||
def b(s):
|
||||
return s.encode("latin-1")
|
||||
def u(s):
|
||||
return s
|
||||
unichr = chr
|
||||
if sys.version_info[1] <= 1:
|
||||
def int2byte(i):
|
||||
return bytes((i,))
|
||||
else:
|
||||
# This is about 2x faster than the implementation above on 3.2+
|
||||
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||
byte2int = operator.itemgetter(0)
|
||||
indexbytes = operator.getitem
|
||||
iterbytes = iter
|
||||
import io
|
||||
StringIO = io.StringIO
|
||||
BytesIO = io.BytesIO
|
||||
else:
|
||||
def b(s):
|
||||
return s
|
||||
# Workaround for standalone backslash
|
||||
def u(s):
|
||||
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||
unichr = unichr
|
||||
int2byte = chr
|
||||
def byte2int(bs):
|
||||
return ord(bs[0])
|
||||
def indexbytes(buf, i):
|
||||
return ord(buf[i])
|
||||
def iterbytes(buf):
|
||||
return (ord(byte) for byte in buf)
|
||||
import StringIO
|
||||
StringIO = BytesIO = StringIO.StringIO
|
||||
_add_doc(b, """Byte literal""")
|
||||
_add_doc(u, """Text literal""")
|
||||
|
||||
|
||||
if PY3:
|
||||
exec_ = getattr(moves.builtins, "exec")
|
||||
|
||||
|
||||
def reraise(tp, value, tb=None):
|
||||
if value.__traceback__ is not tb:
|
||||
raise value.with_traceback(tb)
|
||||
raise value
|
||||
|
||||
else:
|
||||
def exec_(_code_, _globs_=None, _locs_=None):
|
||||
"""Execute code in a namespace."""
|
||||
if _globs_ is None:
|
||||
frame = sys._getframe(1)
|
||||
_globs_ = frame.f_globals
|
||||
if _locs_ is None:
|
||||
_locs_ = frame.f_locals
|
||||
del frame
|
||||
elif _locs_ is None:
|
||||
_locs_ = _globs_
|
||||
exec("""exec _code_ in _globs_, _locs_""")
|
||||
|
||||
|
||||
exec_("""def reraise(tp, value, tb=None):
|
||||
raise tp, value, tb
|
||||
""")
|
||||
|
||||
|
||||
print_ = getattr(moves.builtins, "print", None)
|
||||
if print_ is None:
|
||||
def print_(*args, **kwargs):
|
||||
"""The new-style print function for Python 2.4 and 2.5."""
|
||||
fp = kwargs.pop("file", sys.stdout)
|
||||
if fp is None:
|
||||
return
|
||||
def write(data):
|
||||
if not isinstance(data, basestring):
|
||||
data = str(data)
|
||||
# If the file has an encoding, encode unicode with it.
|
||||
if (isinstance(fp, file) and
|
||||
isinstance(data, unicode) and
|
||||
fp.encoding is not None):
|
||||
errors = getattr(fp, "errors", None)
|
||||
if errors is None:
|
||||
errors = "strict"
|
||||
data = data.encode(fp.encoding, errors)
|
||||
fp.write(data)
|
||||
want_unicode = False
|
||||
sep = kwargs.pop("sep", None)
|
||||
if sep is not None:
|
||||
if isinstance(sep, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(sep, str):
|
||||
raise TypeError("sep must be None or a string")
|
||||
end = kwargs.pop("end", None)
|
||||
if end is not None:
|
||||
if isinstance(end, unicode):
|
||||
want_unicode = True
|
||||
elif not isinstance(end, str):
|
||||
raise TypeError("end must be None or a string")
|
||||
if kwargs:
|
||||
raise TypeError("invalid keyword arguments to print()")
|
||||
if not want_unicode:
|
||||
for arg in args:
|
||||
if isinstance(arg, unicode):
|
||||
want_unicode = True
|
||||
break
|
||||
if want_unicode:
|
||||
newline = unicode("\n")
|
||||
space = unicode(" ")
|
||||
else:
|
||||
newline = "\n"
|
||||
space = " "
|
||||
if sep is None:
|
||||
sep = space
|
||||
if end is None:
|
||||
end = newline
|
||||
for i, arg in enumerate(args):
|
||||
if i:
|
||||
write(sep)
|
||||
write(arg)
|
||||
write(end)
|
||||
|
||||
_add_doc(reraise, """Reraise an exception.""")
|
||||
|
||||
if sys.version_info[0:2] < (3, 4):
|
||||
def wraps(wrapped):
|
||||
def wrapper(f):
|
||||
f = functools.wraps(wrapped)(f)
|
||||
f.__wrapped__ = wrapped
|
||||
return f
|
||||
return wrapper
|
||||
else:
|
||||
wraps = functools.wraps
|
||||
|
||||
def with_metaclass(meta, *bases):
|
||||
"""Create a base class with a metaclass."""
|
||||
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||
# metaclass for one level of class instantiation that replaces itself with
|
||||
# the actual metaclass.
|
||||
class metaclass(meta):
|
||||
def __new__(cls, name, this_bases, d):
|
||||
return meta(name, bases, d)
|
||||
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||
|
||||
|
||||
def add_metaclass(metaclass):
|
||||
"""Class decorator for creating a class with a metaclass."""
|
||||
def wrapper(cls):
|
||||
orig_vars = cls.__dict__.copy()
|
||||
orig_vars.pop('__dict__', None)
|
||||
orig_vars.pop('__weakref__', None)
|
||||
slots = orig_vars.get('__slots__')
|
||||
if slots is not None:
|
||||
if isinstance(slots, str):
|
||||
slots = [slots]
|
||||
for slots_var in slots:
|
||||
orig_vars.pop(slots_var)
|
||||
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||
return wrapper
|
||||
|
||||
# Complete the moves implementation.
|
||||
# This code is at the end of this module to speed up module loading.
|
||||
# Turn this module into a package.
|
||||
__path__ = [] # required for PEP 302 and PEP 451
|
||||
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||
if globals().get("__spec__") is not None:
|
||||
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||
# Remove other six meta path importers, since they cause problems. This can
|
||||
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||
# this for some reason.)
|
||||
if sys.meta_path:
|
||||
for i, importer in enumerate(sys.meta_path):
|
||||
# Here's some real nastiness: Another "instance" of the six module might
|
||||
# be floating around. Therefore, we can't use isinstance() to check for
|
||||
# the six meta path importer, since the other six instance will have
|
||||
# inserted an importer with different class.
|
||||
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||
importer.name == __name__):
|
||||
del sys.meta_path[i]
|
||||
break
|
||||
del i, importer
|
||||
# Finally, add the importer to the meta path import hook.
|
||||
sys.meta_path.append(_importer)
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user