mirror of
https://github.com/XRPLF/rippled.git
synced 2025-11-09 05:35:51 +00:00
Compare commits
1057 Commits
0.16.0
...
0.26.4-sp3
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
00791d2151 | ||
|
|
b141598f9b | ||
|
|
d1618d79b0 | ||
|
|
00c84dfe5c | ||
|
|
95f31b98a8 | ||
|
|
10d74ed100 | ||
|
|
8a7f612d5b | ||
|
|
a46ae4efec | ||
|
|
62777a794e | ||
|
|
bb44bdd047 | ||
|
|
6904e66384 | ||
|
|
05a04aa801 | ||
|
|
6591c21ace | ||
|
|
e8d03c7b9b | ||
|
|
6fbce4c2f7 | ||
|
|
c168d54495 | ||
|
|
2cce22052b | ||
|
|
4e19d5f625 | ||
|
|
5b667da526 | ||
|
|
f9fc9a3518 | ||
|
|
e005cfd70e | ||
|
|
feb997481c | ||
|
|
2c8e90c9d8 | ||
|
|
ec96d5afa0 | ||
|
|
8be8853c33 | ||
|
|
c228f5a244 | ||
|
|
d4c8b4e3ac | ||
|
|
6564f6c164 | ||
|
|
1e37a5509c | ||
|
|
1e9503deaa | ||
|
|
ab1f36c565 | ||
|
|
5a212cd626 | ||
|
|
856fd9d69f | ||
|
|
4606d99951 | ||
|
|
dbd75169e5 | ||
|
|
f5b39ee911 | ||
|
|
db5d52b4b2 | ||
|
|
dfeb9967b8 | ||
|
|
673e860c18 | ||
|
|
9deae34b20 | ||
|
|
ec92344fb4 | ||
|
|
44c68d6174 | ||
|
|
5b7f172d03 | ||
|
|
65125eac87 | ||
|
|
761902864a | ||
|
|
af24d541d1 | ||
|
|
3ad68a617e | ||
|
|
9e1a6589d4 | ||
|
|
da8ceed07e | ||
|
|
35935adc98 | ||
|
|
5b4a501f68 | ||
|
|
5425a90f16 | ||
|
|
7eaca149c1 | ||
|
|
4b5fd95657 | ||
|
|
96dedf553e | ||
|
|
23219f2662 | ||
|
|
af78ed608e | ||
|
|
51dc59e019 | ||
|
|
afc102e90a | ||
|
|
fc560179e0 | ||
|
|
d26241de0e | ||
|
|
00310f4f10 | ||
|
|
8caae219cf | ||
|
|
2264ae9247 | ||
|
|
29225bbe75 | ||
|
|
4b5625fd59 | ||
|
|
7c0c2419f7 | ||
|
|
5f59282ba1 | ||
|
|
db03ce939c | ||
|
|
68bcbbb701 | ||
|
|
8bdf7b3983 | ||
|
|
4ab427d315 | ||
|
|
9a0a434dd8 | ||
|
|
33d1dda954 | ||
|
|
8e9efb4ceb | ||
|
|
8835af11d5 | ||
|
|
cfb6b678f1 | ||
|
|
365500da98 | ||
|
|
f14d75e798 | ||
|
|
0f71b4a378 | ||
|
|
b651e0146d | ||
|
|
a0dbbb2d84 | ||
|
|
a85fbf69e0 | ||
|
|
92b8c7961b | ||
|
|
225f8ac12f | ||
|
|
1161511207 | ||
|
|
ca8eda412e | ||
|
|
ec4ec48fb8 | ||
|
|
c0b69e8ef7 | ||
|
|
4241dbb600 | ||
|
|
f54280aaad | ||
|
|
6069400538 | ||
|
|
616be1d76c | ||
|
|
8e91ce67c5 | ||
|
|
c1ecd661c3 | ||
|
|
b27e2aad07 | ||
|
|
5ce508e09d | ||
|
|
3cfa5a41b1 | ||
|
|
6c072f37ef | ||
|
|
dbd993ed2b | ||
|
|
45b5c4ba7a | ||
|
|
7933e5d1f9 | ||
|
|
01e52e6f9f | ||
|
|
40a955e192 | ||
|
|
a8296f7301 | ||
|
|
590c3b876b | ||
|
|
6dfc805eaa | ||
|
|
5ce6068df5 | ||
|
|
bf9b8f4d1b | ||
|
|
d618581060 | ||
|
|
2936bbfae8 | ||
|
|
47b08bfc02 | ||
|
|
da4f77ca1f | ||
|
|
1c0a75d467 | ||
|
|
659cf0c221 | ||
|
|
430229fd84 | ||
|
|
81699a0971 | ||
|
|
c54aff74b3 | ||
|
|
7f43ab9097 | ||
|
|
d78f740250 | ||
|
|
cd1bd18a49 | ||
|
|
f81b084448 | ||
|
|
02d9c77402 | ||
|
|
a0c903c68c | ||
|
|
6aa325d3da | ||
|
|
041f874d4c | ||
|
|
526ecd6a81 | ||
|
|
d373054fc4 | ||
|
|
b6d9f1d4b2 | ||
|
|
3fef916972 | ||
|
|
89a51e5b91 | ||
|
|
f87a6ccc7a | ||
|
|
f65cea66ef | ||
|
|
4239880acb | ||
|
|
1dcd06a1c1 | ||
|
|
0f30191d10 | ||
|
|
8fb9d5daaa | ||
|
|
ed3c942ff1 | ||
|
|
80436d4a8b | ||
|
|
cfc702c766 | ||
|
|
88ae15ea8e | ||
|
|
6bafca7386 | ||
|
|
379e842080 | ||
|
|
c41ce469d0 | ||
|
|
a1ca68473d | ||
|
|
3345d03433 | ||
|
|
81a426608a | ||
|
|
2ad6f0a65e | ||
|
|
ee8bd8ddae | ||
|
|
319ac14e7d | ||
|
|
0215a7400d | ||
|
|
79db0ca7a6 | ||
|
|
1a7eafb699 | ||
|
|
81a06ea6cd | ||
|
|
de4be649ab | ||
|
|
d90ec5f06c | ||
|
|
32065ced6e | ||
|
|
b5224a2227 | ||
|
|
c55777738f | ||
|
|
c72dff5a24 | ||
|
|
6b09e49c08 | ||
|
|
413218c4c4 | ||
|
|
16c04b50ee | ||
|
|
56c18f7768 | ||
|
|
22ca13bc78 | ||
|
|
4c7fd18230 | ||
|
|
39730fc13e | ||
|
|
889c0a0d0f | ||
|
|
624a803955 | ||
|
|
9f5c21f80e | ||
|
|
a3fe089367 | ||
|
|
85d5cd3118 | ||
|
|
61006e626d | ||
|
|
15aad1cb24 | ||
|
|
95c1c5f54e | ||
|
|
c65fb91878 | ||
|
|
d5a7e1331e | ||
|
|
04bcd93ba3 | ||
|
|
f97ef7039a | ||
|
|
9160b46c1e | ||
|
|
aa4b116498 | ||
|
|
612bb71165 | ||
|
|
5c67f99ef9 | ||
|
|
101a4808a0 | ||
|
|
1d38671f5e | ||
|
|
7f25d88f02 | ||
|
|
be830d3dad | ||
|
|
5bc949d70f | ||
|
|
43817bd722 | ||
|
|
61623d6d75 | ||
|
|
9aad60f56d | ||
|
|
e7cf3e8084 | ||
|
|
e024e7c2ec | ||
|
|
6fc136ae9a | ||
|
|
2b69ded1ea | ||
|
|
8dd799aa6f | ||
|
|
7230ef41ee | ||
|
|
a86f0a743c | ||
|
|
af75b55ef7 | ||
|
|
9ecb37dd4f | ||
|
|
2e3784a914 | ||
|
|
019c1af435 | ||
|
|
5322955f2b | ||
|
|
a8ea4ce283 | ||
|
|
c12862f60d | ||
|
|
e889183fc5 | ||
|
|
7be695c6bd | ||
|
|
956901ae02 | ||
|
|
d562c5b2d5 | ||
|
|
d7b054c3f6 | ||
|
|
901ccad0cf | ||
|
|
b9454e0f0c | ||
|
|
26181907fc | ||
|
|
8368798ad2 | ||
|
|
ed597e5e99 | ||
|
|
2c88c15f7f | ||
|
|
af7cd3cc04 | ||
|
|
9552551f9a | ||
|
|
1c73a0f649 | ||
|
|
e3698b2a07 | ||
|
|
c841f8b360 | ||
|
|
50f9b68d61 | ||
|
|
27b48bc16e | ||
|
|
bccdbaed2b | ||
|
|
398095a667 | ||
|
|
80095824b9 | ||
|
|
d91c1f96cc | ||
|
|
1c005a0292 | ||
|
|
c7ced496ac | ||
|
|
d4ff18834c | ||
|
|
f86d9fd626 | ||
|
|
854604f724 | ||
|
|
cfd3642cb1 | ||
|
|
f493590604 | ||
|
|
8c084a3de8 | ||
|
|
985aa803a4 | ||
|
|
9e319d7bd6 | ||
|
|
a122e176d7 | ||
|
|
88a6f2931e | ||
|
|
54f3a83e25 | ||
|
|
0955c0d8d3 | ||
|
|
6bb5be5216 | ||
|
|
c9cd7e4be0 | ||
|
|
ce2cecf046 | ||
|
|
6e934ee6a1 | ||
|
|
723d7d1263 | ||
|
|
298572893e | ||
|
|
405f6f7368 | ||
|
|
648ccc7c17 | ||
|
|
f5afe0587f | ||
|
|
91a227a475 | ||
|
|
4b905fe9ff | ||
|
|
0f409b7bec | ||
|
|
295c8de858 | ||
|
|
e5252f90af | ||
|
|
c2276155bf | ||
|
|
dbe49bcd87 | ||
|
|
7b936de32c | ||
|
|
9eb34f542c | ||
|
|
194304e544 | ||
|
|
c59fc332d5 | ||
|
|
b43832fe57 | ||
|
|
c24a497a23 | ||
|
|
4096fcd1bf | ||
|
|
9a0e806f78 | ||
|
|
20c9632996 | ||
|
|
65a628ca88 | ||
|
|
d82dbba096 | ||
|
|
58547f6997 | ||
|
|
e6f4eedb1e | ||
|
|
c5b963141f | ||
|
|
5df40bd746 | ||
|
|
403f15dc48 | ||
|
|
704d7451a0 | ||
|
|
fa11071443 | ||
|
|
87351c8a0c | ||
|
|
2f5fb1e68e | ||
|
|
96e1ec6d31 | ||
|
|
ac3cf05f1a | ||
|
|
6335e34395 | ||
|
|
02c2029ac1 | ||
|
|
6914aa3e27 | ||
|
|
f4fcb1cc9a | ||
|
|
b6eec21ec0 | ||
|
|
0ce3aeb189 | ||
|
|
713c8efcbe | ||
|
|
9fa5e39872 | ||
|
|
ace53fa405 | ||
|
|
3c06980107 | ||
|
|
1f26fbb5af | ||
|
|
84c6622122 | ||
|
|
63f099f2f6 | ||
|
|
373ce72984 | ||
|
|
4cf29455e4 | ||
|
|
07db5d497c | ||
|
|
f1bb0afc4e | ||
|
|
d791fe3013 | ||
|
|
c16e22a5c6 | ||
|
|
db7a720445 | ||
|
|
6c09a02099 | ||
|
|
1b3356cafd | ||
|
|
5869902f2c | ||
|
|
28898031f0 | ||
|
|
1ce0f94638 | ||
|
|
f876ad973f | ||
|
|
6014b13234 | ||
|
|
3e9c702c47 | ||
|
|
3ff919ccf1 | ||
|
|
8dc0844c79 | ||
|
|
a2764b68ca | ||
|
|
bbbae072ea | ||
|
|
b4735b5931 | ||
|
|
418638ad16 | ||
|
|
9fb09d3109 | ||
|
|
d7e08f96a5 | ||
|
|
4d49d272eb | ||
|
|
faa6890950 | ||
|
|
9c390f6da4 | ||
|
|
6842277977 | ||
|
|
ddf68d464d | ||
|
|
b2f19e8dc6 | ||
|
|
5714b42975 | ||
|
|
c4e9c49c10 | ||
|
|
9210efb051 | ||
|
|
b9f1b05625 | ||
|
|
828c2e3c71 | ||
|
|
10150a7352 | ||
|
|
baaa45f8c7 | ||
|
|
322af30d6a | ||
|
|
206efbf30d | ||
|
|
a96dee85d2 | ||
|
|
d307568cbc | ||
|
|
0ee27b143c | ||
|
|
7bfb4a9ba5 | ||
|
|
110c73fc8d | ||
|
|
424d9b8385 | ||
|
|
1b48ccc868 | ||
|
|
fac82204b6 | ||
|
|
61f114e655 | ||
|
|
24410bf1bb | ||
|
|
aa24969eee | ||
|
|
a5297d13c4 | ||
|
|
b06bdb83cb | ||
|
|
d06092212f | ||
|
|
914778eae1 | ||
|
|
e14c700c60 | ||
|
|
0848e348bb | ||
|
|
3d5ae42660 | ||
|
|
f207b6b4c9 | ||
|
|
ed2c5078ad | ||
|
|
aec792f5b8 | ||
|
|
17d64de3d5 | ||
|
|
f6bea08535 | ||
|
|
feab6c39b3 | ||
|
|
e999c76882 | ||
|
|
686cc599a2 | ||
|
|
92983556a0 | ||
|
|
837872c3f3 | ||
|
|
55222dc5d1 | ||
|
|
adce6ae851 | ||
|
|
9dc32cb791 | ||
|
|
23dc08c925 | ||
|
|
488a44b88e | ||
|
|
530bdf975e | ||
|
|
d7a6627a1f | ||
|
|
d6066183b9 | ||
|
|
3e2c3ba035 | ||
|
|
e24cba8c35 | ||
|
|
a23013abc1 | ||
|
|
27a4f44de5 | ||
|
|
4e07dbbefc | ||
|
|
dcf4ad2c21 | ||
|
|
04dd861fe3 | ||
|
|
c8ee6c6f6d | ||
|
|
a57e4263d7 | ||
|
|
3d58f0d941 | ||
|
|
a52c9232c4 | ||
|
|
7a059c7a73 | ||
|
|
506910147f | ||
|
|
cf3eb24eb0 | ||
|
|
d965b23b2a | ||
|
|
f660743065 | ||
|
|
4559bd9030 | ||
|
|
3ac98fb101 | ||
|
|
aff52db289 | ||
|
|
ea27dfe08d | ||
|
|
27620af1bf | ||
|
|
bf116308d4 | ||
|
|
3fb27d98ab | ||
|
|
7e45c17730 | ||
|
|
626533d4a7 | ||
|
|
39719f4c17 | ||
|
|
526bd88dc4 | ||
|
|
37201ecaa6 | ||
|
|
02ed879837 | ||
|
|
8b881d3a77 | ||
|
|
f25456ce25 | ||
|
|
dfb1db4ab3 | ||
|
|
4362cb660b | ||
|
|
1aa0749ba8 | ||
|
|
c7f1f6a91f | ||
|
|
888a3fec21 | ||
|
|
8514b88974 | ||
|
|
724ec46129 | ||
|
|
4f1d1d2a8a | ||
|
|
39a387b54c | ||
|
|
2b0034667d | ||
|
|
98202c56ae | ||
|
|
1e06ddf13c | ||
|
|
560071bb68 | ||
|
|
2e49ec47a3 | ||
|
|
e70d618aff | ||
|
|
3e4cf426bd | ||
|
|
3f10924594 | ||
|
|
f8182c335a | ||
|
|
1d8d6a6d68 | ||
|
|
69fccdf5c6 | ||
|
|
27e8d44a56 | ||
|
|
72bc4ebf37 | ||
|
|
353f32e6af | ||
|
|
9bbaa9a2ae | ||
|
|
096fcefae9 | ||
|
|
0f1e292e34 | ||
|
|
195957a7cc | ||
|
|
eb122f45f9 | ||
|
|
c51ac9d6da | ||
|
|
97c9d02c43 | ||
|
|
0c06939f38 | ||
|
|
5db677d74d | ||
|
|
251ce4efbc | ||
|
|
386eabb61f | ||
|
|
b35eabe161 | ||
|
|
9ad9845d69 | ||
|
|
74eb25f9b5 | ||
|
|
044f390fe0 | ||
|
|
06d6e4901e | ||
|
|
3ca9646329 | ||
|
|
4b3e629dfd | ||
|
|
568fae9878 | ||
|
|
a844026f6b | ||
|
|
b677cacb8c | ||
|
|
1d66169ef1 | ||
|
|
74653e57e6 | ||
|
|
29d1d5f062 | ||
|
|
d8e8693d9a | ||
|
|
be737e0047 | ||
|
|
83ef06748d | ||
|
|
5a93f9991a | ||
|
|
8b0602a582 | ||
|
|
eb24ca6def | ||
|
|
22af79606a | ||
|
|
bee12fb89d | ||
|
|
1a9fbab165 | ||
|
|
9a4b9aa69f | ||
|
|
227043e51f | ||
|
|
db3a387224 | ||
|
|
0075f36bbc | ||
|
|
52f45669d1 | ||
|
|
294a13d653 | ||
|
|
eed66894db | ||
|
|
1434695c47 | ||
|
|
390ea65e15 | ||
|
|
6f2bcc6fb0 | ||
|
|
66a762d504 | ||
|
|
aaabec0b55 | ||
|
|
6339800946 | ||
|
|
a3e4a34021 | ||
|
|
6fcf3fedb6 | ||
|
|
34cbb26e47 | ||
|
|
07d0379edd | ||
|
|
96e8cddfc2 | ||
|
|
3c5e4e440b | ||
|
|
5ffcbb9b65 | ||
|
|
ec0fe312af | ||
|
|
3025d8611b | ||
|
|
98612a7cd6 | ||
|
|
6e428054ef | ||
|
|
3eb1c7bd6f | ||
|
|
2ed2158309 | ||
|
|
112d383698 | ||
|
|
11d0c89a59 | ||
|
|
7cffd0e0f5 | ||
|
|
d1f5006e44 | ||
|
|
a10c48182f | ||
|
|
b5348980e2 | ||
|
|
73c5a867c6 | ||
|
|
b8162884d9 | ||
|
|
ebae927a3e | ||
|
|
3dde8b4345 | ||
|
|
6f6720b346 | ||
|
|
1ba0d40bb4 | ||
|
|
3b87b140c9 | ||
|
|
912d74e805 | ||
|
|
6ae329f4a6 | ||
|
|
360db3c7ca | ||
|
|
c39fd4e64d | ||
|
|
f4aec40fc1 | ||
|
|
7fed69fc13 | ||
|
|
f26d95d0bb | ||
|
|
29ee4899bc | ||
|
|
6596c94eb9 | ||
|
|
6f8145b696 | ||
|
|
65f496fefb | ||
|
|
3ca8bf5f98 | ||
|
|
62354350a3 | ||
|
|
898b7eb6f0 | ||
|
|
398edd39f8 | ||
|
|
5fddf374f4 | ||
|
|
2ed8edc19d | ||
|
|
feb88c4f7f | ||
|
|
ebcedcbe93 | ||
|
|
e3995ac776 | ||
|
|
64ee0d07d0 | ||
|
|
524f41177c | ||
|
|
04ea9ff74c | ||
|
|
53bf5e7f36 | ||
|
|
bd60a93cd6 | ||
|
|
8a608b5829 | ||
|
|
151388692a | ||
|
|
bec876d062 | ||
|
|
81238e21a5 | ||
|
|
cc354ee9f2 | ||
|
|
90842073bf | ||
|
|
a5488c3727 | ||
|
|
166b8963bb | ||
|
|
c51644f1b9 | ||
|
|
f124a34730 | ||
|
|
30e882196f | ||
|
|
14830c7cb4 | ||
|
|
5a21269da4 | ||
|
|
cc5466d450 | ||
|
|
a068bff9c1 | ||
|
|
1e54472a1d | ||
|
|
be9e18ddb8 | ||
|
|
5d1aec6280 | ||
|
|
0442b3a683 | ||
|
|
bbc8a8fdc4 | ||
|
|
428872731a | ||
|
|
015d91b50d | ||
|
|
436061be22 | ||
|
|
d447a1db39 | ||
|
|
a4a7dd4314 | ||
|
|
4886772ca3 | ||
|
|
0f19efac2d | ||
|
|
b2d97703c6 | ||
|
|
936f45c7ec | ||
|
|
a865149c65 | ||
|
|
547019bd1b | ||
|
|
953805d97e | ||
|
|
e344cb3d62 | ||
|
|
693a5927a9 | ||
|
|
07e2f87079 | ||
|
|
9b152d8098 | ||
|
|
8daecb5430 | ||
|
|
f1462de729 | ||
|
|
3cb3e5273f | ||
|
|
cad50c68a8 | ||
|
|
c581ffb8a4 | ||
|
|
556778dbdb | ||
|
|
816fab000d | ||
|
|
66d1dd6f81 | ||
|
|
3eae68fc34 | ||
|
|
8545aee4c7 | ||
|
|
29a4f61551 | ||
|
|
c2fd1215f5 | ||
|
|
e3c1375f36 | ||
|
|
4e357601ec | ||
|
|
3674b01634 | ||
|
|
f66aee4e7f | ||
|
|
b2f06dbe3c | ||
|
|
b78076670e | ||
|
|
a08a0703b6 | ||
|
|
f80561e6aa | ||
|
|
f5765b71f2 | ||
|
|
6a6be847c0 | ||
|
|
4e26108a06 | ||
|
|
c517d261bb | ||
|
|
5522a23670 | ||
|
|
64345f90a8 | ||
|
|
879c0d6b78 | ||
|
|
2e8df429df | ||
|
|
7aa1222310 | ||
|
|
2b5142ee4d | ||
|
|
e8d7d339ef | ||
|
|
5324a6d59d | ||
|
|
569b3a46a1 | ||
|
|
fc129e43fd | ||
|
|
b22a0f4a97 | ||
|
|
cf63b5c859 | ||
|
|
b4593a274d | ||
|
|
163daf012a | ||
|
|
63df034bcf | ||
|
|
60787be80c | ||
|
|
f42ae3af21 | ||
|
|
b1fea3c51e | ||
|
|
16d2bbd6e5 | ||
|
|
2505a908c5 | ||
|
|
4b71673ee9 | ||
|
|
242494a9ad | ||
|
|
3a684ce280 | ||
|
|
756da7b844 | ||
|
|
9c465487ae | ||
|
|
e8002a7a62 | ||
|
|
5a53b48026 | ||
|
|
b3f725d9fd | ||
|
|
50f5d5bdf3 | ||
|
|
6c0edd2190 | ||
|
|
d9a0181189 | ||
|
|
09570996a9 | ||
|
|
adc4c855ca | ||
|
|
8b1df06a94 | ||
|
|
7c81eec30c | ||
|
|
faa999d6d0 | ||
|
|
370bfb7a22 | ||
|
|
ae649ec917 | ||
|
|
93b44fcdc1 | ||
|
|
1fe57720c4 | ||
|
|
fca8fa1b1b | ||
|
|
81082ad10f | ||
|
|
2c515636cb | ||
|
|
2f7ac98e34 | ||
|
|
37b39ed1a1 | ||
|
|
73485d5a23 | ||
|
|
28c7827f14 | ||
|
|
e055dc1513 | ||
|
|
c173f14fc0 | ||
|
|
cd30e552a7 | ||
|
|
9bf1a76e91 | ||
|
|
2957b688fd | ||
|
|
add40d524f | ||
|
|
04a55e35b6 | ||
|
|
955ce45448 | ||
|
|
995e64a205 | ||
|
|
bf085f0ef3 | ||
|
|
1374b37882 | ||
|
|
e275f4eb9d | ||
|
|
652d809129 | ||
|
|
c186519bcf | ||
|
|
870fb4f291 | ||
|
|
7cd63489f4 | ||
|
|
616a53888e | ||
|
|
a336cc26f9 | ||
|
|
382088c456 | ||
|
|
ff80531db4 | ||
|
|
105cf3cd1e | ||
|
|
38ba7e695a | ||
|
|
996326a00e | ||
|
|
f2beb82b97 | ||
|
|
41c0702408 | ||
|
|
66b5f75142 | ||
|
|
c2a16ddbab | ||
|
|
5bb194cf89 | ||
|
|
645b9a01c7 | ||
|
|
3a1a5d12de | ||
|
|
12748e7539 | ||
|
|
2f69d4c8ee | ||
|
|
8dbf8b9038 | ||
|
|
fdfe047f3e | ||
|
|
38c3f84c9f | ||
|
|
9c5b071556 | ||
|
|
46dc52e449 | ||
|
|
f469e3853d | ||
|
|
a681a4fcd4 | ||
|
|
575b0bb7b0 | ||
|
|
588cf4bfca | ||
|
|
9cc8c341e7 | ||
|
|
9b657ba224 | ||
|
|
9bc6e83f8a | ||
|
|
572aae320d | ||
|
|
2906899811 | ||
|
|
1a6bf88900 | ||
|
|
e60b28980a | ||
|
|
a253b2ef4b | ||
|
|
0c2e35edc9 | ||
|
|
d0970397a6 | ||
|
|
0afbda0351 | ||
|
|
2009f8b1ca | ||
|
|
35715a0146 | ||
|
|
20e27ceb04 | ||
|
|
7fb614f7af | ||
|
|
83442825e5 | ||
|
|
123c482a69 | ||
|
|
3f091fce59 | ||
|
|
6c5f88aa25 | ||
|
|
23eccebf5b | ||
|
|
15112c1a27 | ||
|
|
d0ca81ff36 | ||
|
|
3e218c494d | ||
|
|
88bd5b12a4 | ||
|
|
5df5983f88 | ||
|
|
aa5ca7cea5 | ||
|
|
02483b2e0b | ||
|
|
f7817866ba | ||
|
|
d62287d54b | ||
|
|
aad074cd8e | ||
|
|
580d179dd0 | ||
|
|
616a514c4d | ||
|
|
bac8d41954 | ||
|
|
f295bb20a1 | ||
|
|
b5ffa2351a | ||
|
|
1b37b52071 | ||
|
|
b1ffd10079 | ||
|
|
39235f5b91 | ||
|
|
8825d94636 | ||
|
|
c8a7b2af56 | ||
|
|
d22b25c030 | ||
|
|
d475994e02 | ||
|
|
f0bb3dfdfb | ||
|
|
e7f0b8eca6 | ||
|
|
0bab6a9fec | ||
|
|
9486fc416c | ||
|
|
fb63aa737a | ||
|
|
58a6ca1d3d | ||
|
|
505f029edb | ||
|
|
815659b898 | ||
|
|
07d16f280c | ||
|
|
f88ddc947c | ||
|
|
65ffdff40c | ||
|
|
c95dccfec6 | ||
|
|
fe83f471f5 | ||
|
|
d2953f602e | ||
|
|
9d07ddeae1 | ||
|
|
ef7810bc95 | ||
|
|
486539b3d3 | ||
|
|
990fb20a2a | ||
|
|
9b61a83721 | ||
|
|
f753519976 | ||
|
|
663e38dcdd | ||
|
|
7570b6489d | ||
|
|
c341d1a71e | ||
|
|
fa10e90c9d | ||
|
|
68501763dd | ||
|
|
cac1d555be | ||
|
|
25ff77c2fd | ||
|
|
2870c7f457 | ||
|
|
ab8d7a86ae | ||
|
|
809359e81e | ||
|
|
6e4cd5bc9c | ||
|
|
de018bd582 | ||
|
|
544642a6ea | ||
|
|
06737bb36f | ||
|
|
7efbfa2d20 | ||
|
|
4d5df92cbc | ||
|
|
1fcb2872b9 | ||
|
|
00c87ca2dd | ||
|
|
d474d68566 | ||
|
|
93e03804d0 | ||
|
|
4591658160 | ||
|
|
a2109b4bda | ||
|
|
deafea9c88 | ||
|
|
93f1a05f5c | ||
|
|
95a573b755 | ||
|
|
2a4623814c | ||
|
|
1017adf743 | ||
|
|
34fb12344c | ||
|
|
ce3358bdf8 | ||
|
|
1159dadfdb | ||
|
|
62516ef07f | ||
|
|
eecd305efd | ||
|
|
a83fa6b2b2 | ||
|
|
b2feafa94c | ||
|
|
2d234e500d | ||
|
|
fee0e7b20e | ||
|
|
a0f6429652 | ||
|
|
8f8b2ae4a3 | ||
|
|
9abdd16721 | ||
|
|
1e5963aeeb | ||
|
|
e25a83bb39 | ||
|
|
e3a67b13ff | ||
|
|
750cbb8399 | ||
|
|
045beb5f36 | ||
|
|
cd8234acba | ||
|
|
de85a7c2bd | ||
|
|
087301933a | ||
|
|
52333b8bd4 | ||
|
|
3768b3c3ca | ||
|
|
09b39e107d | ||
|
|
4b1155bf32 | ||
|
|
3c7fc31c95 | ||
|
|
da3881a486 | ||
|
|
ff12d9adaa | ||
|
|
528cf56f80 | ||
|
|
0ebe3f1f35 | ||
|
|
328680b6cd | ||
|
|
f9dca105a6 | ||
|
|
3664db61e7 | ||
|
|
49677aa799 | ||
|
|
27b771e2ba | ||
|
|
6527cdfa97 | ||
|
|
b2dbe8ef83 | ||
|
|
9bdb0774ad | ||
|
|
8c2ec2cfbe | ||
|
|
00f959ab7e | ||
|
|
f9c4070ad3 | ||
|
|
074325a7ea | ||
|
|
2f521a6a91 | ||
|
|
07959b3cc9 | ||
|
|
21faf8eaeb | ||
|
|
3e2b5dcc3d | ||
|
|
88a8433d31 | ||
|
|
81d418007a | ||
|
|
f88fcf55a3 | ||
|
|
561c8dea08 | ||
|
|
de92ac9e0b | ||
|
|
deead04a6a | ||
|
|
d4771a9b36 | ||
|
|
27b8415d0c | ||
|
|
e1e81e5d97 | ||
|
|
3705680d68 | ||
|
|
a01f546ae3 | ||
|
|
eabc905bac | ||
|
|
37588b6808 | ||
|
|
071db75f04 | ||
|
|
1e00940a90 | ||
|
|
b984566480 | ||
|
|
51aef120a1 | ||
|
|
636d722e8d | ||
|
|
e6da61120a | ||
|
|
b901e0dcf3 | ||
|
|
177b3c93c4 | ||
|
|
9996e4a57e | ||
|
|
4751b6a65c | ||
|
|
b3c79f5c2f | ||
|
|
0e53105ab5 | ||
|
|
0f2a657196 | ||
|
|
89aa2c7a6a | ||
|
|
4c843b6c66 | ||
|
|
130c7c5c58 | ||
|
|
76c364ec2d | ||
|
|
a549c94a15 | ||
|
|
6de8a6907f | ||
|
|
fc31562052 | ||
|
|
cdaa65c07a | ||
|
|
8a278cf9d6 | ||
|
|
97cecc18ce | ||
|
|
536db23e14 | ||
|
|
39d801fb9f | ||
|
|
6d707b21b2 | ||
|
|
2316fe5bbe | ||
|
|
16ec9d2bdb | ||
|
|
2ce4ce4309 | ||
|
|
858c3a5362 | ||
|
|
c124ad0dcd | ||
|
|
ed64c8bb29 | ||
|
|
2678360715 | ||
|
|
54e504dd5a | ||
|
|
b632a6b2cf | ||
|
|
b9e2ac38fa | ||
|
|
4d1c2a5798 | ||
|
|
c69d8a13b3 | ||
|
|
7c358cda17 | ||
|
|
1954a0eb2e | ||
|
|
5500701661 | ||
|
|
21918922f4 | ||
|
|
fad52c5917 | ||
|
|
306811d2a7 | ||
|
|
8b72f2ad79 | ||
|
|
1a1cb696f7 | ||
|
|
582c17b06b | ||
|
|
b156a49cff | ||
|
|
3943cfea06 | ||
|
|
97346c6618 | ||
|
|
474b824902 | ||
|
|
02b5572ccc | ||
|
|
4577ad60c7 | ||
|
|
e683c380e5 | ||
|
|
b660d82516 | ||
|
|
c0dda06499 | ||
|
|
65abd6307d | ||
|
|
6e713dc3b8 | ||
|
|
596a35acca | ||
|
|
4ad84a339f | ||
|
|
833435f8c2 | ||
|
|
4e4942e357 | ||
|
|
1fc8f0a33b | ||
|
|
1f433dea97 | ||
|
|
d8707cad2c | ||
|
|
a399b571ac | ||
|
|
d0e71225c4 | ||
|
|
19d4bf0ea5 | ||
|
|
9e519af887 | ||
|
|
29aa462bfd | ||
|
|
7a91872ee5 | ||
|
|
68307d1012 | ||
|
|
48cb707bb6 | ||
|
|
9322233b37 | ||
|
|
1daf1b9932 | ||
|
|
a3024352ba | ||
|
|
58f07a573f | ||
|
|
a05f33f6a7 | ||
|
|
968624971f | ||
|
|
57e77a5bd2 | ||
|
|
74c65cfdc5 | ||
|
|
399760fda9 | ||
|
|
67b8f95b1e | ||
|
|
d4d6acdf68 | ||
|
|
472baa8bac | ||
|
|
dd74c19858 | ||
|
|
b5f8d447a0 | ||
|
|
5f4a1917a6 | ||
|
|
cf71680aee | ||
|
|
f04b9131cc | ||
|
|
46861fac48 | ||
|
|
4620b667e7 | ||
|
|
49f43ccc0a | ||
|
|
63aa7284c4 | ||
|
|
286ade2d17 | ||
|
|
066d92ecfa | ||
|
|
c5ccabec38 | ||
|
|
548fedb859 | ||
|
|
8d5378a2ca | ||
|
|
bf1843be9e | ||
|
|
d50439cc4d | ||
|
|
6a8f313394 | ||
|
|
c211094d3e | ||
|
|
072b4f3b73 | ||
|
|
08cbcba4ee | ||
|
|
2a9171c623 | ||
|
|
8573679fbb | ||
|
|
811f244fc2 | ||
|
|
a31b2556a3 | ||
|
|
a0ad5cdbfe | ||
|
|
59cf668348 | ||
|
|
09acc26c50 | ||
|
|
0ae7bcff52 | ||
|
|
2210dbac94 | ||
|
|
6b2f654a30 | ||
|
|
3296ac5628 | ||
|
|
6d06cb29df | ||
|
|
b08c7d15cd | ||
|
|
940d620a96 | ||
|
|
a39fa8ae5f | ||
|
|
f859bf160a | ||
|
|
e0512930ae | ||
|
|
7bbf6c553f | ||
|
|
aeb335ebdc | ||
|
|
73ab408b3c | ||
|
|
f333a33f3d | ||
|
|
42b841735e | ||
|
|
e710bd2183 | ||
|
|
84556ba76a | ||
|
|
4eebea91d3 | ||
|
|
4ddadb8792 | ||
|
|
8e65d6288d | ||
|
|
8e18deb74f | ||
|
|
7ae1ad524b | ||
|
|
1ba1b3983a | ||
|
|
aabd6980ac | ||
|
|
3b19310252 | ||
|
|
8201805b28 | ||
|
|
7277c8478b | ||
|
|
963a0dd934 | ||
|
|
3108d58791 | ||
|
|
31b1a6a7e6 | ||
|
|
9ff65d0da4 | ||
|
|
c11abb42d1 | ||
|
|
7b6d81d812 | ||
|
|
ca0daad11f | ||
|
|
a0c4e685c5 | ||
|
|
b30f7a622c | ||
|
|
23f44f12bd | ||
|
|
6c17002e8a | ||
|
|
5dda088335 | ||
|
|
2427cce2c8 | ||
|
|
1c41dae51c | ||
|
|
1a6d72b14c | ||
|
|
3dc646e03e | ||
|
|
96328a8632 | ||
|
|
2cc4488d8e | ||
|
|
a6c2fe4761 | ||
|
|
ed905d3c3d | ||
|
|
81eadbd05c | ||
|
|
ef1e2f8595 | ||
|
|
fae7082049 | ||
|
|
a63de23156 | ||
|
|
8e0dda8480 | ||
|
|
ad7b9ff8b5 | ||
|
|
04f2d0787a | ||
|
|
2ad98a025e | ||
|
|
67516766a6 | ||
|
|
3b2ead3476 | ||
|
|
fc5be2b911 | ||
|
|
466e623dd6 | ||
|
|
a1b487c512 | ||
|
|
0902af8eb5 | ||
|
|
a8a4caf0e4 | ||
|
|
33478517a6 | ||
|
|
0d5a8ca300 | ||
|
|
da22f06d85 | ||
|
|
a918924923 | ||
|
|
5a9416fbcf | ||
|
|
582b5bb3ac | ||
|
|
abe4f1ba03 | ||
|
|
9dfbffa4b8 | ||
|
|
a25ba91876 | ||
|
|
334f109415 | ||
|
|
eb9eb3aa53 | ||
|
|
978c196c78 | ||
|
|
20e7cac743 | ||
|
|
b5d51214ff | ||
|
|
485d4b4897 | ||
|
|
fb6ecebbd1 | ||
|
|
0b69378a03 | ||
|
|
256c12f150 | ||
|
|
8d0349eee0 | ||
|
|
ab0548c9af | ||
|
|
4868bc4df7 | ||
|
|
0900dfe46f | ||
|
|
8dfe53ff7a | ||
|
|
1ae3328642 | ||
|
|
b9e0208aee | ||
|
|
497cc74adc | ||
|
|
eac3814fb5 | ||
|
|
bb331abeba | ||
|
|
6e20bd2dcd | ||
|
|
09961845b4 | ||
|
|
7eacd3bf57 | ||
|
|
72681fa7fb | ||
|
|
500bddebff | ||
|
|
4d3d46f41d | ||
|
|
82d8d9a092 | ||
|
|
30ff139a29 | ||
|
|
dc8420d32d | ||
|
|
bb29c8ba85 | ||
|
|
625780621b | ||
|
|
ea2589dd9c | ||
|
|
942336c454 | ||
|
|
90282707ab | ||
|
|
70f6c41ff7 | ||
|
|
184cf74daa | ||
|
|
a23fb06409 | ||
|
|
3638485977 | ||
|
|
75f3c52d53 | ||
|
|
364973a523 | ||
|
|
678c241962 | ||
|
|
a2aa938e10 | ||
|
|
48eb92e366 | ||
|
|
2894059b63 | ||
|
|
66a272debd | ||
|
|
7d089561c3 | ||
|
|
53f8e73b65 | ||
|
|
37bcf7899e | ||
|
|
86f662aa4a | ||
|
|
a1b958eaac | ||
|
|
06189b2584 | ||
|
|
0bf006cdae | ||
|
|
3f51eb7b63 | ||
|
|
b76443dbde | ||
|
|
5fc823ae08 | ||
|
|
68aec74b47 | ||
|
|
acd23682d1 | ||
|
|
b042397b9f | ||
|
|
951a8208b8 | ||
|
|
9b3c74a095 | ||
|
|
647c0e302a | ||
|
|
b3b22d7595 | ||
|
|
1ba0139683 |
2
.gitattributes
vendored
2
.gitattributes
vendored
@@ -1,5 +1,5 @@
|
|||||||
# Set default behaviour, in case users don't have core.autocrlf set.
|
# Set default behaviour, in case users don't have core.autocrlf set.
|
||||||
* text=auto
|
#* text=auto
|
||||||
|
|
||||||
# These annoying files
|
# These annoying files
|
||||||
rippled.1 binary
|
rippled.1 binary
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -19,6 +19,7 @@
|
|||||||
*.o
|
*.o
|
||||||
build
|
build
|
||||||
tags
|
tags
|
||||||
|
TAGS
|
||||||
bin/rippled
|
bin/rippled
|
||||||
Debug/*.*
|
Debug/*.*
|
||||||
Release/*.*
|
Release/*.*
|
||||||
@@ -72,3 +73,6 @@ My Amplifier XE Results - RippleD
|
|||||||
|
|
||||||
# Compiler intermediate output
|
# Compiler intermediate output
|
||||||
/out.txt
|
/out.txt
|
||||||
|
|
||||||
|
# Build Log
|
||||||
|
rippled-build.log
|
||||||
73
.travis.yml
Normal file
73
.travis.yml
Normal file
@@ -0,0 +1,73 @@
|
|||||||
|
language: cpp
|
||||||
|
compiler:
|
||||||
|
- clang
|
||||||
|
- gcc
|
||||||
|
before_install:
|
||||||
|
- sudo apt-get update -qq
|
||||||
|
- sudo apt-get install -qq python-software-properties
|
||||||
|
- sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||||
|
- sudo add-apt-repository -y ppa:boost-latest/ppa
|
||||||
|
- sudo apt-get update -qq
|
||||||
|
- sudo apt-get install -qq g++-4.8
|
||||||
|
- sudo apt-get install -qq libboost1.55-all-dev
|
||||||
|
# We want debug symbols for boost as we install gdb later
|
||||||
|
- sudo apt-get install -qq libboost1.55-dbg
|
||||||
|
- | # Setup the BOOST_ROOT
|
||||||
|
export BOOST_ROOT=$HOME/boost_root
|
||||||
|
mkdir -p $BOOST_ROOT/stage
|
||||||
|
ln -s /usr/lib/x86_64-linux-gnu $BOOST_ROOT/stage/lib
|
||||||
|
ln -s /usr/include/boost $BOOST_ROOT/boost
|
||||||
|
- | # Try to patch boost
|
||||||
|
sudo patch /usr/include/boost/bimap/detail/debug/static_error.hpp Builds/travis/static_error.boost.patch
|
||||||
|
sudo patch /usr/include/boost/config/compiler/clang.hpp Builds/travis/clang.boost.patch
|
||||||
|
- sudo apt-get install -qq mlocate
|
||||||
|
- sudo updatedb
|
||||||
|
- sudo locate libboost | grep /lib | grep -e ".a$"
|
||||||
|
- sudo apt-get install -qq protobuf-compiler libprotobuf-dev libssl-dev exuberant-ctags
|
||||||
|
# We need gcc >= 4.8 for some c++11 features
|
||||||
|
- sudo apt-get install -qq gcc-4.8
|
||||||
|
- sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 40 --slave /usr/bin/g++ g++ /usr/bin/g++-4.8
|
||||||
|
- sudo update-alternatives --set gcc /usr/bin/gcc-4.8
|
||||||
|
# Stuff is gold. Nuff said ;)
|
||||||
|
- sudo apt-get -y install binutils-gold
|
||||||
|
# We can get a backtrace if the guy crashes
|
||||||
|
- sudo apt-get -y install gdb
|
||||||
|
# What versions are we ACTUALLY running?
|
||||||
|
- g++ -v
|
||||||
|
- clang -v
|
||||||
|
# Avoid `spurious errors` caused by ~/.npm permission issues
|
||||||
|
# Does it already exist? Who owns? What permissions?
|
||||||
|
- ls -lah ~/.npm || mkdir ~/.npm
|
||||||
|
# Make sure we own it
|
||||||
|
- sudo chown -R $USER ~/.npm
|
||||||
|
|
||||||
|
script:
|
||||||
|
# Set so any failing command will abort the build
|
||||||
|
- set -e
|
||||||
|
# $CC will be either `clang` or `gcc` (If only we could do -j12 ;)
|
||||||
|
- scons $CC.debug
|
||||||
|
# We can be sure we're using the build/$CC.debug variant (-f so never err)
|
||||||
|
- rm -f build/rippled
|
||||||
|
- export RIPPLED_PATH="$PWD/build/$CC.debug/rippled"
|
||||||
|
# See what we've actually built
|
||||||
|
- ldd $RIPPLED_PATH
|
||||||
|
# Run unittests (under gdb)
|
||||||
|
- | # create gdb script
|
||||||
|
echo "set env MALLOC_CHECK_=3" > script.gdb
|
||||||
|
echo "run" >> script.gdb
|
||||||
|
echo "backtrace full" >> script.gdb
|
||||||
|
# gdb --help
|
||||||
|
- cat script.gdb | gdb --ex 'set print thread-events off' --return-child-result --args $RIPPLED_PATH --unittest
|
||||||
|
- npm install
|
||||||
|
# Use build/(gcc|clang).debug/rippled
|
||||||
|
- |
|
||||||
|
echo "exports.default_server_config = {\"rippled_path\" : \"$RIPPLED_PATH\"};" > test/config.js
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
- npm test
|
||||||
|
notifications:
|
||||||
|
email:
|
||||||
|
false
|
||||||
|
irc:
|
||||||
|
channels:
|
||||||
|
- "chat.freenode.net#ripple-dev"
|
||||||
41
Builds/ArchLinux/PKGBUILD
Normal file
41
Builds/ArchLinux/PKGBUILD
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
# Maintainer: Roberto Catini <roberto.catini@gmail.com>
|
||||||
|
|
||||||
|
pkgname=rippled
|
||||||
|
pkgrel=1
|
||||||
|
pkgver=0
|
||||||
|
pkgdesc="Ripple peer-to-peer network daemon"
|
||||||
|
arch=('i686' 'x86_64')
|
||||||
|
url="https://github.com/ripple/rippled"
|
||||||
|
license=('custom:ISC')
|
||||||
|
depends=('protobuf' 'openssl' 'boost-libs')
|
||||||
|
makedepends=('git' 'scons' 'boost')
|
||||||
|
checkdepends=('nodejs')
|
||||||
|
backup=("etc/$pkgname/rippled.cfg")
|
||||||
|
source=("git://github.com/ripple/rippled.git#branch=master")
|
||||||
|
sha512sums=('SKIP')
|
||||||
|
|
||||||
|
pkgver() {
|
||||||
|
cd "$srcdir/$pkgname"
|
||||||
|
git describe --long --tags | sed -r 's/([^-]*-g)/r\1/;s/-/./g'
|
||||||
|
}
|
||||||
|
|
||||||
|
build() {
|
||||||
|
cd "$srcdir/$pkgname"
|
||||||
|
scons build/rippled
|
||||||
|
}
|
||||||
|
|
||||||
|
check() {
|
||||||
|
cd "$srcdir/$pkgname"
|
||||||
|
npm install
|
||||||
|
npm test
|
||||||
|
build/rippled --unittest
|
||||||
|
}
|
||||||
|
|
||||||
|
package() {
|
||||||
|
cd "$srcdir/$pkgname"
|
||||||
|
install -D -m644 LICENSE "$pkgdir/usr/share/licenses/$pkgname/LICENSE"
|
||||||
|
install -D build/rippled "$pkgdir/usr/bin/rippled"
|
||||||
|
install -D -m644 doc/rippled-example.cfg "$pkgdir/etc/$pkgname/rippled.cfg"
|
||||||
|
mkdir -p "$pkgdir/var/lib/$pkgname/db"
|
||||||
|
mkdir -p "$pkgdir/var/log/$pkgname"
|
||||||
|
}
|
||||||
30
Builds/Docker/Dockerfile
Normal file
30
Builds/Docker/Dockerfile
Normal file
@@ -0,0 +1,30 @@
|
|||||||
|
# rippled
|
||||||
|
|
||||||
|
# use the ubuntu base image
|
||||||
|
FROM ubuntu
|
||||||
|
MAINTAINER Roberto Catini roberto.catini@gmail.com
|
||||||
|
|
||||||
|
# make sure the package repository is up to date
|
||||||
|
RUN apt-get update
|
||||||
|
RUN apt-get -y upgrade
|
||||||
|
|
||||||
|
# install the dependencies
|
||||||
|
RUN apt-get -y install git scons pkg-config protobuf-compiler libprotobuf-dev libssl-dev libboost1.55-all-dev
|
||||||
|
|
||||||
|
# download source code from official repository
|
||||||
|
RUN git clone https://github.com/ripple/rippled.git src; cd src/; git checkout master
|
||||||
|
|
||||||
|
# compile
|
||||||
|
RUN cd src/; scons build/rippled
|
||||||
|
|
||||||
|
# move to root directory and strip
|
||||||
|
RUN cp src/build/rippled rippled; strip rippled
|
||||||
|
|
||||||
|
# copy default config
|
||||||
|
RUN cp src/doc/rippled-example.cfg rippled.cfg
|
||||||
|
|
||||||
|
# clean source
|
||||||
|
RUN rm -r src
|
||||||
|
|
||||||
|
# launch rippled when launching the container
|
||||||
|
ENTRYPOINT ./rippled
|
||||||
@@ -45,7 +45,6 @@ linux-g++:QMAKE_CXXFLAGS += \
|
|||||||
-pthread
|
-pthread
|
||||||
|
|
||||||
INCLUDEPATH += \
|
INCLUDEPATH += \
|
||||||
"../../src" \
|
|
||||||
"../../src/leveldb/" \
|
"../../src/leveldb/" \
|
||||||
"../../src/leveldb/port" \
|
"../../src/leveldb/port" \
|
||||||
"../../src/leveldb/include" \
|
"../../src/leveldb/include" \
|
||||||
@@ -63,39 +62,42 @@ UI_HEADERS_DIR += ../../src/ripple_basics
|
|||||||
# New style
|
# New style
|
||||||
#
|
#
|
||||||
SOURCES += \
|
SOURCES += \
|
||||||
../../src/ripple/beast/ripple_beast.cpp \
|
../../src/ripple/beast/ripple_beast.unity.cpp \
|
||||||
../../src/ripple/beast/ripple_beastc.c \
|
../../src/ripple/beast/ripple_beastc.c \
|
||||||
../../src/ripple/http/ripple_http.cpp \
|
../../src/ripple/common/ripple_common.unity.cpp \
|
||||||
../../src/ripple/json/ripple_json.cpp \
|
../../src/ripple/http/ripple_http.unity.cpp \
|
||||||
../../src/ripple/peerfinder/ripple_peerfinder.cpp \
|
../../src/ripple/json/ripple_json.unity.cpp \
|
||||||
../../src/ripple/rpc/ripple_rpc.cpp \
|
../../src/ripple/peerfinder/ripple_peerfinder.unity.cpp \
|
||||||
../../src/ripple/sophia/ripple_sophia.c \
|
../../src/ripple/radmap/ripple_radmap.unity.cpp \
|
||||||
../../src/ripple/sslutil/ripple_sslutil.cpp \
|
../../src/ripple/resource/ripple_resource.unity.cpp \
|
||||||
../../src/ripple/testoverlay/ripple_testoverlay.cpp \
|
../../src/ripple/sitefiles/ripple_sitefiles.unity.cpp \
|
||||||
../../src/ripple/types/ripple_types.cpp \
|
../../src/ripple/sslutil/ripple_sslutil.unity.cpp \
|
||||||
../../src/ripple/validators/ripple_validators.cpp
|
../../src/ripple/testoverlay/ripple_testoverlay.unity.cpp \
|
||||||
|
../../src/ripple/types/ripple_types.unity.cpp \
|
||||||
|
../../src/ripple/validators/ripple_validators.unity.cpp
|
||||||
|
|
||||||
# ---------
|
# ---------
|
||||||
# Old style
|
# Old style
|
||||||
#
|
#
|
||||||
SOURCES += \
|
SOURCES += \
|
||||||
../../src/ripple_app/ripple_app.cpp \
|
../../src/ripple_app/ripple_app.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt1.cpp \
|
../../src/ripple_app/ripple_app_pt1.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt2.cpp \
|
../../src/ripple_app/ripple_app_pt2.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt3.cpp \
|
../../src/ripple_app/ripple_app_pt3.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt4.cpp \
|
../../src/ripple_app/ripple_app_pt4.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt5.cpp \
|
../../src/ripple_app/ripple_app_pt5.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt6.cpp \
|
../../src/ripple_app/ripple_app_pt6.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt7.cpp \
|
../../src/ripple_app/ripple_app_pt7.unity.cpp \
|
||||||
../../src/ripple_app/ripple_app_pt8.cpp \
|
../../src/ripple_app/ripple_app_pt8.unity.cpp \
|
||||||
../../src/ripple_basics/ripple_basics.cpp \
|
../../src/ripple_basics/ripple_basics.unity.cpp \
|
||||||
../../src/ripple_core/ripple_core.cpp \
|
../../src/ripple_core/ripple_core.unity.cpp \
|
||||||
../../src/ripple_data/ripple_data.cpp \
|
../../src/ripple_data/ripple_data.unity.cpp \
|
||||||
../../src/ripple_hyperleveldb/ripple_hyperleveldb.cpp \
|
../../src/ripple_hyperleveldb/ripple_hyperleveldb.unity.cpp \
|
||||||
../../src/ripple_leveldb/ripple_leveldb.cpp \
|
../../src/ripple_leveldb/ripple_leveldb.unity.cpp \
|
||||||
../../src/ripple_mdb/ripple_mdb.c \
|
../../src/ripple_net/ripple_net.unity.cpp \
|
||||||
../../src/ripple_net/ripple_net.cpp \
|
../../src/ripple_overlay/ripple_overlay.unity.cpp \
|
||||||
../../src/ripple_websocket/ripple_websocket.cpp
|
../../src/ripple_rpc/ripple_rpc.unity.cpp \
|
||||||
|
../../src/ripple_websocket/ripple_websocket.unity.cpp
|
||||||
|
|
||||||
LIBS += \
|
LIBS += \
|
||||||
-lboost_date_time-mt\
|
-lboost_date_time-mt\
|
||||||
|
|||||||
@@ -1,30 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
|
||||||
<ImportGroup Label="PropertySheets" />
|
|
||||||
<PropertyGroup Label="UserMacros">
|
|
||||||
<RepoDir>..\..</RepoDir>
|
|
||||||
<SrcDir>$(RepoDir)\src\cpp\ripple</SrcDir>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup>
|
|
||||||
<OutDir>$(RepoDir)\build\VisualStudio2010\$(Configuration).$(Platform)\</OutDir>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup>
|
|
||||||
<IntDir>$(RepoDir)\build\obj\VisualStudio2010\$(Configuration).$(Platform)\</IntDir>
|
|
||||||
</PropertyGroup>
|
|
||||||
<ItemDefinitionGroup>
|
|
||||||
<ClCompile>
|
|
||||||
<AdditionalIncludeDirectories>$(RepoDir);$(RepoDir)\src\cpp\leveldb;$(RepoDir)\src\cpp\leveldb\include;$(RepoDir)\src\cpp\protobuf\src;$(RepoDir)\src\cpp\protobuf\vsprojects;$(RepoDir)\build\proto;$(RepoDir)\Subtrees\beast;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
|
||||||
<PreprocessorDefinitions>USE_LEVELDB;BOOST_TEST_ALTERNATIVE_INIT_API;BOOST_TEST_NO_MAIN;_WIN32_WINNT=0x0600;_SCL_SECURE_NO_WARNINGS;_CRT_SECURE_NO_WARNINGS;WIN32;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
|
||||||
<WarningLevel>Level3</WarningLevel>
|
|
||||||
<MultiProcessorCompilation>true</MultiProcessorCompilation>
|
|
||||||
</ClCompile>
|
|
||||||
</ItemDefinitionGroup>
|
|
||||||
<ItemGroup>
|
|
||||||
<BuildMacro Include="RepoDir">
|
|
||||||
<Value>$(RepoDir)</Value>
|
|
||||||
</BuildMacro>
|
|
||||||
<BuildMacro Include="SrcDir">
|
|
||||||
<Value>$(SrcDir)</Value>
|
|
||||||
</BuildMacro>
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
@@ -1,20 +0,0 @@
|
|||||||
|
|
||||||
Microsoft Visual Studio Solution File, Format Version 11.00
|
|
||||||
# Visual Studio 2010
|
|
||||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{EE95954F-3D34-4FB1-ADBD-FE8395233026}"
|
|
||||||
EndProject
|
|
||||||
Global
|
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
|
||||||
Debug|Win32 = Debug|Win32
|
|
||||||
Release|Win32 = Release|Win32
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
|
||||||
{EE95954F-3D34-4FB1-ADBD-FE8395233026}.Debug|Win32.ActiveCfg = Debug|Win32
|
|
||||||
{EE95954F-3D34-4FB1-ADBD-FE8395233026}.Debug|Win32.Build.0 = Debug|Win32
|
|
||||||
{EE95954F-3D34-4FB1-ADBD-FE8395233026}.Release|Win32.ActiveCfg = Release|Win32
|
|
||||||
{EE95954F-3D34-4FB1-ADBD-FE8395233026}.Release|Win32.Build.0 = Release|Win32
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
|
||||||
HideSolutionNode = FALSE
|
|
||||||
EndGlobalSection
|
|
||||||
EndGlobal
|
|
||||||
@@ -1,98 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<Project DefaultTargets="Build" ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
|
||||||
<ItemGroup Label="ProjectConfigurations">
|
|
||||||
<ProjectConfiguration Include="Debug|Win32">
|
|
||||||
<Configuration>Debug</Configuration>
|
|
||||||
<Platform>Win32</Platform>
|
|
||||||
</ProjectConfiguration>
|
|
||||||
<ProjectConfiguration Include="Release|Win32">
|
|
||||||
<Configuration>Release</Configuration>
|
|
||||||
<Platform>Win32</Platform>
|
|
||||||
</ProjectConfiguration>
|
|
||||||
</ItemGroup>
|
|
||||||
<PropertyGroup Label="Globals">
|
|
||||||
<ProjectGuid>{EE95954F-3D34-4FB1-ADBD-FE8395233026}</ProjectGuid>
|
|
||||||
<Keyword>Win32Proj</Keyword>
|
|
||||||
<RootNamespace>RippleD</RootNamespace>
|
|
||||||
</PropertyGroup>
|
|
||||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.Default.props" />
|
|
||||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'" Label="Configuration">
|
|
||||||
<ConfigurationType>Application</ConfigurationType>
|
|
||||||
<UseDebugLibraries>true</UseDebugLibraries>
|
|
||||||
<CharacterSet>NotSet</CharacterSet>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'" Label="Configuration">
|
|
||||||
<ConfigurationType>Application</ConfigurationType>
|
|
||||||
<UseDebugLibraries>false</UseDebugLibraries>
|
|
||||||
<WholeProgramOptimization>true</WholeProgramOptimization>
|
|
||||||
<CharacterSet>NotSet</CharacterSet>
|
|
||||||
</PropertyGroup>
|
|
||||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.props" />
|
|
||||||
<ImportGroup Label="ExtensionSettings">
|
|
||||||
</ImportGroup>
|
|
||||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
|
||||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
|
||||||
<Import Project="RippleD.props" />
|
|
||||||
</ImportGroup>
|
|
||||||
<ImportGroup Label="PropertySheets" Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
|
||||||
<Import Project="$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props" Condition="exists('$(UserRootDir)\Microsoft.Cpp.$(Platform).user.props')" Label="LocalAppDataPlatform" />
|
|
||||||
</ImportGroup>
|
|
||||||
<PropertyGroup Label="UserMacros" />
|
|
||||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
|
||||||
<LinkIncremental>true</LinkIncremental>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
|
||||||
<LinkIncremental>false</LinkIncremental>
|
|
||||||
</PropertyGroup>
|
|
||||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Debug|Win32'">
|
|
||||||
<ClCompile>
|
|
||||||
<PrecompiledHeader>
|
|
||||||
</PrecompiledHeader>
|
|
||||||
<WarningLevel>Level3</WarningLevel>
|
|
||||||
<Optimization>Disabled</Optimization>
|
|
||||||
<PreprocessorDefinitions>WIN32;_DEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
|
||||||
<RuntimeLibrary>MultiThreadedDebug</RuntimeLibrary>
|
|
||||||
<MinimalRebuild>false</MinimalRebuild>
|
|
||||||
</ClCompile>
|
|
||||||
<Link>
|
|
||||||
<SubSystem>Windows</SubSystem>
|
|
||||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
|
||||||
</Link>
|
|
||||||
</ItemDefinitionGroup>
|
|
||||||
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|Win32'">
|
|
||||||
<ClCompile>
|
|
||||||
<WarningLevel>Level3</WarningLevel>
|
|
||||||
<PrecompiledHeader>
|
|
||||||
</PrecompiledHeader>
|
|
||||||
<Optimization>MaxSpeed</Optimization>
|
|
||||||
<FunctionLevelLinking>true</FunctionLevelLinking>
|
|
||||||
<IntrinsicFunctions>true</IntrinsicFunctions>
|
|
||||||
<PreprocessorDefinitions>WIN32;NDEBUG;_WINDOWS;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
|
||||||
<RuntimeLibrary>MultiThreaded</RuntimeLibrary>
|
|
||||||
<MinimalRebuild>false</MinimalRebuild>
|
|
||||||
</ClCompile>
|
|
||||||
<Link>
|
|
||||||
<SubSystem>Windows</SubSystem>
|
|
||||||
<GenerateDebugInformation>true</GenerateDebugInformation>
|
|
||||||
<EnableCOMDATFolding>true</EnableCOMDATFolding>
|
|
||||||
<OptimizeReferences>true</OptimizeReferences>
|
|
||||||
</Link>
|
|
||||||
</ItemDefinitionGroup>
|
|
||||||
<ItemGroup>
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt1.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt2.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt3.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt4.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_basics\ripple_basics.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_data\ripple_data.cpp" />
|
|
||||||
<ClCompile Include="..\..\src\cpp\database\sqlite3.c" />
|
|
||||||
<ClCompile Include="..\..\src\cpp\leveldb_core.cpp" />
|
|
||||||
<ClCompile Include="..\..\src\cpp\protobuf_core.cpp" />
|
|
||||||
<ClCompile Include="..\..\src\cpp\websocket_core.cpp" />
|
|
||||||
<ClCompile Include="..\..\Subtrees\beast\modules\beast_basics\beast_basics.cpp" />
|
|
||||||
<ClCompile Include="..\..\Subtrees\beast\modules\beast_core\beast_core.cpp" />
|
|
||||||
</ItemGroup>
|
|
||||||
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
|
|
||||||
<ImportGroup Label="ExtensionTargets">
|
|
||||||
</ImportGroup>
|
|
||||||
</Project>
|
|
||||||
@@ -1,37 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
|
||||||
<ItemGroup>
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt1.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt2.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt3.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_app\ripple_app_pt4.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_basics\ripple_basics.cpp" />
|
|
||||||
<ClCompile Include="..\..\modules\ripple_data\ripple_data.cpp" />
|
|
||||||
<ClCompile Include="..\..\Subtrees\beast\modules\beast_basics\beast_basics.cpp">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
<ClCompile Include="..\..\Subtrees\beast\modules\beast_core\beast_core.cpp">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
<ClCompile Include="..\..\src\cpp\leveldb_core.cpp">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
<ClCompile Include="..\..\src\cpp\protobuf_core.cpp">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
<ClCompile Include="..\..\src\cpp\websocket_core.cpp">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
<ClCompile Include="..\..\src\cpp\database\sqlite3.c">
|
|
||||||
<Filter>Subtrees</Filter>
|
|
||||||
</ClCompile>
|
|
||||||
</ItemGroup>
|
|
||||||
<ItemGroup>
|
|
||||||
<Filter Include="Ripple">
|
|
||||||
<UniqueIdentifier>{63db902e-0e7a-42d1-b5f5-663e4b48786c}</UniqueIdentifier>
|
|
||||||
</Filter>
|
|
||||||
<Filter Include="Subtrees">
|
|
||||||
<UniqueIdentifier>{469e8a0a-64bf-4fa1-8b6f-81207db68577}</UniqueIdentifier>
|
|
||||||
</Filter>
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
@@ -1,36 +0,0 @@
|
|||||||
|
|
||||||
Microsoft Visual Studio Solution File, Format Version 12.00
|
|
||||||
# Visual Studio 2012
|
|
||||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{B7F39ECD-473C-484D-BC34-31F8362506A5}"
|
|
||||||
EndProject
|
|
||||||
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "beast", "..\..\src\beast\Builds\VisualStudio2012\beast.vcxproj", "{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}"
|
|
||||||
EndProject
|
|
||||||
Global
|
|
||||||
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
|
||||||
Debug|Win32 = Debug|Win32
|
|
||||||
Debug|x64 = Debug|x64
|
|
||||||
Release|Win32 = Release|Win32
|
|
||||||
Release|x64 = Release|x64
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|Win32.ActiveCfg = Debug|Win32
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|Win32.Build.0 = Debug|Win32
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|x64.ActiveCfg = Debug|x64
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Debug|x64.Build.0 = Debug|x64
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|Win32.ActiveCfg = Release|Win32
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|Win32.Build.0 = Release|Win32
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|x64.ActiveCfg = Release|x64
|
|
||||||
{B7F39ECD-473C-484D-BC34-31F8362506A5}.Release|x64.Build.0 = Release|x64
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|Win32.ActiveCfg = Debug|Win32
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|Win32.Build.0 = Debug|Win32
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|x64.ActiveCfg = Debug|x64
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Debug|x64.Build.0 = Debug|x64
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|Win32.ActiveCfg = Release|Win32
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|Win32.Build.0 = Release|Win32
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|x64.ActiveCfg = Release|x64
|
|
||||||
{73C5A0F0-7629-4DE7-9194-BE7AC6C19535}.Release|x64.Build.0 = Release|x64
|
|
||||||
EndGlobalSection
|
|
||||||
GlobalSection(SolutionProperties) = preSolution
|
|
||||||
HideSolutionNode = FALSE
|
|
||||||
EndGlobalSection
|
|
||||||
EndGlobal
|
|
||||||
@@ -1,31 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
|
||||||
<ImportGroup Label="PropertySheets" />
|
|
||||||
<PropertyGroup Label="UserMacros">
|
|
||||||
<RepoDir>..\..</RepoDir>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup>
|
|
||||||
<OutDir>$(RepoDir)\build\VisualStudio2012\$(Configuration).$(Platform)\</OutDir>
|
|
||||||
<IntDir>$(RepoDir)\build\obj\VisualStudio2012\$(Configuration).$(Platform)\</IntDir>
|
|
||||||
<TargetName>rippled</TargetName>
|
|
||||||
</PropertyGroup>
|
|
||||||
<ItemDefinitionGroup>
|
|
||||||
<ClCompile>
|
|
||||||
<PreprocessorDefinitions>_VARIADIC_MAX=10;_WIN32_WINNT=0x0600;_SCL_SECURE_NO_WARNINGS;_CRT_SECURE_NO_WARNINGS;WIN32;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
|
||||||
<MultiProcessorCompilation>true</MultiProcessorCompilation>
|
|
||||||
<WarningLevel>Level3</WarningLevel>
|
|
||||||
<AdditionalIncludeDirectories>$(RepoDir)\src\protobuf\src;$(RepoDir)\src\protobuf\vsprojects;$(RepoDir)\src;$(RepoDir)\src\leveldb;$(RepoDir)\src\leveldb\include;$(RepoDir)\build\proto;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
|
||||||
<AdditionalOptions>/bigobj %(AdditionalOptions)</AdditionalOptions>
|
|
||||||
<ExceptionHandling>Async</ExceptionHandling>
|
|
||||||
<DisableSpecificWarnings>4018;4244</DisableSpecificWarnings>
|
|
||||||
</ClCompile>
|
|
||||||
<Link>
|
|
||||||
<AdditionalDependencies>Shlwapi.lib;%(AdditionalDependencies)</AdditionalDependencies>
|
|
||||||
</Link>
|
|
||||||
</ItemDefinitionGroup>
|
|
||||||
<ItemGroup>
|
|
||||||
<BuildMacro Include="RepoDir">
|
|
||||||
<Value>$(RepoDir)</Value>
|
|
||||||
</BuildMacro>
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
4
Builds/VisualStudio2013/.gitattributes
vendored
Normal file
4
Builds/VisualStudio2013/.gitattributes
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
RippleD.vcxproj -text
|
||||||
|
RippleD.vcxproj.filters -text
|
||||||
|
|
||||||
|
|
||||||
3991
Builds/VisualStudio2013/RippleD.vcxproj
Normal file
3991
Builds/VisualStudio2013/RippleD.vcxproj
Normal file
File diff suppressed because it is too large
Load Diff
5359
Builds/VisualStudio2013/RippleD.vcxproj.filters
Normal file
5359
Builds/VisualStudio2013/RippleD.vcxproj.filters
Normal file
File diff suppressed because it is too large
Load Diff
26
Builds/VisualStudio2013/ripple.sln
Normal file
26
Builds/VisualStudio2013/ripple.sln
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
|
||||||
|
Microsoft Visual Studio Solution File, Format Version 12.00
|
||||||
|
# Visual Studio Express 2013 for Windows Desktop
|
||||||
|
VisualStudioVersion = 12.0.30110.0
|
||||||
|
MinimumVisualStudioVersion = 10.0.40219.1
|
||||||
|
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "RippleD", "RippleD.vcxproj", "{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}"
|
||||||
|
EndProject
|
||||||
|
Global
|
||||||
|
GlobalSection(SolutionConfigurationPlatforms) = preSolution
|
||||||
|
Debug|Win32 = Debug|Win32
|
||||||
|
Debug|x64 = Debug|x64
|
||||||
|
Release|Win32 = Release|Win32
|
||||||
|
Release|x64 = Release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(ProjectConfigurationPlatforms) = postSolution
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|Win32.ActiveCfg = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|x64.ActiveCfg = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Debug|x64.Build.0 = debug|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|Win32.ActiveCfg = release|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|x64.ActiveCfg = release|x64
|
||||||
|
{26B7D9AC-1A80-8EF8-6703-D061F1BECB75}.Release|x64.Build.0 = release|x64
|
||||||
|
EndGlobalSection
|
||||||
|
GlobalSection(SolutionProperties) = preSolution
|
||||||
|
HideSolutionNode = FALSE
|
||||||
|
EndGlobalSection
|
||||||
|
EndGlobal
|
||||||
53
Builds/rpm/rippled.spec
Normal file
53
Builds/rpm/rippled.spec
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
Name: rippled
|
||||||
|
Version: 0.26.4-sp3
|
||||||
|
Release: 1%{?dist}
|
||||||
|
Summary: Ripple peer-to-peer network daemon
|
||||||
|
|
||||||
|
Group: Applications/Internet
|
||||||
|
License: ISC
|
||||||
|
URL: https://github.com/ripple/rippled
|
||||||
|
|
||||||
|
# curl -L -o SOURCES/rippled-release.zip https://github.com/ripple/rippled/archive/release.zip
|
||||||
|
Source0: rippled-release.zip
|
||||||
|
BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
|
||||||
|
|
||||||
|
BuildRequires: gcc-c++ scons openssl-devel protobuf-devel
|
||||||
|
Requires: protobuf openssl
|
||||||
|
|
||||||
|
|
||||||
|
%description
|
||||||
|
Rippled is the server component of the Ripple network.
|
||||||
|
|
||||||
|
|
||||||
|
%prep
|
||||||
|
%setup -n rippled-release
|
||||||
|
|
||||||
|
|
||||||
|
%build
|
||||||
|
# Assume boost is manually installed
|
||||||
|
export RIPPLED_BOOST_HOME=/usr/local/boost_1_55_0
|
||||||
|
scons -j `grep -c processor /proc/cpuinfo` build/rippled
|
||||||
|
|
||||||
|
|
||||||
|
%install
|
||||||
|
rm -rf %{buildroot}
|
||||||
|
mkdir -p %{buildroot}/usr/share/%{name}
|
||||||
|
cp LICENSE %{buildroot}/usr/share/%{name}/
|
||||||
|
mkdir -p %{buildroot}/usr/bin
|
||||||
|
cp build/rippled %{buildroot}/usr/bin/rippled
|
||||||
|
mkdir -p %{buildroot}/etc/%{name}
|
||||||
|
cp doc/rippled-example.cfg %{buildroot}/etc/%{name}/rippled.cfg
|
||||||
|
mkdir -p %{buildroot}/var/lib/%{name}/db
|
||||||
|
mkdir -p %{buildroot}/var/log/%{name}
|
||||||
|
|
||||||
|
|
||||||
|
%clean
|
||||||
|
rm -rf %{buildroot}
|
||||||
|
|
||||||
|
|
||||||
|
%files
|
||||||
|
%defattr(-,root,root,-)
|
||||||
|
/usr/bin/rippled
|
||||||
|
/usr/share/rippled/LICENSE
|
||||||
|
/etc/rippled/rippled-example.cfg
|
||||||
|
|
||||||
13
Builds/travis/clang.boost.patch
Normal file
13
Builds/travis/clang.boost.patch
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
--- /usr/include/boost/config/compiler/clang.hpp 2013-07-20 13:17:10.000000000 -0400
|
||||||
|
+++ /usr/include/boost/config/compiler/clang.rippled.hpp 2014-03-11 16:40:51.000000000 -0400
|
||||||
|
@@ -39,6 +39,10 @@
|
||||||
|
// Clang supports "long long" in all compilation modes.
|
||||||
|
#define BOOST_HAS_LONG_LONG
|
||||||
|
|
||||||
|
+#if defined(__SIZEOF_INT128__)
|
||||||
|
+# define BOOST_HAS_INT128
|
||||||
|
+#endif
|
||||||
|
+
|
||||||
|
//
|
||||||
|
// Dynamic shared object (DSO) and dynamic-link library (DLL) support
|
||||||
|
//
|
||||||
10
Builds/travis/static_error.boost.patch
Normal file
10
Builds/travis/static_error.boost.patch
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
--- /usr/include/boost/bimap/detail/debug/static_error.hpp 2008-03-22 17:45:55.000000000 -0400
|
||||||
|
+++ /usr/include/boost/bimap/detail/debug/static_error.rippled.hpp 2014-03-12 19:40:05.000000000 -0400
|
||||||
|
@@ -25,7 +25,6 @@
|
||||||
|
// a static error.
|
||||||
|
/*===========================================================================*/
|
||||||
|
#define BOOST_BIMAP_STATIC_ERROR(MESSAGE,VARIABLES) \
|
||||||
|
- struct BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE) {}; \
|
||||||
|
BOOST_MPL_ASSERT_MSG(false, \
|
||||||
|
BOOST_PP_CAT(BIMAP_STATIC_ERROR__,MESSAGE), \
|
||||||
|
VARIABLES)
|
||||||
22
README.md
22
README.md
@@ -1,8 +1,8 @@
|
|||||||
#Ripple - P2P Payment Network
|
#rippled - Ripple P2P server
|
||||||
|
|
||||||
##[](https://ci.ripple.com/jenkins/job/rippled/)
|
##[](https://travis-ci.org/ripple/rippled)
|
||||||
|
|
||||||
This is the repository for Ripple's `rippled`, reference P2P network server.
|
This is the repository for Ripple's `rippled`, reference P2P server.
|
||||||
|
|
||||||
###Build instructions:
|
###Build instructions:
|
||||||
* https://ripple.com/wiki/Rippled_build_instructions
|
* https://ripple.com/wiki/Rippled_build_instructions
|
||||||
@@ -33,20 +33,8 @@ README for more details.
|
|||||||
Javascript / Mocha tests.
|
Javascript / Mocha tests.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
Ripple is open source and permissively licensed under the ISC license. See the
|
||||||
Provided under the terms of the ISC License:
|
LICENSE file for more details.
|
||||||
|
|
||||||
Permission to use, copy, modify, and/or distribute this software for any
|
|
||||||
purpose with or without fee is hereby granted, provided that the above
|
|
||||||
copyright notice and this permission notice appear in all copies.
|
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
|
||||||
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
|
||||||
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
|
||||||
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
|
||||||
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
|
||||||
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
|
||||||
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
|
||||||
|
|
||||||
###For more information:
|
###For more information:
|
||||||
* https://ripple.com
|
* https://ripple.com
|
||||||
|
|||||||
876
SConstruct
876
SConstruct
@@ -1,287 +1,677 @@
|
|||||||
|
# rippled SConstruct
|
||||||
#
|
#
|
||||||
# Ripple - SConstruct
|
'''
|
||||||
#
|
|
||||||
|
|
||||||
import commands
|
Target Builds
|
||||||
import copy
|
----------------------------------------------------------------------------
|
||||||
import glob
|
|
||||||
|
<none> Same as 'install'
|
||||||
|
install Default target and copies it to build/rippled (default)
|
||||||
|
|
||||||
|
all All available variants
|
||||||
|
debug All available debug variants
|
||||||
|
release All available release variants
|
||||||
|
|
||||||
|
clang All clang variants
|
||||||
|
clang.debug clang debug variant
|
||||||
|
clang.release clang release variant
|
||||||
|
|
||||||
|
gcc All gcc variants
|
||||||
|
gcc.debug gcc debug variant
|
||||||
|
gcc.release gcc release variant
|
||||||
|
|
||||||
|
msvc All msvc variants
|
||||||
|
msvc.debug MSVC debug variant
|
||||||
|
msvc.release MSVC release variant
|
||||||
|
|
||||||
|
vcxproj Generate Visual Studio 2013 project file
|
||||||
|
|
||||||
|
If the clang toolchain is detected, then the default target will use it, else
|
||||||
|
the gcc toolchain will be used. On Windows environments, the MSVC toolchain is
|
||||||
|
also detected.
|
||||||
|
|
||||||
|
'''
|
||||||
|
#
|
||||||
|
'''
|
||||||
|
|
||||||
|
TODO
|
||||||
|
|
||||||
|
- Fix git-describe support
|
||||||
|
- Fix printing exemplar command lines
|
||||||
|
- Fix toolchain detection
|
||||||
|
|
||||||
|
|
||||||
|
'''
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
import collections
|
||||||
import os
|
import os
|
||||||
import platform
|
import subprocess
|
||||||
import re
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import time
|
||||||
|
import SCons.Action
|
||||||
|
|
||||||
OSX = bool(platform.mac_ver()[0])
|
sys.path.append(os.path.join('src', 'beast', 'site_scons'))
|
||||||
FreeBSD = bool('FreeBSD' == platform.system())
|
|
||||||
Linux = bool('Linux' == platform.system())
|
|
||||||
Ubuntu = bool(Linux and 'Ubuntu' == platform.linux_distribution()[0])
|
|
||||||
Debian = bool(Linux and 'debian' == platform.linux_distribution()[0])
|
|
||||||
Archlinux = bool(Linux and ('','','') == platform.linux_distribution()) #Arch still has issues with the platform module
|
|
||||||
|
|
||||||
#
|
import Beast
|
||||||
# We expect this to be set
|
|
||||||
#
|
#------------------------------------------------------------------------------
|
||||||
BOOST_HOME = os.environ.get("RIPPLED_BOOST_HOME", None)
|
|
||||||
|
def parse_time(t):
|
||||||
|
return time.strptime(t, '%a %b %d %H:%M:%S %Z %Y')
|
||||||
|
|
||||||
|
CHECK_PLATFORMS = 'Debian', 'Ubuntu'
|
||||||
|
CHECK_COMMAND = 'openssl version -a'
|
||||||
|
CHECK_LINE = 'built on: '
|
||||||
|
BUILD_TIME = 'Mon Apr 7 20:33:19 UTC 2014'
|
||||||
|
OPENSSL_ERROR = ('Your openSSL was built on %s; '
|
||||||
|
'rippled needs a version built on or after %s.')
|
||||||
|
UNITY_BUILD_DIRECTORY = 'src/ripple/unity/'
|
||||||
|
|
||||||
|
def check_openssl():
|
||||||
|
if Beast.system.platform in CHECK_PLATFORMS:
|
||||||
|
for line in subprocess.check_output(CHECK_COMMAND.split()).splitlines():
|
||||||
|
if line.startswith(CHECK_LINE):
|
||||||
|
line = line[len(CHECK_LINE):]
|
||||||
|
if parse_time(line) < parse_time(BUILD_TIME):
|
||||||
|
raise Exception(OPENSSL_ERROR % (line, BUILD_TIME))
|
||||||
|
else:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
raise Exception("Didn't find any '%s' line in '$ %s'" %
|
||||||
|
(CHECK_LINE, CHECK_COMMAND))
|
||||||
|
|
||||||
|
|
||||||
if OSX or Ubuntu or Debian or Archlinux:
|
def import_environ(env):
|
||||||
CTAGS = 'ctags'
|
'''Imports environment settings into the construction environment'''
|
||||||
elif FreeBSD:
|
def set(keys):
|
||||||
CTAGS = 'exctags'
|
if type(keys) == list:
|
||||||
else:
|
for key in keys:
|
||||||
CTAGS = 'exuberant-ctags'
|
set(key)
|
||||||
|
return
|
||||||
|
if keys in os.environ:
|
||||||
|
value = os.environ[keys]
|
||||||
|
env[keys] = value
|
||||||
|
set(['GNU_CC', 'GNU_CXX', 'GNU_LINK'])
|
||||||
|
set(['CLANG_CC', 'CLANG_CXX', 'CLANG_LINK'])
|
||||||
|
|
||||||
#
|
def detect_toolchains(env):
|
||||||
# scons tools
|
def is_compiler(comp_from, comp_to):
|
||||||
#
|
return comp_from and comp_to in comp_from
|
||||||
|
|
||||||
env = Environment(
|
def detect_clang(env):
|
||||||
tools = ['default', 'protoc']
|
n = sum(x in env for x in ['CLANG_CC', 'CLANG_CXX', 'CLANG_LINK'])
|
||||||
)
|
if n > 0:
|
||||||
|
if n == 3:
|
||||||
|
return True
|
||||||
|
raise ValueError('CLANG_CC, CLANG_CXX, and CLANG_LINK must be set together')
|
||||||
|
cc = env.get('CC')
|
||||||
|
cxx = env.get('CXX')
|
||||||
|
link = env.subst(env.get('LINK'))
|
||||||
|
if (cc and cxx and link and
|
||||||
|
is_compiler(cc, 'clang') and
|
||||||
|
is_compiler(cxx, 'clang') and
|
||||||
|
is_compiler(link, 'clang')):
|
||||||
|
env['CLANG_CC'] = cc
|
||||||
|
env['CLANG_CXX'] = cxx
|
||||||
|
env['CLANG_LINK'] = link
|
||||||
|
return True
|
||||||
|
cc = env.WhereIs('clang')
|
||||||
|
cxx = env.WhereIs('clang++')
|
||||||
|
link = cxx
|
||||||
|
if (is_compiler(cc, 'clang') and
|
||||||
|
is_compiler(cxx, 'clang') and
|
||||||
|
is_compiler(link, 'clang')):
|
||||||
|
env['CLANG_CC'] = cc
|
||||||
|
env['CLANG_CXX'] = cxx
|
||||||
|
env['CLANG_LINK'] = link
|
||||||
|
return True
|
||||||
|
env['CLANG_CC'] = 'clang'
|
||||||
|
env['CLANG_CXX'] = 'clang++'
|
||||||
|
env['CLANG_LINK'] = env['LINK']
|
||||||
|
return False
|
||||||
|
|
||||||
# Use a newer gcc on FreeBSD
|
def detect_gcc(env):
|
||||||
if FreeBSD:
|
n = sum(x in env for x in ['GNU_CC', 'GNU_CXX', 'GNU_LINK'])
|
||||||
env.Replace(CC = 'gcc46')
|
if n > 0:
|
||||||
env.Replace(CXX = 'g++46')
|
if n == 3:
|
||||||
env.Append(CCFLAGS = ['-Wl,-rpath=/usr/local/lib/gcc46'])
|
return True
|
||||||
env.Append(LINKFLAGS = ['-Wl,-rpath=/usr/local/lib/gcc46'])
|
raise ValueError('GNU_CC, GNU_CXX, and GNU_LINK must be set together')
|
||||||
|
cc = env.get('CC')
|
||||||
|
cxx = env.get('CXX')
|
||||||
|
link = env.subst(env.get('LINK'))
|
||||||
|
if (cc and cxx and link and
|
||||||
|
is_compiler(cc, 'gcc') and
|
||||||
|
is_compiler(cxx, 'g++') and
|
||||||
|
is_compiler(link, 'g++')):
|
||||||
|
env['GNU_CC'] = cc
|
||||||
|
env['GNU_CXX'] = cxx
|
||||||
|
env['GNU_LINK'] = link
|
||||||
|
return True
|
||||||
|
cc = env.WhereIs('gcc')
|
||||||
|
cxx = env.WhereIs('g++')
|
||||||
|
link = cxx
|
||||||
|
if (is_compiler(cc, 'gcc') and
|
||||||
|
is_compiler(cxx, 'g++') and
|
||||||
|
is_compiler(link, 'g++')):
|
||||||
|
env['GNU_CC'] = cc
|
||||||
|
env['GNU_CXX'] = cxx
|
||||||
|
env['GNU_LINK'] = link
|
||||||
|
return True
|
||||||
|
env['GNU_CC'] = 'gcc'
|
||||||
|
env['GNU_CXX'] = 'g++'
|
||||||
|
env['GNU_LINK'] = env['LINK']
|
||||||
|
return False
|
||||||
|
|
||||||
if OSX:
|
toolchains = []
|
||||||
env.Replace(CC= 'clang')
|
if detect_clang(env):
|
||||||
env.Replace(CXX= 'clang++')
|
toolchains.append('clang')
|
||||||
env.Append(CXXFLAGS = ['-std=c++11', '-stdlib=libc++'])
|
if detect_gcc(env):
|
||||||
env.Append(LINKFLAGS='-stdlib=libc++')
|
toolchains.append('gcc')
|
||||||
env['FRAMEWORKS'] = ['AppKit']
|
if env.Detect('cl'):
|
||||||
|
toolchains.append('msvc')
|
||||||
|
return toolchains
|
||||||
|
|
||||||
GCC_VERSION = re.split('\.', commands.getoutput(env['CXX'] + ' -dumpversion'))
|
def files(base):
|
||||||
|
def _iter(base):
|
||||||
|
for parent, _, files in os.walk(base):
|
||||||
|
for path in files:
|
||||||
|
path = os.path.join(parent, path)
|
||||||
|
yield os.path.normpath(path)
|
||||||
|
return list(_iter(base))
|
||||||
|
|
||||||
# Add support for ccache. Usage: scons ccache=1
|
def print_coms(target, source, env):
|
||||||
ccache = ARGUMENTS.get('ccache', 0)
|
'''Display command line exemplars for an environment'''
|
||||||
if int(ccache):
|
print ('Target: ' + Beast.yellow(str(target[0])))
|
||||||
env.Prepend(CC = ['ccache'])
|
# TODO Add 'PROTOCCOM' to this list and make it work
|
||||||
env.Prepend(CXX = ['ccache'])
|
Beast.print_coms(['CXXCOM', 'CCCOM', 'LINKCOM'], env)
|
||||||
ccache_dir = os.getenv('CCACHE_DIR')
|
|
||||||
if ccache_dir:
|
|
||||||
env.Replace(CCACHE_DIR = ccache_dir)
|
|
||||||
|
|
||||||
#
|
#-------------------------------------------------------------------------------
|
||||||
# Builder for CTags
|
|
||||||
#
|
|
||||||
ctags = Builder(action = '$CTAGS $CTAGSOPTIONS -f $TARGET $SOURCES')
|
|
||||||
env.Append(BUILDERS = { 'CTags' : ctags })
|
|
||||||
if OSX:
|
|
||||||
env.Replace(CTAGS = CTAGS)
|
|
||||||
else:
|
|
||||||
env.Replace(CTAGS = CTAGS, CTAGSOPTIONS = '--tag-relative')
|
|
||||||
|
|
||||||
# Use openssl
|
# Set construction variables for the base environment
|
||||||
env.ParseConfig('pkg-config --static --cflags --libs openssl')
|
def config_base(env):
|
||||||
# Use protobuf
|
if False:
|
||||||
env.ParseConfig('pkg-config --static --cflags --libs protobuf')
|
env.Replace(
|
||||||
|
CCCOMSTR='Compiling ' + Beast.blue('$SOURCES'),
|
||||||
|
CXXCOMSTR='Compiling ' + Beast.blue('$SOURCES'),
|
||||||
|
LINKCOMSTR='Linking ' + Beast.blue('$TARGET'),
|
||||||
|
)
|
||||||
|
check_openssl()
|
||||||
|
|
||||||
# Beast uses kvm on FreeBSD
|
env.Append(CPPDEFINES=['OPENSSL_NO_SSL2'])
|
||||||
if FreeBSD:
|
|
||||||
env.Append (
|
try:
|
||||||
LIBS = [
|
BOOST_ROOT = os.path.normpath(os.environ['BOOST_ROOT'])
|
||||||
'kvm'
|
env.Append(CPPPATH=[
|
||||||
|
BOOST_ROOT,
|
||||||
|
])
|
||||||
|
env.Append(LIBPATH=[
|
||||||
|
os.path.join(BOOST_ROOT, 'stage', 'lib'),
|
||||||
|
])
|
||||||
|
env['BOOST_ROOT'] = BOOST_ROOT
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
if Beast.system.windows:
|
||||||
|
try:
|
||||||
|
OPENSSL_ROOT = os.path.normpath(os.environ['OPENSSL_ROOT'])
|
||||||
|
env.Append(CPPPATH=[
|
||||||
|
os.path.join(OPENSSL_ROOT, 'include'),
|
||||||
|
])
|
||||||
|
env.Append(LIBPATH=[
|
||||||
|
os.path.join(OPENSSL_ROOT, 'lib', 'VC', 'static'),
|
||||||
|
])
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
elif Beast.system.osx:
|
||||||
|
OSX_OPENSSL_ROOT = '/usr/local/Cellar/openssl/'
|
||||||
|
most_recent = sorted(os.listdir(OSX_OPENSSL_ROOT))[-1]
|
||||||
|
openssl = os.path.join(OSX_OPENSSL_ROOT, most_recent)
|
||||||
|
env.Prepend(CPPPATH='%s/include' % openssl)
|
||||||
|
env.Prepend(LIBPATH=['%s/lib' % openssl])
|
||||||
|
|
||||||
|
# handle command-line arguments
|
||||||
|
profile_jemalloc = ARGUMENTS.get('profile-jemalloc')
|
||||||
|
if profile_jemalloc:
|
||||||
|
env.Append(CPPDEFINES={'PROFILE_JEMALLOC' : profile_jemalloc})
|
||||||
|
env.Append(LIBS=['jemalloc'])
|
||||||
|
env.Append(LIBPATH=[os.path.join(profile_jemalloc, 'lib')])
|
||||||
|
env.Append(CPPPATH=[os.path.join(profile_jemalloc, 'include')])
|
||||||
|
env.Append(LINKFLAGS=['-Wl,-rpath,' + os.path.join(profile_jemalloc, 'lib')])
|
||||||
|
|
||||||
|
# Set toolchain and variant specific construction variables
|
||||||
|
def config_env(toolchain, variant, env):
|
||||||
|
if variant == 'debug':
|
||||||
|
env.Append(CPPDEFINES=['DEBUG', '_DEBUG'])
|
||||||
|
|
||||||
|
elif variant == 'release':
|
||||||
|
env.Append(CPPDEFINES=['NDEBUG'])
|
||||||
|
|
||||||
|
if toolchain in Split('clang gcc'):
|
||||||
|
if Beast.system.linux:
|
||||||
|
env.ParseConfig('pkg-config --static --cflags --libs openssl')
|
||||||
|
env.ParseConfig('pkg-config --static --cflags --libs protobuf')
|
||||||
|
|
||||||
|
env.Prepend(CFLAGS=['-Wall'])
|
||||||
|
env.Prepend(CXXFLAGS=['-Wall'])
|
||||||
|
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'-Wno-sign-compare',
|
||||||
|
'-Wno-char-subscripts',
|
||||||
|
'-Wno-format',
|
||||||
|
'-g' # generate debug symbols
|
||||||
|
])
|
||||||
|
|
||||||
|
if toolchain == 'clang':
|
||||||
|
env.Append(CCFLAGS=['-Wno-redeclared-class-member'])
|
||||||
|
|
||||||
|
env.Append(CXXFLAGS=[
|
||||||
|
'-frtti',
|
||||||
|
'-std=c++11',
|
||||||
|
'-Wno-invalid-offsetof'])
|
||||||
|
|
||||||
|
if Beast.system.osx:
|
||||||
|
env.Append(CPPDEFINES={
|
||||||
|
'BEAST_COMPILE_OBJECTIVE_CPP': 1,
|
||||||
|
})
|
||||||
|
|
||||||
|
# These should be the same regardless of platform...
|
||||||
|
if Beast.system.osx:
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'-Wno-deprecated',
|
||||||
|
'-Wno-deprecated-declarations',
|
||||||
|
'-Wno-unused-variable',
|
||||||
|
'-Wno-unused-function',
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
if toolchain == 'gcc':
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'-Wno-unused-but-set-variable'
|
||||||
|
])
|
||||||
|
|
||||||
|
boost_libs = [
|
||||||
|
'boost_coroutine',
|
||||||
|
'boost_context',
|
||||||
|
'boost_date_time',
|
||||||
|
'boost_filesystem',
|
||||||
|
'boost_program_options',
|
||||||
|
'boost_regex',
|
||||||
|
'boost_system',
|
||||||
]
|
]
|
||||||
)
|
# We prefer static libraries for boost
|
||||||
|
if env.get('BOOST_ROOT'):
|
||||||
|
# Need to add boost_thread. Not needed when dynamic linking is used.
|
||||||
|
boost_libs += ['boost_thread']
|
||||||
|
static_libs = ['%s/stage/lib/lib%s.a' % (env['BOOST_ROOT'], l) for
|
||||||
|
l in boost_libs]
|
||||||
|
if all(os.path.exists(f) for f in static_libs):
|
||||||
|
boost_libs = [File(f) for f in static_libs]
|
||||||
|
|
||||||
# The required version of boost is documented in the README file.
|
env.Append(LIBS=boost_libs)
|
||||||
BOOST_LIBS = [
|
env.Append(LIBS=['dl'])
|
||||||
'boost_date_time',
|
|
||||||
'boost_filesystem',
|
|
||||||
'boost_program_options',
|
|
||||||
'boost_regex',
|
|
||||||
'boost_system',
|
|
||||||
'boost_thread',
|
|
||||||
'boost_random',
|
|
||||||
]
|
|
||||||
|
|
||||||
# We whitelist platforms where the non -mt version is linked with pthreads. This
|
if Beast.system.osx:
|
||||||
# can be verified with: ldd libboost_filesystem.* If a threading library is
|
env.Append(LIBS=[
|
||||||
# included the platform can be whitelisted.
|
'crypto',
|
||||||
if FreeBSD or Ubuntu or Archlinux or OSX:
|
'protobuf',
|
||||||
# non-mt libs do link with pthreads.
|
'ssl',
|
||||||
env.Append(
|
])
|
||||||
LIBS = BOOST_LIBS
|
env.Append(FRAMEWORKS=[
|
||||||
)
|
'AppKit',
|
||||||
else:
|
'Foundation'
|
||||||
env.Append(
|
])
|
||||||
LIBS = [l + '-mt' for l in BOOST_LIBS]
|
else:
|
||||||
)
|
env.Append(LIBS=['rt'])
|
||||||
|
|
||||||
|
env.Append(LINKFLAGS=[
|
||||||
|
'-rdynamic'
|
||||||
|
])
|
||||||
|
|
||||||
|
if variant == 'release':
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'-O3',
|
||||||
|
'-fno-strict-aliasing'
|
||||||
|
])
|
||||||
|
|
||||||
|
if toolchain != 'msvc':
|
||||||
|
git = Beast.Git(env)
|
||||||
|
if git.exists:
|
||||||
|
id = '%s+%s.%s' % (git.tags, git.user, git.branch)
|
||||||
|
env.Append(CPPDEFINES={'GIT_COMMIT_ID' : '\'"%s"\'' % id })
|
||||||
|
|
||||||
|
if toolchain == 'clang':
|
||||||
|
if Beast.system.osx:
|
||||||
|
env.Replace(CC='clang', CXX='clang++', LINK='clang++')
|
||||||
|
elif 'CLANG_CC' in env and 'CLANG_CXX' in env and 'CLANG_LINK' in env:
|
||||||
|
env.Replace(CC=env['CLANG_CC'],
|
||||||
|
CXX=env['CLANG_CXX'],
|
||||||
|
LINK=env['CLANG_LINK'])
|
||||||
|
# C and C++
|
||||||
|
# Add '-Wshorten-64-to-32'
|
||||||
|
env.Append(CCFLAGS=[])
|
||||||
|
# C++ only
|
||||||
|
env.Append(CXXFLAGS=[
|
||||||
|
'-Wno-mismatched-tags',
|
||||||
|
'-Wno-deprecated-register',
|
||||||
|
])
|
||||||
|
|
||||||
|
elif toolchain == 'gcc':
|
||||||
|
if 'GNU_CC' in env and 'GNU_CXX' in env and 'GNU_LINK' in env:
|
||||||
|
env.Replace(CC=env['GNU_CC'],
|
||||||
|
CXX=env['GNU_CXX'],
|
||||||
|
LINK=env['GNU_LINK'])
|
||||||
|
# Why is this only for gcc?!
|
||||||
|
env.Append(CCFLAGS=['-Wno-unused-local-typedefs'])
|
||||||
|
|
||||||
|
# If we are in debug mode, use GCC-specific functionality to add
|
||||||
|
# extra error checking into the code (e.g. std::vector will throw
|
||||||
|
# for out-of-bounds conditions)
|
||||||
|
if variant == 'debug':
|
||||||
|
env.Append(CPPDEFINES={
|
||||||
|
'_FORTIFY_SOURCE': 2
|
||||||
|
})
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'-O0'
|
||||||
|
])
|
||||||
|
|
||||||
|
elif toolchain == 'msvc':
|
||||||
|
env.Append (CPPPATH=[
|
||||||
|
os.path.join('src', 'protobuf', 'src'),
|
||||||
|
os.path.join('src', 'protobuf', 'vsprojects'),
|
||||||
|
])
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'/bigobj', # Increase object file max size
|
||||||
|
'/EHa', # ExceptionHandling all
|
||||||
|
'/fp:precise', # Floating point behavior
|
||||||
|
'/Gd', # __cdecl calling convention
|
||||||
|
'/Gm-', # Minimal rebuild: disabled
|
||||||
|
'/GR', # Enable RTTI
|
||||||
|
'/Gy-', # Function level linking: disabled
|
||||||
|
'/FS',
|
||||||
|
'/MP', # Multiprocessor compilation
|
||||||
|
'/openmp-', # pragma omp: disabled
|
||||||
|
'/Zc:forScope', # Language extension: for scope
|
||||||
|
'/Zi', # Generate complete debug info
|
||||||
|
'/errorReport:none', # No error reporting to Internet
|
||||||
|
'/nologo', # Suppress login banner
|
||||||
|
#'/Fd${TARGET}.pdb', # Path: Program Database (.pdb)
|
||||||
|
'/W3', # Warning level 3
|
||||||
|
'/WX-', # Disable warnings as errors
|
||||||
|
'/wd"4018"',
|
||||||
|
'/wd"4244"',
|
||||||
|
'/wd"4267"',
|
||||||
|
'/wd"4800"', # Disable C4800 (int to bool performance)
|
||||||
|
])
|
||||||
|
env.Append(CPPDEFINES={
|
||||||
|
'_WIN32_WINNT' : '0x6000',
|
||||||
|
})
|
||||||
|
env.Append(CPPDEFINES=[
|
||||||
|
'_SCL_SECURE_NO_WARNINGS',
|
||||||
|
'_CRT_SECURE_NO_WARNINGS',
|
||||||
|
'WIN32_CONSOLE',
|
||||||
|
])
|
||||||
|
env.Append(LIBS=[
|
||||||
|
'ssleay32MT.lib',
|
||||||
|
'libeay32MT.lib',
|
||||||
|
'Shlwapi.lib',
|
||||||
|
'kernel32.lib',
|
||||||
|
'user32.lib',
|
||||||
|
'gdi32.lib',
|
||||||
|
'winspool.lib',
|
||||||
|
'comdlg32.lib',
|
||||||
|
'advapi32.lib',
|
||||||
|
'shell32.lib',
|
||||||
|
'ole32.lib',
|
||||||
|
'oleaut32.lib',
|
||||||
|
'uuid.lib',
|
||||||
|
'odbc32.lib',
|
||||||
|
'odbccp32.lib',
|
||||||
|
])
|
||||||
|
env.Append(LINKFLAGS=[
|
||||||
|
'/DEBUG',
|
||||||
|
'/DYNAMICBASE',
|
||||||
|
'/ERRORREPORT:NONE',
|
||||||
|
#'/INCREMENTAL',
|
||||||
|
'/MACHINE:X64',
|
||||||
|
'/MANIFEST',
|
||||||
|
#'''/MANIFESTUAC:"level='asInvoker' uiAccess='false'"''',
|
||||||
|
'/nologo',
|
||||||
|
'/NXCOMPAT',
|
||||||
|
'/SUBSYSTEM:CONSOLE',
|
||||||
|
'/TLBID:1',
|
||||||
|
])
|
||||||
|
|
||||||
|
if variant == 'debug':
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'/GS', # Buffers security check: enable
|
||||||
|
'/MTd', # Language: Multi-threaded Debug CRT
|
||||||
|
'/Od', # Optimization: Disabled
|
||||||
|
'/RTC1', # Run-time error checks:
|
||||||
|
])
|
||||||
|
env.Append(CPPDEFINES=[
|
||||||
|
'_CRTDBG_MAP_ALLOC'
|
||||||
|
])
|
||||||
|
else:
|
||||||
|
env.Append(CCFLAGS=[
|
||||||
|
'/MT', # Language: Multi-threaded CRT
|
||||||
|
'/Ox', # Optimization: Full
|
||||||
|
])
|
||||||
|
|
||||||
|
else:
|
||||||
|
raise SCons.UserError('Unknown toolchain == "%s"' % toolchain)
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
#
|
|
||||||
# VFALCO NOTE Clean area.
|
|
||||||
#
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
#
|
|
||||||
# Nothing having to do with directories, source files,
|
|
||||||
# or include paths should reside outside the boundaries.
|
|
||||||
#
|
|
||||||
|
|
||||||
# List of includes passed to the C++ compiler.
|
# Configure the base construction environment
|
||||||
# These are all relative to the repo dir.
|
root_dir = Dir('#').srcnode().get_abspath() # Path to this SConstruct file
|
||||||
#
|
build_dir = os.path.join('build')
|
||||||
INCLUDE_PATHS = [
|
base = Environment(
|
||||||
'.',
|
toolpath=[os.path.join ('src', 'beast', 'site_scons', 'site_tools')],
|
||||||
|
tools=['default', 'Protoc', 'VSProject'],
|
||||||
|
ENV=os.environ,
|
||||||
|
TARGET_ARCH='x86_64')
|
||||||
|
import_environ(base)
|
||||||
|
config_base(base)
|
||||||
|
base.Append(CPPPATH=[
|
||||||
'src',
|
'src',
|
||||||
'src/leveldb',
|
os.path.join('src', 'beast'),
|
||||||
'src/leveldb/port',
|
os.path.join(build_dir, 'proto'),
|
||||||
'src/leveldb/include',
|
])
|
||||||
'src/beast',
|
|
||||||
'build/proto'
|
|
||||||
]
|
|
||||||
|
|
||||||
# if BOOST_HOME:
|
# Configure the toolchains, variants, default toolchain, and default target
|
||||||
# INCLUDE_PATHS.append(BOOST_HOME)
|
variants = ['debug', 'release']
|
||||||
|
all_toolchains = ['clang', 'gcc', 'msvc']
|
||||||
#-------------------------------------------------------------------------------
|
if Beast.system.osx:
|
||||||
#
|
toolchains = ['clang']
|
||||||
# Compiled sources
|
default_toolchain = 'clang'
|
||||||
#
|
|
||||||
|
|
||||||
COMPILED_FILES = []
|
|
||||||
|
|
||||||
# -------------------
|
|
||||||
# Beast unity sources
|
|
||||||
#
|
|
||||||
if OSX:
|
|
||||||
# OSX: Use the Objective C++ version of beast_core
|
|
||||||
COMPILED_FILES.extend (['src/ripple/beast/ripple_beastobjc.mm'])
|
|
||||||
else:
|
else:
|
||||||
COMPILED_FILES.extend (['src/ripple/beast/ripple_beast.cpp'])
|
toolchains = detect_toolchains(base)
|
||||||
COMPILED_FILES.extend (['src/ripple/beast/ripple_beastc.c'])
|
if not toolchains:
|
||||||
|
raise ValueError('No toolchains detected!')
|
||||||
|
if 'msvc' in toolchains:
|
||||||
|
default_toolchain = 'msvc'
|
||||||
|
elif 'gcc' in toolchains:
|
||||||
|
if 'clang' in toolchains:
|
||||||
|
cxx = os.environ.get('CXX', 'g++')
|
||||||
|
default_toolchain = 'clang' if 'clang' in cxx else 'gcc'
|
||||||
|
else:
|
||||||
|
default_toolchain = 'gcc'
|
||||||
|
elif 'clang' in toolchains:
|
||||||
|
default_toolchain = 'clang'
|
||||||
|
else:
|
||||||
|
raise ValueError("Don't understand toolchains in " + str(toolchains))
|
||||||
|
default_variant = 'release'
|
||||||
|
default_target = None
|
||||||
|
|
||||||
# ------------------------------
|
for source in [
|
||||||
# New-style Ripple unity sources
|
'src/ripple/proto/ripple.proto',
|
||||||
#
|
]:
|
||||||
COMPILED_FILES.extend([
|
base.Protoc([],
|
||||||
'src/ripple/http/ripple_http.cpp',
|
source,
|
||||||
'src/ripple/json/ripple_json.cpp',
|
PROTOCPROTOPATH=[os.path.dirname(source)],
|
||||||
'src/ripple/peerfinder/ripple_peerfinder.cpp',
|
PROTOCOUTDIR=os.path.join(build_dir, 'proto'),
|
||||||
'src/ripple/rpc/ripple_rpc.cpp',
|
PROTOCPYTHONOUTDIR=None)
|
||||||
'src/ripple/sophia/ripple_sophia.c',
|
|
||||||
'src/ripple/sslutil/ripple_sslutil.cpp',
|
|
||||||
'src/ripple/testoverlay/ripple_testoverlay.cpp',
|
|
||||||
'src/ripple/types/ripple_types.cpp',
|
|
||||||
'src/ripple/validators/ripple_validators.cpp'
|
|
||||||
])
|
|
||||||
|
|
||||||
# ------------------------------
|
|
||||||
# Old-style Ripple unity sources
|
|
||||||
#
|
|
||||||
COMPILED_FILES.extend([
|
|
||||||
'src/ripple_app/ripple_app.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt1.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt2.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt3.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt4.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt5.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt6.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt7.cpp',
|
|
||||||
'src/ripple_app/ripple_app_pt8.cpp',
|
|
||||||
'src/ripple_basics/ripple_basics.cpp',
|
|
||||||
'src/ripple_core/ripple_core.cpp',
|
|
||||||
'src/ripple_data/ripple_data.cpp',
|
|
||||||
'src/ripple_hyperleveldb/ripple_hyperleveldb.cpp',
|
|
||||||
'src/ripple_leveldb/ripple_leveldb.cpp',
|
|
||||||
'src/ripple_mdb/ripple_mdb.c',
|
|
||||||
'src/ripple_net/ripple_net.cpp',
|
|
||||||
'src/ripple_websocket/ripple_websocket.cpp'
|
|
||||||
])
|
|
||||||
|
|
||||||
#
|
|
||||||
#
|
|
||||||
#-------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
# Map top level source directories to their location in the outputs
|
|
||||||
#
|
|
||||||
|
|
||||||
VariantDir('build/obj/src', 'src', duplicate=0)
|
|
||||||
|
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
# Add the list of includes to compiler include paths.
|
class ObjectBuilder(object):
|
||||||
#
|
def __init__(self, env, variant_dirs):
|
||||||
for path in INCLUDE_PATHS:
|
self.env = env
|
||||||
env.Append (CPPPATH = [ path ])
|
self.variant_dirs = variant_dirs
|
||||||
|
self.objects = []
|
||||||
|
|
||||||
if BOOST_HOME:
|
def add_source_files(self, *filenames, **kwds):
|
||||||
env.Prepend (CPPPATH = [ BOOST_HOME ])
|
for filename in filenames:
|
||||||
|
env = self.env
|
||||||
#-------------------------------------------------------------------------------
|
if kwds:
|
||||||
|
env = env.Clone()
|
||||||
# Apparently, only linux uses -ldl
|
env.Prepend(**kwds)
|
||||||
if Linux: # not FreeBSD:
|
path = UNITY_BUILD_DIRECTORY + filename
|
||||||
env.Append(
|
o = env.Object(Beast.variantFile(path, self.variant_dirs))
|
||||||
LIBS = [
|
self.objects.append(o)
|
||||||
'dl', # dynamic linking for linux
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
env.Append(
|
|
||||||
LIBS = \
|
|
||||||
# rt is for clock_nanosleep in beast
|
|
||||||
['rt'] if not OSX else [] +\
|
|
||||||
[
|
|
||||||
'z'
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# We prepend, in case there's another BOOST somewhere on the path
|
|
||||||
# such, as installed into `/usr/lib/`
|
|
||||||
if BOOST_HOME is not None:
|
|
||||||
env.Prepend(
|
|
||||||
LIBPATH = ["%s/stage/lib" % BOOST_HOME])
|
|
||||||
|
|
||||||
if not OSX:
|
|
||||||
env.Append(LINKFLAGS = [
|
|
||||||
'-rdynamic', '-pthread',
|
|
||||||
])
|
|
||||||
|
|
||||||
DEBUGFLAGS = ['-g', '-DDEBUG', '-D_DEBUG']
|
|
||||||
|
|
||||||
env.Append(CCFLAGS = ['-pthread', '-Wall', '-Wno-sign-compare', '-Wno-char-subscripts']+DEBUGFLAGS)
|
|
||||||
env.Append(CXXFLAGS = ['-O1', '-pthread', '-Wno-invalid-offsetof', '-Wformat']+DEBUGFLAGS)
|
|
||||||
|
|
||||||
|
|
||||||
# RTTI is required for Beast and CountedObject.
|
# Declare the targets
|
||||||
#
|
aliases = collections.defaultdict(list)
|
||||||
env.Append(CXXFLAGS = ['-frtti'])
|
msvc_configs = []
|
||||||
|
for toolchain in all_toolchains:
|
||||||
|
for variant in variants:
|
||||||
|
# Configure this variant's construction environment
|
||||||
|
env = base.Clone()
|
||||||
|
config_env(toolchain, variant, env)
|
||||||
|
variant_name = '%s.%s' % (toolchain, variant)
|
||||||
|
variant_dir = os.path.join(build_dir, variant_name)
|
||||||
|
variant_dirs = {
|
||||||
|
os.path.join(variant_dir, 'src') :
|
||||||
|
'src',
|
||||||
|
os.path.join(variant_dir, 'proto') :
|
||||||
|
os.path.join (build_dir, 'proto'),
|
||||||
|
}
|
||||||
|
for dest, source in variant_dirs.iteritems():
|
||||||
|
env.VariantDir(dest, source, duplicate=0)
|
||||||
|
|
||||||
if (int(GCC_VERSION[0]) == 4 and int(GCC_VERSION[1]) == 6):
|
object_builder = ObjectBuilder(env, variant_dirs)
|
||||||
env.Append(CXXFLAGS = ['-std=c++0x'])
|
object_builder.add_source_files(
|
||||||
elif (int(GCC_VERSION[0]) > 4 or (int(GCC_VERSION[0]) == 4 and int(GCC_VERSION[1]) >= 7)):
|
'app.cpp',
|
||||||
env.Append(CXXFLAGS = ['-std=c++11'])
|
'app1.cpp',
|
||||||
|
'app2.cpp',
|
||||||
|
'app3.cpp',
|
||||||
|
'app4.cpp',
|
||||||
|
'app5.cpp',
|
||||||
|
'app6.cpp',
|
||||||
|
'app7.cpp',
|
||||||
|
'app8.cpp',
|
||||||
|
'app9.cpp',
|
||||||
|
'basics.cpp',
|
||||||
|
'beast.cpp',
|
||||||
|
'common.cpp',
|
||||||
|
'core.cpp',
|
||||||
|
'data.cpp',
|
||||||
|
'http.cpp',
|
||||||
|
'json.cpp',
|
||||||
|
'net.cpp',
|
||||||
|
'overlay.cpp',
|
||||||
|
'peerfinder.cpp',
|
||||||
|
'protobuf.cpp',
|
||||||
|
'ripple.proto.cpp',
|
||||||
|
'resource.cpp',
|
||||||
|
'rpcx.cpp',
|
||||||
|
'sitefiles.cpp',
|
||||||
|
'sslutil.cpp',
|
||||||
|
'types.cpp',
|
||||||
|
'validators.cpp',
|
||||||
|
'websocket.cpp',
|
||||||
|
)
|
||||||
|
|
||||||
# FreeBSD doesn't support O_DSYNC
|
object_builder.add_source_files(
|
||||||
if FreeBSD:
|
'beastc.c',
|
||||||
env.Append(CPPFLAGS = ['-DMDB_DSYNC=O_SYNC'])
|
CCFLAGS=['-Wno-array-bounds'])
|
||||||
|
|
||||||
if OSX:
|
object_builder.add_source_files(
|
||||||
env.Append(LINKFLAGS = ['-L/usr/local/opt/openssl/lib'])
|
'nodestore.cpp',
|
||||||
env.Append(CXXFLAGS = ['-I/usr/local/opt/openssl/include'])
|
CPPPATH=[
|
||||||
|
'src/leveldb/include',
|
||||||
|
#'src/hyperleveldb/include', # hyper
|
||||||
|
'src/rocksdb2/include',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
PROTO_SRCS = env.Protoc([], 'src/ripple_data/protocol/ripple.proto', PROTOCOUTDIR='build/proto', PROTOCPYTHONOUTDIR=None)
|
if 'gcc' in toolchain:
|
||||||
env.Clean(PROTO_SRCS, 'site_scons/site_tools/protoc.pyc')
|
no_uninitialized_warning = {'CCFLAGS': ['-Wno-maybe-uninitialized']}
|
||||||
|
else:
|
||||||
|
no_uninitialized_warning = {}
|
||||||
|
|
||||||
# Only tag actual Ripple files.
|
object_builder.add_source_files(
|
||||||
TAG_SRCS = copy.copy(COMPILED_FILES)
|
'leveldb.cpp',
|
||||||
|
CPPPATH=[
|
||||||
|
'src/leveldb/',
|
||||||
|
'src/leveldb/include',
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
],
|
||||||
|
**no_uninitialized_warning
|
||||||
|
)
|
||||||
|
|
||||||
# Derive the object files from the source files.
|
object_builder.add_source_files(
|
||||||
OBJECT_FILES = []
|
'hyperleveldb.cpp',
|
||||||
|
CPPPATH=[
|
||||||
|
'src/hyperleveldb',
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
],
|
||||||
|
**no_uninitialized_warning
|
||||||
|
)
|
||||||
|
|
||||||
OBJECT_FILES.append(PROTO_SRCS[0])
|
object_builder.add_source_files(
|
||||||
|
'rocksdb.cpp',
|
||||||
|
CPPPATH=[
|
||||||
|
'src/rocksdb2',
|
||||||
|
'src/rocksdb2/include',
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
],
|
||||||
|
**no_uninitialized_warning
|
||||||
|
)
|
||||||
|
|
||||||
for file in COMPILED_FILES:
|
object_builder.add_source_files(
|
||||||
OBJECT_FILES.append('build/obj/' + file)
|
'snappy.cpp',
|
||||||
|
CCFLAGS=['-Wno-unused-function'],
|
||||||
|
CPPPATH=[
|
||||||
|
'src/snappy/snappy',
|
||||||
|
'src/snappy/config',
|
||||||
|
]
|
||||||
|
)
|
||||||
|
|
||||||
#
|
if toolchain == "clang" and Beast.system.osx:
|
||||||
# Targets
|
object_builder.add_source_files('beastobjc.mm')
|
||||||
#
|
|
||||||
|
|
||||||
rippled = env.Program('build/rippled', OBJECT_FILES)
|
target = env.Program(
|
||||||
|
target=os.path.join(variant_dir, 'rippled'),
|
||||||
|
source=object_builder.objects
|
||||||
|
)
|
||||||
|
|
||||||
tags = env.CTags('tags', TAG_SRCS)
|
if toolchain == default_toolchain and variant == default_variant:
|
||||||
|
default_target = target
|
||||||
|
install_target = env.Install (build_dir, source=default_target)
|
||||||
|
env.Alias ('install', install_target)
|
||||||
|
env.Default (install_target)
|
||||||
|
aliases['all'].extend(install_target)
|
||||||
|
if toolchain == 'msvc':
|
||||||
|
config = env.VSProjectConfig(variant, 'x64', target, env)
|
||||||
|
msvc_configs.append(config)
|
||||||
|
if toolchain in toolchains:
|
||||||
|
aliases['all'].extend(target)
|
||||||
|
aliases[variant].extend(target)
|
||||||
|
aliases[toolchain].extend(target)
|
||||||
|
env.Alias(variant_name, target)
|
||||||
|
|
||||||
Default(rippled, tags)
|
for key, value in aliases.iteritems():
|
||||||
|
env.Alias(key, value)
|
||||||
|
|
||||||
|
vcxproj = base.VSProject(
|
||||||
|
os.path.join('Builds', 'VisualStudio2013', 'RippleD'),
|
||||||
|
source = [],
|
||||||
|
VSPROJECT_ROOT_DIRS = ['src/beast', 'src', '.'],
|
||||||
|
VSPROJECT_CONFIGS = msvc_configs)
|
||||||
|
base.Alias('vcxproj', vcxproj)
|
||||||
|
|||||||
24
bin/LedgerTool.py
Executable file
24
bin/LedgerTool.py
Executable file
@@ -0,0 +1,24 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
|
||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import sys
|
||||||
|
import traceback
|
||||||
|
|
||||||
|
from ripple.ledger import Server
|
||||||
|
from ripple.ledger.commands import Cache, Info, Print
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util.CommandList import CommandList
|
||||||
|
|
||||||
|
_COMMANDS = CommandList(Cache, Info, Print)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
try:
|
||||||
|
server = Server.Server()
|
||||||
|
args = list(ARGS.command)
|
||||||
|
_COMMANDS.run_safe(args.pop(0), server, *args)
|
||||||
|
except Exception as e:
|
||||||
|
if ARGS.verbose:
|
||||||
|
print(traceback.format_exc(), sys.stderr)
|
||||||
|
Log.error(e)
|
||||||
8
bin/README.md
Normal file
8
bin/README.md
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
Unit Tests
|
||||||
|
==========
|
||||||
|
|
||||||
|
To run the Python unit tests, execute:
|
||||||
|
|
||||||
|
python -m unittest discover
|
||||||
|
|
||||||
|
from this directory.
|
||||||
251
bin/decorator.py
Normal file
251
bin/decorator.py
Normal file
@@ -0,0 +1,251 @@
|
|||||||
|
########################## LICENCE ###############################
|
||||||
|
|
||||||
|
# Copyright (c) 2005-2012, Michele Simionato
|
||||||
|
# All rights reserved.
|
||||||
|
|
||||||
|
# Redistribution and use in source and binary forms, with or without
|
||||||
|
# modification, are permitted provided that the following conditions are
|
||||||
|
# met:
|
||||||
|
|
||||||
|
# Redistributions of source code must retain the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer.
|
||||||
|
# Redistributions in bytecode form must reproduce the above copyright
|
||||||
|
# notice, this list of conditions and the following disclaimer in
|
||||||
|
# the documentation and/or other materials provided with the
|
||||||
|
# distribution.
|
||||||
|
|
||||||
|
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||||
|
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||||
|
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||||
|
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||||
|
# HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
|
||||||
|
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
|
||||||
|
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
|
||||||
|
# OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
||||||
|
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
|
||||||
|
# TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
|
||||||
|
# USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
|
||||||
|
# DAMAGE.
|
||||||
|
|
||||||
|
"""
|
||||||
|
Decorator module, see http://pypi.python.org/pypi/decorator
|
||||||
|
for the documentation.
|
||||||
|
"""
|
||||||
|
|
||||||
|
__version__ = '3.4.0'
|
||||||
|
|
||||||
|
__all__ = ["decorator", "FunctionMaker", "contextmanager"]
|
||||||
|
|
||||||
|
import sys, re, inspect
|
||||||
|
if sys.version >= '3':
|
||||||
|
from inspect import getfullargspec
|
||||||
|
def get_init(cls):
|
||||||
|
return cls.__init__
|
||||||
|
else:
|
||||||
|
class getfullargspec(object):
|
||||||
|
"A quick and dirty replacement for getfullargspec for Python 2.X"
|
||||||
|
def __init__(self, f):
|
||||||
|
self.args, self.varargs, self.varkw, self.defaults = \
|
||||||
|
inspect.getargspec(f)
|
||||||
|
self.kwonlyargs = []
|
||||||
|
self.kwonlydefaults = None
|
||||||
|
def __iter__(self):
|
||||||
|
yield self.args
|
||||||
|
yield self.varargs
|
||||||
|
yield self.varkw
|
||||||
|
yield self.defaults
|
||||||
|
def get_init(cls):
|
||||||
|
return cls.__init__.im_func
|
||||||
|
|
||||||
|
DEF = re.compile('\s*def\s*([_\w][_\w\d]*)\s*\(')
|
||||||
|
|
||||||
|
# basic functionality
|
||||||
|
class FunctionMaker(object):
|
||||||
|
"""
|
||||||
|
An object with the ability to create functions with a given signature.
|
||||||
|
It has attributes name, doc, module, signature, defaults, dict and
|
||||||
|
methods update and make.
|
||||||
|
"""
|
||||||
|
def __init__(self, func=None, name=None, signature=None,
|
||||||
|
defaults=None, doc=None, module=None, funcdict=None):
|
||||||
|
self.shortsignature = signature
|
||||||
|
if func:
|
||||||
|
# func can be a class or a callable, but not an instance method
|
||||||
|
self.name = func.__name__
|
||||||
|
if self.name == '<lambda>': # small hack for lambda functions
|
||||||
|
self.name = '_lambda_'
|
||||||
|
self.doc = func.__doc__
|
||||||
|
self.module = func.__module__
|
||||||
|
if inspect.isfunction(func):
|
||||||
|
argspec = getfullargspec(func)
|
||||||
|
self.annotations = getattr(func, '__annotations__', {})
|
||||||
|
for a in ('args', 'varargs', 'varkw', 'defaults', 'kwonlyargs',
|
||||||
|
'kwonlydefaults'):
|
||||||
|
setattr(self, a, getattr(argspec, a))
|
||||||
|
for i, arg in enumerate(self.args):
|
||||||
|
setattr(self, 'arg%d' % i, arg)
|
||||||
|
if sys.version < '3': # easy way
|
||||||
|
self.shortsignature = self.signature = \
|
||||||
|
inspect.formatargspec(
|
||||||
|
formatvalue=lambda val: "", *argspec)[1:-1]
|
||||||
|
else: # Python 3 way
|
||||||
|
allargs = list(self.args)
|
||||||
|
allshortargs = list(self.args)
|
||||||
|
if self.varargs:
|
||||||
|
allargs.append('*' + self.varargs)
|
||||||
|
allshortargs.append('*' + self.varargs)
|
||||||
|
elif self.kwonlyargs:
|
||||||
|
allargs.append('*') # single star syntax
|
||||||
|
for a in self.kwonlyargs:
|
||||||
|
allargs.append('%s=None' % a)
|
||||||
|
allshortargs.append('%s=%s' % (a, a))
|
||||||
|
if self.varkw:
|
||||||
|
allargs.append('**' + self.varkw)
|
||||||
|
allshortargs.append('**' + self.varkw)
|
||||||
|
self.signature = ', '.join(allargs)
|
||||||
|
self.shortsignature = ', '.join(allshortargs)
|
||||||
|
self.dict = func.__dict__.copy()
|
||||||
|
# func=None happens when decorating a caller
|
||||||
|
if name:
|
||||||
|
self.name = name
|
||||||
|
if signature is not None:
|
||||||
|
self.signature = signature
|
||||||
|
if defaults:
|
||||||
|
self.defaults = defaults
|
||||||
|
if doc:
|
||||||
|
self.doc = doc
|
||||||
|
if module:
|
||||||
|
self.module = module
|
||||||
|
if funcdict:
|
||||||
|
self.dict = funcdict
|
||||||
|
# check existence required attributes
|
||||||
|
assert hasattr(self, 'name')
|
||||||
|
if not hasattr(self, 'signature'):
|
||||||
|
raise TypeError('You are decorating a non function: %s' % func)
|
||||||
|
|
||||||
|
def update(self, func, **kw):
|
||||||
|
"Update the signature of func with the data in self"
|
||||||
|
func.__name__ = self.name
|
||||||
|
func.__doc__ = getattr(self, 'doc', None)
|
||||||
|
func.__dict__ = getattr(self, 'dict', {})
|
||||||
|
func.func_defaults = getattr(self, 'defaults', ())
|
||||||
|
func.__kwdefaults__ = getattr(self, 'kwonlydefaults', None)
|
||||||
|
func.__annotations__ = getattr(self, 'annotations', None)
|
||||||
|
callermodule = sys._getframe(3).f_globals.get('__name__', '?')
|
||||||
|
func.__module__ = getattr(self, 'module', callermodule)
|
||||||
|
func.__dict__.update(kw)
|
||||||
|
|
||||||
|
def make(self, src_templ, evaldict=None, addsource=False, **attrs):
|
||||||
|
"Make a new function from a given template and update the signature"
|
||||||
|
src = src_templ % vars(self) # expand name and signature
|
||||||
|
evaldict = evaldict or {}
|
||||||
|
mo = DEF.match(src)
|
||||||
|
if mo is None:
|
||||||
|
raise SyntaxError('not a valid function template\n%s' % src)
|
||||||
|
name = mo.group(1) # extract the function name
|
||||||
|
names = set([name] + [arg.strip(' *') for arg in
|
||||||
|
self.shortsignature.split(',')])
|
||||||
|
for n in names:
|
||||||
|
if n in ('_func_', '_call_'):
|
||||||
|
raise NameError('%s is overridden in\n%s' % (n, src))
|
||||||
|
if not src.endswith('\n'): # add a newline just for safety
|
||||||
|
src += '\n' # this is needed in old versions of Python
|
||||||
|
try:
|
||||||
|
code = compile(src, '<string>', 'single')
|
||||||
|
# print >> sys.stderr, 'Compiling %s' % src
|
||||||
|
exec code in evaldict
|
||||||
|
except:
|
||||||
|
print >> sys.stderr, 'Error in generated code:'
|
||||||
|
print >> sys.stderr, src
|
||||||
|
raise
|
||||||
|
func = evaldict[name]
|
||||||
|
if addsource:
|
||||||
|
attrs['__source__'] = src
|
||||||
|
self.update(func, **attrs)
|
||||||
|
return func
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def create(cls, obj, body, evaldict, defaults=None,
|
||||||
|
doc=None, module=None, addsource=True, **attrs):
|
||||||
|
"""
|
||||||
|
Create a function from the strings name, signature and body.
|
||||||
|
evaldict is the evaluation dictionary. If addsource is true an attribute
|
||||||
|
__source__ is added to the result. The attributes attrs are added,
|
||||||
|
if any.
|
||||||
|
"""
|
||||||
|
if isinstance(obj, str): # "name(signature)"
|
||||||
|
name, rest = obj.strip().split('(', 1)
|
||||||
|
signature = rest[:-1] #strip a right parens
|
||||||
|
func = None
|
||||||
|
else: # a function
|
||||||
|
name = None
|
||||||
|
signature = None
|
||||||
|
func = obj
|
||||||
|
self = cls(func, name, signature, defaults, doc, module)
|
||||||
|
ibody = '\n'.join(' ' + line for line in body.splitlines())
|
||||||
|
return self.make('def %(name)s(%(signature)s):\n' + ibody,
|
||||||
|
evaldict, addsource, **attrs)
|
||||||
|
|
||||||
|
def decorator(caller, func=None):
|
||||||
|
"""
|
||||||
|
decorator(caller) converts a caller function into a decorator;
|
||||||
|
decorator(caller, func) decorates a function using a caller.
|
||||||
|
"""
|
||||||
|
if func is not None: # returns a decorated function
|
||||||
|
evaldict = func.func_globals.copy()
|
||||||
|
evaldict['_call_'] = caller
|
||||||
|
evaldict['_func_'] = func
|
||||||
|
return FunctionMaker.create(
|
||||||
|
func, "return _call_(_func_, %(shortsignature)s)",
|
||||||
|
evaldict, undecorated=func, __wrapped__=func)
|
||||||
|
else: # returns a decorator
|
||||||
|
if inspect.isclass(caller):
|
||||||
|
name = caller.__name__.lower()
|
||||||
|
callerfunc = get_init(caller)
|
||||||
|
doc = 'decorator(%s) converts functions/generators into ' \
|
||||||
|
'factories of %s objects' % (caller.__name__, caller.__name__)
|
||||||
|
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||||
|
elif inspect.isfunction(caller):
|
||||||
|
name = '_lambda_' if caller.__name__ == '<lambda>' \
|
||||||
|
else caller.__name__
|
||||||
|
callerfunc = caller
|
||||||
|
doc = caller.__doc__
|
||||||
|
fun = getfullargspec(callerfunc).args[0] # first arg
|
||||||
|
else: # assume caller is an object with a __call__ method
|
||||||
|
name = caller.__class__.__name__.lower()
|
||||||
|
callerfunc = caller.__call__.im_func
|
||||||
|
doc = caller.__call__.__doc__
|
||||||
|
fun = getfullargspec(callerfunc).args[1] # second arg
|
||||||
|
evaldict = callerfunc.func_globals.copy()
|
||||||
|
evaldict['_call_'] = caller
|
||||||
|
evaldict['decorator'] = decorator
|
||||||
|
return FunctionMaker.create(
|
||||||
|
'%s(%s)' % (name, fun),
|
||||||
|
'return decorator(_call_, %s)' % fun,
|
||||||
|
evaldict, undecorated=caller, __wrapped__=caller,
|
||||||
|
doc=doc, module=caller.__module__)
|
||||||
|
|
||||||
|
######################### contextmanager ########################
|
||||||
|
|
||||||
|
def __call__(self, func):
|
||||||
|
'Context manager decorator'
|
||||||
|
return FunctionMaker.create(
|
||||||
|
func, "with _self_: return _func_(%(shortsignature)s)",
|
||||||
|
dict(_self_=self, _func_=func), __wrapped__=func)
|
||||||
|
|
||||||
|
try: # Python >= 3.2
|
||||||
|
|
||||||
|
from contextlib import _GeneratorContextManager
|
||||||
|
ContextManager = type(
|
||||||
|
'ContextManager', (_GeneratorContextManager,), dict(__call__=__call__))
|
||||||
|
|
||||||
|
except ImportError: # Python >= 2.5
|
||||||
|
|
||||||
|
from contextlib import GeneratorContextManager
|
||||||
|
def __init__(self, f, *a, **k):
|
||||||
|
return GeneratorContextManager.__init__(self, f(*a, **k))
|
||||||
|
ContextManager = type(
|
||||||
|
'ContextManager', (GeneratorContextManager,),
|
||||||
|
dict(__call__=__call__, __init__=__init__))
|
||||||
|
|
||||||
|
contextmanager = decorator(ContextManager)
|
||||||
4
bin/jsonpath_rw/__init__.py
Normal file
4
bin/jsonpath_rw/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from .jsonpath import *
|
||||||
|
from .parser import parse
|
||||||
|
|
||||||
|
__version__ = '1.3.0'
|
||||||
510
bin/jsonpath_rw/jsonpath.py
Normal file
510
bin/jsonpath_rw/jsonpath.py
Normal file
@@ -0,0 +1,510 @@
|
|||||||
|
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
||||||
|
import logging
|
||||||
|
import six
|
||||||
|
from six.moves import xrange
|
||||||
|
from itertools import *
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Turn on/off the automatic creation of id attributes
|
||||||
|
# ... could be a kwarg pervasively but uses are rare and simple today
|
||||||
|
auto_id_field = None
|
||||||
|
|
||||||
|
class JSONPath(object):
|
||||||
|
"""
|
||||||
|
The base class for JSONPath abstract syntax; those
|
||||||
|
methods stubbed here are the interface to supported
|
||||||
|
JSONPath semantics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find(self, data):
|
||||||
|
"""
|
||||||
|
All `JSONPath` types support `find()`, which returns an iterable of `DatumInContext`s.
|
||||||
|
They keep track of the path followed to the current location, so if the calling code
|
||||||
|
has some opinion about that, it can be passed in here as a starting point.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def update(self, data, val):
|
||||||
|
"Returns `data` with the specified path replaced by `val`"
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
def child(self, child):
|
||||||
|
"""
|
||||||
|
Equivalent to Child(self, next) but with some canonicalization
|
||||||
|
"""
|
||||||
|
if isinstance(self, This) or isinstance(self, Root):
|
||||||
|
return child
|
||||||
|
elif isinstance(child, This):
|
||||||
|
return self
|
||||||
|
elif isinstance(child, Root):
|
||||||
|
return child
|
||||||
|
else:
|
||||||
|
return Child(self, child)
|
||||||
|
|
||||||
|
def make_datum(self, value):
|
||||||
|
if isinstance(value, DatumInContext):
|
||||||
|
return value
|
||||||
|
else:
|
||||||
|
return DatumInContext(value, path=Root(), context=None)
|
||||||
|
|
||||||
|
class DatumInContext(object):
|
||||||
|
"""
|
||||||
|
Represents a datum along a path from a context.
|
||||||
|
|
||||||
|
Essentially a zipper but with a structure represented by JsonPath,
|
||||||
|
and where the context is more of a parent pointer than a proper
|
||||||
|
representation of the context.
|
||||||
|
|
||||||
|
For quick-and-dirty work, this proxies any non-special attributes
|
||||||
|
to the underlying datum, but the actual datum can (and usually should)
|
||||||
|
be retrieved via the `value` attribute.
|
||||||
|
|
||||||
|
To place `datum` within another, use `datum.in_context(context=..., path=...)`
|
||||||
|
which extends the path. If the datum already has a context, it places the entire
|
||||||
|
context within that passed in, so an object can be built from the inside
|
||||||
|
out.
|
||||||
|
"""
|
||||||
|
@classmethod
|
||||||
|
def wrap(cls, data):
|
||||||
|
if isinstance(data, cls):
|
||||||
|
return data
|
||||||
|
else:
|
||||||
|
return cls(data)
|
||||||
|
|
||||||
|
def __init__(self, value, path=None, context=None):
|
||||||
|
self.value = value
|
||||||
|
self.path = path or This()
|
||||||
|
self.context = None if context is None else DatumInContext.wrap(context)
|
||||||
|
|
||||||
|
def in_context(self, context, path):
|
||||||
|
context = DatumInContext.wrap(context)
|
||||||
|
|
||||||
|
if self.context:
|
||||||
|
return DatumInContext(value=self.value, path=self.path, context=context.in_context(path=path, context=context))
|
||||||
|
else:
|
||||||
|
return DatumInContext(value=self.value, path=path, context=context)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def full_path(self):
|
||||||
|
return self.path if self.context is None else self.context.full_path.child(self.path)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def id_pseudopath(self):
|
||||||
|
"""
|
||||||
|
Looks like a path, but with ids stuck in when available
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
pseudopath = Fields(str(self.value[auto_id_field]))
|
||||||
|
except (TypeError, AttributeError, KeyError): # This may not be all the interesting exceptions
|
||||||
|
pseudopath = self.path
|
||||||
|
|
||||||
|
if self.context:
|
||||||
|
return self.context.id_pseudopath.child(pseudopath)
|
||||||
|
else:
|
||||||
|
return pseudopath
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(value=%r, path=%r, context=%r)' % (self.__class__.__name__, self.value, self.path, self.context)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, DatumInContext) and other.value == self.value and other.path == self.path and self.context == other.context
|
||||||
|
|
||||||
|
class AutoIdForDatum(DatumInContext):
|
||||||
|
"""
|
||||||
|
This behaves like a DatumInContext, but the value is
|
||||||
|
always the path leading up to it, not including the "id",
|
||||||
|
and with any "id" fields along the way replacing the prior
|
||||||
|
segment of the path
|
||||||
|
|
||||||
|
For example, it will make "foo.bar.id" return a datum
|
||||||
|
that behaves like DatumInContext(value="foo.bar", path="foo.bar.id").
|
||||||
|
|
||||||
|
This is disabled by default; it can be turned on by
|
||||||
|
settings the `auto_id_field` global to a value other
|
||||||
|
than `None`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, datum, id_field=None):
|
||||||
|
"""
|
||||||
|
Invariant is that datum.path is the path from context to datum. The auto id
|
||||||
|
will either be the id in the datum (if present) or the id of the context
|
||||||
|
followed by the path to the datum.
|
||||||
|
|
||||||
|
The path to this datum is always the path to the context, the path to the
|
||||||
|
datum, and then the auto id field.
|
||||||
|
"""
|
||||||
|
self.datum = datum
|
||||||
|
self.id_field = id_field or auto_id_field
|
||||||
|
|
||||||
|
@property
|
||||||
|
def value(self):
|
||||||
|
return str(self.datum.id_pseudopath)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def path(self):
|
||||||
|
return self.id_field
|
||||||
|
|
||||||
|
@property
|
||||||
|
def context(self):
|
||||||
|
return self.datum
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%r)' % (self.__class__.__name__, self.datum)
|
||||||
|
|
||||||
|
def in_context(self, context, path):
|
||||||
|
return AutoIdForDatum(self.datum.in_context(context=context, path=path))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, AutoIdForDatum) and other.datum == self.datum and self.id_field == other.id_field
|
||||||
|
|
||||||
|
|
||||||
|
class Root(JSONPath):
|
||||||
|
"""
|
||||||
|
The JSONPath referring to the "root" object. Concrete syntax is '$'.
|
||||||
|
The root is the topmost datum without any context attached.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find(self, data):
|
||||||
|
if not isinstance(data, DatumInContext):
|
||||||
|
return [DatumInContext(data, path=Root(), context=None)]
|
||||||
|
else:
|
||||||
|
if data.context is None:
|
||||||
|
return [DatumInContext(data.value, context=None, path=Root())]
|
||||||
|
else:
|
||||||
|
return Root().find(data.context)
|
||||||
|
|
||||||
|
def update(self, data, val):
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '$'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Root()'
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Root)
|
||||||
|
|
||||||
|
class This(JSONPath):
|
||||||
|
"""
|
||||||
|
The JSONPath referring to the current datum. Concrete syntax is '@'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
return [DatumInContext.wrap(datum)]
|
||||||
|
|
||||||
|
def update(self, data, val):
|
||||||
|
return val
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '`this`'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'This()'
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, This)
|
||||||
|
|
||||||
|
class Child(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that first matches the left, then the right.
|
||||||
|
Concrete syntax is <left> '.' <right>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, left, right):
|
||||||
|
self.left = left
|
||||||
|
self.right = right
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
"""
|
||||||
|
Extra special case: auto ids do not have children,
|
||||||
|
so cut it off right now rather than auto id the auto id
|
||||||
|
"""
|
||||||
|
|
||||||
|
return [submatch
|
||||||
|
for subdata in self.left.find(datum)
|
||||||
|
if not isinstance(subdata, AutoIdForDatum)
|
||||||
|
for submatch in self.right.find(subdata)]
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Child) and self.left == other.left and self.right == other.right
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '%s.%s' % (self.left, self.right)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%r, %r)' % (self.__class__.__name__, self.left, self.right)
|
||||||
|
|
||||||
|
class Parent(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that matches the parent node of the current match.
|
||||||
|
Will crash if no such parent exists.
|
||||||
|
Available via named operator `parent`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
datum = DatumInContext.wrap(datum)
|
||||||
|
return [datum.context]
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Parent)
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '`parent`'
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return 'Parent()'
|
||||||
|
|
||||||
|
|
||||||
|
class Where(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that first matches the left, and then
|
||||||
|
filters for only those nodes that have
|
||||||
|
a match on the right.
|
||||||
|
|
||||||
|
WARNING: Subject to change. May want to have "contains"
|
||||||
|
or some other better word for it.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, left, right):
|
||||||
|
self.left = left
|
||||||
|
self.right = right
|
||||||
|
|
||||||
|
def find(self, data):
|
||||||
|
return [subdata for subdata in self.left.find(data) if self.right.find(data)]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '%s where %s' % (self.left, self.right)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Where) and other.left == self.left and other.right == self.right
|
||||||
|
|
||||||
|
class Descendants(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that matches first the left expression then any descendant
|
||||||
|
of it which matches the right expression.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, left, right):
|
||||||
|
self.left = left
|
||||||
|
self.right = right
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
# <left> .. <right> ==> <left> . (<right> | *..<right> | [*]..<right>)
|
||||||
|
#
|
||||||
|
# With with a wonky caveat that since Slice() has funky coercions
|
||||||
|
# we cannot just delegate to that equivalence or we'll hit an
|
||||||
|
# infinite loop. So right here we implement the coercion-free version.
|
||||||
|
|
||||||
|
# Get all left matches into a list
|
||||||
|
left_matches = self.left.find(datum)
|
||||||
|
if not isinstance(left_matches, list):
|
||||||
|
left_matches = [left_matches]
|
||||||
|
|
||||||
|
def match_recursively(datum):
|
||||||
|
right_matches = self.right.find(datum)
|
||||||
|
|
||||||
|
# Manually do the * or [*] to avoid coercion and recurse just the right-hand pattern
|
||||||
|
if isinstance(datum.value, list):
|
||||||
|
recursive_matches = [submatch
|
||||||
|
for i in range(0, len(datum.value))
|
||||||
|
for submatch in match_recursively(DatumInContext(datum.value[i], context=datum, path=Index(i)))]
|
||||||
|
|
||||||
|
elif isinstance(datum.value, dict):
|
||||||
|
recursive_matches = [submatch
|
||||||
|
for field in datum.value.keys()
|
||||||
|
for submatch in match_recursively(DatumInContext(datum.value[field], context=datum, path=Fields(field)))]
|
||||||
|
|
||||||
|
else:
|
||||||
|
recursive_matches = []
|
||||||
|
|
||||||
|
return right_matches + list(recursive_matches)
|
||||||
|
|
||||||
|
# TODO: repeatable iterator instead of list?
|
||||||
|
return [submatch
|
||||||
|
for left_match in left_matches
|
||||||
|
for submatch in match_recursively(left_match)]
|
||||||
|
|
||||||
|
def is_singular():
|
||||||
|
return False
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '%s..%s' % (self.left, self.right)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Descendants) and self.left == other.left and self.right == other.right
|
||||||
|
|
||||||
|
class Union(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that returns the union of the results of each match.
|
||||||
|
This is pretty shoddily implemented for now. The nicest semantics
|
||||||
|
in case of mismatched bits (list vs atomic) is to put
|
||||||
|
them all in a list, but I haven't done that yet.
|
||||||
|
|
||||||
|
WARNING: Any appearance of this being the _concatenation_ is
|
||||||
|
coincidence. It may even be a bug! (or laziness)
|
||||||
|
"""
|
||||||
|
def __init__(self, left, right):
|
||||||
|
self.left = left
|
||||||
|
self.right = right
|
||||||
|
|
||||||
|
def is_singular(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def find(self, data):
|
||||||
|
return self.left.find(data) + self.right.find(data)
|
||||||
|
|
||||||
|
class Intersect(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath for bits that match *both* patterns.
|
||||||
|
|
||||||
|
This can be accomplished a couple of ways. The most
|
||||||
|
efficient is to actually build the intersected
|
||||||
|
AST as in building a state machine for matching the
|
||||||
|
intersection of regular languages. The next
|
||||||
|
idea is to build a filtered data and match against
|
||||||
|
that.
|
||||||
|
"""
|
||||||
|
def __init__(self, left, right):
|
||||||
|
self.left = left
|
||||||
|
self.right = right
|
||||||
|
|
||||||
|
def is_singular(self):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def find(self, data):
|
||||||
|
raise NotImplementedError()
|
||||||
|
|
||||||
|
class Fields(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath referring to some field of the current object.
|
||||||
|
Concrete syntax ix comma-separated field names.
|
||||||
|
|
||||||
|
WARNING: If '*' is any of the field names, then they will
|
||||||
|
all be returned.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, *fields):
|
||||||
|
self.fields = fields
|
||||||
|
|
||||||
|
def get_field_datum(self, datum, field):
|
||||||
|
if field == auto_id_field:
|
||||||
|
return AutoIdForDatum(datum)
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
field_value = datum.value[field] # Do NOT use `val.get(field)` since that confuses None as a value and None due to `get`
|
||||||
|
return DatumInContext(value=field_value, path=Fields(field), context=datum)
|
||||||
|
except (TypeError, KeyError, AttributeError):
|
||||||
|
return None
|
||||||
|
|
||||||
|
def reified_fields(self, datum):
|
||||||
|
if '*' not in self.fields:
|
||||||
|
return self.fields
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
fields = tuple(datum.value.keys())
|
||||||
|
return fields if auto_id_field is None else fields + (auto_id_field,)
|
||||||
|
except AttributeError:
|
||||||
|
return ()
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
datum = DatumInContext.wrap(datum)
|
||||||
|
|
||||||
|
return [field_datum
|
||||||
|
for field_datum in [self.get_field_datum(datum, field) for field in self.reified_fields(datum)]
|
||||||
|
if field_datum is not None]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return ','.join(self.fields)
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(%s)' % (self.__class__.__name__, ','.join(map(repr, self.fields)))
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Fields) and tuple(self.fields) == tuple(other.fields)
|
||||||
|
|
||||||
|
|
||||||
|
class Index(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath that matches indices of the current datum, or none if not large enough.
|
||||||
|
Concrete syntax is brackets.
|
||||||
|
|
||||||
|
WARNING: If the datum is not long enough, it will not crash but will not match anything.
|
||||||
|
NOTE: For the concrete syntax of `[*]`, the abstract syntax is a Slice() with no parameters (equiv to `[:]`
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, index):
|
||||||
|
self.index = index
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
datum = DatumInContext.wrap(datum)
|
||||||
|
|
||||||
|
if len(datum.value) > self.index:
|
||||||
|
return [DatumInContext(datum.value[self.index], path=self, context=datum)]
|
||||||
|
else:
|
||||||
|
return []
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Index) and self.index == other.index
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return '[%i]' % self.index
|
||||||
|
|
||||||
|
class Slice(JSONPath):
|
||||||
|
"""
|
||||||
|
JSONPath matching a slice of an array.
|
||||||
|
|
||||||
|
Because of a mismatch between JSON and XML when schema-unaware,
|
||||||
|
this always returns an iterable; if the incoming data
|
||||||
|
was not a list, then it returns a one element list _containing_ that
|
||||||
|
data.
|
||||||
|
|
||||||
|
Consider these two docs, and their schema-unaware translation to JSON:
|
||||||
|
|
||||||
|
<a><b>hello</b></a> ==> {"a": {"b": "hello"}}
|
||||||
|
<a><b>hello</b><b>goodbye</b></a> ==> {"a": {"b": ["hello", "goodbye"]}}
|
||||||
|
|
||||||
|
If there were a schema, it would be known that "b" should always be an
|
||||||
|
array (unless the schema were wonky, but that is too much to fix here)
|
||||||
|
so when querying with JSON if the one writing the JSON knows that it
|
||||||
|
should be an array, they can write a slice operator and it will coerce
|
||||||
|
a non-array value to an array.
|
||||||
|
|
||||||
|
This may be a bit unfortunate because it would be nice to always have
|
||||||
|
an iterator, but dictionaries and other objects may also be iterable,
|
||||||
|
so this is the compromise.
|
||||||
|
"""
|
||||||
|
def __init__(self, start=None, end=None, step=None):
|
||||||
|
self.start = start
|
||||||
|
self.end = end
|
||||||
|
self.step = step
|
||||||
|
|
||||||
|
def find(self, datum):
|
||||||
|
datum = DatumInContext.wrap(datum)
|
||||||
|
|
||||||
|
# Here's the hack. If it is a dictionary or some kind of constant,
|
||||||
|
# put it in a single-element list
|
||||||
|
if (isinstance(datum.value, dict) or isinstance(datum.value, six.integer_types) or isinstance(datum.value, six.string_types)):
|
||||||
|
return self.find(DatumInContext([datum.value], path=datum.path, context=datum.context))
|
||||||
|
|
||||||
|
# Some iterators do not support slicing but we can still
|
||||||
|
# at least work for '*'
|
||||||
|
if self.start == None and self.end == None and self.step == None:
|
||||||
|
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in xrange(0, len(datum.value))]
|
||||||
|
else:
|
||||||
|
return [DatumInContext(datum.value[i], path=Index(i), context=datum) for i in range(0, len(datum.value))[self.start:self.end:self.step]]
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.start == None and self.end == None and self.step == None:
|
||||||
|
return '[*]'
|
||||||
|
else:
|
||||||
|
return '[%s%s%s]' % (self.start or '',
|
||||||
|
':%d'%self.end if self.end else '',
|
||||||
|
':%d'%self.step if self.step else '')
|
||||||
|
|
||||||
|
def __repr__(self):
|
||||||
|
return '%s(start=%r,end=%r,step=%r)' % (self.__class__.__name__, self.start, self.end, self.step)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return isinstance(other, Slice) and other.start == self.start and self.end == other.end and other.step == self.step
|
||||||
171
bin/jsonpath_rw/lexer.py
Normal file
171
bin/jsonpath_rw/lexer.py
Normal file
@@ -0,0 +1,171 @@
|
|||||||
|
from __future__ import unicode_literals, print_function, absolute_import, division, generators, nested_scopes
|
||||||
|
import sys
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import ply.lex
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
class JsonPathLexerError(Exception):
|
||||||
|
pass
|
||||||
|
|
||||||
|
class JsonPathLexer(object):
|
||||||
|
'''
|
||||||
|
A Lexical analyzer for JsonPath.
|
||||||
|
'''
|
||||||
|
|
||||||
|
def __init__(self, debug=False):
|
||||||
|
self.debug = debug
|
||||||
|
if self.__doc__ == None:
|
||||||
|
raise JsonPathLexerError('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
||||||
|
|
||||||
|
def tokenize(self, string):
|
||||||
|
'''
|
||||||
|
Maps a string to an iterator over tokens. In other words: [char] -> [token]
|
||||||
|
'''
|
||||||
|
|
||||||
|
new_lexer = ply.lex.lex(module=self, debug=self.debug, errorlog=logger)
|
||||||
|
new_lexer.latest_newline = 0
|
||||||
|
new_lexer.string_value = None
|
||||||
|
new_lexer.input(string)
|
||||||
|
|
||||||
|
while True:
|
||||||
|
t = new_lexer.token()
|
||||||
|
if t is None: break
|
||||||
|
t.col = t.lexpos - new_lexer.latest_newline
|
||||||
|
yield t
|
||||||
|
|
||||||
|
if new_lexer.string_value is not None:
|
||||||
|
raise JsonPathLexerError('Unexpected EOF in string literal or identifier')
|
||||||
|
|
||||||
|
# ============== PLY Lexer specification ==================
|
||||||
|
#
|
||||||
|
# This probably should be private but:
|
||||||
|
# - the parser requires access to `tokens` (perhaps they should be defined in a third, shared dependency)
|
||||||
|
# - things like `literals` might be a legitimate part of the public interface.
|
||||||
|
#
|
||||||
|
# Anyhow, it is pythonic to give some rope to hang oneself with :-)
|
||||||
|
|
||||||
|
literals = ['*', '.', '[', ']', '(', ')', '$', ',', ':', '|', '&']
|
||||||
|
|
||||||
|
reserved_words = { 'where': 'WHERE' }
|
||||||
|
|
||||||
|
tokens = ['DOUBLEDOT', 'NUMBER', 'ID', 'NAMED_OPERATOR'] + list(reserved_words.values())
|
||||||
|
|
||||||
|
states = [ ('singlequote', 'exclusive'),
|
||||||
|
('doublequote', 'exclusive'),
|
||||||
|
('backquote', 'exclusive') ]
|
||||||
|
|
||||||
|
# Normal lexing, rather easy
|
||||||
|
t_DOUBLEDOT = r'\.\.'
|
||||||
|
t_ignore = ' \t'
|
||||||
|
|
||||||
|
def t_ID(self, t):
|
||||||
|
r'[a-zA-Z_@][a-zA-Z0-9_@\-]*'
|
||||||
|
t.type = self.reserved_words.get(t.value, 'ID')
|
||||||
|
return t
|
||||||
|
|
||||||
|
def t_NUMBER(self, t):
|
||||||
|
r'-?\d+'
|
||||||
|
t.value = int(t.value)
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
# Single-quoted strings
|
||||||
|
t_singlequote_ignore = ''
|
||||||
|
def t_singlequote(self, t):
|
||||||
|
r"'"
|
||||||
|
t.lexer.string_start = t.lexer.lexpos
|
||||||
|
t.lexer.string_value = ''
|
||||||
|
t.lexer.push_state('singlequote')
|
||||||
|
|
||||||
|
def t_singlequote_content(self, t):
|
||||||
|
r"[^'\\]+"
|
||||||
|
t.lexer.string_value += t.value
|
||||||
|
|
||||||
|
def t_singlequote_escape(self, t):
|
||||||
|
r'\\.'
|
||||||
|
t.lexer.string_value += t.value[1]
|
||||||
|
|
||||||
|
def t_singlequote_end(self, t):
|
||||||
|
r"'"
|
||||||
|
t.value = t.lexer.string_value
|
||||||
|
t.type = 'ID'
|
||||||
|
t.lexer.string_value = None
|
||||||
|
t.lexer.pop_state()
|
||||||
|
return t
|
||||||
|
|
||||||
|
def t_singlequote_error(self, t):
|
||||||
|
raise JsonPathLexerError('Error on line %s, col %s while lexing singlequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||||
|
|
||||||
|
|
||||||
|
# Double-quoted strings
|
||||||
|
t_doublequote_ignore = ''
|
||||||
|
def t_doublequote(self, t):
|
||||||
|
r'"'
|
||||||
|
t.lexer.string_start = t.lexer.lexpos
|
||||||
|
t.lexer.string_value = ''
|
||||||
|
t.lexer.push_state('doublequote')
|
||||||
|
|
||||||
|
def t_doublequote_content(self, t):
|
||||||
|
r'[^"\\]+'
|
||||||
|
t.lexer.string_value += t.value
|
||||||
|
|
||||||
|
def t_doublequote_escape(self, t):
|
||||||
|
r'\\.'
|
||||||
|
t.lexer.string_value += t.value[1]
|
||||||
|
|
||||||
|
def t_doublequote_end(self, t):
|
||||||
|
r'"'
|
||||||
|
t.value = t.lexer.string_value
|
||||||
|
t.type = 'ID'
|
||||||
|
t.lexer.string_value = None
|
||||||
|
t.lexer.pop_state()
|
||||||
|
return t
|
||||||
|
|
||||||
|
def t_doublequote_error(self, t):
|
||||||
|
raise JsonPathLexerError('Error on line %s, col %s while lexing doublequoted field: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||||
|
|
||||||
|
|
||||||
|
# Back-quoted "magic" operators
|
||||||
|
t_backquote_ignore = ''
|
||||||
|
def t_backquote(self, t):
|
||||||
|
r'`'
|
||||||
|
t.lexer.string_start = t.lexer.lexpos
|
||||||
|
t.lexer.string_value = ''
|
||||||
|
t.lexer.push_state('backquote')
|
||||||
|
|
||||||
|
def t_backquote_escape(self, t):
|
||||||
|
r'\\.'
|
||||||
|
t.lexer.string_value += t.value[1]
|
||||||
|
|
||||||
|
def t_backquote_content(self, t):
|
||||||
|
r"[^`\\]+"
|
||||||
|
t.lexer.string_value += t.value
|
||||||
|
|
||||||
|
def t_backquote_end(self, t):
|
||||||
|
r'`'
|
||||||
|
t.value = t.lexer.string_value
|
||||||
|
t.type = 'NAMED_OPERATOR'
|
||||||
|
t.lexer.string_value = None
|
||||||
|
t.lexer.pop_state()
|
||||||
|
return t
|
||||||
|
|
||||||
|
def t_backquote_error(self, t):
|
||||||
|
raise JsonPathLexerError('Error on line %s, col %s while lexing backquoted operator: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||||
|
|
||||||
|
|
||||||
|
# Counting lines, handling errors
|
||||||
|
def t_newline(self, t):
|
||||||
|
r'\n'
|
||||||
|
t.lexer.lineno += 1
|
||||||
|
t.lexer.latest_newline = t.lexpos
|
||||||
|
|
||||||
|
def t_error(self, t):
|
||||||
|
raise JsonPathLexerError('Error on line %s, col %s: Unexpected character: %s ' % (t.lexer.lineno, t.lexpos - t.lexer.latest_newline, t.value[0]))
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logging.basicConfig()
|
||||||
|
lexer = JsonPathLexer(debug=True)
|
||||||
|
for token in lexer.tokenize(sys.stdin.read()):
|
||||||
|
print('%-20s%s' % (token.value, token.type))
|
||||||
187
bin/jsonpath_rw/parser.py
Normal file
187
bin/jsonpath_rw/parser.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
from __future__ import print_function, absolute_import, division, generators, nested_scopes
|
||||||
|
import sys
|
||||||
|
import os.path
|
||||||
|
import logging
|
||||||
|
|
||||||
|
import ply.yacc
|
||||||
|
|
||||||
|
from jsonpath_rw.jsonpath import *
|
||||||
|
from jsonpath_rw.lexer import JsonPathLexer
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
def parse(string):
|
||||||
|
return JsonPathParser().parse(string)
|
||||||
|
|
||||||
|
class JsonPathParser(object):
|
||||||
|
'''
|
||||||
|
An LALR-parser for JsonPath
|
||||||
|
'''
|
||||||
|
|
||||||
|
tokens = JsonPathLexer.tokens
|
||||||
|
|
||||||
|
def __init__(self, debug=False, lexer_class=None):
|
||||||
|
if self.__doc__ == None:
|
||||||
|
raise Exception('Docstrings have been removed! By design of PLY, jsonpath-rw requires docstrings. You must not use PYTHONOPTIMIZE=2 or python -OO.')
|
||||||
|
|
||||||
|
self.debug = debug
|
||||||
|
self.lexer_class = lexer_class or JsonPathLexer # Crufty but works around statefulness in PLY
|
||||||
|
|
||||||
|
def parse(self, string, lexer = None):
|
||||||
|
lexer = lexer or self.lexer_class()
|
||||||
|
return self.parse_token_stream(lexer.tokenize(string))
|
||||||
|
|
||||||
|
def parse_token_stream(self, token_iterator, start_symbol='jsonpath'):
|
||||||
|
|
||||||
|
# Since PLY has some crufty aspects and dumps files, we try to keep them local
|
||||||
|
# However, we need to derive the name of the output Python file :-/
|
||||||
|
output_directory = os.path.dirname(__file__)
|
||||||
|
try:
|
||||||
|
module_name = os.path.splitext(os.path.split(__file__)[1])[0]
|
||||||
|
except:
|
||||||
|
module_name = __name__
|
||||||
|
|
||||||
|
parsing_table_module = '_'.join([module_name, start_symbol, 'parsetab'])
|
||||||
|
|
||||||
|
# And we regenerate the parse table every time; it doesn't actually take that long!
|
||||||
|
new_parser = ply.yacc.yacc(module=self,
|
||||||
|
debug=self.debug,
|
||||||
|
tabmodule = parsing_table_module,
|
||||||
|
outputdir = output_directory,
|
||||||
|
write_tables=0,
|
||||||
|
start = start_symbol,
|
||||||
|
errorlog = logger)
|
||||||
|
|
||||||
|
return new_parser.parse(lexer = IteratorToTokenStream(token_iterator))
|
||||||
|
|
||||||
|
# ===================== PLY Parser specification =====================
|
||||||
|
|
||||||
|
precedence = [
|
||||||
|
('left', ','),
|
||||||
|
('left', 'DOUBLEDOT'),
|
||||||
|
('left', '.'),
|
||||||
|
('left', '|'),
|
||||||
|
('left', '&'),
|
||||||
|
('left', 'WHERE'),
|
||||||
|
]
|
||||||
|
|
||||||
|
def p_error(self, t):
|
||||||
|
raise Exception('Parse error at %s:%s near token %s (%s)' % (t.lineno, t.col, t.value, t.type))
|
||||||
|
|
||||||
|
def p_jsonpath_binop(self, p):
|
||||||
|
"""jsonpath : jsonpath '.' jsonpath
|
||||||
|
| jsonpath DOUBLEDOT jsonpath
|
||||||
|
| jsonpath WHERE jsonpath
|
||||||
|
| jsonpath '|' jsonpath
|
||||||
|
| jsonpath '&' jsonpath"""
|
||||||
|
op = p[2]
|
||||||
|
|
||||||
|
if op == '.':
|
||||||
|
p[0] = Child(p[1], p[3])
|
||||||
|
elif op == '..':
|
||||||
|
p[0] = Descendants(p[1], p[3])
|
||||||
|
elif op == 'where':
|
||||||
|
p[0] = Where(p[1], p[3])
|
||||||
|
elif op == '|':
|
||||||
|
p[0] = Union(p[1], p[3])
|
||||||
|
elif op == '&':
|
||||||
|
p[0] = Intersect(p[1], p[3])
|
||||||
|
|
||||||
|
def p_jsonpath_fields(self, p):
|
||||||
|
"jsonpath : fields_or_any"
|
||||||
|
p[0] = Fields(*p[1])
|
||||||
|
|
||||||
|
def p_jsonpath_named_operator(self, p):
|
||||||
|
"jsonpath : NAMED_OPERATOR"
|
||||||
|
if p[1] == 'this':
|
||||||
|
p[0] = This()
|
||||||
|
elif p[1] == 'parent':
|
||||||
|
p[0] = Parent()
|
||||||
|
else:
|
||||||
|
raise Exception('Unknown named operator `%s` at %s:%s' % (p[1], p.lineno(1), p.lexpos(1)))
|
||||||
|
|
||||||
|
def p_jsonpath_root(self, p):
|
||||||
|
"jsonpath : '$'"
|
||||||
|
p[0] = Root()
|
||||||
|
|
||||||
|
def p_jsonpath_idx(self, p):
|
||||||
|
"jsonpath : '[' idx ']'"
|
||||||
|
p[0] = p[2]
|
||||||
|
|
||||||
|
def p_jsonpath_slice(self, p):
|
||||||
|
"jsonpath : '[' slice ']'"
|
||||||
|
p[0] = p[2]
|
||||||
|
|
||||||
|
def p_jsonpath_fieldbrackets(self, p):
|
||||||
|
"jsonpath : '[' fields ']'"
|
||||||
|
p[0] = Fields(*p[2])
|
||||||
|
|
||||||
|
def p_jsonpath_child_fieldbrackets(self, p):
|
||||||
|
"jsonpath : jsonpath '[' fields ']'"
|
||||||
|
p[0] = Child(p[1], Fields(*p[3]))
|
||||||
|
|
||||||
|
def p_jsonpath_child_idxbrackets(self, p):
|
||||||
|
"jsonpath : jsonpath '[' idx ']'"
|
||||||
|
p[0] = Child(p[1], p[3])
|
||||||
|
|
||||||
|
def p_jsonpath_child_slicebrackets(self, p):
|
||||||
|
"jsonpath : jsonpath '[' slice ']'"
|
||||||
|
p[0] = Child(p[1], p[3])
|
||||||
|
|
||||||
|
def p_jsonpath_parens(self, p):
|
||||||
|
"jsonpath : '(' jsonpath ')'"
|
||||||
|
p[0] = p[2]
|
||||||
|
|
||||||
|
# Because fields in brackets cannot be '*' - that is reserved for array indices
|
||||||
|
def p_fields_or_any(self, p):
|
||||||
|
"""fields_or_any : fields
|
||||||
|
| '*' """
|
||||||
|
if p[1] == '*':
|
||||||
|
p[0] = ['*']
|
||||||
|
else:
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_fields_id(self, p):
|
||||||
|
"fields : ID"
|
||||||
|
p[0] = [p[1]]
|
||||||
|
|
||||||
|
def p_fields_comma(self, p):
|
||||||
|
"fields : fields ',' fields"
|
||||||
|
p[0] = p[1] + p[3]
|
||||||
|
|
||||||
|
def p_idx(self, p):
|
||||||
|
"idx : NUMBER"
|
||||||
|
p[0] = Index(p[1])
|
||||||
|
|
||||||
|
def p_slice_any(self, p):
|
||||||
|
"slice : '*'"
|
||||||
|
p[0] = Slice()
|
||||||
|
|
||||||
|
def p_slice(self, p): # Currently does not support `step`
|
||||||
|
"slice : maybe_int ':' maybe_int"
|
||||||
|
p[0] = Slice(start=p[1], end=p[3])
|
||||||
|
|
||||||
|
def p_maybe_int(self, p):
|
||||||
|
"""maybe_int : NUMBER
|
||||||
|
| empty"""
|
||||||
|
p[0] = p[1]
|
||||||
|
|
||||||
|
def p_empty(self, p):
|
||||||
|
'empty :'
|
||||||
|
p[0] = None
|
||||||
|
|
||||||
|
class IteratorToTokenStream(object):
|
||||||
|
def __init__(self, iterator):
|
||||||
|
self.iterator = iterator
|
||||||
|
|
||||||
|
def token(self):
|
||||||
|
try:
|
||||||
|
return next(self.iterator)
|
||||||
|
except StopIteration:
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
logging.basicConfig()
|
||||||
|
parser = JsonPathParser(debug=True)
|
||||||
|
print(parser.parse(sys.stdin.read()))
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh -x
|
|
||||||
|
|
||||||
cd ~/NewCoin
|
|
||||||
git pull
|
|
||||||
scons -j 2
|
|
||||||
cp -p newcoind bin/
|
|
||||||
@@ -1,10 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
# XXX Should not need to make db directory. newcoind should do this.
|
|
||||||
for SITE in $HOSTS
|
|
||||||
do
|
|
||||||
echo "Clearing db for:" $SITE
|
|
||||||
DB_DIR="/var/www/$SITE/db/"
|
|
||||||
mkdir -p "/var/www/$SITE/db/"
|
|
||||||
rm -rf "/var/www/$SITE/db/*"
|
|
||||||
done
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/bin/sh -x
|
|
||||||
|
|
||||||
network-stop
|
|
||||||
sleep 1
|
|
||||||
network-start
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
for SITE in $HOSTS
|
|
||||||
do
|
|
||||||
(nx $SITE &)
|
|
||||||
done
|
|
||||||
@@ -1,6 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
for SITE in $HOSTS
|
|
||||||
do
|
|
||||||
(nx $SITE stop &)
|
|
||||||
done
|
|
||||||
@@ -1,5 +0,0 @@
|
|||||||
#!/bin/sh -x
|
|
||||||
echo "Building and restarting."
|
|
||||||
|
|
||||||
network-build
|
|
||||||
network-restart
|
|
||||||
7
bin/nx
7
bin/nx
@@ -1,7 +0,0 @@
|
|||||||
#!/bin/sh
|
|
||||||
|
|
||||||
SITE=$1
|
|
||||||
shift
|
|
||||||
COMMAND=$@
|
|
||||||
cd "/var/www/$SITE"
|
|
||||||
newcoind $COMMAND
|
|
||||||
4
bin/ply/__init__.py
Normal file
4
bin/ply/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
# PLY package
|
||||||
|
# Author: David Beazley (dave@dabeaz.com)
|
||||||
|
|
||||||
|
__all__ = ['lex','yacc']
|
||||||
898
bin/ply/cpp.py
Normal file
898
bin/ply/cpp.py
Normal file
@@ -0,0 +1,898 @@
|
|||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# cpp.py
|
||||||
|
#
|
||||||
|
# Author: David Beazley (http://www.dabeaz.com)
|
||||||
|
# Copyright (C) 2007
|
||||||
|
# All rights reserved
|
||||||
|
#
|
||||||
|
# This module implements an ANSI-C style lexical preprocessor for PLY.
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
from __future__ import generators
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# Default preprocessor lexer definitions. These tokens are enough to get
|
||||||
|
# a basic preprocessor working. Other modules may import these if they want
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
tokens = (
|
||||||
|
'CPP_ID','CPP_INTEGER', 'CPP_FLOAT', 'CPP_STRING', 'CPP_CHAR', 'CPP_WS', 'CPP_COMMENT', 'CPP_POUND','CPP_DPOUND'
|
||||||
|
)
|
||||||
|
|
||||||
|
literals = "+-*/%|&~^<>=!?()[]{}.,;:\\\'\""
|
||||||
|
|
||||||
|
# Whitespace
|
||||||
|
def t_CPP_WS(t):
|
||||||
|
r'\s+'
|
||||||
|
t.lexer.lineno += t.value.count("\n")
|
||||||
|
return t
|
||||||
|
|
||||||
|
t_CPP_POUND = r'\#'
|
||||||
|
t_CPP_DPOUND = r'\#\#'
|
||||||
|
|
||||||
|
# Identifier
|
||||||
|
t_CPP_ID = r'[A-Za-z_][\w_]*'
|
||||||
|
|
||||||
|
# Integer literal
|
||||||
|
def CPP_INTEGER(t):
|
||||||
|
r'(((((0x)|(0X))[0-9a-fA-F]+)|(\d+))([uU]|[lL]|[uU][lL]|[lL][uU])?)'
|
||||||
|
return t
|
||||||
|
|
||||||
|
t_CPP_INTEGER = CPP_INTEGER
|
||||||
|
|
||||||
|
# Floating literal
|
||||||
|
t_CPP_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
||||||
|
|
||||||
|
# String literal
|
||||||
|
def t_CPP_STRING(t):
|
||||||
|
r'\"([^\\\n]|(\\(.|\n)))*?\"'
|
||||||
|
t.lexer.lineno += t.value.count("\n")
|
||||||
|
return t
|
||||||
|
|
||||||
|
# Character constant 'c' or L'c'
|
||||||
|
def t_CPP_CHAR(t):
|
||||||
|
r'(L)?\'([^\\\n]|(\\(.|\n)))*?\''
|
||||||
|
t.lexer.lineno += t.value.count("\n")
|
||||||
|
return t
|
||||||
|
|
||||||
|
# Comment
|
||||||
|
def t_CPP_COMMENT(t):
|
||||||
|
r'(/\*(.|\n)*?\*/)|(//.*?\n)'
|
||||||
|
t.lexer.lineno += t.value.count("\n")
|
||||||
|
return t
|
||||||
|
|
||||||
|
def t_error(t):
|
||||||
|
t.type = t.value[0]
|
||||||
|
t.value = t.value[0]
|
||||||
|
t.lexer.skip(1)
|
||||||
|
return t
|
||||||
|
|
||||||
|
import re
|
||||||
|
import copy
|
||||||
|
import time
|
||||||
|
import os.path
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# trigraph()
|
||||||
|
#
|
||||||
|
# Given an input string, this function replaces all trigraph sequences.
|
||||||
|
# The following mapping is used:
|
||||||
|
#
|
||||||
|
# ??= #
|
||||||
|
# ??/ \
|
||||||
|
# ??' ^
|
||||||
|
# ??( [
|
||||||
|
# ??) ]
|
||||||
|
# ??! |
|
||||||
|
# ??< {
|
||||||
|
# ??> }
|
||||||
|
# ??- ~
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
_trigraph_pat = re.compile(r'''\?\?[=/\'\(\)\!<>\-]''')
|
||||||
|
_trigraph_rep = {
|
||||||
|
'=':'#',
|
||||||
|
'/':'\\',
|
||||||
|
"'":'^',
|
||||||
|
'(':'[',
|
||||||
|
')':']',
|
||||||
|
'!':'|',
|
||||||
|
'<':'{',
|
||||||
|
'>':'}',
|
||||||
|
'-':'~'
|
||||||
|
}
|
||||||
|
|
||||||
|
def trigraph(input):
|
||||||
|
return _trigraph_pat.sub(lambda g: _trigraph_rep[g.group()[-1]],input)
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Macro object
|
||||||
|
#
|
||||||
|
# This object holds information about preprocessor macros
|
||||||
|
#
|
||||||
|
# .name - Macro name (string)
|
||||||
|
# .value - Macro value (a list of tokens)
|
||||||
|
# .arglist - List of argument names
|
||||||
|
# .variadic - Boolean indicating whether or not variadic macro
|
||||||
|
# .vararg - Name of the variadic parameter
|
||||||
|
#
|
||||||
|
# When a macro is created, the macro replacement token sequence is
|
||||||
|
# pre-scanned and used to create patch lists that are later used
|
||||||
|
# during macro expansion
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
class Macro(object):
|
||||||
|
def __init__(self,name,value,arglist=None,variadic=False):
|
||||||
|
self.name = name
|
||||||
|
self.value = value
|
||||||
|
self.arglist = arglist
|
||||||
|
self.variadic = variadic
|
||||||
|
if variadic:
|
||||||
|
self.vararg = arglist[-1]
|
||||||
|
self.source = None
|
||||||
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Preprocessor object
|
||||||
|
#
|
||||||
|
# Object representing a preprocessor. Contains macro definitions,
|
||||||
|
# include directories, and other information
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
|
class Preprocessor(object):
|
||||||
|
def __init__(self,lexer=None):
|
||||||
|
if lexer is None:
|
||||||
|
lexer = lex.lexer
|
||||||
|
self.lexer = lexer
|
||||||
|
self.macros = { }
|
||||||
|
self.path = []
|
||||||
|
self.temp_path = []
|
||||||
|
|
||||||
|
# Probe the lexer for selected tokens
|
||||||
|
self.lexprobe()
|
||||||
|
|
||||||
|
tm = time.localtime()
|
||||||
|
self.define("__DATE__ \"%s\"" % time.strftime("%b %d %Y",tm))
|
||||||
|
self.define("__TIME__ \"%s\"" % time.strftime("%H:%M:%S",tm))
|
||||||
|
self.parser = None
|
||||||
|
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
# tokenize()
|
||||||
|
#
|
||||||
|
# Utility function. Given a string of text, tokenize into a list of tokens
|
||||||
|
# -----------------------------------------------------------------------------
|
||||||
|
|
||||||
|
def tokenize(self,text):
|
||||||
|
tokens = []
|
||||||
|
self.lexer.input(text)
|
||||||
|
while True:
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok: break
|
||||||
|
tokens.append(tok)
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------
|
||||||
|
# error()
|
||||||
|
#
|
||||||
|
# Report a preprocessor error/warning of some kind
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def error(self,file,line,msg):
|
||||||
|
print("%s:%d %s" % (file,line,msg))
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# lexprobe()
|
||||||
|
#
|
||||||
|
# This method probes the preprocessor lexer object to discover
|
||||||
|
# the token types of symbols that are important to the preprocessor.
|
||||||
|
# If this works right, the preprocessor will simply "work"
|
||||||
|
# with any suitable lexer regardless of how tokens have been named.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def lexprobe(self):
|
||||||
|
|
||||||
|
# Determine the token type for identifiers
|
||||||
|
self.lexer.input("identifier")
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or tok.value != "identifier":
|
||||||
|
print("Couldn't determine identifier type")
|
||||||
|
else:
|
||||||
|
self.t_ID = tok.type
|
||||||
|
|
||||||
|
# Determine the token type for integers
|
||||||
|
self.lexer.input("12345")
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or int(tok.value) != 12345:
|
||||||
|
print("Couldn't determine integer type")
|
||||||
|
else:
|
||||||
|
self.t_INTEGER = tok.type
|
||||||
|
self.t_INTEGER_TYPE = type(tok.value)
|
||||||
|
|
||||||
|
# Determine the token type for strings enclosed in double quotes
|
||||||
|
self.lexer.input("\"filename\"")
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or tok.value != "\"filename\"":
|
||||||
|
print("Couldn't determine string type")
|
||||||
|
else:
|
||||||
|
self.t_STRING = tok.type
|
||||||
|
|
||||||
|
# Determine the token type for whitespace--if any
|
||||||
|
self.lexer.input(" ")
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or tok.value != " ":
|
||||||
|
self.t_SPACE = None
|
||||||
|
else:
|
||||||
|
self.t_SPACE = tok.type
|
||||||
|
|
||||||
|
# Determine the token type for newlines
|
||||||
|
self.lexer.input("\n")
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or tok.value != "\n":
|
||||||
|
self.t_NEWLINE = None
|
||||||
|
print("Couldn't determine token for newlines")
|
||||||
|
else:
|
||||||
|
self.t_NEWLINE = tok.type
|
||||||
|
|
||||||
|
self.t_WS = (self.t_SPACE, self.t_NEWLINE)
|
||||||
|
|
||||||
|
# Check for other characters used by the preprocessor
|
||||||
|
chars = [ '<','>','#','##','\\','(',')',',','.']
|
||||||
|
for c in chars:
|
||||||
|
self.lexer.input(c)
|
||||||
|
tok = self.lexer.token()
|
||||||
|
if not tok or tok.value != c:
|
||||||
|
print("Unable to lex '%s' required for preprocessor" % c)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# add_path()
|
||||||
|
#
|
||||||
|
# Adds a search path to the preprocessor.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def add_path(self,path):
|
||||||
|
self.path.append(path)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# group_lines()
|
||||||
|
#
|
||||||
|
# Given an input string, this function splits it into lines. Trailing whitespace
|
||||||
|
# is removed. Any line ending with \ is grouped with the next line. This
|
||||||
|
# function forms the lowest level of the preprocessor---grouping into text into
|
||||||
|
# a line-by-line format.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def group_lines(self,input):
|
||||||
|
lex = self.lexer.clone()
|
||||||
|
lines = [x.rstrip() for x in input.splitlines()]
|
||||||
|
for i in xrange(len(lines)):
|
||||||
|
j = i+1
|
||||||
|
while lines[i].endswith('\\') and (j < len(lines)):
|
||||||
|
lines[i] = lines[i][:-1]+lines[j]
|
||||||
|
lines[j] = ""
|
||||||
|
j += 1
|
||||||
|
|
||||||
|
input = "\n".join(lines)
|
||||||
|
lex.input(input)
|
||||||
|
lex.lineno = 1
|
||||||
|
|
||||||
|
current_line = []
|
||||||
|
while True:
|
||||||
|
tok = lex.token()
|
||||||
|
if not tok:
|
||||||
|
break
|
||||||
|
current_line.append(tok)
|
||||||
|
if tok.type in self.t_WS and '\n' in tok.value:
|
||||||
|
yield current_line
|
||||||
|
current_line = []
|
||||||
|
|
||||||
|
if current_line:
|
||||||
|
yield current_line
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# tokenstrip()
|
||||||
|
#
|
||||||
|
# Remove leading/trailing whitespace tokens from a token list
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def tokenstrip(self,tokens):
|
||||||
|
i = 0
|
||||||
|
while i < len(tokens) and tokens[i].type in self.t_WS:
|
||||||
|
i += 1
|
||||||
|
del tokens[:i]
|
||||||
|
i = len(tokens)-1
|
||||||
|
while i >= 0 and tokens[i].type in self.t_WS:
|
||||||
|
i -= 1
|
||||||
|
del tokens[i+1:]
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# collect_args()
|
||||||
|
#
|
||||||
|
# Collects comma separated arguments from a list of tokens. The arguments
|
||||||
|
# must be enclosed in parenthesis. Returns a tuple (tokencount,args,positions)
|
||||||
|
# where tokencount is the number of tokens consumed, args is a list of arguments,
|
||||||
|
# and positions is a list of integers containing the starting index of each
|
||||||
|
# argument. Each argument is represented by a list of tokens.
|
||||||
|
#
|
||||||
|
# When collecting arguments, leading and trailing whitespace is removed
|
||||||
|
# from each argument.
|
||||||
|
#
|
||||||
|
# This function properly handles nested parenthesis and commas---these do not
|
||||||
|
# define new arguments.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def collect_args(self,tokenlist):
|
||||||
|
args = []
|
||||||
|
positions = []
|
||||||
|
current_arg = []
|
||||||
|
nesting = 1
|
||||||
|
tokenlen = len(tokenlist)
|
||||||
|
|
||||||
|
# Search for the opening '('.
|
||||||
|
i = 0
|
||||||
|
while (i < tokenlen) and (tokenlist[i].type in self.t_WS):
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
if (i < tokenlen) and (tokenlist[i].value == '('):
|
||||||
|
positions.append(i+1)
|
||||||
|
else:
|
||||||
|
self.error(self.source,tokenlist[0].lineno,"Missing '(' in macro arguments")
|
||||||
|
return 0, [], []
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
while i < tokenlen:
|
||||||
|
t = tokenlist[i]
|
||||||
|
if t.value == '(':
|
||||||
|
current_arg.append(t)
|
||||||
|
nesting += 1
|
||||||
|
elif t.value == ')':
|
||||||
|
nesting -= 1
|
||||||
|
if nesting == 0:
|
||||||
|
if current_arg:
|
||||||
|
args.append(self.tokenstrip(current_arg))
|
||||||
|
positions.append(i)
|
||||||
|
return i+1,args,positions
|
||||||
|
current_arg.append(t)
|
||||||
|
elif t.value == ',' and nesting == 1:
|
||||||
|
args.append(self.tokenstrip(current_arg))
|
||||||
|
positions.append(i+1)
|
||||||
|
current_arg = []
|
||||||
|
else:
|
||||||
|
current_arg.append(t)
|
||||||
|
i += 1
|
||||||
|
|
||||||
|
# Missing end argument
|
||||||
|
self.error(self.source,tokenlist[-1].lineno,"Missing ')' in macro arguments")
|
||||||
|
return 0, [],[]
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# macro_prescan()
|
||||||
|
#
|
||||||
|
# Examine the macro value (token sequence) and identify patch points
|
||||||
|
# This is used to speed up macro expansion later on---we'll know
|
||||||
|
# right away where to apply patches to the value to form the expansion
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def macro_prescan(self,macro):
|
||||||
|
macro.patch = [] # Standard macro arguments
|
||||||
|
macro.str_patch = [] # String conversion expansion
|
||||||
|
macro.var_comma_patch = [] # Variadic macro comma patch
|
||||||
|
i = 0
|
||||||
|
while i < len(macro.value):
|
||||||
|
if macro.value[i].type == self.t_ID and macro.value[i].value in macro.arglist:
|
||||||
|
argnum = macro.arglist.index(macro.value[i].value)
|
||||||
|
# Conversion of argument to a string
|
||||||
|
if i > 0 and macro.value[i-1].value == '#':
|
||||||
|
macro.value[i] = copy.copy(macro.value[i])
|
||||||
|
macro.value[i].type = self.t_STRING
|
||||||
|
del macro.value[i-1]
|
||||||
|
macro.str_patch.append((argnum,i-1))
|
||||||
|
continue
|
||||||
|
# Concatenation
|
||||||
|
elif (i > 0 and macro.value[i-1].value == '##'):
|
||||||
|
macro.patch.append(('c',argnum,i-1))
|
||||||
|
del macro.value[i-1]
|
||||||
|
continue
|
||||||
|
elif ((i+1) < len(macro.value) and macro.value[i+1].value == '##'):
|
||||||
|
macro.patch.append(('c',argnum,i))
|
||||||
|
i += 1
|
||||||
|
continue
|
||||||
|
# Standard expansion
|
||||||
|
else:
|
||||||
|
macro.patch.append(('e',argnum,i))
|
||||||
|
elif macro.value[i].value == '##':
|
||||||
|
if macro.variadic and (i > 0) and (macro.value[i-1].value == ',') and \
|
||||||
|
((i+1) < len(macro.value)) and (macro.value[i+1].type == self.t_ID) and \
|
||||||
|
(macro.value[i+1].value == macro.vararg):
|
||||||
|
macro.var_comma_patch.append(i-1)
|
||||||
|
i += 1
|
||||||
|
macro.patch.sort(key=lambda x: x[2],reverse=True)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# macro_expand_args()
|
||||||
|
#
|
||||||
|
# Given a Macro and list of arguments (each a token list), this method
|
||||||
|
# returns an expanded version of a macro. The return value is a token sequence
|
||||||
|
# representing the replacement macro tokens
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def macro_expand_args(self,macro,args):
|
||||||
|
# Make a copy of the macro token sequence
|
||||||
|
rep = [copy.copy(_x) for _x in macro.value]
|
||||||
|
|
||||||
|
# Make string expansion patches. These do not alter the length of the replacement sequence
|
||||||
|
|
||||||
|
str_expansion = {}
|
||||||
|
for argnum, i in macro.str_patch:
|
||||||
|
if argnum not in str_expansion:
|
||||||
|
str_expansion[argnum] = ('"%s"' % "".join([x.value for x in args[argnum]])).replace("\\","\\\\")
|
||||||
|
rep[i] = copy.copy(rep[i])
|
||||||
|
rep[i].value = str_expansion[argnum]
|
||||||
|
|
||||||
|
# Make the variadic macro comma patch. If the variadic macro argument is empty, we get rid
|
||||||
|
comma_patch = False
|
||||||
|
if macro.variadic and not args[-1]:
|
||||||
|
for i in macro.var_comma_patch:
|
||||||
|
rep[i] = None
|
||||||
|
comma_patch = True
|
||||||
|
|
||||||
|
# Make all other patches. The order of these matters. It is assumed that the patch list
|
||||||
|
# has been sorted in reverse order of patch location since replacements will cause the
|
||||||
|
# size of the replacement sequence to expand from the patch point.
|
||||||
|
|
||||||
|
expanded = { }
|
||||||
|
for ptype, argnum, i in macro.patch:
|
||||||
|
# Concatenation. Argument is left unexpanded
|
||||||
|
if ptype == 'c':
|
||||||
|
rep[i:i+1] = args[argnum]
|
||||||
|
# Normal expansion. Argument is macro expanded first
|
||||||
|
elif ptype == 'e':
|
||||||
|
if argnum not in expanded:
|
||||||
|
expanded[argnum] = self.expand_macros(args[argnum])
|
||||||
|
rep[i:i+1] = expanded[argnum]
|
||||||
|
|
||||||
|
# Get rid of removed comma if necessary
|
||||||
|
if comma_patch:
|
||||||
|
rep = [_i for _i in rep if _i]
|
||||||
|
|
||||||
|
return rep
|
||||||
|
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# expand_macros()
|
||||||
|
#
|
||||||
|
# Given a list of tokens, this function performs macro expansion.
|
||||||
|
# The expanded argument is a dictionary that contains macros already
|
||||||
|
# expanded. This is used to prevent infinite recursion.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def expand_macros(self,tokens,expanded=None):
|
||||||
|
if expanded is None:
|
||||||
|
expanded = {}
|
||||||
|
i = 0
|
||||||
|
while i < len(tokens):
|
||||||
|
t = tokens[i]
|
||||||
|
if t.type == self.t_ID:
|
||||||
|
if t.value in self.macros and t.value not in expanded:
|
||||||
|
# Yes, we found a macro match
|
||||||
|
expanded[t.value] = True
|
||||||
|
|
||||||
|
m = self.macros[t.value]
|
||||||
|
if not m.arglist:
|
||||||
|
# A simple macro
|
||||||
|
ex = self.expand_macros([copy.copy(_x) for _x in m.value],expanded)
|
||||||
|
for e in ex:
|
||||||
|
e.lineno = t.lineno
|
||||||
|
tokens[i:i+1] = ex
|
||||||
|
i += len(ex)
|
||||||
|
else:
|
||||||
|
# A macro with arguments
|
||||||
|
j = i + 1
|
||||||
|
while j < len(tokens) and tokens[j].type in self.t_WS:
|
||||||
|
j += 1
|
||||||
|
if tokens[j].value == '(':
|
||||||
|
tokcount,args,positions = self.collect_args(tokens[j:])
|
||||||
|
if not m.variadic and len(args) != len(m.arglist):
|
||||||
|
self.error(self.source,t.lineno,"Macro %s requires %d arguments" % (t.value,len(m.arglist)))
|
||||||
|
i = j + tokcount
|
||||||
|
elif m.variadic and len(args) < len(m.arglist)-1:
|
||||||
|
if len(m.arglist) > 2:
|
||||||
|
self.error(self.source,t.lineno,"Macro %s must have at least %d arguments" % (t.value, len(m.arglist)-1))
|
||||||
|
else:
|
||||||
|
self.error(self.source,t.lineno,"Macro %s must have at least %d argument" % (t.value, len(m.arglist)-1))
|
||||||
|
i = j + tokcount
|
||||||
|
else:
|
||||||
|
if m.variadic:
|
||||||
|
if len(args) == len(m.arglist)-1:
|
||||||
|
args.append([])
|
||||||
|
else:
|
||||||
|
args[len(m.arglist)-1] = tokens[j+positions[len(m.arglist)-1]:j+tokcount-1]
|
||||||
|
del args[len(m.arglist):]
|
||||||
|
|
||||||
|
# Get macro replacement text
|
||||||
|
rep = self.macro_expand_args(m,args)
|
||||||
|
rep = self.expand_macros(rep,expanded)
|
||||||
|
for r in rep:
|
||||||
|
r.lineno = t.lineno
|
||||||
|
tokens[i:j+tokcount] = rep
|
||||||
|
i += len(rep)
|
||||||
|
del expanded[t.value]
|
||||||
|
continue
|
||||||
|
elif t.value == '__LINE__':
|
||||||
|
t.type = self.t_INTEGER
|
||||||
|
t.value = self.t_INTEGER_TYPE(t.lineno)
|
||||||
|
|
||||||
|
i += 1
|
||||||
|
return tokens
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# evalexpr()
|
||||||
|
#
|
||||||
|
# Evaluate an expression token sequence for the purposes of evaluating
|
||||||
|
# integral expressions.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def evalexpr(self,tokens):
|
||||||
|
# tokens = tokenize(line)
|
||||||
|
# Search for defined macros
|
||||||
|
i = 0
|
||||||
|
while i < len(tokens):
|
||||||
|
if tokens[i].type == self.t_ID and tokens[i].value == 'defined':
|
||||||
|
j = i + 1
|
||||||
|
needparen = False
|
||||||
|
result = "0L"
|
||||||
|
while j < len(tokens):
|
||||||
|
if tokens[j].type in self.t_WS:
|
||||||
|
j += 1
|
||||||
|
continue
|
||||||
|
elif tokens[j].type == self.t_ID:
|
||||||
|
if tokens[j].value in self.macros:
|
||||||
|
result = "1L"
|
||||||
|
else:
|
||||||
|
result = "0L"
|
||||||
|
if not needparen: break
|
||||||
|
elif tokens[j].value == '(':
|
||||||
|
needparen = True
|
||||||
|
elif tokens[j].value == ')':
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
self.error(self.source,tokens[i].lineno,"Malformed defined()")
|
||||||
|
j += 1
|
||||||
|
tokens[i].type = self.t_INTEGER
|
||||||
|
tokens[i].value = self.t_INTEGER_TYPE(result)
|
||||||
|
del tokens[i+1:j+1]
|
||||||
|
i += 1
|
||||||
|
tokens = self.expand_macros(tokens)
|
||||||
|
for i,t in enumerate(tokens):
|
||||||
|
if t.type == self.t_ID:
|
||||||
|
tokens[i] = copy.copy(t)
|
||||||
|
tokens[i].type = self.t_INTEGER
|
||||||
|
tokens[i].value = self.t_INTEGER_TYPE("0L")
|
||||||
|
elif t.type == self.t_INTEGER:
|
||||||
|
tokens[i] = copy.copy(t)
|
||||||
|
# Strip off any trailing suffixes
|
||||||
|
tokens[i].value = str(tokens[i].value)
|
||||||
|
while tokens[i].value[-1] not in "0123456789abcdefABCDEF":
|
||||||
|
tokens[i].value = tokens[i].value[:-1]
|
||||||
|
|
||||||
|
expr = "".join([str(x.value) for x in tokens])
|
||||||
|
expr = expr.replace("&&"," and ")
|
||||||
|
expr = expr.replace("||"," or ")
|
||||||
|
expr = expr.replace("!"," not ")
|
||||||
|
try:
|
||||||
|
result = eval(expr)
|
||||||
|
except StandardError:
|
||||||
|
self.error(self.source,tokens[0].lineno,"Couldn't evaluate expression")
|
||||||
|
result = 0
|
||||||
|
return result
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# parsegen()
|
||||||
|
#
|
||||||
|
# Parse an input string/
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
def parsegen(self,input,source=None):
|
||||||
|
|
||||||
|
# Replace trigraph sequences
|
||||||
|
t = trigraph(input)
|
||||||
|
lines = self.group_lines(t)
|
||||||
|
|
||||||
|
if not source:
|
||||||
|
source = ""
|
||||||
|
|
||||||
|
self.define("__FILE__ \"%s\"" % source)
|
||||||
|
|
||||||
|
self.source = source
|
||||||
|
chunk = []
|
||||||
|
enable = True
|
||||||
|
iftrigger = False
|
||||||
|
ifstack = []
|
||||||
|
|
||||||
|
for x in lines:
|
||||||
|
for i,tok in enumerate(x):
|
||||||
|
if tok.type not in self.t_WS: break
|
||||||
|
if tok.value == '#':
|
||||||
|
# Preprocessor directive
|
||||||
|
|
||||||
|
for tok in x:
|
||||||
|
if tok in self.t_WS and '\n' in tok.value:
|
||||||
|
chunk.append(tok)
|
||||||
|
|
||||||
|
dirtokens = self.tokenstrip(x[i+1:])
|
||||||
|
if dirtokens:
|
||||||
|
name = dirtokens[0].value
|
||||||
|
args = self.tokenstrip(dirtokens[1:])
|
||||||
|
else:
|
||||||
|
name = ""
|
||||||
|
args = []
|
||||||
|
|
||||||
|
if name == 'define':
|
||||||
|
if enable:
|
||||||
|
for tok in self.expand_macros(chunk):
|
||||||
|
yield tok
|
||||||
|
chunk = []
|
||||||
|
self.define(args)
|
||||||
|
elif name == 'include':
|
||||||
|
if enable:
|
||||||
|
for tok in self.expand_macros(chunk):
|
||||||
|
yield tok
|
||||||
|
chunk = []
|
||||||
|
oldfile = self.macros['__FILE__']
|
||||||
|
for tok in self.include(args):
|
||||||
|
yield tok
|
||||||
|
self.macros['__FILE__'] = oldfile
|
||||||
|
self.source = source
|
||||||
|
elif name == 'undef':
|
||||||
|
if enable:
|
||||||
|
for tok in self.expand_macros(chunk):
|
||||||
|
yield tok
|
||||||
|
chunk = []
|
||||||
|
self.undef(args)
|
||||||
|
elif name == 'ifdef':
|
||||||
|
ifstack.append((enable,iftrigger))
|
||||||
|
if enable:
|
||||||
|
if not args[0].value in self.macros:
|
||||||
|
enable = False
|
||||||
|
iftrigger = False
|
||||||
|
else:
|
||||||
|
iftrigger = True
|
||||||
|
elif name == 'ifndef':
|
||||||
|
ifstack.append((enable,iftrigger))
|
||||||
|
if enable:
|
||||||
|
if args[0].value in self.macros:
|
||||||
|
enable = False
|
||||||
|
iftrigger = False
|
||||||
|
else:
|
||||||
|
iftrigger = True
|
||||||
|
elif name == 'if':
|
||||||
|
ifstack.append((enable,iftrigger))
|
||||||
|
if enable:
|
||||||
|
result = self.evalexpr(args)
|
||||||
|
if not result:
|
||||||
|
enable = False
|
||||||
|
iftrigger = False
|
||||||
|
else:
|
||||||
|
iftrigger = True
|
||||||
|
elif name == 'elif':
|
||||||
|
if ifstack:
|
||||||
|
if ifstack[-1][0]: # We only pay attention if outer "if" allows this
|
||||||
|
if enable: # If already true, we flip enable False
|
||||||
|
enable = False
|
||||||
|
elif not iftrigger: # If False, but not triggered yet, we'll check expression
|
||||||
|
result = self.evalexpr(args)
|
||||||
|
if result:
|
||||||
|
enable = True
|
||||||
|
iftrigger = True
|
||||||
|
else:
|
||||||
|
self.error(self.source,dirtokens[0].lineno,"Misplaced #elif")
|
||||||
|
|
||||||
|
elif name == 'else':
|
||||||
|
if ifstack:
|
||||||
|
if ifstack[-1][0]:
|
||||||
|
if enable:
|
||||||
|
enable = False
|
||||||
|
elif not iftrigger:
|
||||||
|
enable = True
|
||||||
|
iftrigger = True
|
||||||
|
else:
|
||||||
|
self.error(self.source,dirtokens[0].lineno,"Misplaced #else")
|
||||||
|
|
||||||
|
elif name == 'endif':
|
||||||
|
if ifstack:
|
||||||
|
enable,iftrigger = ifstack.pop()
|
||||||
|
else:
|
||||||
|
self.error(self.source,dirtokens[0].lineno,"Misplaced #endif")
|
||||||
|
else:
|
||||||
|
# Unknown preprocessor directive
|
||||||
|
pass
|
||||||
|
|
||||||
|
else:
|
||||||
|
# Normal text
|
||||||
|
if enable:
|
||||||
|
chunk.extend(x)
|
||||||
|
|
||||||
|
for tok in self.expand_macros(chunk):
|
||||||
|
yield tok
|
||||||
|
chunk = []
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# include()
|
||||||
|
#
|
||||||
|
# Implementation of file-inclusion
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def include(self,tokens):
|
||||||
|
# Try to extract the filename and then process an include file
|
||||||
|
if not tokens:
|
||||||
|
return
|
||||||
|
if tokens:
|
||||||
|
if tokens[0].value != '<' and tokens[0].type != self.t_STRING:
|
||||||
|
tokens = self.expand_macros(tokens)
|
||||||
|
|
||||||
|
if tokens[0].value == '<':
|
||||||
|
# Include <...>
|
||||||
|
i = 1
|
||||||
|
while i < len(tokens):
|
||||||
|
if tokens[i].value == '>':
|
||||||
|
break
|
||||||
|
i += 1
|
||||||
|
else:
|
||||||
|
print("Malformed #include <...>")
|
||||||
|
return
|
||||||
|
filename = "".join([x.value for x in tokens[1:i]])
|
||||||
|
path = self.path + [""] + self.temp_path
|
||||||
|
elif tokens[0].type == self.t_STRING:
|
||||||
|
filename = tokens[0].value[1:-1]
|
||||||
|
path = self.temp_path + [""] + self.path
|
||||||
|
else:
|
||||||
|
print("Malformed #include statement")
|
||||||
|
return
|
||||||
|
for p in path:
|
||||||
|
iname = os.path.join(p,filename)
|
||||||
|
try:
|
||||||
|
data = open(iname,"r").read()
|
||||||
|
dname = os.path.dirname(iname)
|
||||||
|
if dname:
|
||||||
|
self.temp_path.insert(0,dname)
|
||||||
|
for tok in self.parsegen(data,filename):
|
||||||
|
yield tok
|
||||||
|
if dname:
|
||||||
|
del self.temp_path[0]
|
||||||
|
break
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
print("Couldn't find '%s'" % filename)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# define()
|
||||||
|
#
|
||||||
|
# Define a new macro
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def define(self,tokens):
|
||||||
|
if isinstance(tokens,(str,unicode)):
|
||||||
|
tokens = self.tokenize(tokens)
|
||||||
|
|
||||||
|
linetok = tokens
|
||||||
|
try:
|
||||||
|
name = linetok[0]
|
||||||
|
if len(linetok) > 1:
|
||||||
|
mtype = linetok[1]
|
||||||
|
else:
|
||||||
|
mtype = None
|
||||||
|
if not mtype:
|
||||||
|
m = Macro(name.value,[])
|
||||||
|
self.macros[name.value] = m
|
||||||
|
elif mtype.type in self.t_WS:
|
||||||
|
# A normal macro
|
||||||
|
m = Macro(name.value,self.tokenstrip(linetok[2:]))
|
||||||
|
self.macros[name.value] = m
|
||||||
|
elif mtype.value == '(':
|
||||||
|
# A macro with arguments
|
||||||
|
tokcount, args, positions = self.collect_args(linetok[1:])
|
||||||
|
variadic = False
|
||||||
|
for a in args:
|
||||||
|
if variadic:
|
||||||
|
print("No more arguments may follow a variadic argument")
|
||||||
|
break
|
||||||
|
astr = "".join([str(_i.value) for _i in a])
|
||||||
|
if astr == "...":
|
||||||
|
variadic = True
|
||||||
|
a[0].type = self.t_ID
|
||||||
|
a[0].value = '__VA_ARGS__'
|
||||||
|
variadic = True
|
||||||
|
del a[1:]
|
||||||
|
continue
|
||||||
|
elif astr[-3:] == "..." and a[0].type == self.t_ID:
|
||||||
|
variadic = True
|
||||||
|
del a[1:]
|
||||||
|
# If, for some reason, "." is part of the identifier, strip off the name for the purposes
|
||||||
|
# of macro expansion
|
||||||
|
if a[0].value[-3:] == '...':
|
||||||
|
a[0].value = a[0].value[:-3]
|
||||||
|
continue
|
||||||
|
if len(a) > 1 or a[0].type != self.t_ID:
|
||||||
|
print("Invalid macro argument")
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
mvalue = self.tokenstrip(linetok[1+tokcount:])
|
||||||
|
i = 0
|
||||||
|
while i < len(mvalue):
|
||||||
|
if i+1 < len(mvalue):
|
||||||
|
if mvalue[i].type in self.t_WS and mvalue[i+1].value == '##':
|
||||||
|
del mvalue[i]
|
||||||
|
continue
|
||||||
|
elif mvalue[i].value == '##' and mvalue[i+1].type in self.t_WS:
|
||||||
|
del mvalue[i+1]
|
||||||
|
i += 1
|
||||||
|
m = Macro(name.value,mvalue,[x[0].value for x in args],variadic)
|
||||||
|
self.macro_prescan(m)
|
||||||
|
self.macros[name.value] = m
|
||||||
|
else:
|
||||||
|
print("Bad macro definition")
|
||||||
|
except LookupError:
|
||||||
|
print("Bad macro definition")
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# undef()
|
||||||
|
#
|
||||||
|
# Undefine a macro
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
def undef(self,tokens):
|
||||||
|
id = tokens[0].value
|
||||||
|
try:
|
||||||
|
del self.macros[id]
|
||||||
|
except LookupError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# parse()
|
||||||
|
#
|
||||||
|
# Parse input text.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
def parse(self,input,source=None,ignore={}):
|
||||||
|
self.ignore = ignore
|
||||||
|
self.parser = self.parsegen(input,source)
|
||||||
|
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# token()
|
||||||
|
#
|
||||||
|
# Method to return individual tokens
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
def token(self):
|
||||||
|
try:
|
||||||
|
while True:
|
||||||
|
tok = next(self.parser)
|
||||||
|
if tok.type not in self.ignore: return tok
|
||||||
|
except StopIteration:
|
||||||
|
self.parser = None
|
||||||
|
return None
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
import ply.lex as lex
|
||||||
|
lexer = lex.lex()
|
||||||
|
|
||||||
|
# Run a preprocessor
|
||||||
|
import sys
|
||||||
|
f = open(sys.argv[1])
|
||||||
|
input = f.read()
|
||||||
|
|
||||||
|
p = Preprocessor(lexer)
|
||||||
|
p.parse(input,sys.argv[1])
|
||||||
|
while True:
|
||||||
|
tok = p.token()
|
||||||
|
if not tok: break
|
||||||
|
print(p.source, tok)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
133
bin/ply/ctokens.py
Normal file
133
bin/ply/ctokens.py
Normal file
@@ -0,0 +1,133 @@
|
|||||||
|
# ----------------------------------------------------------------------
|
||||||
|
# ctokens.py
|
||||||
|
#
|
||||||
|
# Token specifications for symbols in ANSI C and C++. This file is
|
||||||
|
# meant to be used as a library in other tokenizers.
|
||||||
|
# ----------------------------------------------------------------------
|
||||||
|
|
||||||
|
# Reserved words
|
||||||
|
|
||||||
|
tokens = [
|
||||||
|
# Literals (identifier, integer constant, float constant, string constant, char const)
|
||||||
|
'ID', 'TYPEID', 'ICONST', 'FCONST', 'SCONST', 'CCONST',
|
||||||
|
|
||||||
|
# Operators (+,-,*,/,%,|,&,~,^,<<,>>, ||, &&, !, <, <=, >, >=, ==, !=)
|
||||||
|
'PLUS', 'MINUS', 'TIMES', 'DIVIDE', 'MOD',
|
||||||
|
'OR', 'AND', 'NOT', 'XOR', 'LSHIFT', 'RSHIFT',
|
||||||
|
'LOR', 'LAND', 'LNOT',
|
||||||
|
'LT', 'LE', 'GT', 'GE', 'EQ', 'NE',
|
||||||
|
|
||||||
|
# Assignment (=, *=, /=, %=, +=, -=, <<=, >>=, &=, ^=, |=)
|
||||||
|
'EQUALS', 'TIMESEQUAL', 'DIVEQUAL', 'MODEQUAL', 'PLUSEQUAL', 'MINUSEQUAL',
|
||||||
|
'LSHIFTEQUAL','RSHIFTEQUAL', 'ANDEQUAL', 'XOREQUAL', 'OREQUAL',
|
||||||
|
|
||||||
|
# Increment/decrement (++,--)
|
||||||
|
'PLUSPLUS', 'MINUSMINUS',
|
||||||
|
|
||||||
|
# Structure dereference (->)
|
||||||
|
'ARROW',
|
||||||
|
|
||||||
|
# Ternary operator (?)
|
||||||
|
'TERNARY',
|
||||||
|
|
||||||
|
# Delimeters ( ) [ ] { } , . ; :
|
||||||
|
'LPAREN', 'RPAREN',
|
||||||
|
'LBRACKET', 'RBRACKET',
|
||||||
|
'LBRACE', 'RBRACE',
|
||||||
|
'COMMA', 'PERIOD', 'SEMI', 'COLON',
|
||||||
|
|
||||||
|
# Ellipsis (...)
|
||||||
|
'ELLIPSIS',
|
||||||
|
]
|
||||||
|
|
||||||
|
# Operators
|
||||||
|
t_PLUS = r'\+'
|
||||||
|
t_MINUS = r'-'
|
||||||
|
t_TIMES = r'\*'
|
||||||
|
t_DIVIDE = r'/'
|
||||||
|
t_MODULO = r'%'
|
||||||
|
t_OR = r'\|'
|
||||||
|
t_AND = r'&'
|
||||||
|
t_NOT = r'~'
|
||||||
|
t_XOR = r'\^'
|
||||||
|
t_LSHIFT = r'<<'
|
||||||
|
t_RSHIFT = r'>>'
|
||||||
|
t_LOR = r'\|\|'
|
||||||
|
t_LAND = r'&&'
|
||||||
|
t_LNOT = r'!'
|
||||||
|
t_LT = r'<'
|
||||||
|
t_GT = r'>'
|
||||||
|
t_LE = r'<='
|
||||||
|
t_GE = r'>='
|
||||||
|
t_EQ = r'=='
|
||||||
|
t_NE = r'!='
|
||||||
|
|
||||||
|
# Assignment operators
|
||||||
|
|
||||||
|
t_EQUALS = r'='
|
||||||
|
t_TIMESEQUAL = r'\*='
|
||||||
|
t_DIVEQUAL = r'/='
|
||||||
|
t_MODEQUAL = r'%='
|
||||||
|
t_PLUSEQUAL = r'\+='
|
||||||
|
t_MINUSEQUAL = r'-='
|
||||||
|
t_LSHIFTEQUAL = r'<<='
|
||||||
|
t_RSHIFTEQUAL = r'>>='
|
||||||
|
t_ANDEQUAL = r'&='
|
||||||
|
t_OREQUAL = r'\|='
|
||||||
|
t_XOREQUAL = r'^='
|
||||||
|
|
||||||
|
# Increment/decrement
|
||||||
|
t_INCREMENT = r'\+\+'
|
||||||
|
t_DECREMENT = r'--'
|
||||||
|
|
||||||
|
# ->
|
||||||
|
t_ARROW = r'->'
|
||||||
|
|
||||||
|
# ?
|
||||||
|
t_TERNARY = r'\?'
|
||||||
|
|
||||||
|
# Delimeters
|
||||||
|
t_LPAREN = r'\('
|
||||||
|
t_RPAREN = r'\)'
|
||||||
|
t_LBRACKET = r'\['
|
||||||
|
t_RBRACKET = r'\]'
|
||||||
|
t_LBRACE = r'\{'
|
||||||
|
t_RBRACE = r'\}'
|
||||||
|
t_COMMA = r','
|
||||||
|
t_PERIOD = r'\.'
|
||||||
|
t_SEMI = r';'
|
||||||
|
t_COLON = r':'
|
||||||
|
t_ELLIPSIS = r'\.\.\.'
|
||||||
|
|
||||||
|
# Identifiers
|
||||||
|
t_ID = r'[A-Za-z_][A-Za-z0-9_]*'
|
||||||
|
|
||||||
|
# Integer literal
|
||||||
|
t_INTEGER = r'\d+([uU]|[lL]|[uU][lL]|[lL][uU])?'
|
||||||
|
|
||||||
|
# Floating literal
|
||||||
|
t_FLOAT = r'((\d+)(\.\d+)(e(\+|-)?(\d+))? | (\d+)e(\+|-)?(\d+))([lL]|[fF])?'
|
||||||
|
|
||||||
|
# String literal
|
||||||
|
t_STRING = r'\"([^\\\n]|(\\.))*?\"'
|
||||||
|
|
||||||
|
# Character constant 'c' or L'c'
|
||||||
|
t_CHARACTER = r'(L)?\'([^\\\n]|(\\.))*?\''
|
||||||
|
|
||||||
|
# Comment (C-Style)
|
||||||
|
def t_COMMENT(t):
|
||||||
|
r'/\*(.|\n)*?\*/'
|
||||||
|
t.lexer.lineno += t.value.count('\n')
|
||||||
|
return t
|
||||||
|
|
||||||
|
# Comment (C++-Style)
|
||||||
|
def t_CPPCOMMENT(t):
|
||||||
|
r'//.*\n'
|
||||||
|
t.lexer.lineno += 1
|
||||||
|
return t
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
1058
bin/ply/lex.py
Normal file
1058
bin/ply/lex.py
Normal file
File diff suppressed because it is too large
Load Diff
3276
bin/ply/yacc.py
Normal file
3276
bin/ply/yacc.py
Normal file
File diff suppressed because it is too large
Load Diff
187
bin/ripple/ledger/Args.py
Normal file
187
bin/ripple/ledger/Args.py
Normal file
@@ -0,0 +1,187 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import importlib
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ripple.ledger import LedgerNumber
|
||||||
|
from ripple.util import File
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import PrettyPrint
|
||||||
|
from ripple.util import Range
|
||||||
|
from ripple.util.Function import Function
|
||||||
|
|
||||||
|
NAME = 'LedgerTool'
|
||||||
|
VERSION = '0.1'
|
||||||
|
NONE = '(none)'
|
||||||
|
|
||||||
|
_parser = argparse.ArgumentParser(
|
||||||
|
prog=NAME,
|
||||||
|
description='Retrieve and process Ripple ledgers.',
|
||||||
|
epilog=LedgerNumber.HELP,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Positional arguments.
|
||||||
|
_parser.add_argument(
|
||||||
|
'command',
|
||||||
|
nargs='*',
|
||||||
|
help='Command to execute.'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Flag arguments.
|
||||||
|
_parser.add_argument(
|
||||||
|
'--binary',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, searches are binary - by default linear search is used.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--cache',
|
||||||
|
default='~/.local/share/ripple/ledger',
|
||||||
|
help='The cache directory.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--complete',
|
||||||
|
action='store_true',
|
||||||
|
help='If set, only match complete ledgers.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--condition', '-c',
|
||||||
|
help='The name of a condition function used to match ledgers.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--config',
|
||||||
|
help='The rippled configuration file name.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--database', '-d',
|
||||||
|
nargs='*',
|
||||||
|
default=NONE,
|
||||||
|
help='Specify a database.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--display',
|
||||||
|
help='Specify a function to display ledgers.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--full', '-f',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, request full ledgers.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--indent', '-i',
|
||||||
|
type=int,
|
||||||
|
default=2,
|
||||||
|
help='How many spaces to indent when display in JSON.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--offline', '-o',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, work entirely from cache, do not try to contact the server.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--position', '-p',
|
||||||
|
choices=['all', 'first', 'last'],
|
||||||
|
default='last',
|
||||||
|
help='Select which ledgers to display.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--rippled', '-r',
|
||||||
|
help='The filename of a rippled binary for retrieving ledgers.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--server', '-s',
|
||||||
|
help='IP address of a rippled JSON server.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--utc', '-u',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, display times in UTC rather than local time.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--validations',
|
||||||
|
default=3,
|
||||||
|
help='The number of validations needed before considering a ledger valid.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--version',
|
||||||
|
action='version',
|
||||||
|
version='%(prog)s ' + VERSION,
|
||||||
|
help='Print the current version of %(prog)s',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--verbose', '-v',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, give status messages on stderr.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--window', '-w',
|
||||||
|
type=int,
|
||||||
|
default=0,
|
||||||
|
help='How many ledgers to display around the matching ledger.',
|
||||||
|
)
|
||||||
|
|
||||||
|
_parser.add_argument(
|
||||||
|
'--yes', '-y',
|
||||||
|
action='store_true',
|
||||||
|
help='If true, don\'t ask for confirmation on large commands.',
|
||||||
|
)
|
||||||
|
|
||||||
|
# Read the arguments from the command line.
|
||||||
|
ARGS = _parser.parse_args()
|
||||||
|
ARGS.NONE = NONE
|
||||||
|
|
||||||
|
Log.VERBOSE = ARGS.verbose
|
||||||
|
|
||||||
|
# Now remove any items that look like ledger numbers from the command line.
|
||||||
|
_command = ARGS.command
|
||||||
|
_parts = (ARGS.command, ARGS.ledgers) = ([], [])
|
||||||
|
|
||||||
|
for c in _command:
|
||||||
|
_parts[Range.is_range(c, *LedgerNumber.LEDGERS)].append(c)
|
||||||
|
|
||||||
|
ARGS.command = ARGS.command or ['print' if ARGS.ledgers else 'info']
|
||||||
|
|
||||||
|
ARGS.cache = File.normalize(ARGS.cache)
|
||||||
|
|
||||||
|
if not ARGS.ledgers:
|
||||||
|
if ARGS.condition:
|
||||||
|
Log.warn('--condition needs a range of ledgers')
|
||||||
|
if ARGS.display:
|
||||||
|
Log.warn('--display needs a range of ledgers')
|
||||||
|
|
||||||
|
ARGS.condition = Function(
|
||||||
|
ARGS.condition or 'all_ledgers', 'ripple.ledger.conditions')
|
||||||
|
ARGS.display = Function(
|
||||||
|
ARGS.display or 'ledger_number', 'ripple.ledger.displays')
|
||||||
|
|
||||||
|
if ARGS.window < 0:
|
||||||
|
raise ValueError('Window cannot be negative: --window=%d' %
|
||||||
|
ARGS.window)
|
||||||
|
|
||||||
|
PrettyPrint.INDENT = (ARGS.indent * ' ')
|
||||||
|
|
||||||
|
_loaders = (ARGS.database != NONE) + bool(ARGS.rippled) + bool(ARGS.server)
|
||||||
|
|
||||||
|
if not _loaders:
|
||||||
|
ARGS.rippled = 'rippled'
|
||||||
|
|
||||||
|
elif _loaders > 1:
|
||||||
|
raise ValueError('At most one of --database, --rippled and --server '
|
||||||
|
'may be specified')
|
||||||
78
bin/ripple/ledger/DatabaseReader.py
Normal file
78
bin/ripple/ledger/DatabaseReader.py
Normal file
@@ -0,0 +1,78 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import ConfigFile
|
||||||
|
from ripple.util import Database
|
||||||
|
from ripple.util import File
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
|
||||||
|
LEDGER_QUERY = """
|
||||||
|
SELECT
|
||||||
|
L.*, count(1) validations
|
||||||
|
FROM
|
||||||
|
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq DESC) L
|
||||||
|
JOIN Validations V
|
||||||
|
ON (V.LedgerHash = L.LedgerHash)
|
||||||
|
GROUP BY L.LedgerHash
|
||||||
|
HAVING validations >= {validation_quorum}
|
||||||
|
ORDER BY 2;
|
||||||
|
"""
|
||||||
|
|
||||||
|
COMPLETE_QUERY = """
|
||||||
|
SELECT
|
||||||
|
L.LedgerSeq, count(*) validations
|
||||||
|
FROM
|
||||||
|
(select LedgerHash, LedgerSeq from Ledgers ORDER BY LedgerSeq) L
|
||||||
|
JOIN Validations V
|
||||||
|
ON (V.LedgerHash = L.LedgerHash)
|
||||||
|
GROUP BY L.LedgerHash
|
||||||
|
HAVING validations >= :validation_quorum
|
||||||
|
ORDER BY 2;
|
||||||
|
"""
|
||||||
|
|
||||||
|
_DATABASE_NAME = 'ledger.db'
|
||||||
|
|
||||||
|
USE_PLACEHOLDERS = False
|
||||||
|
|
||||||
|
class DatabaseReader(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
assert ARGS.database != ARGS.NONE
|
||||||
|
database = ARGS.database or config['database_path']
|
||||||
|
if not database.endswith(_DATABASE_NAME):
|
||||||
|
database = os.path.join(database, _DATABASE_NAME)
|
||||||
|
if USE_PLACEHOLDERS:
|
||||||
|
cursor = Database.fetchall(
|
||||||
|
database, COMPLETE_QUERY, config)
|
||||||
|
else:
|
||||||
|
cursor = Database.fetchall(
|
||||||
|
database, LEDGER_QUERY.format(**config), {})
|
||||||
|
self.complete = [c[1] for c in cursor]
|
||||||
|
|
||||||
|
def name_to_ledger_index(self, ledger_name, is_full=False):
|
||||||
|
if not self.complete:
|
||||||
|
return None
|
||||||
|
if ledger_name == 'closed':
|
||||||
|
return self.complete[-1]
|
||||||
|
if ledger_name == 'current':
|
||||||
|
return None
|
||||||
|
if ledger_name == 'validated':
|
||||||
|
return self.complete[-1]
|
||||||
|
|
||||||
|
def get_ledger(self, name, is_full=False):
|
||||||
|
cmd = ['ledger', str(name)]
|
||||||
|
if is_full:
|
||||||
|
cmd.append('full')
|
||||||
|
response = self._command(*cmd)
|
||||||
|
result = response.get('ledger')
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
error = response['error']
|
||||||
|
etext = _ERROR_TEXT.get(error)
|
||||||
|
if etext:
|
||||||
|
error = '%s (%s)' % (etext, error)
|
||||||
|
Log.fatal(_ERROR_TEXT.get(error, error))
|
||||||
18
bin/ripple/ledger/LedgerNumber.py
Normal file
18
bin/ripple/ledger/LedgerNumber.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util import Range
|
||||||
|
|
||||||
|
FIRST_EVER = 32570
|
||||||
|
|
||||||
|
LEDGERS = {
|
||||||
|
'closed': 'the most recently closed ledger',
|
||||||
|
'current': 'the current ledger',
|
||||||
|
'first': 'the first complete ledger on this server',
|
||||||
|
'last': 'the last complete ledger on this server',
|
||||||
|
'validated': 'the most recently validated ledger',
|
||||||
|
}
|
||||||
|
|
||||||
|
HELP = """
|
||||||
|
Ledgers are either represented by a number, or one of the special ledgers;
|
||||||
|
""" + ',\n'.join('%s, %s' % (k, v) for k, v in sorted(LEDGERS.items())
|
||||||
|
)
|
||||||
68
bin/ripple/ledger/RippledReader.py
Normal file
68
bin/ripple/ledger/RippledReader.py
Normal file
@@ -0,0 +1,68 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import File
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
|
||||||
|
_ERROR_CODE_REASON = {
|
||||||
|
62: 'No rippled server is running.',
|
||||||
|
}
|
||||||
|
|
||||||
|
_ERROR_TEXT = {
|
||||||
|
'lgrNotFound': 'The ledger you requested was not found.',
|
||||||
|
'noCurrent': 'The server has no current ledger.',
|
||||||
|
'noNetwork': 'The server did not respond to your request.',
|
||||||
|
}
|
||||||
|
|
||||||
|
_DEFAULT_ERROR_ = "Couldn't connect to server."
|
||||||
|
|
||||||
|
class RippledReader(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
fname = File.normalize(ARGS.rippled)
|
||||||
|
if not os.path.exists(fname):
|
||||||
|
raise Exception('No rippled found at %s.' % fname)
|
||||||
|
self.cmd = [fname]
|
||||||
|
if ARGS.config:
|
||||||
|
self.cmd.extend(['--conf', File.normalize(ARGS.config)])
|
||||||
|
self.info = self._command('server_info')['info']
|
||||||
|
c = self.info.get('complete_ledgers')
|
||||||
|
if c == 'empty':
|
||||||
|
self.complete = []
|
||||||
|
else:
|
||||||
|
self.complete = sorted(Range.from_string(c))
|
||||||
|
|
||||||
|
def name_to_ledger_index(self, ledger_name, is_full=False):
|
||||||
|
return self.get_ledger(ledger_name, is_full)['ledger_index']
|
||||||
|
|
||||||
|
def get_ledger(self, name, is_full=False):
|
||||||
|
cmd = ['ledger', str(name)]
|
||||||
|
if is_full:
|
||||||
|
cmd.append('full')
|
||||||
|
response = self._command(*cmd)
|
||||||
|
result = response.get('ledger')
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
error = response['error']
|
||||||
|
etext = _ERROR_TEXT.get(error)
|
||||||
|
if etext:
|
||||||
|
error = '%s (%s)' % (etext, error)
|
||||||
|
Log.fatal(_ERROR_TEXT.get(error, error))
|
||||||
|
|
||||||
|
def _command(self, *cmds):
|
||||||
|
cmd = self.cmd + list(cmds)
|
||||||
|
try:
|
||||||
|
data = subprocess.check_output(cmd, stderr=subprocess.PIPE)
|
||||||
|
except subprocess.CalledProcessError as e:
|
||||||
|
raise Exception(_ERROR_CODE_REASON.get(
|
||||||
|
e.returncode, _DEFAULT_ERROR_))
|
||||||
|
|
||||||
|
part = json.loads(data)
|
||||||
|
try:
|
||||||
|
return part['result']
|
||||||
|
except:
|
||||||
|
raise ValueError(part.get('error', 'unknown error'))
|
||||||
24
bin/ripple/ledger/SearchLedgers.py
Normal file
24
bin/ripple/ledger/SearchLedgers.py
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
from ripple.util import Search
|
||||||
|
|
||||||
|
def search(server):
|
||||||
|
"""Yields a stream of ledger numbers that match the given condition."""
|
||||||
|
condition = lambda number: ARGS.condition(server, number)
|
||||||
|
ledgers = server.ledgers
|
||||||
|
if ARGS.binary:
|
||||||
|
try:
|
||||||
|
position = Search.FIRST if ARGS.position == 'first' else Search.LAST
|
||||||
|
yield Search.binary_search(
|
||||||
|
ledgers[0], ledgers[-1], condition, position)
|
||||||
|
except:
|
||||||
|
Log.fatal('No ledgers matching condition "%s".' % condition,
|
||||||
|
file=sys.stderr)
|
||||||
|
else:
|
||||||
|
for x in Search.linear_search(ledgers, condition):
|
||||||
|
yield x
|
||||||
55
bin/ripple/ledger/Server.py
Normal file
55
bin/ripple/ledger/Server.py
Normal file
@@ -0,0 +1,55 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from ripple.ledger import DatabaseReader, RippledReader
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util.FileCache import FileCache
|
||||||
|
from ripple.util import ConfigFile
|
||||||
|
from ripple.util import File
|
||||||
|
from ripple.util import Range
|
||||||
|
|
||||||
|
class Server(object):
|
||||||
|
def __init__(self):
|
||||||
|
cfg_file = File.normalize(ARGS.config or 'rippled.cfg')
|
||||||
|
self.config = ConfigFile.read(open(cfg_file))
|
||||||
|
if ARGS.database != ARGS.NONE:
|
||||||
|
reader = DatabaseReader.DatabaseReader(self.config)
|
||||||
|
else:
|
||||||
|
reader = RippledReader.RippledReader(self.config)
|
||||||
|
|
||||||
|
self.reader = reader
|
||||||
|
self.complete = reader.complete
|
||||||
|
|
||||||
|
names = {
|
||||||
|
'closed': reader.name_to_ledger_index('closed'),
|
||||||
|
'current': reader.name_to_ledger_index('current'),
|
||||||
|
'validated': reader.name_to_ledger_index('validated'),
|
||||||
|
'first': self.complete[0] if self.complete else None,
|
||||||
|
'last': self.complete[-1] if self.complete else None,
|
||||||
|
}
|
||||||
|
self.__dict__.update(names)
|
||||||
|
self.ledgers = sorted(Range.join_ranges(*ARGS.ledgers, **names))
|
||||||
|
|
||||||
|
def make_cache(is_full):
|
||||||
|
name = 'full' if is_full else 'summary'
|
||||||
|
filepath = os.path.join(ARGS.cache, name)
|
||||||
|
creator = lambda n: reader.get_ledger(n, is_full)
|
||||||
|
return FileCache(filepath, creator)
|
||||||
|
self._caches = [make_cache(False), make_cache(True)]
|
||||||
|
|
||||||
|
def info(self):
|
||||||
|
return self.reader.info
|
||||||
|
|
||||||
|
def cache(self, is_full):
|
||||||
|
return self._caches[is_full]
|
||||||
|
|
||||||
|
def get_ledger(self, number, is_full=False):
|
||||||
|
num = int(number)
|
||||||
|
save_in_cache = num in self.complete
|
||||||
|
can_create = (not ARGS.offline and
|
||||||
|
self.complete and
|
||||||
|
self.complete[0] <= num - 1)
|
||||||
|
cache = self.cache(is_full)
|
||||||
|
return cache.get_data(number, save_in_cache, can_create)
|
||||||
5
bin/ripple/ledger/ServerReader.py
Normal file
5
bin/ripple/ledger/ServerReader.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
class ServerReader(object):
|
||||||
|
def __init__(self, config):
|
||||||
|
raise ValueError('Direct server connections are not yet implemented.')
|
||||||
0
bin/ripple/ledger/__init__.py
Normal file
0
bin/ripple/ledger/__init__.py
Normal file
34
bin/ripple/ledger/commands/Cache.py
Normal file
34
bin/ripple/ledger/commands/Cache.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
from ripple.util.PrettyPrint import pretty_print
|
||||||
|
|
||||||
|
SAFE = True
|
||||||
|
|
||||||
|
HELP = """cache
|
||||||
|
return server_info"""
|
||||||
|
|
||||||
|
def cache(server, clear=False):
|
||||||
|
cache = server.cache(ARGS.full)
|
||||||
|
name = ['summary', 'full'][ARGS.full]
|
||||||
|
files = cache.file_count()
|
||||||
|
if not files:
|
||||||
|
Log.error('No files in %s cache.' % name)
|
||||||
|
|
||||||
|
elif clear:
|
||||||
|
if not clear.strip() == 'clear':
|
||||||
|
raise Exception("Don't understand 'clear %s'." % clear)
|
||||||
|
if not ARGS.yes:
|
||||||
|
yes = raw_input('OK to clear %s cache? (y/N) ' % name)
|
||||||
|
if not yes.lower().startswith('y'):
|
||||||
|
Log.out('Cancelled.')
|
||||||
|
return
|
||||||
|
cache.clear(ARGS.full)
|
||||||
|
Log.out('%s cache cleared - %d file%s deleted.' %
|
||||||
|
(name.capitalize(), files, '' if files == 1 else 's'))
|
||||||
|
|
||||||
|
else:
|
||||||
|
caches = (int(c) for c in cache.cache_list())
|
||||||
|
Log.out(Range.to_string(caches))
|
||||||
21
bin/ripple/ledger/commands/Info.py
Normal file
21
bin/ripple/ledger/commands/Info.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
from ripple.util.PrettyPrint import pretty_print
|
||||||
|
|
||||||
|
SAFE = True
|
||||||
|
|
||||||
|
HELP = 'info - return server_info'
|
||||||
|
|
||||||
|
def info(server):
|
||||||
|
Log.out('first =', server.first)
|
||||||
|
Log.out('last =', server.last)
|
||||||
|
Log.out('closed =', server.closed)
|
||||||
|
Log.out('current =', server.current)
|
||||||
|
Log.out('validated =', server.validated)
|
||||||
|
Log.out('complete =', Range.to_string(server.complete))
|
||||||
|
|
||||||
|
if ARGS.full:
|
||||||
|
Log.out(pretty_print(server.info()))
|
||||||
15
bin/ripple/ledger/commands/Print.py
Normal file
15
bin/ripple/ledger/commands/Print.py
Normal file
@@ -0,0 +1,15 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.ledger import SearchLedgers
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
SAFE = True
|
||||||
|
|
||||||
|
HELP = """print
|
||||||
|
|
||||||
|
Print the ledgers to stdout. The default command."""
|
||||||
|
|
||||||
|
def run_print(server):
|
||||||
|
ARGS.display(print, server, SearchLedgers.search(server))
|
||||||
0
bin/ripple/ledger/commands/__init__.py
Normal file
0
bin/ripple/ledger/commands/__init__.py
Normal file
4
bin/ripple/ledger/conditions/__init__.py
Normal file
4
bin/ripple/ledger/conditions/__init__.py
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
def all_ledgers(server, ledger_number):
|
||||||
|
return True
|
||||||
89
bin/ripple/ledger/displays/__init__.py
Normal file
89
bin/ripple/ledger/displays/__init__.py
Normal file
@@ -0,0 +1,89 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
|
||||||
|
import jsonpath_rw
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Dict
|
||||||
|
from ripple.util import Log
|
||||||
|
from ripple.util import Range
|
||||||
|
from ripple.util.Decimal import Decimal
|
||||||
|
from ripple.util.PrettyPrint import pretty_print, Streamer
|
||||||
|
|
||||||
|
TRANSACT_FIELDS = (
|
||||||
|
'accepted',
|
||||||
|
'close_time_human',
|
||||||
|
'closed',
|
||||||
|
'ledger_index',
|
||||||
|
'total_coins',
|
||||||
|
'transactions',
|
||||||
|
)
|
||||||
|
|
||||||
|
LEDGER_FIELDS = (
|
||||||
|
'accepted',
|
||||||
|
'accountState',
|
||||||
|
'close_time_human',
|
||||||
|
'closed',
|
||||||
|
'ledger_index',
|
||||||
|
'total_coins',
|
||||||
|
'transactions',
|
||||||
|
)
|
||||||
|
|
||||||
|
def _dict_filter(d, keys):
|
||||||
|
return dict((k, v) for (k, v) in d.items() if k in keys)
|
||||||
|
|
||||||
|
def ledger_number(print, server, numbers):
|
||||||
|
print(Range.to_string(numbers))
|
||||||
|
|
||||||
|
def display(f):
|
||||||
|
@wraps(f)
|
||||||
|
def wrapper(printer, server, numbers, *args):
|
||||||
|
streamer = Streamer(printer=printer)
|
||||||
|
for number in numbers:
|
||||||
|
ledger = server.get_ledger(number, ARGS.full)
|
||||||
|
if ledger:
|
||||||
|
streamer.add(number, f(ledger, *args))
|
||||||
|
streamer.finish()
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
def extractor(f):
|
||||||
|
@wraps(f)
|
||||||
|
def wrapper(printer, server, numbers, *paths):
|
||||||
|
try:
|
||||||
|
find = jsonpath_rw.parse('|'.join(paths)).find
|
||||||
|
except:
|
||||||
|
raise ValueError("Can't understand jsonpath '%s'." % path)
|
||||||
|
def fn(ledger, *args):
|
||||||
|
return f(find(ledger), *args)
|
||||||
|
display(fn)(printer, server, numbers)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
@display
|
||||||
|
def ledger(ledger, full=False):
|
||||||
|
if ARGS.full:
|
||||||
|
if full:
|
||||||
|
return ledger
|
||||||
|
|
||||||
|
ledger = Dict.prune(ledger, 1, False)
|
||||||
|
|
||||||
|
return _dict_filter(ledger, LEDGER_FIELDS)
|
||||||
|
|
||||||
|
@display
|
||||||
|
def prune(ledger, level=1):
|
||||||
|
return Dict.prune(ledger, level, False)
|
||||||
|
|
||||||
|
@display
|
||||||
|
def transact(ledger):
|
||||||
|
return _dict_filter(ledger, TRANSACT_FIELDS)
|
||||||
|
|
||||||
|
@extractor
|
||||||
|
def extract(finds):
|
||||||
|
return dict((str(f.full_path), str(f.value)) for f in finds)
|
||||||
|
|
||||||
|
@extractor
|
||||||
|
def sum(finds):
|
||||||
|
d = Decimal()
|
||||||
|
for f in finds:
|
||||||
|
d.accumulate(f.value)
|
||||||
|
return [str(d), len(finds)]
|
||||||
40
bin/ripple/util/Cache.py
Normal file
40
bin/ripple/util/Cache.py
Normal file
@@ -0,0 +1,40 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from collections import defaultdict
|
||||||
|
|
||||||
|
class Cache(object):
|
||||||
|
def __init__(self):
|
||||||
|
self._value_to_index = {}
|
||||||
|
self._index_to_value = []
|
||||||
|
|
||||||
|
def value_to_index(self, value, **kwds):
|
||||||
|
index = self._value_to_index.get(value, None)
|
||||||
|
if index is None:
|
||||||
|
index = len(self._index_to_value)
|
||||||
|
self._index_to_value.append((value, kwds))
|
||||||
|
self._value_to_index[value] = index
|
||||||
|
return index
|
||||||
|
|
||||||
|
def index_to_value(self, index):
|
||||||
|
return self._index_to_value[index]
|
||||||
|
|
||||||
|
def NamedCache():
|
||||||
|
return defaultdict(Cache)
|
||||||
|
|
||||||
|
def cache_by_key(d, keyfunc=None, exclude=None):
|
||||||
|
cache = defaultdict(Cache)
|
||||||
|
exclude = exclude or None
|
||||||
|
keyfunc = keyfunc or (lambda x: x)
|
||||||
|
|
||||||
|
def visit(item):
|
||||||
|
if isinstance(item, list):
|
||||||
|
for i, x in enumerate(item):
|
||||||
|
item[i] = visit(x)
|
||||||
|
|
||||||
|
elif isinstance(item, dict):
|
||||||
|
for k, v in item.items():
|
||||||
|
item[k] = visit(v)
|
||||||
|
|
||||||
|
return item
|
||||||
|
|
||||||
|
return cache
|
||||||
77
bin/ripple/util/CommandList.py
Normal file
77
bin/ripple/util/CommandList.py
Normal file
@@ -0,0 +1,77 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
# Code taken from github/rec/grit.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from collections import namedtuple
|
||||||
|
|
||||||
|
from ripple.ledger.Args import ARGS
|
||||||
|
from ripple.util import Log
|
||||||
|
|
||||||
|
Command = namedtuple('Command', 'function help safe')
|
||||||
|
|
||||||
|
def make_command(module):
|
||||||
|
name = module.__name__.split('.')[-1].lower()
|
||||||
|
return name, Command(getattr(module, name, None) or
|
||||||
|
getattr(module, 'run_' + name),
|
||||||
|
getattr(module, 'HELP'),
|
||||||
|
getattr(module, 'SAFE', False))
|
||||||
|
|
||||||
|
class CommandList(object):
|
||||||
|
def __init__(self, *args, **kwds):
|
||||||
|
self.registry = {}
|
||||||
|
self.register(*args, **kwds)
|
||||||
|
|
||||||
|
def register(self, *modules, **kwds):
|
||||||
|
for module in modules:
|
||||||
|
name, command = make_command(module)
|
||||||
|
self.registry[name] = command
|
||||||
|
|
||||||
|
for k, v in kwds.items():
|
||||||
|
if not isinstance(v, (list, tuple)):
|
||||||
|
v = [v]
|
||||||
|
self.register_one(k, *v)
|
||||||
|
|
||||||
|
def keys(self):
|
||||||
|
return self.registry.keys()
|
||||||
|
|
||||||
|
def register_one(self, name, function, help='', safe=False):
|
||||||
|
assert name not in self.registry
|
||||||
|
self.registry[name] = Command(function, help, safe)
|
||||||
|
|
||||||
|
def _get(self, command):
|
||||||
|
command = command.lower()
|
||||||
|
c = self.registry.get(command)
|
||||||
|
if c:
|
||||||
|
return command, c
|
||||||
|
commands = [c for c in self.registry if c.startswith(command)]
|
||||||
|
if len(commands) == 1:
|
||||||
|
command = commands[0]
|
||||||
|
return command, self.registry[command]
|
||||||
|
if not commands:
|
||||||
|
raise ValueError('No such command: %s. Commands are %s.' %
|
||||||
|
(command, ', '.join(sorted(self.registry))))
|
||||||
|
if len(commands) > 1:
|
||||||
|
raise ValueError('Command %s was ambiguous: %s.' %
|
||||||
|
(command, ', '.join(commands)))
|
||||||
|
|
||||||
|
def get(self, command):
|
||||||
|
return self._get(command)[1]
|
||||||
|
|
||||||
|
def run(self, command, *args):
|
||||||
|
return self.get(command).function(*args)
|
||||||
|
|
||||||
|
def run_safe(self, command, *args):
|
||||||
|
name, cmd = self._get(command)
|
||||||
|
if not (ARGS.yes or cmd.safe):
|
||||||
|
confirm = raw_input('OK to execute "rl %s %s"? (y/N) ' %
|
||||||
|
(name, ' '.join(args)))
|
||||||
|
if not confirm.lower().startswith('y'):
|
||||||
|
Log.error('Cancelled.')
|
||||||
|
return
|
||||||
|
cmd.function(*args)
|
||||||
|
|
||||||
|
def help(self, command):
|
||||||
|
return self.get(command).help()
|
||||||
54
bin/ripple/util/ConfigFile.py
Normal file
54
bin/ripple/util/ConfigFile.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import json
|
||||||
|
|
||||||
|
"""Ripple has a proprietary format for their .cfg files, so we need a reader for
|
||||||
|
them."""
|
||||||
|
|
||||||
|
def read(lines):
|
||||||
|
sections = []
|
||||||
|
section = []
|
||||||
|
for line in lines:
|
||||||
|
line = line.strip()
|
||||||
|
if (not line) or line[0] == '#':
|
||||||
|
continue
|
||||||
|
if line.startswith('['):
|
||||||
|
if section:
|
||||||
|
sections.append(section)
|
||||||
|
section = []
|
||||||
|
section.append(line)
|
||||||
|
if section:
|
||||||
|
sections.append(section)
|
||||||
|
|
||||||
|
result = {}
|
||||||
|
for section in sections:
|
||||||
|
option = section.pop(0)
|
||||||
|
assert section, ('No value for option "%s".' % option)
|
||||||
|
assert option.startswith('[') and option.endswith(']'), (
|
||||||
|
'No option name in block "%s"' % p[0])
|
||||||
|
option = option[1:-1]
|
||||||
|
assert option not in result, 'Duplicate option "%s".' % option
|
||||||
|
|
||||||
|
subdict = {}
|
||||||
|
items = []
|
||||||
|
for part in section:
|
||||||
|
if '=' in part:
|
||||||
|
assert not items, 'Dictionary mixed with list.'
|
||||||
|
k, v = part.split('=', 1)
|
||||||
|
assert k not in subdict, 'Repeated dictionary entry ' + k
|
||||||
|
subdict[k] = v
|
||||||
|
else:
|
||||||
|
assert not subdict, 'List mixed with dictionary.'
|
||||||
|
if part.startswith('{'):
|
||||||
|
items.append(json.loads(part))
|
||||||
|
else:
|
||||||
|
words = part.split()
|
||||||
|
if len(words) > 1:
|
||||||
|
items.append(words)
|
||||||
|
else:
|
||||||
|
items.append(part)
|
||||||
|
if len(items) == 1:
|
||||||
|
result[option] = items[0]
|
||||||
|
else:
|
||||||
|
result[option] = items or subdict
|
||||||
|
return result
|
||||||
12
bin/ripple/util/Database.py
Normal file
12
bin/ripple/util/Database.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import sqlite3
|
||||||
|
|
||||||
|
def fetchall(database, query, kwds):
|
||||||
|
conn = sqlite3.connect(database)
|
||||||
|
try:
|
||||||
|
cursor = conn.execute(query, kwds)
|
||||||
|
return cursor.fetchall()
|
||||||
|
|
||||||
|
finally:
|
||||||
|
conn.close()
|
||||||
46
bin/ripple/util/Decimal.py
Normal file
46
bin/ripple/util/Decimal.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
"""Fixed point numbers."""
|
||||||
|
|
||||||
|
POSITIONS = 10
|
||||||
|
POSITIONS_SHIFT = 10 ** POSITIONS
|
||||||
|
|
||||||
|
class Decimal(object):
|
||||||
|
def __init__(self, desc='0'):
|
||||||
|
if isinstance(desc, int):
|
||||||
|
self.value = desc
|
||||||
|
return
|
||||||
|
if desc.startswith('-'):
|
||||||
|
sign = -1
|
||||||
|
desc = desc[1:]
|
||||||
|
else:
|
||||||
|
sign = 1
|
||||||
|
parts = desc.split('.')
|
||||||
|
if len(parts) == 1:
|
||||||
|
parts.append('0')
|
||||||
|
elif len(parts) > 2:
|
||||||
|
raise Exception('Too many decimals in "%s"' % desc)
|
||||||
|
number, decimal = parts
|
||||||
|
# Fix the number of positions.
|
||||||
|
decimal = (decimal + POSITIONS * '0')[:POSITIONS]
|
||||||
|
self.value = sign * int(number + decimal)
|
||||||
|
|
||||||
|
def accumulate(self, item):
|
||||||
|
if not isinstance(item, Decimal):
|
||||||
|
item = Decimal(item)
|
||||||
|
self.value += item.value
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.value >= 0:
|
||||||
|
sign = ''
|
||||||
|
value = self.value
|
||||||
|
else:
|
||||||
|
sign = '-'
|
||||||
|
value = -self.value
|
||||||
|
number = value // POSITIONS_SHIFT
|
||||||
|
decimal = (value % POSITIONS_SHIFT) * POSITIONS_SHIFT
|
||||||
|
|
||||||
|
if decimal:
|
||||||
|
return '%s%s.%s' % (sign, number, str(decimal).rstrip('0'))
|
||||||
|
else:
|
||||||
|
return '%s%s' % (sign, number)
|
||||||
33
bin/ripple/util/Dict.py
Normal file
33
bin/ripple/util/Dict.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
def count_all_subitems(x):
|
||||||
|
"""Count the subitems of a Python object, including the object itself."""
|
||||||
|
if isinstance(x, list):
|
||||||
|
return 1 + sum(count_all_subitems(i) for i in x)
|
||||||
|
if isinstance(x, dict):
|
||||||
|
return 1 + sum(count_all_subitems(i) for i in x.itervalues())
|
||||||
|
return 1
|
||||||
|
|
||||||
|
def prune(item, level, count_recursively=True):
|
||||||
|
def subitems(x):
|
||||||
|
i = count_all_subitems(x) - 1 if count_recursively else len(x)
|
||||||
|
return '1 subitem' if i == 1 else '%d subitems' % i
|
||||||
|
|
||||||
|
assert level >= 0
|
||||||
|
if not item:
|
||||||
|
return item
|
||||||
|
|
||||||
|
if isinstance(item, list):
|
||||||
|
if level:
|
||||||
|
return [prune(i, level - 1, count_recursively) for i in item]
|
||||||
|
else:
|
||||||
|
return '[list with %s]' % subitems(item)
|
||||||
|
|
||||||
|
if isinstance(item, dict):
|
||||||
|
if level:
|
||||||
|
return dict((k, prune(v, level - 1, count_recursively))
|
||||||
|
for k, v in item.iteritems())
|
||||||
|
else:
|
||||||
|
return '{dict with %s}' % subitems(item)
|
||||||
|
|
||||||
|
return item
|
||||||
7
bin/ripple/util/File.py
Normal file
7
bin/ripple/util/File.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import os
|
||||||
|
|
||||||
|
def normalize(f):
|
||||||
|
f = os.path.join(*f.split('/')) # For Windows users.
|
||||||
|
return os.path.abspath(os.path.expanduser(f))
|
||||||
56
bin/ripple/util/FileCache.py
Normal file
56
bin/ripple/util/FileCache.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import gzip
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
_NONE = object()
|
||||||
|
|
||||||
|
class FileCache(object):
|
||||||
|
"""A two-level cache, which stores expensive results in memory and on disk.
|
||||||
|
"""
|
||||||
|
def __init__(self, cache_directory, creator, open=gzip.open, suffix='.gz'):
|
||||||
|
self.cache_directory = cache_directory
|
||||||
|
self.creator = creator
|
||||||
|
self.open = open
|
||||||
|
self.suffix = suffix
|
||||||
|
self.cached_data = {}
|
||||||
|
if not os.path.exists(self.cache_directory):
|
||||||
|
os.makedirs(self.cache_directory)
|
||||||
|
|
||||||
|
def get_file_data(self, name):
|
||||||
|
if os.path.exists(filename):
|
||||||
|
return json.load(self.open(filename))
|
||||||
|
|
||||||
|
result = self.creator(name)
|
||||||
|
return result
|
||||||
|
|
||||||
|
def get_data(self, name, save_in_cache, can_create, default=None):
|
||||||
|
name = str(name)
|
||||||
|
result = self.cached_data.get(name, _NONE)
|
||||||
|
if result is _NONE:
|
||||||
|
filename = os.path.join(self.cache_directory, name) + self.suffix
|
||||||
|
if os.path.exists(filename):
|
||||||
|
result = json.load(self.open(filename)) or _NONE
|
||||||
|
if result is _NONE and can_create:
|
||||||
|
result = self.creator(name)
|
||||||
|
if save_in_cache:
|
||||||
|
json.dump(result, self.open(filename, 'w'))
|
||||||
|
return default if result is _NONE else result
|
||||||
|
|
||||||
|
def _files(self):
|
||||||
|
return os.listdir(self.cache_directory)
|
||||||
|
|
||||||
|
def cache_list(self):
|
||||||
|
for f in self._files():
|
||||||
|
if f.endswith(self.suffix):
|
||||||
|
yield f[:-len(self.suffix)]
|
||||||
|
|
||||||
|
def file_count(self):
|
||||||
|
return len(self._files())
|
||||||
|
|
||||||
|
def clear(self):
|
||||||
|
"""Clears both local files and memory."""
|
||||||
|
self.cached_data = {}
|
||||||
|
for f in self._files():
|
||||||
|
os.remove(os.path.join(self.cache_directory, f))
|
||||||
82
bin/ripple/util/Function.py
Normal file
82
bin/ripple/util/Function.py
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
"""A function that can be specified at the command line, with an argument."""
|
||||||
|
|
||||||
|
import importlib
|
||||||
|
import re
|
||||||
|
import tokenize
|
||||||
|
|
||||||
|
from StringIO import StringIO
|
||||||
|
|
||||||
|
MATCHER = re.compile(r'([\w.]+)(.*)')
|
||||||
|
|
||||||
|
REMAPPINGS = {
|
||||||
|
'false': False,
|
||||||
|
'true': True,
|
||||||
|
'null': None,
|
||||||
|
'False': False,
|
||||||
|
'True': True,
|
||||||
|
'None': None,
|
||||||
|
}
|
||||||
|
|
||||||
|
def eval_arguments(args):
|
||||||
|
args = args.strip()
|
||||||
|
if not args or (args == '()'):
|
||||||
|
return ()
|
||||||
|
tokens = list(tokenize.generate_tokens(StringIO(args).readline))
|
||||||
|
def remap():
|
||||||
|
for type, name, _, _, _ in tokens:
|
||||||
|
if type == tokenize.NAME and name not in REMAPPINGS:
|
||||||
|
yield tokenize.STRING, '"%s"' % name
|
||||||
|
else:
|
||||||
|
yield type, name
|
||||||
|
untok = tokenize.untokenize(remap())
|
||||||
|
if untok[1:-1].strip():
|
||||||
|
untok = untok[:-1] + ',)' # Force a tuple.
|
||||||
|
try:
|
||||||
|
return eval(untok, REMAPPINGS)
|
||||||
|
except Exception as e:
|
||||||
|
raise ValueError('Couldn\'t evaluate expression "%s" (became "%s"), '
|
||||||
|
'error "%s"' % (args, untok, str(e)))
|
||||||
|
|
||||||
|
class Function(object):
|
||||||
|
def __init__(self, desc='', default_path=''):
|
||||||
|
self.desc = desc.strip()
|
||||||
|
if not self.desc:
|
||||||
|
# Make an empty function that does nothing.
|
||||||
|
self.args = ()
|
||||||
|
self.function = lambda *args, **kwds: None
|
||||||
|
return
|
||||||
|
|
||||||
|
m = MATCHER.match(desc)
|
||||||
|
if not m:
|
||||||
|
raise ValueError('"%s" is not a function' % desc)
|
||||||
|
self.function, self.args = (g.strip() for g in m.groups())
|
||||||
|
self.args = eval_arguments(self.args)
|
||||||
|
|
||||||
|
if '.' not in self.function:
|
||||||
|
if default_path and not default_path.endswith('.'):
|
||||||
|
default_path += '.'
|
||||||
|
self.function = default_path + self.function
|
||||||
|
p, m = self.function.rsplit('.', 1)
|
||||||
|
mod = importlib.import_module(p)
|
||||||
|
# Errors in modules are swallowed here.
|
||||||
|
# except:
|
||||||
|
# raise ValueError('Can\'t find Python module "%s"' % p)
|
||||||
|
|
||||||
|
try:
|
||||||
|
self.function = getattr(mod, m)
|
||||||
|
except:
|
||||||
|
raise ValueError('No function "%s" in module "%s"' % (m, p))
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.desc
|
||||||
|
|
||||||
|
def __call__(self, *args, **kwds):
|
||||||
|
return self.function(*(args + self.args), **kwds)
|
||||||
|
|
||||||
|
def __eq__(self, other):
|
||||||
|
return self.function == other.function and self.args == other.args
|
||||||
|
|
||||||
|
def __ne__(self, other):
|
||||||
|
return not (self == other)
|
||||||
21
bin/ripple/util/Log.py
Normal file
21
bin/ripple/util/Log.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
VERBOSE = False
|
||||||
|
|
||||||
|
def out(*args, **kwds):
|
||||||
|
kwds.get('print', print)(*args, file=sys.stdout, **kwds)
|
||||||
|
|
||||||
|
def info(*args, **kwds):
|
||||||
|
if VERBOSE:
|
||||||
|
out(*args, **kwds)
|
||||||
|
|
||||||
|
def warn(*args, **kwds):
|
||||||
|
out('WARNING:', *args, **kwds)
|
||||||
|
|
||||||
|
def error(*args, **kwds):
|
||||||
|
out('ERROR:', *args, **kwds)
|
||||||
|
|
||||||
|
def fatal(*args, **kwds):
|
||||||
|
raise Exception('FATAL: ' + ' '.join(str(a) for a in args))
|
||||||
42
bin/ripple/util/PrettyPrint.py
Normal file
42
bin/ripple/util/PrettyPrint.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from functools import wraps
|
||||||
|
import json
|
||||||
|
|
||||||
|
SEPARATORS = ',', ': '
|
||||||
|
INDENT = ' '
|
||||||
|
|
||||||
|
def pretty_print(item):
|
||||||
|
return json.dumps(item,
|
||||||
|
sort_keys=True,
|
||||||
|
indent=len(INDENT),
|
||||||
|
separators=SEPARATORS)
|
||||||
|
|
||||||
|
class Streamer(object):
|
||||||
|
def __init__(self, printer=print):
|
||||||
|
# No automatic spacing or carriage returns.
|
||||||
|
self.printer = lambda *args: printer(*args, end='', sep='')
|
||||||
|
self.first_key = True
|
||||||
|
|
||||||
|
def add(self, key, value):
|
||||||
|
if self.first_key:
|
||||||
|
self.first_key = False
|
||||||
|
self.printer('{')
|
||||||
|
else:
|
||||||
|
self.printer(',')
|
||||||
|
|
||||||
|
self.printer('\n', INDENT, '"', str(key), '": ')
|
||||||
|
|
||||||
|
pp = pretty_print(value).splitlines()
|
||||||
|
if len(pp) > 1:
|
||||||
|
for i, line in enumerate(pp):
|
||||||
|
if i > 0:
|
||||||
|
self.printer('\n', INDENT)
|
||||||
|
self.printer(line)
|
||||||
|
else:
|
||||||
|
self.printer(pp[0])
|
||||||
|
|
||||||
|
def finish(self):
|
||||||
|
if not self.first_key:
|
||||||
|
self.first_key = True
|
||||||
|
self.printer('\n}')
|
||||||
53
bin/ripple/util/Range.py
Normal file
53
bin/ripple/util/Range.py
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
"""
|
||||||
|
Convert a discontiguous range of integers to and from a human-friendly form.
|
||||||
|
|
||||||
|
Real world example is the server_info.complete_ledgers:
|
||||||
|
8252899-8403772,8403824,8403827-8403830,8403834-8403876
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def from_string(desc, **aliases):
|
||||||
|
if not desc:
|
||||||
|
return []
|
||||||
|
result = set()
|
||||||
|
for d in desc.split(','):
|
||||||
|
nums = [int(aliases.get(x) or x) for x in d.split('-')]
|
||||||
|
if len(nums) == 1:
|
||||||
|
result.add(nums[0])
|
||||||
|
elif len(nums) == 2:
|
||||||
|
result.update(range(nums[0], nums[1] + 1))
|
||||||
|
return result
|
||||||
|
|
||||||
|
def to_string(r):
|
||||||
|
groups = []
|
||||||
|
next_group = []
|
||||||
|
for i, x in enumerate(sorted(r)):
|
||||||
|
if next_group and (x - next_group[-1]) > 1:
|
||||||
|
groups.append(next_group)
|
||||||
|
next_group = []
|
||||||
|
next_group.append(x)
|
||||||
|
if next_group:
|
||||||
|
groups.append(next_group)
|
||||||
|
|
||||||
|
def display(g):
|
||||||
|
if len(g) == 1:
|
||||||
|
return str(g[0])
|
||||||
|
else:
|
||||||
|
return '%s-%s' % (g[0], g[-1])
|
||||||
|
|
||||||
|
return ','.join(display(g) for g in groups)
|
||||||
|
|
||||||
|
def is_range(desc, *names):
|
||||||
|
try:
|
||||||
|
from_string(desc, **dict((n, 1) for n in names))
|
||||||
|
return True;
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def join_ranges(*ranges, **aliases):
|
||||||
|
result = set()
|
||||||
|
for r in ranges:
|
||||||
|
result.update(from_string(r, **aliases))
|
||||||
|
return result
|
||||||
46
bin/ripple/util/Search.py
Normal file
46
bin/ripple/util/Search.py
Normal file
@@ -0,0 +1,46 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
FIRST, LAST = range(2)
|
||||||
|
|
||||||
|
def binary_search(begin, end, condition, location=FIRST):
|
||||||
|
"""Search for an i in the interval [begin, end] where condition(i) is true.
|
||||||
|
If location is FIRST, return the first such i.
|
||||||
|
If location is LAST, return the last such i.
|
||||||
|
If there is no such i, then throw an exception.
|
||||||
|
"""
|
||||||
|
b = condition(begin)
|
||||||
|
e = condition(end)
|
||||||
|
if b and e:
|
||||||
|
return begin if location == FIRST else end
|
||||||
|
|
||||||
|
if not (b or e):
|
||||||
|
raise ValueError('%d/%d' % (begin, end))
|
||||||
|
|
||||||
|
if b and location is FIRST:
|
||||||
|
return begin
|
||||||
|
|
||||||
|
if e and location is LAST:
|
||||||
|
return end
|
||||||
|
|
||||||
|
width = end - begin + 1
|
||||||
|
if width == 1:
|
||||||
|
if not b:
|
||||||
|
raise ValueError('%d/%d' % (begin, end))
|
||||||
|
return begin
|
||||||
|
if width == 2:
|
||||||
|
return begin if b else end
|
||||||
|
|
||||||
|
mid = (begin + end) // 2
|
||||||
|
m = condition(mid)
|
||||||
|
|
||||||
|
if m == b:
|
||||||
|
return binary_search(mid, end, condition, location)
|
||||||
|
else:
|
||||||
|
return binary_search(begin, mid, condition, location)
|
||||||
|
|
||||||
|
def linear_search(items, condition):
|
||||||
|
"""Yields each i in the interval [begin, end] where condition(i) is true.
|
||||||
|
"""
|
||||||
|
for i in items:
|
||||||
|
if condition(i):
|
||||||
|
yield i
|
||||||
21
bin/ripple/util/Time.py
Normal file
21
bin/ripple/util/Time.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
import datetime
|
||||||
|
|
||||||
|
# Format for human-readable dates in rippled
|
||||||
|
_DATE_FORMAT = '%Y-%b-%d'
|
||||||
|
_TIME_FORMAT = '%H:%M:%S'
|
||||||
|
_DATETIME_FORMAT = '%s %s' % (_DATE_FORMAT, _TIME_FORMAT)
|
||||||
|
|
||||||
|
_FORMATS = _DATE_FORMAT, _TIME_FORMAT, _DATETIME_FORMAT
|
||||||
|
|
||||||
|
def parse_datetime(desc):
|
||||||
|
for fmt in _FORMATS:
|
||||||
|
try:
|
||||||
|
return datetime.date.strptime(desc, fmt)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
|
raise ValueError("Can't understand date '%s'." % date)
|
||||||
|
|
||||||
|
def format_datetime(dt):
|
||||||
|
return dt.strftime(_DATETIME_FORMAT)
|
||||||
0
bin/ripple/util/__init__.py
Normal file
0
bin/ripple/util/__init__.py
Normal file
12
bin/ripple/util/test_Cache.py
Normal file
12
bin/ripple/util/test_Cache.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util.Cache import NamedCache
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_Cache(TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self.cache = NamedCache()
|
||||||
|
|
||||||
|
def test_trivial(self):
|
||||||
|
pass
|
||||||
163
bin/ripple/util/test_ConfigFile.py
Normal file
163
bin/ripple/util/test_ConfigFile.py
Normal file
@@ -0,0 +1,163 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util import ConfigFile
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_ConfigFile(TestCase):
|
||||||
|
def test_trivial(self):
|
||||||
|
self.assertEquals(ConfigFile.read(''), {})
|
||||||
|
|
||||||
|
def test_full(self):
|
||||||
|
self.assertEquals(ConfigFile.read(FULL.splitlines()), RESULT)
|
||||||
|
|
||||||
|
RESULT = {
|
||||||
|
'websocket_port': '6206',
|
||||||
|
'database_path': '/development/alpha/db',
|
||||||
|
'sntp_servers':
|
||||||
|
['time.windows.com', 'time.apple.com', 'time.nist.gov', 'pool.ntp.org'],
|
||||||
|
'validation_seed': 'sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV',
|
||||||
|
'node_size': 'medium',
|
||||||
|
'rpc_startup': {
|
||||||
|
'command': 'log_level',
|
||||||
|
'severity': 'debug'},
|
||||||
|
'ips': ['r.ripple.com', '51235'],
|
||||||
|
'node_db': {
|
||||||
|
'file_size_mult': '2',
|
||||||
|
'file_size_mb': '8',
|
||||||
|
'cache_mb': '256',
|
||||||
|
'path': '/development/alpha/db/rocksdb',
|
||||||
|
'open_files': '2000',
|
||||||
|
'type': 'RocksDB',
|
||||||
|
'filter_bits': '12'},
|
||||||
|
'peer_port': '53235',
|
||||||
|
'ledger_history': 'full',
|
||||||
|
'rpc_ip': '127.0.0.1',
|
||||||
|
'websocket_public_ip': '0.0.0.0',
|
||||||
|
'rpc_allow_remote': '0',
|
||||||
|
'validators':
|
||||||
|
[['n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7', 'RL1'],
|
||||||
|
['n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj', 'RL2'],
|
||||||
|
['n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C', 'RL3'],
|
||||||
|
['n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS', 'RL4'],
|
||||||
|
['n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA', 'RL5']],
|
||||||
|
'debug_logfile': '/development/alpha/debug.log',
|
||||||
|
'websocket_public_port': '5206',
|
||||||
|
'peer_ip': '0.0.0.0',
|
||||||
|
'rpc_port': '5205',
|
||||||
|
'validation_quorum': '3',
|
||||||
|
'websocket_ip': '127.0.0.1'}
|
||||||
|
|
||||||
|
FULL = """
|
||||||
|
[ledger_history]
|
||||||
|
full
|
||||||
|
|
||||||
|
# Allow other peers to connect to this server.
|
||||||
|
#
|
||||||
|
[peer_ip]
|
||||||
|
0.0.0.0
|
||||||
|
|
||||||
|
[peer_port]
|
||||||
|
53235
|
||||||
|
|
||||||
|
# Allow untrusted clients to connect to this server.
|
||||||
|
#
|
||||||
|
[websocket_public_ip]
|
||||||
|
0.0.0.0
|
||||||
|
|
||||||
|
[websocket_public_port]
|
||||||
|
5206
|
||||||
|
|
||||||
|
# Provide trusted websocket ADMIN access to the localhost.
|
||||||
|
#
|
||||||
|
[websocket_ip]
|
||||||
|
127.0.0.1
|
||||||
|
|
||||||
|
[websocket_port]
|
||||||
|
6206
|
||||||
|
|
||||||
|
# Provide trusted json-rpc ADMIN access to the localhost.
|
||||||
|
#
|
||||||
|
[rpc_ip]
|
||||||
|
127.0.0.1
|
||||||
|
|
||||||
|
[rpc_port]
|
||||||
|
5205
|
||||||
|
|
||||||
|
[rpc_allow_remote]
|
||||||
|
0
|
||||||
|
|
||||||
|
[node_size]
|
||||||
|
medium
|
||||||
|
|
||||||
|
# This is primary persistent datastore for rippled. This includes transaction
|
||||||
|
# metadata, account states, and ledger headers. Helpful information can be
|
||||||
|
# found here: https://ripple.com/wiki/NodeBackEnd
|
||||||
|
[node_db]
|
||||||
|
type=RocksDB
|
||||||
|
path=/development/alpha/db/rocksdb
|
||||||
|
open_files=2000
|
||||||
|
filter_bits=12
|
||||||
|
cache_mb=256
|
||||||
|
file_size_mb=8
|
||||||
|
file_size_mult=2
|
||||||
|
|
||||||
|
[database_path]
|
||||||
|
/development/alpha/db
|
||||||
|
|
||||||
|
# This needs to be an absolute directory reference, not a relative one.
|
||||||
|
# Modify this value as required.
|
||||||
|
[debug_logfile]
|
||||||
|
/development/alpha/debug.log
|
||||||
|
|
||||||
|
[sntp_servers]
|
||||||
|
time.windows.com
|
||||||
|
time.apple.com
|
||||||
|
time.nist.gov
|
||||||
|
pool.ntp.org
|
||||||
|
|
||||||
|
# Where to find some other servers speaking the Ripple protocol.
|
||||||
|
#
|
||||||
|
[ips]
|
||||||
|
r.ripple.com 51235
|
||||||
|
|
||||||
|
# The latest validators can be obtained from
|
||||||
|
# https://ripple.com/ripple.txt
|
||||||
|
#
|
||||||
|
[validators]
|
||||||
|
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||||
|
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||||
|
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||||
|
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||||
|
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||||
|
|
||||||
|
# Ditto.
|
||||||
|
[validation_quorum]
|
||||||
|
3
|
||||||
|
|
||||||
|
[validation_seed]
|
||||||
|
sh1T8T9yGuV7Jb6DPhqSzdU2s5LcV
|
||||||
|
|
||||||
|
# Turn down default logging to save disk space in the long run.
|
||||||
|
# Valid values here are trace, debug, info, warning, error, and fatal
|
||||||
|
[rpc_startup]
|
||||||
|
{ "command": "log_level", "severity": "debug" }
|
||||||
|
|
||||||
|
# Configure SSL for WebSockets. Not enabled by default because not everybody
|
||||||
|
# has an SSL cert on their server, but if you uncomment the following lines and
|
||||||
|
# set the path to the SSL certificate and private key the WebSockets protocol
|
||||||
|
# will be protected by SSL/TLS.
|
||||||
|
#[websocket_secure]
|
||||||
|
#1
|
||||||
|
|
||||||
|
#[websocket_ssl_cert]
|
||||||
|
#/etc/ssl/certs/server.crt
|
||||||
|
|
||||||
|
#[websocket_ssl_key]
|
||||||
|
#/etc/ssl/private/server.key
|
||||||
|
|
||||||
|
# Defaults to 0 ("no") so that you can use self-signed SSL certificates for
|
||||||
|
# development, or internally.
|
||||||
|
#[ssl_verify]
|
||||||
|
#0
|
||||||
|
""".strip()
|
||||||
20
bin/ripple/util/test_Decimal.py
Normal file
20
bin/ripple/util/test_Decimal.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util.Decimal import Decimal
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_Decimal(TestCase):
|
||||||
|
def test_construct(self):
|
||||||
|
self.assertEquals(str(Decimal('')), '0')
|
||||||
|
self.assertEquals(str(Decimal('0')), '0')
|
||||||
|
self.assertEquals(str(Decimal('0.2')), '0.2')
|
||||||
|
self.assertEquals(str(Decimal('-0.2')), '-0.2')
|
||||||
|
self.assertEquals(str(Decimal('3.1416')), '3.1416')
|
||||||
|
|
||||||
|
def test_accumulate(self):
|
||||||
|
d = Decimal()
|
||||||
|
d.accumulate('0.5')
|
||||||
|
d.accumulate('3.1416')
|
||||||
|
d.accumulate('-23.34234')
|
||||||
|
self.assertEquals(str(d), '-19.70074')
|
||||||
56
bin/ripple/util/test_Dict.py
Normal file
56
bin/ripple/util/test_Dict.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util import Dict
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_Dict(TestCase):
|
||||||
|
def test_count_all_subitems(self):
|
||||||
|
self.assertEquals(Dict.count_all_subitems({}), 1)
|
||||||
|
self.assertEquals(Dict.count_all_subitems({'a': {}}), 2)
|
||||||
|
self.assertEquals(Dict.count_all_subitems([1]), 2)
|
||||||
|
self.assertEquals(Dict.count_all_subitems([1, 2]), 3)
|
||||||
|
self.assertEquals(Dict.count_all_subitems([1, {2: 3}]), 4)
|
||||||
|
self.assertEquals(Dict.count_all_subitems([1, {2: [3]}]), 5)
|
||||||
|
self.assertEquals(Dict.count_all_subitems([1, {2: [3, 4]}]), 6)
|
||||||
|
|
||||||
|
def test_prune(self):
|
||||||
|
self.assertEquals(Dict.prune({}, 0), {})
|
||||||
|
self.assertEquals(Dict.prune({}, 1), {})
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 0), '{dict with 1 subitem}')
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 1), {1: 2})
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 2), {1: 2})
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune([1, 2, 3], 0), '[list with 3 subitems]')
|
||||||
|
self.assertEquals(Dict.prune([1, 2, 3], 1), [1, 2, 3])
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 0),
|
||||||
|
'[list with 4 subitems]')
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 1),
|
||||||
|
['{dict with 3 subitems}'])
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 2),
|
||||||
|
[{1: u'[list with 2 subitems]'}])
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 3),
|
||||||
|
[{1: [2, 3]}])
|
||||||
|
|
||||||
|
def test_prune_nosub(self):
|
||||||
|
self.assertEquals(Dict.prune({}, 0, False), {})
|
||||||
|
self.assertEquals(Dict.prune({}, 1, False), {})
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 0, False), '{dict with 1 subitem}')
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 1, False), {1: 2})
|
||||||
|
self.assertEquals(Dict.prune({1: 2}, 2, False), {1: 2})
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune([1, 2, 3], 0, False),
|
||||||
|
'[list with 3 subitems]')
|
||||||
|
self.assertEquals(Dict.prune([1, 2, 3], 1, False), [1, 2, 3])
|
||||||
|
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 0, False),
|
||||||
|
'[list with 1 subitem]')
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 1, False),
|
||||||
|
['{dict with 1 subitem}'])
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 2, False),
|
||||||
|
[{1: u'[list with 2 subitems]'}])
|
||||||
|
self.assertEquals(Dict.prune([{1: [2, 3]}], 3, False),
|
||||||
|
[{1: [2, 3]}])
|
||||||
37
bin/ripple/util/test_Function.py
Normal file
37
bin/ripple/util/test_Function.py
Normal file
@@ -0,0 +1,37 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util.Function import Function, MATCHER
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
def FN(*args, **kwds):
|
||||||
|
return args, kwds
|
||||||
|
|
||||||
|
class test_Function(TestCase):
|
||||||
|
def match_test(self, item, *results):
|
||||||
|
self.assertEquals(MATCHER.match(item).groups(), results)
|
||||||
|
|
||||||
|
def test_simple(self):
|
||||||
|
self.match_test('function', 'function', '')
|
||||||
|
self.match_test('f(x)', 'f', '(x)')
|
||||||
|
|
||||||
|
def test_empty_function(self):
|
||||||
|
self.assertEquals(Function()(), None)
|
||||||
|
|
||||||
|
def test_empty_args(self):
|
||||||
|
f = Function('ripple.util.test_Function.FN()')
|
||||||
|
self.assertEquals(f(), ((), {}))
|
||||||
|
|
||||||
|
def test_function(self):
|
||||||
|
f = Function('ripple.util.test_Function.FN(True, {1: 2}, None)')
|
||||||
|
self.assertEquals(f(), ((True, {1: 2}, None), {}))
|
||||||
|
self.assertEquals(f('hello', foo='bar'),
|
||||||
|
(('hello', True, {1: 2}, None), {'foo':'bar'}))
|
||||||
|
self.assertEquals(
|
||||||
|
f, Function('ripple.util.test_Function.FN(true, {1: 2}, null)'))
|
||||||
|
|
||||||
|
def test_quoting(self):
|
||||||
|
f = Function('ripple.util.test_Function.FN(testing)')
|
||||||
|
self.assertEquals(f(), (('testing',), {}))
|
||||||
|
f = Function('ripple.util.test_Function.FN(testing, true, false, null)')
|
||||||
|
self.assertEquals(f(), (('testing', True, False, None), {}))
|
||||||
56
bin/ripple/util/test_PrettyPrint.py
Normal file
56
bin/ripple/util/test_PrettyPrint.py
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util import PrettyPrint
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_PrettyPrint(TestCase):
|
||||||
|
def setUp(self):
|
||||||
|
self._results = []
|
||||||
|
self.printer = PrettyPrint.Streamer(printer=self.printer)
|
||||||
|
|
||||||
|
def printer(self, *args, **kwds):
|
||||||
|
self._results.extend(args)
|
||||||
|
|
||||||
|
def run_test(self, expected, *args):
|
||||||
|
for i in range(0, len(args), 2):
|
||||||
|
self.printer.add(args[i], args[i + 1])
|
||||||
|
self.printer.finish()
|
||||||
|
self.assertEquals(''.join(self._results), expected)
|
||||||
|
|
||||||
|
def test_simple_printer(self):
|
||||||
|
self.run_test(
|
||||||
|
'{\n "foo": "bar"\n}',
|
||||||
|
'foo', 'bar')
|
||||||
|
|
||||||
|
def test_multiple_lines(self):
|
||||||
|
self.run_test(
|
||||||
|
'{\n "foo": "bar",\n "baz": 5\n}',
|
||||||
|
'foo', 'bar', 'baz', 5)
|
||||||
|
|
||||||
|
def test_multiple_lines(self):
|
||||||
|
self.run_test(
|
||||||
|
"""
|
||||||
|
{
|
||||||
|
"foo": {
|
||||||
|
"bar": 1,
|
||||||
|
"baz": true
|
||||||
|
},
|
||||||
|
"bang": "bing"
|
||||||
|
}
|
||||||
|
""".strip(), 'foo', {'bar': 1, 'baz': True}, 'bang', 'bing')
|
||||||
|
|
||||||
|
def test_multiple_lines_with_list(self):
|
||||||
|
self.run_test(
|
||||||
|
"""
|
||||||
|
{
|
||||||
|
"foo": [
|
||||||
|
"bar",
|
||||||
|
1
|
||||||
|
],
|
||||||
|
"baz": [
|
||||||
|
23,
|
||||||
|
42
|
||||||
|
]
|
||||||
|
}
|
||||||
|
""".strip(), 'foo', ['bar', 1], 'baz', [23, 42])
|
||||||
28
bin/ripple/util/test_Range.py
Normal file
28
bin/ripple/util/test_Range.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util import Range
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_Range(TestCase):
|
||||||
|
def round_trip(self, s, *items):
|
||||||
|
self.assertEquals(Range.from_string(s), set(items))
|
||||||
|
self.assertEquals(Range.to_string(items), s)
|
||||||
|
|
||||||
|
def test_complete(self):
|
||||||
|
self.round_trip('10,19', 10, 19)
|
||||||
|
self.round_trip('10', 10)
|
||||||
|
self.round_trip('10-12', 10, 11, 12)
|
||||||
|
self.round_trip('10,19,42-45', 10, 19, 42, 43, 44, 45)
|
||||||
|
|
||||||
|
def test_names(self):
|
||||||
|
self.assertEquals(
|
||||||
|
Range.from_string('first,last,current', first=1, last=3, current=5),
|
||||||
|
set([1, 3, 5]))
|
||||||
|
|
||||||
|
def test_is_range(self):
|
||||||
|
self.assertTrue(Range.is_range(''))
|
||||||
|
self.assertTrue(Range.is_range('10'))
|
||||||
|
self.assertTrue(Range.is_range('10,12'))
|
||||||
|
self.assertFalse(Range.is_range('10,12,fred'))
|
||||||
|
self.assertTrue(Range.is_range('10,12,fred', 'fred'))
|
||||||
44
bin/ripple/util/test_Search.py
Normal file
44
bin/ripple/util/test_Search.py
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
from __future__ import absolute_import, division, print_function, unicode_literals
|
||||||
|
|
||||||
|
from ripple.util.Search import binary_search, linear_search, FIRST, LAST
|
||||||
|
|
||||||
|
from unittest import TestCase
|
||||||
|
|
||||||
|
class test_Search(TestCase):
|
||||||
|
def condition(self, i):
|
||||||
|
return 10 <= i < 15;
|
||||||
|
|
||||||
|
def test_linear_full(self):
|
||||||
|
self.assertEquals(list(linear_search(range(21), self.condition)),
|
||||||
|
[10, 11, 12, 13, 14])
|
||||||
|
|
||||||
|
def test_linear_partial(self):
|
||||||
|
self.assertEquals(list(linear_search(range(8, 14), self.condition)),
|
||||||
|
[10, 11, 12, 13])
|
||||||
|
self.assertEquals(list(linear_search(range(11, 14), self.condition)),
|
||||||
|
[11, 12, 13])
|
||||||
|
self.assertEquals(list(linear_search(range(12, 18), self.condition)),
|
||||||
|
[12, 13, 14])
|
||||||
|
|
||||||
|
def test_linear_empty(self):
|
||||||
|
self.assertEquals(list(linear_search(range(1, 4), self.condition)), [])
|
||||||
|
|
||||||
|
def test_binary_first(self):
|
||||||
|
self.assertEquals(binary_search(0, 14, self.condition, FIRST), 10)
|
||||||
|
self.assertEquals(binary_search(10, 19, self.condition, FIRST), 10)
|
||||||
|
self.assertEquals(binary_search(14, 14, self.condition, FIRST), 14)
|
||||||
|
self.assertEquals(binary_search(14, 15, self.condition, FIRST), 14)
|
||||||
|
self.assertEquals(binary_search(13, 15, self.condition, FIRST), 13)
|
||||||
|
|
||||||
|
def test_binary_last(self):
|
||||||
|
self.assertEquals(binary_search(10, 20, self.condition, LAST), 14)
|
||||||
|
self.assertEquals(binary_search(0, 14, self.condition, LAST), 14)
|
||||||
|
self.assertEquals(binary_search(14, 14, self.condition, LAST), 14)
|
||||||
|
self.assertEquals(binary_search(14, 15, self.condition, LAST), 14)
|
||||||
|
self.assertEquals(binary_search(13, 15, self.condition, LAST), 14)
|
||||||
|
|
||||||
|
def test_binary_throws(self):
|
||||||
|
self.assertRaises(
|
||||||
|
ValueError, binary_search, 0, 20, self.condition, LAST)
|
||||||
|
self.assertRaises(
|
||||||
|
ValueError, binary_search, 0, 20, self.condition, FIRST)
|
||||||
747
bin/six.py
Normal file
747
bin/six.py
Normal file
@@ -0,0 +1,747 @@
|
|||||||
|
"""Utilities for writing code that runs on Python 2 and 3"""
|
||||||
|
|
||||||
|
# Copyright (c) 2010-2014 Benjamin Peterson
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import operator
|
||||||
|
import sys
|
||||||
|
import types
|
||||||
|
|
||||||
|
__author__ = "Benjamin Peterson <benjamin@python.org>"
|
||||||
|
__version__ = "1.7.3"
|
||||||
|
|
||||||
|
|
||||||
|
# Useful for very coarse version differentiation.
|
||||||
|
PY2 = sys.version_info[0] == 2
|
||||||
|
PY3 = sys.version_info[0] == 3
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
string_types = str,
|
||||||
|
integer_types = int,
|
||||||
|
class_types = type,
|
||||||
|
text_type = str
|
||||||
|
binary_type = bytes
|
||||||
|
|
||||||
|
MAXSIZE = sys.maxsize
|
||||||
|
else:
|
||||||
|
string_types = basestring,
|
||||||
|
integer_types = (int, long)
|
||||||
|
class_types = (type, types.ClassType)
|
||||||
|
text_type = unicode
|
||||||
|
binary_type = str
|
||||||
|
|
||||||
|
if sys.platform.startswith("java"):
|
||||||
|
# Jython always uses 32 bits.
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
|
||||||
|
class X(object):
|
||||||
|
def __len__(self):
|
||||||
|
return 1 << 31
|
||||||
|
try:
|
||||||
|
len(X())
|
||||||
|
except OverflowError:
|
||||||
|
# 32-bit
|
||||||
|
MAXSIZE = int((1 << 31) - 1)
|
||||||
|
else:
|
||||||
|
# 64-bit
|
||||||
|
MAXSIZE = int((1 << 63) - 1)
|
||||||
|
del X
|
||||||
|
|
||||||
|
|
||||||
|
def _add_doc(func, doc):
|
||||||
|
"""Add documentation to a function."""
|
||||||
|
func.__doc__ = doc
|
||||||
|
|
||||||
|
|
||||||
|
def _import_module(name):
|
||||||
|
"""Import module, returning the module after the last dot."""
|
||||||
|
__import__(name)
|
||||||
|
return sys.modules[name]
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyDescr(object):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __get__(self, obj, tp):
|
||||||
|
result = self._resolve()
|
||||||
|
setattr(obj, self.name, result) # Invokes __set__.
|
||||||
|
# This is a bit ugly, but it avoids running this again.
|
||||||
|
delattr(obj.__class__, self.name)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
class MovedModule(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old, new=None):
|
||||||
|
super(MovedModule, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new is None:
|
||||||
|
new = name
|
||||||
|
self.mod = new
|
||||||
|
else:
|
||||||
|
self.mod = old
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
return _import_module(self.mod)
|
||||||
|
|
||||||
|
def __getattr__(self, attr):
|
||||||
|
_module = self._resolve()
|
||||||
|
value = getattr(_module, attr)
|
||||||
|
setattr(self, attr, value)
|
||||||
|
return value
|
||||||
|
|
||||||
|
|
||||||
|
class _LazyModule(types.ModuleType):
|
||||||
|
|
||||||
|
def __init__(self, name):
|
||||||
|
super(_LazyModule, self).__init__(name)
|
||||||
|
self.__doc__ = self.__class__.__doc__
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
attrs = ["__doc__", "__name__"]
|
||||||
|
attrs += [attr.name for attr in self._moved_attributes]
|
||||||
|
return attrs
|
||||||
|
|
||||||
|
# Subclasses should override this
|
||||||
|
_moved_attributes = []
|
||||||
|
|
||||||
|
|
||||||
|
class MovedAttribute(_LazyDescr):
|
||||||
|
|
||||||
|
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
|
||||||
|
super(MovedAttribute, self).__init__(name)
|
||||||
|
if PY3:
|
||||||
|
if new_mod is None:
|
||||||
|
new_mod = name
|
||||||
|
self.mod = new_mod
|
||||||
|
if new_attr is None:
|
||||||
|
if old_attr is None:
|
||||||
|
new_attr = name
|
||||||
|
else:
|
||||||
|
new_attr = old_attr
|
||||||
|
self.attr = new_attr
|
||||||
|
else:
|
||||||
|
self.mod = old_mod
|
||||||
|
if old_attr is None:
|
||||||
|
old_attr = name
|
||||||
|
self.attr = old_attr
|
||||||
|
|
||||||
|
def _resolve(self):
|
||||||
|
module = _import_module(self.mod)
|
||||||
|
return getattr(module, self.attr)
|
||||||
|
|
||||||
|
|
||||||
|
class _SixMetaPathImporter(object):
|
||||||
|
"""
|
||||||
|
A meta path importer to import six.moves and its submodules.
|
||||||
|
|
||||||
|
This class implements a PEP302 finder and loader. It should be compatible
|
||||||
|
with Python 2.5 and all existing versions of Python3
|
||||||
|
"""
|
||||||
|
def __init__(self, six_module_name):
|
||||||
|
self.name = six_module_name
|
||||||
|
self.known_modules = {}
|
||||||
|
|
||||||
|
def _add_module(self, mod, *fullnames):
|
||||||
|
for fullname in fullnames:
|
||||||
|
self.known_modules[self.name + "." + fullname] = mod
|
||||||
|
|
||||||
|
def _get_module(self, fullname):
|
||||||
|
return self.known_modules[self.name + "." + fullname]
|
||||||
|
|
||||||
|
def find_module(self, fullname, path=None):
|
||||||
|
if fullname in self.known_modules:
|
||||||
|
return self
|
||||||
|
return None
|
||||||
|
|
||||||
|
def __get_module(self, fullname):
|
||||||
|
try:
|
||||||
|
return self.known_modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
raise ImportError("This loader does not know module " + fullname)
|
||||||
|
|
||||||
|
def load_module(self, fullname):
|
||||||
|
try:
|
||||||
|
# in case of a reload
|
||||||
|
return sys.modules[fullname]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
mod = self.__get_module(fullname)
|
||||||
|
if isinstance(mod, MovedModule):
|
||||||
|
mod = mod._resolve()
|
||||||
|
else:
|
||||||
|
mod.__loader__ = self
|
||||||
|
sys.modules[fullname] = mod
|
||||||
|
return mod
|
||||||
|
|
||||||
|
def is_package(self, fullname):
|
||||||
|
"""
|
||||||
|
Return true, if the named module is a package.
|
||||||
|
|
||||||
|
We need this method to get correct spec objects with
|
||||||
|
Python 3.4 (see PEP451)
|
||||||
|
"""
|
||||||
|
return hasattr(self.__get_module(fullname), "__path__")
|
||||||
|
|
||||||
|
def get_code(self, fullname):
|
||||||
|
"""Return None
|
||||||
|
|
||||||
|
Required, if is_package is implemented"""
|
||||||
|
self.__get_module(fullname) # eventually raises ImportError
|
||||||
|
return None
|
||||||
|
get_source = get_code # same as get_code
|
||||||
|
|
||||||
|
_importer = _SixMetaPathImporter(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class _MovedItems(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
|
||||||
|
|
||||||
|
_moved_attributes = [
|
||||||
|
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
|
||||||
|
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
|
||||||
|
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
|
||||||
|
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
|
||||||
|
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
|
||||||
|
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
|
||||||
|
MovedAttribute("reduce", "__builtin__", "functools"),
|
||||||
|
MovedAttribute("StringIO", "StringIO", "io"),
|
||||||
|
MovedAttribute("UserDict", "UserDict", "collections"),
|
||||||
|
MovedAttribute("UserList", "UserList", "collections"),
|
||||||
|
MovedAttribute("UserString", "UserString", "collections"),
|
||||||
|
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
|
||||||
|
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
|
||||||
|
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
|
||||||
|
|
||||||
|
MovedModule("builtins", "__builtin__"),
|
||||||
|
MovedModule("configparser", "ConfigParser"),
|
||||||
|
MovedModule("copyreg", "copy_reg"),
|
||||||
|
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
|
||||||
|
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
|
||||||
|
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
|
||||||
|
MovedModule("http_cookies", "Cookie", "http.cookies"),
|
||||||
|
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
|
||||||
|
MovedModule("html_parser", "HTMLParser", "html.parser"),
|
||||||
|
MovedModule("http_client", "httplib", "http.client"),
|
||||||
|
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
|
||||||
|
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
|
||||||
|
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
|
||||||
|
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
|
||||||
|
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
|
||||||
|
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
|
||||||
|
MovedModule("cPickle", "cPickle", "pickle"),
|
||||||
|
MovedModule("queue", "Queue"),
|
||||||
|
MovedModule("reprlib", "repr"),
|
||||||
|
MovedModule("socketserver", "SocketServer"),
|
||||||
|
MovedModule("_thread", "thread", "_thread"),
|
||||||
|
MovedModule("tkinter", "Tkinter"),
|
||||||
|
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
|
||||||
|
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
|
||||||
|
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
|
||||||
|
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
|
||||||
|
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
|
||||||
|
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
|
||||||
|
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
|
||||||
|
MovedModule("tkinter_colorchooser", "tkColorChooser",
|
||||||
|
"tkinter.colorchooser"),
|
||||||
|
MovedModule("tkinter_commondialog", "tkCommonDialog",
|
||||||
|
"tkinter.commondialog"),
|
||||||
|
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
|
||||||
|
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
|
||||||
|
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
|
||||||
|
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
|
||||||
|
"tkinter.simpledialog"),
|
||||||
|
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
|
||||||
|
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
|
||||||
|
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
|
||||||
|
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
|
||||||
|
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
|
||||||
|
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
|
||||||
|
MovedModule("winreg", "_winreg"),
|
||||||
|
]
|
||||||
|
for attr in _moved_attributes:
|
||||||
|
setattr(_MovedItems, attr.name, attr)
|
||||||
|
if isinstance(attr, MovedModule):
|
||||||
|
_importer._add_module(attr, "moves." + attr.name)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
_MovedItems._moved_attributes = _moved_attributes
|
||||||
|
|
||||||
|
moves = _MovedItems(__name__ + ".moves")
|
||||||
|
_importer._add_module(moves, "moves")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_parse(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_parse"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_parse_moved_attributes = [
|
||||||
|
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
|
||||||
|
MovedAttribute("quote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("urlencode", "urllib", "urllib.parse"),
|
||||||
|
MovedAttribute("splitquery", "urllib", "urllib.parse"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_parse_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_parse, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
|
||||||
|
"moves.urllib_parse", "moves.urllib.parse")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_error(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_error"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_error_moved_attributes = [
|
||||||
|
MovedAttribute("URLError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
|
||||||
|
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_error_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_error, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
|
||||||
|
"moves.urllib_error", "moves.urllib.error")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_request(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_request"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_request_moved_attributes = [
|
||||||
|
MovedAttribute("urlopen", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("install_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("build_opener", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("pathname2url", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("url2pathname", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("getproxies", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("Request", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
|
||||||
|
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("URLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
|
||||||
|
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_request_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_request, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
|
||||||
|
"moves.urllib_request", "moves.urllib.request")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_response(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_response"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_response_moved_attributes = [
|
||||||
|
MovedAttribute("addbase", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addclosehook", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfo", "urllib", "urllib.response"),
|
||||||
|
MovedAttribute("addinfourl", "urllib", "urllib.response"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_response_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_response, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
|
||||||
|
"moves.urllib_response", "moves.urllib.response")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib_robotparser(_LazyModule):
|
||||||
|
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
|
||||||
|
|
||||||
|
|
||||||
|
_urllib_robotparser_moved_attributes = [
|
||||||
|
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
|
||||||
|
]
|
||||||
|
for attr in _urllib_robotparser_moved_attributes:
|
||||||
|
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
|
||||||
|
del attr
|
||||||
|
|
||||||
|
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
|
||||||
|
"moves.urllib_robotparser", "moves.urllib.robotparser")
|
||||||
|
|
||||||
|
|
||||||
|
class Module_six_moves_urllib(types.ModuleType):
|
||||||
|
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
|
||||||
|
__path__ = [] # mark as package
|
||||||
|
parse = _importer._get_module("moves.urllib_parse")
|
||||||
|
error = _importer._get_module("moves.urllib_error")
|
||||||
|
request = _importer._get_module("moves.urllib_request")
|
||||||
|
response = _importer._get_module("moves.urllib_response")
|
||||||
|
robotparser = _importer._get_module("moves.urllib_robotparser")
|
||||||
|
|
||||||
|
def __dir__(self):
|
||||||
|
return ['parse', 'error', 'request', 'response', 'robotparser']
|
||||||
|
|
||||||
|
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
|
||||||
|
"moves.urllib")
|
||||||
|
|
||||||
|
|
||||||
|
def add_move(move):
|
||||||
|
"""Add an item to six.moves."""
|
||||||
|
setattr(_MovedItems, move.name, move)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_move(name):
|
||||||
|
"""Remove item from six.moves."""
|
||||||
|
try:
|
||||||
|
delattr(_MovedItems, name)
|
||||||
|
except AttributeError:
|
||||||
|
try:
|
||||||
|
del moves.__dict__[name]
|
||||||
|
except KeyError:
|
||||||
|
raise AttributeError("no such move, %r" % (name,))
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
_meth_func = "__func__"
|
||||||
|
_meth_self = "__self__"
|
||||||
|
|
||||||
|
_func_closure = "__closure__"
|
||||||
|
_func_code = "__code__"
|
||||||
|
_func_defaults = "__defaults__"
|
||||||
|
_func_globals = "__globals__"
|
||||||
|
else:
|
||||||
|
_meth_func = "im_func"
|
||||||
|
_meth_self = "im_self"
|
||||||
|
|
||||||
|
_func_closure = "func_closure"
|
||||||
|
_func_code = "func_code"
|
||||||
|
_func_defaults = "func_defaults"
|
||||||
|
_func_globals = "func_globals"
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
advance_iterator = next
|
||||||
|
except NameError:
|
||||||
|
def advance_iterator(it):
|
||||||
|
return it.next()
|
||||||
|
next = advance_iterator
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
callable = callable
|
||||||
|
except NameError:
|
||||||
|
def callable(obj):
|
||||||
|
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound
|
||||||
|
|
||||||
|
create_bound_method = types.MethodType
|
||||||
|
|
||||||
|
Iterator = object
|
||||||
|
else:
|
||||||
|
def get_unbound_function(unbound):
|
||||||
|
return unbound.im_func
|
||||||
|
|
||||||
|
def create_bound_method(func, obj):
|
||||||
|
return types.MethodType(func, obj, obj.__class__)
|
||||||
|
|
||||||
|
class Iterator(object):
|
||||||
|
|
||||||
|
def next(self):
|
||||||
|
return type(self).__next__(self)
|
||||||
|
|
||||||
|
callable = callable
|
||||||
|
_add_doc(get_unbound_function,
|
||||||
|
"""Get the function out of a possibly unbound function""")
|
||||||
|
|
||||||
|
|
||||||
|
get_method_function = operator.attrgetter(_meth_func)
|
||||||
|
get_method_self = operator.attrgetter(_meth_self)
|
||||||
|
get_function_closure = operator.attrgetter(_func_closure)
|
||||||
|
get_function_code = operator.attrgetter(_func_code)
|
||||||
|
get_function_defaults = operator.attrgetter(_func_defaults)
|
||||||
|
get_function_globals = operator.attrgetter(_func_globals)
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return iter(d.keys(**kw))
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return iter(d.values(**kw))
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.items(**kw))
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return iter(d.lists(**kw))
|
||||||
|
else:
|
||||||
|
def iterkeys(d, **kw):
|
||||||
|
return iter(d.iterkeys(**kw))
|
||||||
|
|
||||||
|
def itervalues(d, **kw):
|
||||||
|
return iter(d.itervalues(**kw))
|
||||||
|
|
||||||
|
def iteritems(d, **kw):
|
||||||
|
return iter(d.iteritems(**kw))
|
||||||
|
|
||||||
|
def iterlists(d, **kw):
|
||||||
|
return iter(d.iterlists(**kw))
|
||||||
|
|
||||||
|
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
|
||||||
|
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
|
||||||
|
_add_doc(iteritems,
|
||||||
|
"Return an iterator over the (key, value) pairs of a dictionary.")
|
||||||
|
_add_doc(iterlists,
|
||||||
|
"Return an iterator over the (key, [values]) pairs of a dictionary.")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
def b(s):
|
||||||
|
return s.encode("latin-1")
|
||||||
|
def u(s):
|
||||||
|
return s
|
||||||
|
unichr = chr
|
||||||
|
if sys.version_info[1] <= 1:
|
||||||
|
def int2byte(i):
|
||||||
|
return bytes((i,))
|
||||||
|
else:
|
||||||
|
# This is about 2x faster than the implementation above on 3.2+
|
||||||
|
int2byte = operator.methodcaller("to_bytes", 1, "big")
|
||||||
|
byte2int = operator.itemgetter(0)
|
||||||
|
indexbytes = operator.getitem
|
||||||
|
iterbytes = iter
|
||||||
|
import io
|
||||||
|
StringIO = io.StringIO
|
||||||
|
BytesIO = io.BytesIO
|
||||||
|
else:
|
||||||
|
def b(s):
|
||||||
|
return s
|
||||||
|
# Workaround for standalone backslash
|
||||||
|
def u(s):
|
||||||
|
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
|
||||||
|
unichr = unichr
|
||||||
|
int2byte = chr
|
||||||
|
def byte2int(bs):
|
||||||
|
return ord(bs[0])
|
||||||
|
def indexbytes(buf, i):
|
||||||
|
return ord(buf[i])
|
||||||
|
def iterbytes(buf):
|
||||||
|
return (ord(byte) for byte in buf)
|
||||||
|
import StringIO
|
||||||
|
StringIO = BytesIO = StringIO.StringIO
|
||||||
|
_add_doc(b, """Byte literal""")
|
||||||
|
_add_doc(u, """Text literal""")
|
||||||
|
|
||||||
|
|
||||||
|
if PY3:
|
||||||
|
exec_ = getattr(moves.builtins, "exec")
|
||||||
|
|
||||||
|
|
||||||
|
def reraise(tp, value, tb=None):
|
||||||
|
if value.__traceback__ is not tb:
|
||||||
|
raise value.with_traceback(tb)
|
||||||
|
raise value
|
||||||
|
|
||||||
|
else:
|
||||||
|
def exec_(_code_, _globs_=None, _locs_=None):
|
||||||
|
"""Execute code in a namespace."""
|
||||||
|
if _globs_ is None:
|
||||||
|
frame = sys._getframe(1)
|
||||||
|
_globs_ = frame.f_globals
|
||||||
|
if _locs_ is None:
|
||||||
|
_locs_ = frame.f_locals
|
||||||
|
del frame
|
||||||
|
elif _locs_ is None:
|
||||||
|
_locs_ = _globs_
|
||||||
|
exec("""exec _code_ in _globs_, _locs_""")
|
||||||
|
|
||||||
|
|
||||||
|
exec_("""def reraise(tp, value, tb=None):
|
||||||
|
raise tp, value, tb
|
||||||
|
""")
|
||||||
|
|
||||||
|
|
||||||
|
print_ = getattr(moves.builtins, "print", None)
|
||||||
|
if print_ is None:
|
||||||
|
def print_(*args, **kwargs):
|
||||||
|
"""The new-style print function for Python 2.4 and 2.5."""
|
||||||
|
fp = kwargs.pop("file", sys.stdout)
|
||||||
|
if fp is None:
|
||||||
|
return
|
||||||
|
def write(data):
|
||||||
|
if not isinstance(data, basestring):
|
||||||
|
data = str(data)
|
||||||
|
# If the file has an encoding, encode unicode with it.
|
||||||
|
if (isinstance(fp, file) and
|
||||||
|
isinstance(data, unicode) and
|
||||||
|
fp.encoding is not None):
|
||||||
|
errors = getattr(fp, "errors", None)
|
||||||
|
if errors is None:
|
||||||
|
errors = "strict"
|
||||||
|
data = data.encode(fp.encoding, errors)
|
||||||
|
fp.write(data)
|
||||||
|
want_unicode = False
|
||||||
|
sep = kwargs.pop("sep", None)
|
||||||
|
if sep is not None:
|
||||||
|
if isinstance(sep, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(sep, str):
|
||||||
|
raise TypeError("sep must be None or a string")
|
||||||
|
end = kwargs.pop("end", None)
|
||||||
|
if end is not None:
|
||||||
|
if isinstance(end, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
elif not isinstance(end, str):
|
||||||
|
raise TypeError("end must be None or a string")
|
||||||
|
if kwargs:
|
||||||
|
raise TypeError("invalid keyword arguments to print()")
|
||||||
|
if not want_unicode:
|
||||||
|
for arg in args:
|
||||||
|
if isinstance(arg, unicode):
|
||||||
|
want_unicode = True
|
||||||
|
break
|
||||||
|
if want_unicode:
|
||||||
|
newline = unicode("\n")
|
||||||
|
space = unicode(" ")
|
||||||
|
else:
|
||||||
|
newline = "\n"
|
||||||
|
space = " "
|
||||||
|
if sep is None:
|
||||||
|
sep = space
|
||||||
|
if end is None:
|
||||||
|
end = newline
|
||||||
|
for i, arg in enumerate(args):
|
||||||
|
if i:
|
||||||
|
write(sep)
|
||||||
|
write(arg)
|
||||||
|
write(end)
|
||||||
|
|
||||||
|
_add_doc(reraise, """Reraise an exception.""")
|
||||||
|
|
||||||
|
if sys.version_info[0:2] < (3, 4):
|
||||||
|
def wraps(wrapped):
|
||||||
|
def wrapper(f):
|
||||||
|
f = functools.wraps(wrapped)(f)
|
||||||
|
f.__wrapped__ = wrapped
|
||||||
|
return f
|
||||||
|
return wrapper
|
||||||
|
else:
|
||||||
|
wraps = functools.wraps
|
||||||
|
|
||||||
|
def with_metaclass(meta, *bases):
|
||||||
|
"""Create a base class with a metaclass."""
|
||||||
|
# This requires a bit of explanation: the basic idea is to make a dummy
|
||||||
|
# metaclass for one level of class instantiation that replaces itself with
|
||||||
|
# the actual metaclass.
|
||||||
|
class metaclass(meta):
|
||||||
|
def __new__(cls, name, this_bases, d):
|
||||||
|
return meta(name, bases, d)
|
||||||
|
return type.__new__(metaclass, 'temporary_class', (), {})
|
||||||
|
|
||||||
|
|
||||||
|
def add_metaclass(metaclass):
|
||||||
|
"""Class decorator for creating a class with a metaclass."""
|
||||||
|
def wrapper(cls):
|
||||||
|
orig_vars = cls.__dict__.copy()
|
||||||
|
orig_vars.pop('__dict__', None)
|
||||||
|
orig_vars.pop('__weakref__', None)
|
||||||
|
slots = orig_vars.get('__slots__')
|
||||||
|
if slots is not None:
|
||||||
|
if isinstance(slots, str):
|
||||||
|
slots = [slots]
|
||||||
|
for slots_var in slots:
|
||||||
|
orig_vars.pop(slots_var)
|
||||||
|
return metaclass(cls.__name__, cls.__bases__, orig_vars)
|
||||||
|
return wrapper
|
||||||
|
|
||||||
|
# Complete the moves implementation.
|
||||||
|
# This code is at the end of this module to speed up module loading.
|
||||||
|
# Turn this module into a package.
|
||||||
|
__path__ = [] # required for PEP 302 and PEP 451
|
||||||
|
__package__ = __name__ # see PEP 366 @ReservedAssignment
|
||||||
|
if globals().get("__spec__") is not None:
|
||||||
|
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
|
||||||
|
# Remove other six meta path importers, since they cause problems. This can
|
||||||
|
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
|
||||||
|
# this for some reason.)
|
||||||
|
if sys.meta_path:
|
||||||
|
for i, importer in enumerate(sys.meta_path):
|
||||||
|
# Here's some real nastiness: Another "instance" of the six module might
|
||||||
|
# be floating around. Therefore, we can't use isinstance() to check for
|
||||||
|
# the six meta path importer, since the other six instance will have
|
||||||
|
# inserted an importer with different class.
|
||||||
|
if (type(importer).__name__ == "_SixMetaPathImporter" and
|
||||||
|
importer.name == __name__):
|
||||||
|
del sys.meta_path[i]
|
||||||
|
break
|
||||||
|
del i, importer
|
||||||
|
# Finally, add the importer to the meta path import hook.
|
||||||
|
sys.meta_path.append(_importer)
|
||||||
@@ -2,14 +2,38 @@
|
|||||||
|
|
||||||
# Coding Standards
|
# Coding Standards
|
||||||
|
|
||||||
Coding standards used here are extreme strict and consistent. The style
|
Coding standards used here gradually evolve and propagate through
|
||||||
evolved gradually over the years, incorporating generally acknowledged
|
code reviews. Some aspects are enforced more strictly than others.
|
||||||
best-practice C++ advice, experience, and personal preference.
|
|
||||||
|
|
||||||
## Don't Repeat Yourself!
|
## Rules
|
||||||
|
|
||||||
The [Don't Repeat Yourself][1] principle summarises the essence of what it
|
These rules only apply to our own code. We can't enforce any sort of
|
||||||
means to write good code, in all languages, at all levels.
|
style on the external repositories and libraries we include. The best
|
||||||
|
guideline is to maintain the standards that are used in those libraries.
|
||||||
|
|
||||||
|
* Tab inserts 4 spaces. No tab characters.
|
||||||
|
* Braces are indented in the [Allman style][1].
|
||||||
|
* Modern C++ principles. No naked ```new``` or ```delete```.
|
||||||
|
* Line lengths limited to 80 characters. Exceptions limited to data and tables.
|
||||||
|
|
||||||
|
## Guidelines
|
||||||
|
|
||||||
|
If you want to do something contrary to these guidelines, understand
|
||||||
|
why you're doing it. Think, use common sense, and consider that this
|
||||||
|
your changes will probably need to be maintained long after you've
|
||||||
|
moved on to other projects.
|
||||||
|
|
||||||
|
* Use white space and blank lines to guide the eye and keep your intent clear.
|
||||||
|
* Put private data members at the top of a class, and the 6 public special
|
||||||
|
members immediately after, in the following order:
|
||||||
|
* Destructor
|
||||||
|
* Default constructor
|
||||||
|
* Copy constructor
|
||||||
|
* Copy assignment
|
||||||
|
* Move constructor
|
||||||
|
* Move assignment
|
||||||
|
* Don't over-inline by defining large functions within the class
|
||||||
|
declaration, not even for template classes.
|
||||||
|
|
||||||
## Formatting
|
## Formatting
|
||||||
|
|
||||||
@@ -17,9 +41,6 @@ The goal of source code formatting should always be to make things as easy to
|
|||||||
read as possible. White space is used to guide the eye so that details are not
|
read as possible. White space is used to guide the eye so that details are not
|
||||||
overlooked. Blank lines are used to separate code into "paragraphs."
|
overlooked. Blank lines are used to separate code into "paragraphs."
|
||||||
|
|
||||||
* No tab characters please.
|
|
||||||
* Tab stops are set to 4 spaces.
|
|
||||||
* Braces are indented in the [Allman style][2].
|
|
||||||
* Always place a space before and after all binary operators,
|
* Always place a space before and after all binary operators,
|
||||||
especially assignments (`operator=`).
|
especially assignments (`operator=`).
|
||||||
* The `!` operator should always be followed by a space.
|
* The `!` operator should always be followed by a space.
|
||||||
@@ -62,156 +83,4 @@ overlooked. Blank lines are used to separate code into "paragraphs."
|
|||||||
* Always place a space in between the template angle brackets and the type
|
* Always place a space in between the template angle brackets and the type
|
||||||
name. Template code is already hard enough to read!
|
name. Template code is already hard enough to read!
|
||||||
|
|
||||||
## Naming conventions
|
[1]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
|
||||||
|
|
||||||
* Member variables and method names are written with camel-case, and never
|
|
||||||
begin with a capital letter.
|
|
||||||
* Class names are also written in camel-case, but always begin with a capital
|
|
||||||
letter.
|
|
||||||
* For global variables... well, you shouldn't have any, so it doesn't matter.
|
|
||||||
* Class data members begin with `m_`, static data members begin with `s_`.
|
|
||||||
Global variables begin with `g_`. This is so the scope of the corresponding
|
|
||||||
declaration can be easily determined.
|
|
||||||
* Avoid underscores in your names, especially leading or trailing underscores.
|
|
||||||
In particular, leading underscores should be avoided, as these are often used
|
|
||||||
in standard library code, so to use them in your own code looks quite jarring.
|
|
||||||
* If you really have to write a macro for some reason, then make it all caps,
|
|
||||||
with underscores to separate the words. And obviously make sure that its name
|
|
||||||
is unlikely to clash with symbols used in other libraries or 3rd party code.
|
|
||||||
|
|
||||||
## Types, const-correctness
|
|
||||||
|
|
||||||
* If a method can (and should!) be const, make it const!
|
|
||||||
* If a method definitely doesn't throw an exception (be careful!), mark it as
|
|
||||||
`noexcept`
|
|
||||||
* When returning a temporary object, e.g. a String, the returned object should
|
|
||||||
be non-const, so that if the class has a C++11 move operator, it can be used.
|
|
||||||
* If a local variable can be const, then make it const!
|
|
||||||
* Remember that pointers can be const as well as primitives; For example, if
|
|
||||||
you have a `char*` whose contents are going to be altered, you may still be
|
|
||||||
able to make the pointer itself const, e.g. `char* const foobar = getFoobar();`.
|
|
||||||
* Do not declare all your local variables at the top of a function or method
|
|
||||||
(i.e. in the old-fashioned C-style). Declare them at the last possible moment,
|
|
||||||
and give them as small a scope as possible.
|
|
||||||
* Object parameters should be passed as `const&` wherever possible. Only
|
|
||||||
pass a parameter as a copy-by-value object if you really need to mutate
|
|
||||||
a local copy inside the method, and if making a local copy inside the method
|
|
||||||
would be difficult.
|
|
||||||
* Use portable `for()` loop variable scoping (i.e. do not have multiple for
|
|
||||||
loops in the same scope that each re-declare the same variable name, as
|
|
||||||
this fails on older compilers)
|
|
||||||
* When you're testing a pointer to see if it's null, never write
|
|
||||||
`if (myPointer)`. Always avoid that implicit cast-to-bool by writing it more
|
|
||||||
fully: `if (myPointer != nullptr)`. And likewise, never ever write
|
|
||||||
`if (! myPointer)`, instead always write `if (myPointer == nullptr)`.
|
|
||||||
It is more readable that way.
|
|
||||||
* Avoid C-style casts except when converting between primitive numeric types.
|
|
||||||
Some people would say "avoid C-style casts altogether", but `static_cast` is
|
|
||||||
a bit unreadable when you just want to cast an `int` to a `float`. But
|
|
||||||
whenever a pointer is involved, or a non-primitive object, always use
|
|
||||||
`static_cast`. And when you're reinterpreting data, always use
|
|
||||||
`reinterpret_cast`.
|
|
||||||
* Until C++ gets a universal 64-bit primitive type (part of the C++11
|
|
||||||
standard), it's best to stick to the `int64` and `uint64` typedefs.
|
|
||||||
|
|
||||||
## Object lifetime and ownership
|
|
||||||
|
|
||||||
* Absolutely do NOT use `delete`, `deleteAndZero`, etc. There are very very few
|
|
||||||
situations where you can't use a `ScopedPointer` or some other automatic
|
|
||||||
lifetime management class.
|
|
||||||
* Do not use `new` unless there's no alternative. Whenever you type `new`, always
|
|
||||||
treat it as a failure to find a better solution. If a local variable can be
|
|
||||||
allocated on the stack rather than the heap, then always do so.
|
|
||||||
* Do not ever use `new` or `malloc` to allocate a C++ array. Always use a
|
|
||||||
`HeapBlock` instead.
|
|
||||||
* And just to make it doubly clear: Never use `malloc` or `calloc`.
|
|
||||||
* If a parent object needs to create and own some kind of child object, always
|
|
||||||
use composition as your first choice. If that's not possible (e.g. if the
|
|
||||||
child needs a pointer to the parent for its constructor), then use a
|
|
||||||
`ScopedPointer`.
|
|
||||||
* If possible, pass an object as a reference rather than a pointer. If possible,
|
|
||||||
make it a `const` reference.
|
|
||||||
* Obviously avoid static and global values. Sometimes there's no alternative,
|
|
||||||
but if there is an alternative, then use it, no matter how much effort it
|
|
||||||
involves.
|
|
||||||
* If allocating a local POD structure (e.g. an operating-system structure in
|
|
||||||
native code), and you need to initialise it with zeros, use the `= { 0 };`
|
|
||||||
syntax as your first choice for doing this. If for some reason that's not
|
|
||||||
appropriate, use the `zerostruct()` function, or in case that isn't suitable,
|
|
||||||
use `zeromem()`. Don't use `memset()`.
|
|
||||||
|
|
||||||
## Classes
|
|
||||||
|
|
||||||
* Declare a class's public section first, and put its constructors and
|
|
||||||
destructor first. Any protected items come next, and then private ones.
|
|
||||||
* Use the most restrictive access-specifier possible for each member. Prefer
|
|
||||||
`private` over `protected`, and `protected` over `public`. Don't expose
|
|
||||||
things unnecessarily.
|
|
||||||
* Preferred positioning for any inherited classes is to put them to the right
|
|
||||||
of the class name, vertically aligned, e.g.:
|
|
||||||
class Thing : public Foo,
|
|
||||||
private Bar
|
|
||||||
{
|
|
||||||
}
|
|
||||||
* Put a class's member variables (which should almost always be private, of course),
|
|
||||||
after all the public and protected method declarations.
|
|
||||||
* Any private methods can go towards the end of the class, after the member
|
|
||||||
variables.
|
|
||||||
* If your class does not have copy-by-value semantics, derive the class from
|
|
||||||
`Uncopyable`.
|
|
||||||
* If your class is likely to be leaked, then derive your class from
|
|
||||||
`LeakChecked<>`.
|
|
||||||
* Constructors that take a single parameter should be default be marked
|
|
||||||
`explicit`. Obviously there are cases where you do want implicit conversion,
|
|
||||||
but always think about it carefully before writing a non-explicit constructor.
|
|
||||||
* Do not use `NULL`, `null`, or 0 for a null-pointer. And especially never use
|
|
||||||
'0L', which is particulary burdensome. Use `nullptr` instead - this is the
|
|
||||||
C++2011 standard, so get used to it. There's a fallback definition for `nullptr`
|
|
||||||
in Beast, so it's always possible to use it even if your compiler isn't yet
|
|
||||||
C++2011 compliant.
|
|
||||||
* All the C++ 'guru' books and articles are full of excellent and detailed advice
|
|
||||||
on when it's best to use inheritance vs composition. If you're not already
|
|
||||||
familiar with the received wisdom in these matters, then do some reading!
|
|
||||||
|
|
||||||
## Miscellaneous
|
|
||||||
|
|
||||||
* `goto` statements should not be used at all, even if the alternative is
|
|
||||||
more verbose code. The only exception is when implementing an algorithm in
|
|
||||||
a function as a state machine.
|
|
||||||
* Don't use macros! OK, obviously there are many situations where they're the
|
|
||||||
right tool for the job, but treat them as a last resort. Certainly don't ever
|
|
||||||
use a macro just to hold a constant value or to perform any kind of function
|
|
||||||
that could have been done as a real inline function. And it goes without saying
|
|
||||||
that you should give them names which aren't going to clash with other code.
|
|
||||||
And `#undef` them after you've used them, if possible.
|
|
||||||
* When using the `++` or `--` operators, never use post-increment if
|
|
||||||
pre-increment could be used instead. Although it doesn't matter for
|
|
||||||
primitive types, it's good practice to pre-increment since this can be
|
|
||||||
much more efficient for more complex objects. In particular, if you're
|
|
||||||
writing a for loop, always use pre-increment,
|
|
||||||
e.g. `for (int = 0; i < 10; ++i)`
|
|
||||||
* Never put an "else" statement after a "return"! This is well-explained in the
|
|
||||||
LLVM coding standards...and a couple of other very good pieces of advice from
|
|
||||||
the LLVM standards are in there as well.
|
|
||||||
* When getting a possibly-null pointer and using it only if it's non-null, limit
|
|
||||||
the scope of the pointer as much as possible - e.g. Do NOT do this:
|
|
||||||
|
|
||||||
Foo* f = getFoo ();
|
|
||||||
if (f != nullptr)
|
|
||||||
f->doSomething ();
|
|
||||||
// other code
|
|
||||||
f->doSomething (); // oops! f may be null!
|
|
||||||
|
|
||||||
..instead, prefer to write it like this, which reduces the scope of the
|
|
||||||
pointer, making it impossible to write code that accidentally uses a null
|
|
||||||
pointer:
|
|
||||||
|
|
||||||
if (Foo* f = getFoo ())
|
|
||||||
f->doSomethingElse ();
|
|
||||||
|
|
||||||
// f is out-of-scope here, so impossible to use it if it's null
|
|
||||||
|
|
||||||
(This also results in smaller, cleaner code)
|
|
||||||
|
|
||||||
[1]: http://en.wikipedia.org/wiki/Don%27t_repeat_yourself
|
|
||||||
[2]: http://en.wikipedia.org/wiki/Indent_style#Allman_style
|
|
||||||
|
|||||||
2708
doc/Doxyfile
2708
doc/Doxyfile
File diff suppressed because it is too large
Load Diff
63
doc/HeapProfiling.md
Normal file
63
doc/HeapProfiling.md
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
## Heap profiling of rippled with jemalloc
|
||||||
|
|
||||||
|
The jemalloc library provides a good API for doing heap analysis,
|
||||||
|
including a mechanism to dump a description of the heap from within the
|
||||||
|
running application via a function call. Details on how to perform this
|
||||||
|
activity in general, as well as how to acquire the software, are available on
|
||||||
|
the jemalloc site:
|
||||||
|
[https://github.com/jemalloc/jemalloc/wiki/Use-Case:-Heap-Profiling](https://github.com/jemalloc/jemalloc/wiki/Use-Case:-Heap-Profiling)
|
||||||
|
|
||||||
|
jemalloc is acquired separately from rippled, and is not affiliated
|
||||||
|
with Ripple Labs. If you compile and install jemalloc from the
|
||||||
|
source release with default options, it will install the library and header
|
||||||
|
under `/usr/local/lib` and `/usr/local/include`, respectively. Heap
|
||||||
|
profiling has been tested with rippled on a Linux platform. It should
|
||||||
|
work on platforms on which both rippled and jemalloc are available.
|
||||||
|
|
||||||
|
To link rippled with jemalloc, the argument
|
||||||
|
`profile-jemalloc=<jemalloc_dir>` is provided after the optional target.
|
||||||
|
The `<jemalloc_dir>` argument should be the same as that of the
|
||||||
|
`--prefix` parameter passed to the jemalloc configure script when building.
|
||||||
|
|
||||||
|
## Examples:
|
||||||
|
|
||||||
|
Build rippled with jemalloc library under /usr/local/lib and
|
||||||
|
header under /usr/local/include:
|
||||||
|
|
||||||
|
$ scons profile-jemalloc=/usr/local
|
||||||
|
|
||||||
|
Build rippled using clang with the jemalloc library under /opt/local/lib
|
||||||
|
and header under /opt/local/include:
|
||||||
|
|
||||||
|
$ scons clang profile-jemalloc=/opt/local
|
||||||
|
|
||||||
|
----------------------
|
||||||
|
|
||||||
|
## Using the jemalloc library from within the code
|
||||||
|
|
||||||
|
The `profile-jemalloc` parameter enables a macro definition called
|
||||||
|
`PROFILE_JEMALLOC`. Include the jemalloc header file as
|
||||||
|
well as the api call(s) that you wish to make within preprocessor
|
||||||
|
conditional groups, such as:
|
||||||
|
|
||||||
|
In global scope:
|
||||||
|
|
||||||
|
#ifdef PROFILE_JEMALLOC
|
||||||
|
#include <jemalloc/jemalloc.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
And later, within a function scope:
|
||||||
|
|
||||||
|
#ifdef PROFILE_JEMALLOC
|
||||||
|
mallctl("prof.dump", NULL, NULL, NULL, 0);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
Fuller descriptions of how to acquire and use jemalloc's api to do memory
|
||||||
|
analysis are available at the [jemalloc
|
||||||
|
site.](http://www.canonware.com/jemalloc/)
|
||||||
|
|
||||||
|
Linking against the jemalloc library will override
|
||||||
|
the system's default `malloc()` and related functions with jemalloc's
|
||||||
|
implementation. This is the case even if the code is not instrumented
|
||||||
|
to use jemalloc's specific API.
|
||||||
|
|
||||||
@@ -22,17 +22,19 @@
|
|||||||
#
|
#
|
||||||
# 8. Diagnostics
|
# 8. Diagnostics
|
||||||
#
|
#
|
||||||
|
# 9. Voting
|
||||||
|
#
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
#
|
#
|
||||||
# Purpose
|
# Purpose
|
||||||
#
|
#
|
||||||
# This file documents and provides examples of all rippled server process
|
# This file documents and provides examples of all rippled server process
|
||||||
# configuration options. When the rippled server instance is lanched, it looks
|
# configuration options. When the rippled server instance is launched, it
|
||||||
# for a file with the following name:
|
# looks for a file with the following name:
|
||||||
#
|
#
|
||||||
# rippled.cfg
|
# rippled.cfg
|
||||||
#
|
#
|
||||||
# For more information on where the rippled serer instance searches for
|
# For more information on where the rippled server instance searches for
|
||||||
# the file please visit the Ripple wiki. Specifically, the section explaining
|
# the file please visit the Ripple wiki. Specifically, the section explaining
|
||||||
# the --conf command line option:
|
# the --conf command line option:
|
||||||
#
|
#
|
||||||
@@ -60,27 +62,36 @@
|
|||||||
#
|
#
|
||||||
# [ips]
|
# [ips]
|
||||||
#
|
#
|
||||||
# List of ips where the Ripple protocol is served. For a starter list,
|
# List of hostnames or ips where the Ripple protocol is served. For a starter
|
||||||
# you can copy entries from: https://ripple.com/ripple.txt
|
# list, you can either copy entries from: https://ripple.com/ripple.txt or if
|
||||||
|
# you prefer you can specify r.ripple.com 51235
|
||||||
#
|
#
|
||||||
# Domain names are not allowed. One ipv4 or ipv6 address per line. A port
|
# One IPv4 address or domain names per line is allowed. A port may optionally
|
||||||
# may optionally be specified after adding a space to the address. By
|
# be specified after adding a space to the address. By convention, if known,
|
||||||
# convention, if known, IPs are listed in from most to least trusted.
|
# IPs are listed in from most to least trusted.
|
||||||
#
|
#
|
||||||
# Examples:
|
# Examples:
|
||||||
# 192.168.0.1
|
# 192.168.0.1
|
||||||
# 192.168.0.1 3939
|
# 192.168.0.1 3939
|
||||||
# 2001:0db8:0100:f101:0210:a4ff:fee3:9566
|
# r.ripple.com 51235
|
||||||
#
|
#
|
||||||
# Here's the recent set of good, well known addresses:
|
# This will give you a good, up-to-date list of addresses:
|
||||||
#
|
#
|
||||||
# [ips]
|
# [ips]
|
||||||
# 54.225.112.220 51235
|
# r.ripple.com 51235
|
||||||
# 54.225.123.13 51235
|
#
|
||||||
# 54.227.239.106 51235
|
#
|
||||||
# 107.21.251.218 51235
|
#
|
||||||
# 184.73.226.101 51235
|
# [ips_fixed]
|
||||||
# 23.23.201.55 51235
|
#
|
||||||
|
# List of IP addresses or hostnames to which rippled should always attempt to
|
||||||
|
# maintain peer connections with. This is useful for manually forming private
|
||||||
|
# networks, for example to configure a validation server that connects to the
|
||||||
|
# Ripple network through a public-facing server, or for building a set
|
||||||
|
# of cluster peers.
|
||||||
|
#
|
||||||
|
# One IPv4 address or domain names per line is allowed. A port may optionally
|
||||||
|
# be specified after adding a space to the address.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -116,6 +127,8 @@
|
|||||||
# to configure your server instance to receive PROXY Protocol handshakes,
|
# to configure your server instance to receive PROXY Protocol handshakes,
|
||||||
# and also to restrict access to your instance to the Elastic Load Balancer.
|
# and also to restrict access to your instance to the Elastic Load Balancer.
|
||||||
#
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
# [peer_private]
|
# [peer_private]
|
||||||
#
|
#
|
||||||
# 0 or 1.
|
# 0 or 1.
|
||||||
@@ -125,6 +138,15 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
# [peers_max]
|
||||||
|
#
|
||||||
|
# The largest number of desired peer connections (incoming or outgoing).
|
||||||
|
# Cluster and fixed peers do not count towards this total. There are
|
||||||
|
# implementation-defined lower limits imposed on this value for security
|
||||||
|
# purposes.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
# [peer_ssl_cipher_list]
|
# [peer_ssl_cipher_list]
|
||||||
#
|
#
|
||||||
# A colon delimited string with the allowed SSL cipher modes for peer. The
|
# A colon delimited string with the allowed SSL cipher modes for peer. The
|
||||||
@@ -222,7 +244,7 @@
|
|||||||
#
|
#
|
||||||
# The amount of time to wait in seconds, before sending a websocket 'ping'
|
# The amount of time to wait in seconds, before sending a websocket 'ping'
|
||||||
# message. Ping messages are used to determine if the remote end of the
|
# message. Ping messages are used to determine if the remote end of the
|
||||||
# connection is no longer availabile.
|
# connection is no longer available.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -279,7 +301,7 @@
|
|||||||
#
|
#
|
||||||
# This group of settings configures security and access attributes of the
|
# This group of settings configures security and access attributes of the
|
||||||
# RPC server section of the rippled process, used to service both local
|
# RPC server section of the rippled process, used to service both local
|
||||||
# an optional remote clients.
|
# and optional remote clients.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -294,7 +316,7 @@
|
|||||||
#
|
#
|
||||||
# [rpc_admin_allow]
|
# [rpc_admin_allow]
|
||||||
#
|
#
|
||||||
# Specify an list of IP addresses allowed to have admin access. One per line.
|
# Specify a list of IP addresses allowed to have admin access. One per line.
|
||||||
# If you want to test the output of non-admin commands add this section and
|
# If you want to test the output of non-admin commands add this section and
|
||||||
# just put an ip address not under your control.
|
# just put an ip address not under your control.
|
||||||
# Defaults to 127.0.0.1.
|
# Defaults to 127.0.0.1.
|
||||||
@@ -314,7 +336,7 @@
|
|||||||
#
|
#
|
||||||
# [rpc_admin_password]
|
# [rpc_admin_password]
|
||||||
#
|
#
|
||||||
# As a server, require this as the admin pasword to be specified. Also,
|
# As a server, require this as the admin password to be specified. Also,
|
||||||
# require rpc_admin_user and rpc_admin_password to be checked for RPC admin
|
# require rpc_admin_user and rpc_admin_password to be checked for RPC admin
|
||||||
# functions. The request must specify these as the admin_user and
|
# functions. The request must specify these as the admin_user and
|
||||||
# admin_password in the request object.
|
# admin_password in the request object.
|
||||||
@@ -338,7 +360,7 @@
|
|||||||
#
|
#
|
||||||
# [rpc_user]
|
# [rpc_user]
|
||||||
#
|
#
|
||||||
# As a server, require a this user to specified and require rpc_password to
|
# As a server, require this user to be specified and require rpc_password to
|
||||||
# be checked for RPC access via the rpc_ip and rpc_port. The user and password
|
# be checked for RPC access via the rpc_ip and rpc_port. The user and password
|
||||||
# must be specified via HTTP's basic authentication method.
|
# must be specified via HTTP's basic authentication method.
|
||||||
# As a client, supply this to the server via HTTP's basic authentication
|
# As a client, supply this to the server via HTTP's basic authentication
|
||||||
@@ -348,7 +370,7 @@
|
|||||||
#
|
#
|
||||||
# [rpc_password]
|
# [rpc_password]
|
||||||
#
|
#
|
||||||
# As a server, require a this password to specified and require rpc_user to
|
# As a server, require this password to be specified and require rpc_user to
|
||||||
# be checked for RPC access via the rpc_ip and rpc_port. The user and password
|
# be checked for RPC access via the rpc_ip and rpc_port. The user and password
|
||||||
# must be specified via HTTP's basic authentication method.
|
# must be specified via HTTP's basic authentication method.
|
||||||
# As a client, supply this to the server via HTTP's basic authentication
|
# As a client, supply this to the server via HTTP's basic authentication
|
||||||
@@ -373,8 +395,9 @@
|
|||||||
# 0: Server certificates are not provided for RPC clients using SSL [default]
|
# 0: Server certificates are not provided for RPC clients using SSL [default]
|
||||||
# 1: Client RPC connections wil be provided with SSL certificates.
|
# 1: Client RPC connections wil be provided with SSL certificates.
|
||||||
#
|
#
|
||||||
# Note that if rpc_secure is enabled, it will also be necessasry to configure the
|
# Note that if rpc_secure is enabled, it will also be necessary to configure
|
||||||
# certificate file settings located in rpc_ssl_cert, rpc_ssl_chain, and rpc_ssl_key
|
# the certificate file settings located in rpc_ssl_cert, rpc_ssl_chain, and
|
||||||
|
# rpc_ssl_key
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -382,8 +405,9 @@
|
|||||||
#
|
#
|
||||||
# <pathname>
|
# <pathname>
|
||||||
#
|
#
|
||||||
# A file system path leading to the SSL certificate file to use for secure RPC.
|
# A file system path leading to the SSL certificate file to use for secure
|
||||||
# The file is in PEM format. The file is not needed if the chain includes it.
|
# RPC. The file is in PEM format. The file is not needed if the chain
|
||||||
|
# includes it.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -424,7 +448,7 @@
|
|||||||
#
|
#
|
||||||
# [sms_url]?from=[sms_from]&to=[sms_to]&api_key=[sms_key]&api_secret=[sms_secret]&text=['text']
|
# [sms_url]?from=[sms_from]&to=[sms_to]&api_key=[sms_key]&api_secret=[sms_secret]&text=['text']
|
||||||
#
|
#
|
||||||
# Where [...] are the corresponding valus from the configuration file, and
|
# Where [...] are the corresponding values from the configuration file, and
|
||||||
# ['test'] is the value of the JSON field with name 'text'.
|
# ['test'] is the value of the JSON field with name 'text'.
|
||||||
#
|
#
|
||||||
# [sms_url]
|
# [sms_url]
|
||||||
@@ -484,6 +508,20 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
# [fetch_depth]
|
||||||
|
#
|
||||||
|
# The number of past ledgers to serve to other peers that request historical
|
||||||
|
# ledger data (or "full" for no limit).
|
||||||
|
#
|
||||||
|
# Servers that require low latency and high local performance may wish to
|
||||||
|
# restrict the historical ledgers they are willing to serve. Setting this
|
||||||
|
# below 32 can harm network stability as servers require easy access to
|
||||||
|
# recent history to stay in sync. Values below 128 are not recommended.
|
||||||
|
#
|
||||||
|
# The default is: full
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
# [validation_seed]
|
# [validation_seed]
|
||||||
#
|
#
|
||||||
# To perform validation, this section should contain either a validation seed
|
# To perform validation, this section should contain either a validation seed
|
||||||
@@ -504,8 +542,8 @@
|
|||||||
# For domains, rippled will probe for https web servers at the specified
|
# For domains, rippled will probe for https web servers at the specified
|
||||||
# domain in the following order: ripple.DOMAIN, www.DOMAIN, DOMAIN
|
# domain in the following order: ripple.DOMAIN, www.DOMAIN, DOMAIN
|
||||||
#
|
#
|
||||||
# For public key entries, a comment may optionally be spcified after adding a
|
# For public key entries, a comment may optionally be specified after adding
|
||||||
# space to the pulic key.
|
# a space to the public key.
|
||||||
#
|
#
|
||||||
# Examples:
|
# Examples:
|
||||||
# ripple.com
|
# ripple.com
|
||||||
@@ -553,14 +591,21 @@
|
|||||||
#
|
#
|
||||||
# [path_search_fast]
|
# [path_search_fast]
|
||||||
# [path_search_max]
|
# [path_search_max]
|
||||||
# When seaching for paths, the minimum and maximum search aggressiveness.
|
# When searching for paths, the minimum and maximum search aggressiveness.
|
||||||
#
|
#
|
||||||
# The default for 'path_search_fast' is 2. The default for 'path_search_max' is 10.
|
# The default for 'path_search_fast' is 2. The default for 'path_search_max' is 10.
|
||||||
#
|
#
|
||||||
# [path_search_old]
|
# [path_search_old]
|
||||||
#
|
#
|
||||||
# For clients that use the legacy path finding interfaces, the search
|
# For clients that use the legacy path finding interfaces, the search
|
||||||
# agressiveness to use. The default is 7.
|
# agressivness to use. The default is 7.
|
||||||
|
#
|
||||||
|
#
|
||||||
|
#
|
||||||
|
# [fee_default]
|
||||||
|
#
|
||||||
|
# Sets the base cost of a transaction in drops. Used when the server has
|
||||||
|
# no other source of fee information, such as signing transactions offline.
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
@@ -641,19 +686,20 @@
|
|||||||
# Examples:
|
# Examples:
|
||||||
# type=HyperLevelDB
|
# type=HyperLevelDB
|
||||||
# path=db/hyperldb
|
# path=db/hyperldb
|
||||||
|
# compression=0
|
||||||
#
|
#
|
||||||
# Choices for 'type' (not case-sensitive)
|
# Choices for 'type' (not case-sensitive)
|
||||||
# HyperLevelDB Use an improved version of LevelDB (preferred)
|
# RocksDB Use Facebook's RocksDB database (preferred)
|
||||||
# LevelDB Use Google's LevelDB database (deprecated)
|
# HyperLevelDB Use an improved version of LevelDB
|
||||||
# MDB Use MDB
|
|
||||||
# none Use no backend
|
|
||||||
# SQLite Use SQLite
|
# SQLite Use SQLite
|
||||||
|
# LevelDB Use Google's LevelDB database (deprecated)
|
||||||
|
# none Use no backend
|
||||||
#
|
#
|
||||||
# Required keys:
|
# Required keys:
|
||||||
# path Location to store the database (all types)
|
# path Location to store the database (all types)
|
||||||
#
|
#
|
||||||
# Optional keys:
|
# Optional keys:
|
||||||
# (none yet)
|
# compression 0 for none, 1 for Snappy compression
|
||||||
#
|
#
|
||||||
# Notes:
|
# Notes:
|
||||||
# The 'node_db' entry configures the primary, persistent storage.
|
# The 'node_db' entry configures the primary, persistent storage.
|
||||||
@@ -697,6 +743,92 @@
|
|||||||
#
|
#
|
||||||
#
|
#
|
||||||
#
|
#
|
||||||
|
# [insight]
|
||||||
|
#
|
||||||
|
# Configuration parameters for the Beast.Insight stats collection module.
|
||||||
|
#
|
||||||
|
# Insight is a module that collects information from the areas of rippled
|
||||||
|
# that have instrumentation. The configuration paramters control where the
|
||||||
|
# collection metrics are sent. The parameters are expressed as key = value
|
||||||
|
# pairs with no white space. The main parameter is the choice of server:
|
||||||
|
#
|
||||||
|
# "server"
|
||||||
|
#
|
||||||
|
# Choice of server to send metrics to. Currently the only choice is
|
||||||
|
# "statsd" which sends UDP packets to a StatsD daemon, which must be
|
||||||
|
# running while rippled is running. More information on StatsD is
|
||||||
|
# available here:
|
||||||
|
# https://github.com/b/statsd_spec
|
||||||
|
#
|
||||||
|
# When server=statsd, these additional keys are used:
|
||||||
|
#
|
||||||
|
# "address" The UDP address and port of the listening StatsD server,
|
||||||
|
# in the format, n.n.n.n:port.
|
||||||
|
#
|
||||||
|
# "prefix" A string prepended to each collected metric. This is used
|
||||||
|
# to distinguish between different running instances of rippled.
|
||||||
|
#
|
||||||
|
# If this section is missing, or the server type is unspecified or unknown,
|
||||||
|
# statistics are not collected or reported.
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
#
|
||||||
|
# [insight]
|
||||||
|
# server=statsd
|
||||||
|
# address=192.168.0.95:4201
|
||||||
|
# prefix=my_validator
|
||||||
|
#
|
||||||
|
#-------------------------------------------------------------------------------
|
||||||
|
#
|
||||||
|
# 9. Voting
|
||||||
|
#
|
||||||
|
#----------
|
||||||
|
#
|
||||||
|
# The vote settings configure settings for the entire Ripple network.
|
||||||
|
# While a single instance of rippled cannot unilaterally enforce network-wide
|
||||||
|
# settings, these choices become part of the instance's vote during the
|
||||||
|
# consensus process for each voting ledger.
|
||||||
|
#
|
||||||
|
# [voting]
|
||||||
|
#
|
||||||
|
# A set of key/value pair parameters used during voting ledgers.
|
||||||
|
#
|
||||||
|
# reference_fee = <drops>
|
||||||
|
#
|
||||||
|
# The cost of the reference transaction fee, specified in drops.
|
||||||
|
# The reference transaction is the simplest form of transaction.
|
||||||
|
# It represents an XRP payment between two parties.
|
||||||
|
#
|
||||||
|
# If this parameter is unspecified, rippled will use an internal
|
||||||
|
# default. Don't change this without understanding the consequences.
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
# reference_fee = 10 # 10 drops
|
||||||
|
#
|
||||||
|
# account_reserve = <drops>
|
||||||
|
#
|
||||||
|
# The account reserve requirement specified in drops. The portion of an
|
||||||
|
# account's XRP balance that is at or below the reserve may only be
|
||||||
|
# spent on transaction fees, and not transferred out of the account.
|
||||||
|
#
|
||||||
|
# If this parameter is unspecified, rippled will use an internal
|
||||||
|
# default. Don't change this without understanding the consequences.
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
# account_reserve = 20000000 # 20 XRP
|
||||||
|
#
|
||||||
|
# owner_reserve = <drops>
|
||||||
|
#
|
||||||
|
# The owner reserve is the amount of XRP reserved in the account for
|
||||||
|
# each ledger item owned by the account. Ledger items an account may
|
||||||
|
# own include trust lines, open orders, and tickets.
|
||||||
|
#
|
||||||
|
# If this parameter is unspecified, rippled will use an internal
|
||||||
|
# default. Don't change this without understanding the consequences.
|
||||||
|
#
|
||||||
|
# Example:
|
||||||
|
# owner_reserve = 5000000 # 5 XRP
|
||||||
|
#
|
||||||
#-------------------------------------------------------------------------------
|
#-------------------------------------------------------------------------------
|
||||||
|
|
||||||
# Allow other peers to connect to this server.
|
# Allow other peers to connect to this server.
|
||||||
@@ -737,14 +869,25 @@
|
|||||||
[node_size]
|
[node_size]
|
||||||
medium
|
medium
|
||||||
|
|
||||||
# Note that HyperLevelDB is unavailable on Windows platforms
|
# This is primary persistent datastore for rippled. This includes transaction
|
||||||
#
|
# metadata, account states, and ledger headers. Helpful information can be
|
||||||
|
# found here: https://ripple.com/wiki/NodeBackEnd
|
||||||
[node_db]
|
[node_db]
|
||||||
type=HyperLevelDB
|
type=RocksDB
|
||||||
path=db/hyperldb
|
path=/var/lib/rippled/db/rocksdb
|
||||||
|
open_files=2000
|
||||||
|
filter_bits=12
|
||||||
|
cache_mb=256
|
||||||
|
file_size_mb=8
|
||||||
|
file_size_mult=2
|
||||||
|
|
||||||
|
[database_path]
|
||||||
|
/var/lib/rippled/db
|
||||||
|
|
||||||
|
# This needs to be an absolute directory reference, not a relative one.
|
||||||
|
# Modify this value as required.
|
||||||
[debug_logfile]
|
[debug_logfile]
|
||||||
log/debug.log
|
/var/log/rippled/debug.log
|
||||||
|
|
||||||
[sntp_servers]
|
[sntp_servers]
|
||||||
time.windows.com
|
time.windows.com
|
||||||
@@ -753,12 +896,45 @@ time.nist.gov
|
|||||||
pool.ntp.org
|
pool.ntp.org
|
||||||
|
|
||||||
# Where to find some other servers speaking the Ripple protocol.
|
# Where to find some other servers speaking the Ripple protocol.
|
||||||
# This set of addresses is recent as of September 5, 2013
|
|
||||||
#
|
#
|
||||||
[ips]
|
[ips]
|
||||||
54.225.112.220 51235
|
r.ripple.com 51235
|
||||||
54.225.123.13 51235
|
|
||||||
54.227.239.106 51235
|
# The latest validators can be obtained from
|
||||||
107.21.251.218 51235
|
# https://ripple.com/ripple.txt
|
||||||
184.73.226.101 51235
|
#
|
||||||
23.23.201.55 51235
|
[validators]
|
||||||
|
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||||
|
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||||
|
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||||
|
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||||
|
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||||
|
|
||||||
|
# Ditto.
|
||||||
|
[validation_quorum]
|
||||||
|
3
|
||||||
|
|
||||||
|
# Turn down default logging to save disk space in the long run.
|
||||||
|
# Valid values here are trace, debug, info, warning, error, and fatal
|
||||||
|
[rpc_startup]
|
||||||
|
{ "command": "log_level", "severity": "warning" }
|
||||||
|
|
||||||
|
# Configure SSL for WebSockets. Not enabled by default because not everybody
|
||||||
|
# has an SSL cert on their server, but if you uncomment the following lines and
|
||||||
|
# set the path to the SSL certificate and private key the WebSockets protocol
|
||||||
|
# will be protected by SSL/TLS.
|
||||||
|
#[websocket_secure]
|
||||||
|
#1
|
||||||
|
|
||||||
|
#[websocket_ssl_cert]
|
||||||
|
#/etc/ssl/certs/server.crt
|
||||||
|
|
||||||
|
#[websocket_ssl_key]
|
||||||
|
#/etc/ssl/private/server.key
|
||||||
|
|
||||||
|
# Defaults to 0 ("no") so that you can use self-signed SSL certificates for
|
||||||
|
# development, or internally.
|
||||||
|
#[ssl_verify]
|
||||||
|
#0
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
10
doc/rippled-example.service
Normal file
10
doc/rippled-example.service
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[Unit]
|
||||||
|
Description=Ripple Peer-to-Peer Network Daemon
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
User=nobody
|
||||||
|
ExecStart=/usr/bin/rippled --conf=/etc/rippled/rippled.cfg
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
||||||
114
doc/rippled.init
Normal file
114
doc/rippled.init
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
### BEGIN INIT INFO
|
||||||
|
# Provides: ripple
|
||||||
|
# Required-Start: $local_fs $remote_fs $network $syslog
|
||||||
|
# Required-Stop: $local_fs $remote_fs $network $syslog
|
||||||
|
# Default-Start: 2 3 4 5
|
||||||
|
# Default-Stop: 0 1 6
|
||||||
|
# Short-Description: starts the ripple network node
|
||||||
|
# Description: starts rippled using start-stop-daemon
|
||||||
|
### END INIT INFO
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
NAME=rippled
|
||||||
|
USER="rippled"
|
||||||
|
GROUP="rippled"
|
||||||
|
PIDFILE=/var/run/$NAME.pid
|
||||||
|
DAEMON=/usr/local/sbin/rippled
|
||||||
|
DAEMON_OPTS="--conf /etc/ripple/rippled.cfg"
|
||||||
|
NET_OPTS="--net $DAEMON_OPTS"
|
||||||
|
LOGDIR="/var/log/rippled"
|
||||||
|
DBDIR="/var/db/rippled/db/hyperldb"
|
||||||
|
|
||||||
|
export PATH="${PATH:+$PATH:}/usr/sbin:/sbin"
|
||||||
|
|
||||||
|
# I wish it didn't come down to this, but this is the easiest way to ensure
|
||||||
|
# sanity of an install.
|
||||||
|
if [ ! -d $LOGDIR ]; then
|
||||||
|
mkdir -p $LOGDIR
|
||||||
|
chown $USER:$GROUP $LOGDIR
|
||||||
|
fi
|
||||||
|
if [ ! -d $DBDIR ]; then
|
||||||
|
mkdir -p $DBDIR
|
||||||
|
chown -R $USER:$GROUP $DBDIR
|
||||||
|
fi
|
||||||
|
|
||||||
|
case "$1" in
|
||||||
|
start)
|
||||||
|
echo -n "Starting daemon: "$NAME
|
||||||
|
start-stop-daemon --start --quiet --background -m --pidfile $PIDFILE \
|
||||||
|
--exec $DAEMON --chuid $USER --group $GROUP --verbose -- $NET_OPTS
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
stop)
|
||||||
|
echo -n "Stopping daemon: "$NAME
|
||||||
|
$DAEMON $DAEMON_OPTS stop
|
||||||
|
rm -f $PIDFILE
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
restart)
|
||||||
|
echo -n "Restarting daemon: "$NAME
|
||||||
|
$DAEMON $DAEMON_OPTS stop
|
||||||
|
rm -f $PIDFILE
|
||||||
|
start-stop-daemon --start --quiet --background -m --pidfile $PIDFILE \
|
||||||
|
--exec $DAEMON --chuid $USER --group $GROUP -- $NET_OPTS
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
status)
|
||||||
|
echo "Status of $NAME:"
|
||||||
|
echo -n "PID of $NAME: "
|
||||||
|
if [ -f "$PIDFILE" ]; then
|
||||||
|
cat $PIDFILE
|
||||||
|
$DAEMON $DAEMON_OPTS server_info
|
||||||
|
else
|
||||||
|
echo "$NAME not running."
|
||||||
|
fi
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
fetch)
|
||||||
|
echo "$NAME ledger fetching info:"
|
||||||
|
$DAEMON $DAEMON_OPTS fetch_info
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
uptime)
|
||||||
|
echo "$NAME uptime:"
|
||||||
|
$DAEMON $DAEMON_OPTS get_counts
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
startconfig)
|
||||||
|
echo "$NAME is being started with the following command line:"
|
||||||
|
echo "$DAEMON $NET_OPTS"
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
command)
|
||||||
|
# Truncate the script's argument vector by one position to get rid of
|
||||||
|
# this entry.
|
||||||
|
shift
|
||||||
|
|
||||||
|
# Pass the remainder of the argument vector to rippled.
|
||||||
|
$DAEMON $DAEMON_OPTS "$@"
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
test)
|
||||||
|
$DAEMON $DAEMON_OPTS ping
|
||||||
|
echo "."
|
||||||
|
;;
|
||||||
|
|
||||||
|
*)
|
||||||
|
echo "Usage: $0 {start|stop|restart|status|fetch|uptime|startconfig|"
|
||||||
|
echo " command|test}"
|
||||||
|
exit 1
|
||||||
|
esac
|
||||||
|
|
||||||
|
exit 0
|
||||||
|
|
||||||
@@ -1,330 +0,0 @@
|
|||||||
--------------------------------------------------------------------------------
|
|
||||||
RIPPLE TODO
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
Vinnie's List: Changes day to day, descending priority
|
|
||||||
- PeerFinder work
|
|
||||||
- Fix and tidy up broken beast classes
|
|
||||||
- Validators work
|
|
||||||
* Parse Validator line using cribbed code
|
|
||||||
- Parse ContentBodyBuffer from HTTPResponse
|
|
||||||
- HTTPMessage improvements
|
|
||||||
- HTTPClient improvements based on HTTPServer
|
|
||||||
- Ditch old HTTPClient so I can take the name
|
|
||||||
- Finish RPCAsyncServer, RPCService and RPCService::Manager
|
|
||||||
- Fix RPCDoor to respect config setting for [rpc_secure]
|
|
||||||
- Validators should delay the application of newly downloaded lists from
|
|
||||||
sources, to mitigate the effects of attacks. Unless there's no validators
|
|
||||||
in the list.
|
|
||||||
- Validators RPC options to immediately apply UNL,
|
|
||||||
manually revisit sources, etc...
|
|
||||||
- Clean up calculation of COnfig file location.
|
|
||||||
- Remove TESTNET and all related code and settings.
|
|
||||||
- Remove addRpcSub, findRpcSub, and notify the appropriate partner(s)
|
|
||||||
|
|
||||||
David Features:
|
|
||||||
- override config items from command line
|
|
||||||
- change config via RPC, this is for debugging
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
- Look into using CMake
|
|
||||||
|
|
||||||
- IPv6 support in IPEndpoint
|
|
||||||
|
|
||||||
- Configuration list for Jenkins
|
|
||||||
|
|
||||||
- Fix all crash and leaks on exit
|
|
||||||
Say there's a leak, a ledger that can never be accessed is locked in some
|
|
||||||
structure. If the organized teardown code frees that structure, the leak
|
|
||||||
will not be reported.
|
|
||||||
Yes, so you'll detect some small subset of leaks that way.
|
|
||||||
You'll still have to be vigilant for the leaks that won't detect.
|
|
||||||
The problem is ordering. There are lots of circular dependencies.
|
|
||||||
The biggest problem is the order of destruction of global objects. (I think)
|
|
||||||
Getting rid of global objects is a good solution to that.
|
|
||||||
Vinnie Falco: Those I can resolve with my ReferenceCountedSingleton. And
|
|
||||||
yeah thats a good approach, one that I am doing slowly anyway
|
|
||||||
Yeah, that's good for other reasons too, not just the unpredictability of
|
|
||||||
creation order that can hide bugs.
|
|
||||||
There may also just be some missing destructors.
|
|
||||||
Some of it may be things being shut down in the wrong order. Like if you shut
|
|
||||||
down the cache and then something that uses the cache, objects may get
|
|
||||||
put in the cache after it was shut down.
|
|
||||||
|
|
||||||
- Do something about the throw() reporting weaknesses:
|
|
||||||
* Make sure all Sconstruct and .pro builds have debug symbols in release
|
|
||||||
* Replace all throw with beast::Throw()
|
|
||||||
(Only in ripple sources, not in Subtrees/, protobuf, or websocket)
|
|
||||||
- Improved Beast exception object, provides __FILE__ and __LINE__
|
|
||||||
- Add file and line capabilities to beast::Throw()
|
|
||||||
- Allow beast::Throw to be hooked for logging
|
|
||||||
- Add stack trace capability to beast::Throw() diagnostics via the hook
|
|
||||||
(use the existing beast::SystemStats::getStackBacktrace())
|
|
||||||
- Implement getStackBacktrace for BEAST_BSD targets
|
|
||||||
- Add UnhandledExceptionCatcher to beast
|
|
||||||
- Return EXIT_FAILURE on unhandled exception
|
|
||||||
|
|
||||||
- Consolidate JSON code maybe use Beast
|
|
||||||
|
|
||||||
- Deeply create directories specified in config settings
|
|
||||||
|
|
||||||
- Refactor Section code into ConfigFile
|
|
||||||
|
|
||||||
- Supress useless gcc warnings
|
|
||||||
http://stackoverflow.com/questions/3378560/how-to-disable-gcc-warnings-for-a-few-lines-of-code
|
|
||||||
|
|
||||||
- Get rid of boost::filesystem
|
|
||||||
|
|
||||||
- What the heck is up with site_scons/site_tools/protoc.py?
|
|
||||||
|
|
||||||
- Add convenience variadic functions to JobQueue that do the bind for you
|
|
||||||
|
|
||||||
- Consolidate databases
|
|
||||||
|
|
||||||
- Figure out why we need WAL sqlite mode if we no longer use sqlite for the node store
|
|
||||||
|
|
||||||
- Add "skipped" field to beginTestCase() to disable a test but still record
|
|
||||||
that it was skipped in the output. Like for mdb import.
|
|
||||||
|
|
||||||
- use beast DeadlineTimer for sweep in Application
|
|
||||||
|
|
||||||
- Get rid of 'ref' typedefs that really mean const&
|
|
||||||
|
|
||||||
- Use secp256k1 from beast
|
|
||||||
|
|
||||||
- Fix xsd/dtd line in JUnit XML output
|
|
||||||
|
|
||||||
- Get rid of the WriteLog() stuff in the ripple tests and make it report the
|
|
||||||
message directly to the UnitTest object. Then update the JUnit XML output
|
|
||||||
routines to also write the auxiliary messages.
|
|
||||||
|
|
||||||
* Restyle all the macros in ConfigSection.h
|
|
||||||
|
|
||||||
- Move src/protobuf to Subtrees and deal with protobuf_core.cpp
|
|
||||||
|
|
||||||
- Replace home-made database wrappers with beast::sqdb
|
|
||||||
|
|
||||||
- Use static creation member functions instead of endless constructor
|
|
||||||
variations in base_uint, uint256, and family.
|
|
||||||
|
|
||||||
- Raise the warning level and fix everything
|
|
||||||
|
|
||||||
- Replace base_uint and uintXXX with UnsignedInteger
|
|
||||||
* Need to specialize UnsignedInteger to work efficiently with 4 and 8 byte
|
|
||||||
multiples of the size.
|
|
||||||
|
|
||||||
- Rewrite boost program_options in Beast
|
|
||||||
|
|
||||||
- Replace endian conversion calls with beast calls:
|
|
||||||
htobe32, be32toh, ntohl, etc...
|
|
||||||
Start by removing the system headers which provide these routines, if possible
|
|
||||||
|
|
||||||
- Rename RPCHandler to CallHandler
|
|
||||||
|
|
||||||
- Profile/VTune the application to identify hot spots
|
|
||||||
* Determine why rippled has a slow startup on Windows
|
|
||||||
* Improve the performance when running all unit tests on Windows
|
|
||||||
|
|
||||||
- Rename "fullBelow" to something like haveAllDescendants or haveAllChildren.
|
|
||||||
|
|
||||||
- Rewrite Sustain to use Beast and work on Windows as well
|
|
||||||
* Do not enable watchdog process if a debugger is attached
|
|
||||||
|
|
||||||
- Make sure the leak detector output appears on Linux and FreeBSD debug builds.
|
|
||||||
|
|
||||||
- Create SharedData <LoadState>, move all load related state variables currently
|
|
||||||
protected by separated mutexes in different classes into the LoadState, and
|
|
||||||
use read/write locking semantics to update the values. Later, use Listeners
|
|
||||||
to notify dependent code to resolve the dependency inversion.
|
|
||||||
|
|
||||||
- Rename LoadMonitor to LoadMeter, change LoadEvent to LoadMeter::ScopedSample
|
|
||||||
|
|
||||||
- Rename LedgerMaster to Ledgers, create ILedgers interface.
|
|
||||||
|
|
||||||
- Figure out where previous ledgers go after a call to LedgerMaster::pushLedger()
|
|
||||||
and see if it is possible to clean up the leaks on exit.
|
|
||||||
|
|
||||||
- Replace all NULL with nullptr
|
|
||||||
|
|
||||||
- Make TxFormats a member of ICore instead of a singleton.
|
|
||||||
PROBLEM: STObject derived classes like STInt16 make direct use of the
|
|
||||||
singleton. It might have to remain a singleton. At the very least,
|
|
||||||
it should be a SharedSingleton to resolve ordering issues.
|
|
||||||
|
|
||||||
- Rename include guards to boost style, e.g. RIPPLE_LOG_H_INCLUDED
|
|
||||||
|
|
||||||
- Replace C11X with BEAST_COMPILER_SUPPORTS_MOVE_SEMANTICS
|
|
||||||
|
|
||||||
- Remove "ENABLE_INSECURE" when the time is right.
|
|
||||||
|
|
||||||
- lift unique_ptr / auto_ptr into ripple namespace,
|
|
||||||
or replace with ScopedPointer (preferred)
|
|
||||||
|
|
||||||
- Make LevelDB and Ripple code work with both Unicode and non-Unicode Windows APIs
|
|
||||||
|
|
||||||
- Go searching through VFALCO notes and fix everything
|
|
||||||
|
|
||||||
- Deal with function-level statics used for SqliteDatabase (like in
|
|
||||||
HSBESQLite::visitAll)
|
|
||||||
|
|
||||||
- Document in order:
|
|
||||||
SerializedType
|
|
||||||
STObject
|
|
||||||
SerializedLedgerEntry
|
|
||||||
|
|
||||||
- Replace uint160, uint256 in argument lists, template parameter lists, and
|
|
||||||
data members with tyepdefs from ripple_ProtocolTypes.h
|
|
||||||
|
|
||||||
- Consolidate SQLite database classes: DatabaseCon, Database, SqliteDatabase.
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
HYPERLEVELDB TODO
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
- Port to Windows
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
LEVELDB TODO
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
- Add VisualStudio 2012 project file to our fork
|
|
||||||
|
|
||||||
- Add LevelDB unity .cpp and .h to our fork
|
|
||||||
|
|
||||||
- Replace Beast specific platform macros with universal macros so that the
|
|
||||||
unity doesn't require Beast
|
|
||||||
|
|
||||||
- Submit LevelDB fork changes to Bitcoin upstream
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
WEBSOCKET TODO
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
- Rewrite for sanity
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
PROTOCOL BUFFERS TODO
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
- Create/maintain the protobuf Git repo (original uses SVN)
|
|
||||||
|
|
||||||
- Update the subtree
|
|
||||||
|
|
||||||
- Make a Visual Studio 2012 Project for source browsing
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
NOTES
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
LoadEvent
|
|
||||||
|
|
||||||
Is referenced with both a shared pointer and an auto pointer.
|
|
||||||
Should be named LoadMeter::ScopedSample. Or possibly ScopedLoadSample
|
|
||||||
|
|
||||||
JobQueue
|
|
||||||
|
|
||||||
getLoadEvent and getLoadEventAP differ only in the style of pointer
|
|
||||||
container which is returned. Unnecessary complexity.
|
|
||||||
|
|
||||||
Naming: Some names don't make sense.
|
|
||||||
|
|
||||||
Index
|
|
||||||
Stop using Index to refer to keys in tables. Replace with "Key" ?
|
|
||||||
Index implies a small integer, or a data structure.
|
|
||||||
|
|
||||||
This is all over the place in the Ledger API, "Index" of this and
|
|
||||||
"Index" of that, the terminology is imprecise and helps neither
|
|
||||||
understanding nor recall.
|
|
||||||
|
|
||||||
Inconsistent names
|
|
||||||
|
|
||||||
We have full names like SerializedType and then acronyms like STObject
|
|
||||||
Two names for some things, e.g. SerializedLedgerEntry and SLE
|
|
||||||
|
|
||||||
Shared/Smart pointer typedefs in classes have a variety of different names
|
|
||||||
for the same thing. e.g. "pointer", "ptr", "ptr_t", "wptr"
|
|
||||||
|
|
||||||
Verbose names
|
|
||||||
|
|
||||||
The prefix "Flat" is more appealing than "Serialized" because its shorter and
|
|
||||||
easier to pronounce.
|
|
||||||
|
|
||||||
Ledger "Skip List"
|
|
||||||
|
|
||||||
Is not really a skip list data structure. This is more appropriately
|
|
||||||
called an "index" although that name is currently used to identify hashes
|
|
||||||
used as keys.
|
|
||||||
|
|
||||||
Interfaces
|
|
||||||
|
|
||||||
Serializer
|
|
||||||
|
|
||||||
Upon analysis this class does two incompatible things. Flattening, and
|
|
||||||
unflattening. The interface should be reimplemented as two distinct
|
|
||||||
abstract classes, InputStream and OutputStream with suitable implementations
|
|
||||||
such as to and from a block of memory or dynamically allocated buffer.
|
|
||||||
|
|
||||||
The name and conflation of dual roles serves to confuse code at the point
|
|
||||||
of call. Does set(Serializer& s) flatten or unflatten the data? This
|
|
||||||
would be more clear:
|
|
||||||
bool write (OutputStream& stream);
|
|
||||||
|
|
||||||
We have beast for InputStream and OutputStream, we can use those now.
|
|
||||||
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
Davidisms
|
|
||||||
--------------------------------------------------------------------------------
|
|
||||||
|
|
||||||
(Figure out a good place to record information like this permanently)
|
|
||||||
|
|
||||||
Regarding a defect where a failing transaction was being submitted over and over
|
|
||||||
again on the network (July 3, 2013)
|
|
||||||
|
|
||||||
The core problem was an interaction between two bits of logic.
|
|
||||||
1) Normally, we won't relay a transaction again if we already recently relayed
|
|
||||||
it. But this is bypassed if the transaction failed in a way that could
|
|
||||||
allow it to succeed later. This way, if one server discovers a transaction
|
|
||||||
can now work, it can get all servers to retry it.
|
|
||||||
2) Normally, we won't relay a transaction if we think it can't claim a fee.
|
|
||||||
But if we're not sure it can't claim a fee because we're in an unhealthy
|
|
||||||
state, we propagate the transaction to let other servers decide if they
|
|
||||||
think it can claim a fee.
|
|
||||||
With these two bits of logic, two unhealthy servers could infinitely propagate
|
|
||||||
a transaction back and forth between each other.
|
|
||||||
|
|
||||||
A node is "full below" if we believe we have (either in the database or
|
|
||||||
scheduled to be stored in the database) the contents of every node below that
|
|
||||||
node in a hash tree. When trying to acquire a hash tree/map, if a node is
|
|
||||||
full below, we know not to bother with anything below that node.
|
|
||||||
|
|
||||||
The fullBelowCache is a cache of hashes of nodes that are full below. Which means
|
|
||||||
there are no missing children
|
|
||||||
|
|
||||||
|
|
||||||
What we want from the unique node list:
|
|
||||||
- Some number of trusted roots (known by domain)
|
|
||||||
probably organizations whose job is to provide a list of validators
|
|
||||||
- We imagine the IRGA for example would establish some group whose job is to
|
|
||||||
maintain a list of validators. There would be a public list of criteria
|
|
||||||
that they would use to vet the validator. Things like:
|
|
||||||
* Not anonymous
|
|
||||||
* registered business
|
|
||||||
* Physical location
|
|
||||||
* Agree not to cease operations without notice / arbitrarily
|
|
||||||
* Responsive to complaints
|
|
||||||
- Identifiable jurisdiction
|
|
||||||
* Homogeneity in the jurisdiction is a business risk
|
|
||||||
* If all validators are in the same jurisdiction this is a business risk
|
|
||||||
- OpenCoin sets criteria for the organizations
|
|
||||||
- Rippled will ship with a list of trusted root "certificates"
|
|
||||||
In other words this is a list of trusted domains from which the software
|
|
||||||
can contact each trusted root and retrieve a list of "good" validators
|
|
||||||
and then do something with that information
|
|
||||||
- All the validation information would be public, including the broadcast
|
|
||||||
messages.
|
|
||||||
- The goal is to easily identify bad actors and assess network health
|
|
||||||
* Malicious intent
|
|
||||||
* Or, just hardware problems (faulty drive or memory)
|
|
||||||
|
|
||||||
|
|
||||||
@@ -20,6 +20,8 @@
|
|||||||
#
|
#
|
||||||
|
|
||||||
[validators]
|
[validators]
|
||||||
n9KPnVLn7ewVzHvn218DcEYsnWLzKerTDwhpofhk4Ym1RUq4TeGw first
|
n949f75evCHwgyP4fPVgaHqNHxUVN15PsJEZ3B3HnXPcPjcZAoy7 RL1
|
||||||
n9LFzWuhKNvXStHAuemfRKFVECLApowncMAM5chSCL9R5ECHGN4V second
|
n9MD5h24qrQqiyBC8aeqqCWvpiBiYQ3jxSr91uiDvmrkyHRdYLUj RL2
|
||||||
n94rSdgTyBNGvYg8pZXGuNt59Y5bGAZGxbxyvjDaqD9ceRAgD85P third
|
n9L81uNCaPgtUJfaHh89gmdvXKAmSt5Gdsw2g1iPWaPkAHW5Nm4C RL3
|
||||||
|
n9KiYM9CgngLvtRCQHZwgC2gjpdaZcCcbt3VboxiNFcKuwFVujzS RL4
|
||||||
|
n9LdgEtkmGB9E2h3K4Vp7iGUaKuq23Zr32ehxiU8FWY7xoxbWTSA RL5
|
||||||
|
|||||||
@@ -10,17 +10,19 @@
|
|||||||
},
|
},
|
||||||
|
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"ripple-lib": "0.7.25",
|
"ripple-lib": "0.8.2",
|
||||||
"async": "~0.2.9",
|
"async": "~0.2.9",
|
||||||
"extend": "~1.2.0",
|
"extend": "~1.2.0",
|
||||||
"simple-jsonrpc": "~0.0.2"
|
"simple-jsonrpc": "~0.0.2",
|
||||||
|
"deep-equal": "0.0.0"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
|
"coffee-script": "~1.6.3",
|
||||||
"mocha": "~1.13.0"
|
"mocha": "~1.13.0"
|
||||||
},
|
},
|
||||||
|
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"test": "mocha --reporter spec --ui tdd --timeout 10000 --slow 600 test/*-test.js"
|
"test": "mocha test/websocket-test.js test/server-test.js test/*-test.{js,coffee}"
|
||||||
},
|
},
|
||||||
|
|
||||||
"repository": {
|
"repository": {
|
||||||
|
|||||||
@@ -1,88 +0,0 @@
|
|||||||
#!python
|
|
||||||
"""
|
|
||||||
protoc.py: Protoc Builder for SCons
|
|
||||||
|
|
||||||
This Builder invokes protoc to generate C++ and Python
|
|
||||||
from a .proto file.
|
|
||||||
|
|
||||||
NOTE: Java is not currently supported.
|
|
||||||
|
|
||||||
From: http://www.scons.org/wiki/ProtocBuilder
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = "Scott Stafford"
|
|
||||||
|
|
||||||
import SCons.Action
|
|
||||||
import SCons.Builder
|
|
||||||
import SCons.Defaults
|
|
||||||
import SCons.Node.FS
|
|
||||||
import SCons.Util
|
|
||||||
|
|
||||||
from SCons.Script import File, Dir
|
|
||||||
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
protocs = 'protoc'
|
|
||||||
|
|
||||||
ProtocAction = SCons.Action.Action('$PROTOCCOM', '$PROTOCCOMSTR')
|
|
||||||
def ProtocEmitter(target, source, env):
|
|
||||||
dirOfCallingSConscript = Dir('.').srcnode()
|
|
||||||
env.Prepend(PROTOCPROTOPATH = dirOfCallingSConscript.path)
|
|
||||||
|
|
||||||
source_with_corrected_path = []
|
|
||||||
for src in source:
|
|
||||||
commonprefix = os.path.commonprefix([dirOfCallingSConscript.path, src.srcnode().path])
|
|
||||||
if len(commonprefix)>0:
|
|
||||||
source_with_corrected_path.append( src.srcnode().path[len(commonprefix + os.sep):] )
|
|
||||||
else:
|
|
||||||
source_with_corrected_path.append( src.srcnode().path )
|
|
||||||
|
|
||||||
source = source_with_corrected_path
|
|
||||||
|
|
||||||
for src in source:
|
|
||||||
modulename = os.path.splitext(os.path.basename(src))[0]
|
|
||||||
|
|
||||||
if env['PROTOCOUTDIR']:
|
|
||||||
base = os.path.join(env['PROTOCOUTDIR'] , modulename)
|
|
||||||
target.extend( [ base + '.pb.cc', base + '.pb.h' ] )
|
|
||||||
|
|
||||||
if env['PROTOCPYTHONOUTDIR']:
|
|
||||||
base = os.path.join(env['PROTOCPYTHONOUTDIR'] , modulename)
|
|
||||||
target.append( base + '_pb2.py' )
|
|
||||||
|
|
||||||
try:
|
|
||||||
target.append(env['PROTOCFDSOUT'])
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# XXX KLUDGE: Force things to be right.
|
|
||||||
env['PROTOCOUTDIR'] = 'build/proto'
|
|
||||||
env['PROTOCPROTOPATH'] = ['src/ripple_data/protocol']
|
|
||||||
|
|
||||||
#~ print "PROTOC SOURCE:", [str(s) for s in source]
|
|
||||||
#~ print "PROTOC TARGET:", [str(s) for s in target]
|
|
||||||
|
|
||||||
return target, source
|
|
||||||
|
|
||||||
ProtocBuilder = SCons.Builder.Builder(action = ProtocAction,
|
|
||||||
emitter = ProtocEmitter,
|
|
||||||
srcsuffix = '$PROTOCSRCSUFFIX')
|
|
||||||
|
|
||||||
def generate(env):
|
|
||||||
"""Add Builders and construction variables for protoc to an Environment."""
|
|
||||||
try:
|
|
||||||
bld = env['BUILDERS']['Protoc']
|
|
||||||
except KeyError:
|
|
||||||
bld = ProtocBuilder
|
|
||||||
env['BUILDERS']['Protoc'] = bld
|
|
||||||
|
|
||||||
env['PROTOC'] = env.Detect(protocs) or 'protoc'
|
|
||||||
env['PROTOCFLAGS'] = SCons.Util.CLVar('')
|
|
||||||
env['PROTOCPROTOPATH'] = SCons.Util.CLVar('')
|
|
||||||
env['PROTOCCOM'] = '$PROTOC ${["-I%s"%x for x in PROTOCPROTOPATH]} $PROTOCFLAGS --cpp_out=$PROTOCCPPOUTFLAGS$PROTOCOUTDIR ${PROTOCPYTHONOUTDIR and ("--python_out="+PROTOCPYTHONOUTDIR) or ""} ${PROTOCFDSOUT and ("-o"+PROTOCFDSOUT) or ""} ${SOURCES}'
|
|
||||||
env['PROTOCOUTDIR'] = '${SOURCE.dir}'
|
|
||||||
env['PROTOCPYTHONOUTDIR'] = "python"
|
|
||||||
env['PROTOCSRCSUFFIX'] = '.proto'
|
|
||||||
|
|
||||||
def exists(env):
|
|
||||||
return env.Detect(protocs)
|
|
||||||
2
src/.gitignore
vendored
2
src/.gitignore
vendored
@@ -1,2 +0,0 @@
|
|||||||
# boost subtree
|
|
||||||
/boost
|
|
||||||
@@ -27,6 +27,21 @@
|
|||||||
@file BeastConfig.h
|
@file BeastConfig.h
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
//
|
||||||
|
// Unit Tests
|
||||||
|
//
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Config: BEAST_NO_UNIT_TEST_INLINE
|
||||||
|
Prevents unit test definitions from being inserted into a global table.
|
||||||
|
The default is to include inline unit test definitions.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef BEAST_NO_UNIT_TEST_INLINE
|
||||||
|
//#define BEAST_NO_UNIT_TEST_INLINE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
//
|
//
|
||||||
// Diagnostics
|
// Diagnostics
|
||||||
@@ -42,18 +57,6 @@
|
|||||||
//#define BEAST_FORCE_DEBUG 1
|
//#define BEAST_FORCE_DEBUG 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Config: BEAST_LOG_ASSERTIONS
|
|
||||||
If this flag is enabled, the the bassert and bassertfalse macros will always
|
|
||||||
use Logger::writeToLog() to write a message when an assertion happens.
|
|
||||||
Enabling it will also leave this turned on in release builds. When it's
|
|
||||||
disabled, however, the bassert and bassertfalse macros will not be compiled
|
|
||||||
in a release build.
|
|
||||||
@see bassert, bassertfalse, Logger
|
|
||||||
*/
|
|
||||||
#ifndef BEAST_LOG_ASSERTIONS
|
|
||||||
//#define BEAST_LOG_ASSERTIONS 1
|
|
||||||
#endif
|
|
||||||
|
|
||||||
/** Config: BEAST_CHECK_MEMORY_LEAKS
|
/** Config: BEAST_CHECK_MEMORY_LEAKS
|
||||||
Enables a memory-leak check for certain objects when the app terminates.
|
Enables a memory-leak check for certain objects when the app terminates.
|
||||||
See the LeakChecked class for more details about enabling leak checking for
|
See the LeakChecked class for more details about enabling leak checking for
|
||||||
@@ -107,6 +110,13 @@
|
|||||||
#define BEAST_ZLIB_INCLUDE_PATH <zlib.h>
|
#define BEAST_ZLIB_INCLUDE_PATH <zlib.h>
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_SQLITE_FORCE_NDEBUG
|
||||||
|
Setting this option forces sqlite into release mode even if NDEBUG is not set
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_SQLITE_FORCE_NDEBUG
|
||||||
|
#define BEAST_SQLITE_FORCE_NDEBUG 1
|
||||||
|
#endif
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
//
|
//
|
||||||
// Boost
|
// Boost
|
||||||
@@ -144,14 +154,18 @@
|
|||||||
#define RIPPLE_DUMP_LEAKS_ON_EXIT 1
|
#define RIPPLE_DUMP_LEAKS_ON_EXIT 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/** Config: RIPPLE_TRACK_MUTEXES
|
//------------------------------------------------------------------------------
|
||||||
Turns on a feature that enables tracking and diagnostics for mutex
|
|
||||||
and recursive mutex objects. This affects the type of lock used
|
// These control whether or not certain functionality gets
|
||||||
by RippleMutex and RippleRecursiveMutex
|
// compiled into the resulting rippled executable
|
||||||
@note This can slow down performance considerably.
|
|
||||||
|
/** Config: RIPPLE_ROCKSDB_AVAILABLE
|
||||||
|
Controls whether or not the RocksDB database back-end is compiled into
|
||||||
|
rippled. RocksDB requires a relatively modern C++ compiler (tested with
|
||||||
|
gcc versions 4.8.1 and later) that supports some C++11 features.
|
||||||
*/
|
*/
|
||||||
#ifndef RIPPLE_TRACK_MUTEXES
|
#ifndef RIPPLE_ROCKSDB_AVAILABLE
|
||||||
#define RIPPLE_TRACK_MUTEXES 0
|
//#define RIPPLE_ROCKSDB_AVAILABLE 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
//------------------------------------------------------------------------------
|
//------------------------------------------------------------------------------
|
||||||
@@ -159,23 +173,52 @@
|
|||||||
// Here temporarily to turn off new Validations code while it
|
// Here temporarily to turn off new Validations code while it
|
||||||
// is being written.
|
// is being written.
|
||||||
//
|
//
|
||||||
#ifndef RIPPLE_USE_NEW_VALIDATORS
|
#ifndef RIPPLE_USE_VALIDATORS
|
||||||
#define RIPPLE_USE_NEW_VALIDATORS 0
|
#define RIPPLE_USE_VALIDATORS 0
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Turning this on will use the new PeerFinder logic to establish connections
|
/** Config: BEAST_USE_BOOST_FEATURES
|
||||||
// to other peers. Even with this off, PeerFinder will still send mtENDPOINTS
|
This activates boost specific features and improvements. If this is
|
||||||
// messages as needed, and collect legacy IP endpoint information.
|
turned on, the include paths for your build environment must be set
|
||||||
//
|
correctly to find the boost headers.
|
||||||
#ifndef RIPPLE_USE_PEERFINDER
|
*/
|
||||||
#define RIPPLE_USE_PEERFINDER 0
|
#ifndef BEAST_USE_BOOST_FEATURES
|
||||||
|
//#define BEAST_USE_BOOST_FEATURES 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
// Here temporarily
|
/** Config: RIPPLE_PROPOSE_FEATURES
|
||||||
// Controls whether or not the new RPC::Manager logic will be
|
This determines whether to add any features to the proposed transaction set.
|
||||||
// used to invoke RPC commands before they pass to the original code.
|
*/
|
||||||
#ifndef RIPPLE_USE_RPC_SERVICE_MANAGER
|
#ifndef RIPPLE_PROPOSE_AMENDMENTS
|
||||||
#define RIPPLE_USE_RPC_SERVICE_MANAGER 0
|
#define RIPPLE_PROPOSE_AMENDMENTS 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: RIPPLE_ENABLE_AUTOBRIDGING
|
||||||
|
This determines whether ripple implements offer autobridging via XRP.
|
||||||
|
*/
|
||||||
|
#ifndef RIPPLE_ENABLE_AUTOBRIDGING
|
||||||
|
#define RIPPLE_ENABLE_AUTOBRIDGING 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: RIPPLE_SINGLE_IO_SERVICE_THREAD
|
||||||
|
When set, restricts the number of threads calling io_service::run to one.
|
||||||
|
This is useful when debugging.
|
||||||
|
*/
|
||||||
|
#ifndef RIPPLE_SINGLE_IO_SERVICE_THREAD
|
||||||
|
#define RIPPLE_SINGLE_IO_SERVICE_THREAD 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: RIPPLE_STRUCTURED_OVERLAY_CLIENT
|
||||||
|
RIPPLE_STRUCTURED_OVERLAY_SERVER
|
||||||
|
Enables Structured Overlay support for the client or server roles.
|
||||||
|
This feature is currently in development:
|
||||||
|
https://ripplelabs.atlassian.net/browse/RIPD-157
|
||||||
|
*/
|
||||||
|
#ifndef RIPPLE_STRUCTURED_OVERLAY_CLIENT
|
||||||
|
#define RIPPLE_STRUCTURED_OVERLAY_CLIENT 0
|
||||||
|
#endif
|
||||||
|
#ifndef RIPPLE_STRUCTURED_OVERLAY_SERVER
|
||||||
|
#define RIPPLE_STRUCTURED_OVERLAY_SERVER 1
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
|||||||
@@ -1,9 +1,9 @@
|
|||||||
# src
|
# src
|
||||||
|
|
||||||
Some of these directories come from entire outside repositories
|
Some of these directories come from entire outside repositories brought in
|
||||||
brought in using git-subtree. This means that the source files are
|
using git-subtree. This means that the source files are inserted directly
|
||||||
inserted directly into the rippled repository. They can be edited
|
into the rippled repository. They can be edited and committed just as if they
|
||||||
and committed just as if they were normal files.
|
were normal files.
|
||||||
|
|
||||||
However, if you create a commit that contains files both from a
|
However, if you create a commit that contains files both from a
|
||||||
subtree, and from the ripple source tree please use care when designing
|
subtree, and from the ripple source tree please use care when designing
|
||||||
@@ -21,6 +21,16 @@ About git-subtree:
|
|||||||
https://github.com/apenwarr/git-subtree <br>
|
https://github.com/apenwarr/git-subtree <br>
|
||||||
http://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/ <br>
|
http://blogs.atlassian.com/2013/05/alternatives-to-git-submodule-git-subtree/ <br>
|
||||||
|
|
||||||
|
<table align=left><tr>
|
||||||
|
<th>dir</th>
|
||||||
|
<th>What</th>
|
||||||
|
</tr><tr>
|
||||||
|
<td>beast</td>
|
||||||
|
<td>Beast, the amazing cross-platform library.<br>
|
||||||
|
git@github.com:vinniefalco/Beast.git
|
||||||
|
</td>
|
||||||
|
</tr></table>
|
||||||
|
|
||||||
## ./beast
|
## ./beast
|
||||||
|
|
||||||
Beast, the amazing cross-platform library.
|
Beast, the amazing cross-platform library.
|
||||||
@@ -89,7 +99,7 @@ ripple-fork
|
|||||||
## protobuf
|
## protobuf
|
||||||
|
|
||||||
Ripple's fork of protobuf. We've changed some names in order to support the
|
Ripple's fork of protobuf. We've changed some names in order to support the
|
||||||
unity-style of build (a single .cpp addded to the project, instead of
|
unity-style of build (a single .cpp added to the project, instead of
|
||||||
linking to a separately built static library).
|
linking to a separately built static library).
|
||||||
|
|
||||||
Repository
|
Repository
|
||||||
|
|||||||
4
src/beast/.gitignore
vendored
4
src/beast/.gitignore
vendored
@@ -15,6 +15,7 @@ Docs
|
|||||||
*.manifest
|
*.manifest
|
||||||
*.manifest.res
|
*.manifest.res
|
||||||
*.o
|
*.o
|
||||||
|
*.opensdf
|
||||||
*.d
|
*.d
|
||||||
*.sdf
|
*.sdf
|
||||||
xcuserdata
|
xcuserdata
|
||||||
@@ -24,5 +25,6 @@ contents.xcworkspacedata
|
|||||||
profile
|
profile
|
||||||
Builds/VisualStudio2012/Debug
|
Builds/VisualStudio2012/Debug
|
||||||
Builds/VisualStudio2012/Release
|
Builds/VisualStudio2012/Release
|
||||||
|
project.xcworkspace
|
||||||
modules/beast_cryptopp
|
modules/beast_cryptopp
|
||||||
|
bin/
|
||||||
|
|||||||
26
src/beast/.travis.yml
Normal file
26
src/beast/.travis.yml
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
language: cpp
|
||||||
|
|
||||||
|
compiler:
|
||||||
|
- gcc
|
||||||
|
- clang
|
||||||
|
before_install:
|
||||||
|
- sudo add-apt-repository -y ppa:ubuntu-toolchain-r/test
|
||||||
|
- sudo add-apt-repository -y ppa:boost-latest/ppa
|
||||||
|
- sudo apt-get update -qq
|
||||||
|
- sudo apt-get install -qq python-software-properties
|
||||||
|
- sudo apt-get install -qq g++-4.8
|
||||||
|
- sudo apt-get install -qq libboost1.55-all-dev
|
||||||
|
- sudo apt-get install -qq libssl-dev
|
||||||
|
- sudo apt-get install -qq gcc-4.8
|
||||||
|
- sudo update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-4.8 40 --slave /usr/bin/g++ g++ /usr/bin/g++-4.8
|
||||||
|
- sudo update-alternatives --set gcc /usr/bin/gcc-4.8
|
||||||
|
# - sudo apt-get -y install binutils-gold
|
||||||
|
- g++ -v
|
||||||
|
- clang -v
|
||||||
|
script:
|
||||||
|
# Abort build on failure
|
||||||
|
- set -e
|
||||||
|
- scons
|
||||||
|
notifications:
|
||||||
|
email:
|
||||||
|
false
|
||||||
150
src/beast/BeastConfig.h
Normal file
150
src/beast/BeastConfig.h
Normal file
@@ -0,0 +1,150 @@
|
|||||||
|
//------------------------------------------------------------------------------
|
||||||
|
/*
|
||||||
|
This file is part of Beast: https://github.com/vinniefalco/Beast
|
||||||
|
Copyright 2013, Vinnie Falco <vinnie.falco@gmail.com>
|
||||||
|
|
||||||
|
Permission to use, copy, modify, and/or distribute this software for any
|
||||||
|
purpose with or without fee is hereby granted, provided that the above
|
||||||
|
copyright notice and this permission notice appear in all copies.
|
||||||
|
|
||||||
|
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||||
|
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||||
|
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||||
|
ANY SPECIAL , DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||||
|
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||||
|
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||||
|
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||||
|
*/
|
||||||
|
//==============================================================================
|
||||||
|
|
||||||
|
#ifndef BEAST_BEASTCONFIG_H_INCLUDED
|
||||||
|
#define BEAST_BEASTCONFIG_H_INCLUDED
|
||||||
|
|
||||||
|
/** Configuration file for Beast.
|
||||||
|
This sets various configurable options for Beast. In order to compile you
|
||||||
|
must place a copy of this file in a location where your build environment
|
||||||
|
can find it, and then customize its contents to suit your needs.
|
||||||
|
@file BeastConfig.h
|
||||||
|
*/
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
//
|
||||||
|
// Unit Tests
|
||||||
|
//
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Config: BEAST_NO_UNIT_TEST_INLINE
|
||||||
|
Prevents unit test definitions from being inserted into a global table.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_NO_UNIT_TEST_INLINE
|
||||||
|
#define BEAST_NO_UNIT_TEST_INLINE 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
//
|
||||||
|
// Diagnostics
|
||||||
|
//
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Config: BEAST_FORCE_DEBUG
|
||||||
|
Normally, BEAST_DEBUG is set to 1 or 0 based on compiler and project
|
||||||
|
settings, but if you define this value, you can override this to force it
|
||||||
|
to be true or false.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_FORCE_DEBUG
|
||||||
|
//#define BEAST_FORCE_DEBUG 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_CHECK_MEMORY_LEAKS
|
||||||
|
Enables a memory-leak check for certain objects when the app terminates.
|
||||||
|
See the LeakChecked class for more details about enabling leak checking for
|
||||||
|
specific classes.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_CHECK_MEMORY_LEAKS
|
||||||
|
//#define BEAST_CHECK_MEMORY_LEAKS 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_COMPILER_CHECKS_SOCKET_OVERRIDES
|
||||||
|
Setting this option makes Socket-derived classes generate compile errors
|
||||||
|
if they forget any of the virtual overrides As some Socket-derived classes
|
||||||
|
intentionally omit member functions that are not applicable, this macro
|
||||||
|
should only be enabled temporarily when writing your own Socket-derived
|
||||||
|
class, to make sure that the function signatures match as expected.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_COMPILER_CHECKS_SOCKET_OVERRIDES
|
||||||
|
//#define BEAST_COMPILER_CHECKS_SOCKET_OVERRIDES 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
//
|
||||||
|
// Libraries
|
||||||
|
//
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Config: BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES
|
||||||
|
In a Visual C++ build, this can be used to stop the required system libs
|
||||||
|
being automatically added to the link stage.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES
|
||||||
|
//#define BEAST_DONT_AUTOLINK_TO_WIN32_LIBRARIES 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_INCLUDE_ZLIB_CODE
|
||||||
|
This can be used to disable Beast's embedded 3rd-party zlib code.
|
||||||
|
You might need to tweak this if you're linking to an external zlib library
|
||||||
|
in your app, but for normal apps, this option should be left alone.
|
||||||
|
|
||||||
|
If you disable this, you might also want to set a value for
|
||||||
|
BEAST_ZLIB_INCLUDE_PATH, to specify the path where your zlib headers live.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_INCLUDE_ZLIB_CODE
|
||||||
|
//#define BEAST_INCLUDE_ZLIB_CODE 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_ZLIB_INCLUDE_PATH
|
||||||
|
This is included when BEAST_INCLUDE_ZLIB_CODE is set to zero.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_ZLIB_INCLUDE_PATH
|
||||||
|
#define BEAST_ZLIB_INCLUDE_PATH <zlib.h>
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_SQLITE_FORCE_NDEBUG
|
||||||
|
Setting this option forces sqlite into release mode even if NDEBUG is not set
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_SQLITE_FORCE_NDEBUG
|
||||||
|
//#define BEAST_SQLITE_FORCE_NDEBUG 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
/** Config: BEAST_FUNCTIONAL_USES_###
|
||||||
|
<functional> source configuration.
|
||||||
|
Set one of these to manually force a particular implementation of bind().
|
||||||
|
If nothing is chosen then beast will use whatever is appropriate for your
|
||||||
|
environment based on what is available.
|
||||||
|
If you override these, set ONE to 1 and the rest to 0
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_FUNCTIONAL_USES_STD
|
||||||
|
//#define BEAST_FUNCTIONAL_USES_STD 0
|
||||||
|
#endif
|
||||||
|
#ifndef BEAST_FUNCTIONAL_USES_TR1
|
||||||
|
//#define BEAST_FUNCTIONAL_USES_TR1 0
|
||||||
|
#endif
|
||||||
|
#ifndef BEAST_FUNCTIONAL_USES_BOOST
|
||||||
|
//#define BEAST_FUNCTIONAL_USES_BOOST 0
|
||||||
|
#endif
|
||||||
|
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
//
|
||||||
|
// Boost
|
||||||
|
//
|
||||||
|
//------------------------------------------------------------------------------
|
||||||
|
|
||||||
|
/** Config: BEAST_USE_BOOST_FEATURES
|
||||||
|
This activates boost specific features and improvements. If this is
|
||||||
|
turned on, the include paths for your build environment must be set
|
||||||
|
correctly to find the boost headers.
|
||||||
|
*/
|
||||||
|
#ifndef BEAST_USE_BOOST_FEATURES
|
||||||
|
//#define BEAST_USE_BOOST_FEATURES 1
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#endif
|
||||||
@@ -1,22 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="utf-8"?>
|
|
||||||
<Project ToolsVersion="4.0" xmlns="http://schemas.microsoft.com/developer/msbuild/2003">
|
|
||||||
<ImportGroup Label="PropertySheets" />
|
|
||||||
<PropertyGroup Label="UserMacros">
|
|
||||||
<RepoDir>..\..</RepoDir>
|
|
||||||
</PropertyGroup>
|
|
||||||
<PropertyGroup />
|
|
||||||
<ItemDefinitionGroup>
|
|
||||||
<ClCompile>
|
|
||||||
<WarningLevel>Level4</WarningLevel>
|
|
||||||
<PreprocessorDefinitions>BEAST_COMPILING_STATIC_LIBARARY=1;_CRTDBG_MAP_ALLOC;_WIN32_WINNT=0x0600;%(PreprocessorDefinitions)</PreprocessorDefinitions>
|
|
||||||
<MultiProcessorCompilation>true</MultiProcessorCompilation>
|
|
||||||
<MinimalRebuild>false</MinimalRebuild>
|
|
||||||
<AdditionalIncludeDirectories>$(RepoDir)\config;%(AdditionalIncludeDirectories)</AdditionalIncludeDirectories>
|
|
||||||
</ClCompile>
|
|
||||||
</ItemDefinitionGroup>
|
|
||||||
<ItemGroup>
|
|
||||||
<BuildMacro Include="RepoDir">
|
|
||||||
<Value>$(RepoDir)</Value>
|
|
||||||
</BuildMacro>
|
|
||||||
</ItemGroup>
|
|
||||||
</Project>
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user