mirror of
https://gerrit.googlesource.com/git-repo
synced 2025-06-26 20:17:52 +00:00
Compare commits
1120 Commits
Author | SHA1 | Date | |
---|---|---|---|
07392ed326 | |||
3285e4b436 | |||
ae62541005 | |||
83a3227b62 | |||
09dd9bda38 | |||
f914edca53 | |||
e7c91889a6 | |||
1b117db767 | |||
563f1a6512 | |||
b4687ad862 | |||
ded477dbb9 | |||
93293ca47f | |||
dbd277ce50 | |||
5a03308c5c | |||
3ba716f382 | |||
655aedd7f3 | |||
cc960971f4 | |||
66098f707a | |||
f7b64e3350 | |||
bd0aae95f5 | |||
e6a202f790 | |||
04122b7261 | |||
f5525fb310 | |||
ee451f035d | |||
91d9587e45 | |||
0bcc2d28d4 | |||
ec0ba2777f | |||
9da67feecf | |||
b0b164a87f | |||
b71d61d34e | |||
8f997b38cb | |||
0eb2d3c8a0 | |||
e4d20372b2 | |||
1e01a74445 | |||
7c321f1bf6 | |||
7ac12a9b22 | |||
0b304c06ff | |||
4997d1c838 | |||
5b3a57c3ff | |||
6f8c85ce2a | |||
6856f98467 | |||
34bc5712eb | |||
70c54dc255 | |||
6da17751ca | |||
2ba5a1e963 | |||
3538dd224d | |||
b610b850ac | |||
dff919493a | |||
3164d40e22 | |||
f454512619 | |||
b466854bed | |||
d1e93dd58a | |||
e778e57f11 | |||
f1c5dd8a0f | |||
2058c63641 | |||
c8290ad49e | |||
9775a3d5d2 | |||
9bfdfbe117 | |||
2f0951b216 | |||
72ab852ca5 | |||
0a9265e2d6 | |||
dc1b59d2c0 | |||
71b0f312b1 | |||
369814b4a7 | |||
e37aa5f331 | |||
4a07798c82 | |||
fb527e3f52 | |||
6be76337a0 | |||
a2cd6aeae8 | |||
70d861fa29 | |||
9100f7fadd | |||
01d6c3c0c5 | |||
4c263b52e7 | |||
60fdc5cad1 | |||
46702eddc7 | |||
ae6cb08ae5 | |||
3fc157285c | |||
8a11f6f24c | |||
898f4e6217 | |||
d9e5cf0ee7 | |||
3069be2684 | |||
d5c306b404 | |||
a850ca2712 | |||
a34186e481 | |||
600f49278a | |||
1f2462e0d2 | |||
50d27639b5 | |||
c5b172ad6f | |||
87deaefd86 | |||
5fbd1c6053 | |||
1126c4ed86 | |||
f7c51606f0 | |||
745be2ede1 | |||
87fb5a1894 | |||
ab85fe7c53 | |||
4f42a97067 | |||
2b7daff8cb | |||
242fcdd93b | |||
ca540aed19 | |||
f88b2fe569 | |||
6db1b9e282 | |||
490e16385d | |||
ec558df074 | |||
81f5c59671 | |||
1b9adab75a | |||
3698ab7c92 | |||
0c0e934b69 | |||
9e71842fbf | |||
61b2d41f26 | |||
da9e200f1d | |||
c92ce5c7dc | |||
f601376e13 | |||
31067c0ac5 | |||
35159abbeb | |||
24ee29e468 | |||
1b291fc2e7 | |||
a26c49ead4 | |||
c745350ab9 | |||
025704e946 | |||
c5b0e23490 | |||
d92464e8ef | |||
0968570df2 | |||
f25a370a14 | |||
b554838ce8 | |||
2d095da4f1 | |||
266f74c888 | |||
1f1596b473 | |||
0d9b16d1d8 | |||
a84df06160 | |||
e57f1146de | |||
01019d94af | |||
834d308a2b | |||
c18ee35da6 | |||
d3c0f5914f | |||
41a26837d0 | |||
e7379dc5f7 | |||
13f323b2c2 | |||
12ee5446e9 | |||
e158e3802d | |||
3bbbcaf99d | |||
d4b13c280b | |||
6e53844f1e | |||
d26146de7f | |||
bd8f658823 | |||
713c5872fb | |||
36391bf5ca | |||
bed8b62345 | |||
09f0abb0ef | |||
b3133a3164 | |||
3b24e7b557 | |||
b8f7bb04d0 | |||
3891b7519d | |||
2b42d288c0 | |||
e469a0c741 | |||
65b0ba5aa0 | |||
a6515fb952 | |||
993dcacd17 | |||
a9399846fa | |||
b10f0e5b9a | |||
da40341a3e | |||
8d4b106642 | |||
ed429c9f6f | |||
0f2e45a3a6 | |||
cf7c0834cf | |||
4ea1f0cabd | |||
7d52585ec4 | |||
1f365701b3 | |||
ce7e02601c | |||
a32c92c206 | |||
5f0e57d2ca | |||
baa0009355 | |||
685320b000 | |||
02c0ee6ae6 | |||
1dc36600ef | |||
cbe8aeb52b | |||
305a2d029f | |||
84e7e16d35 | |||
f46902a800 | |||
c00d28b767 | |||
788e9626cc | |||
cd892a38a6 | |||
010fed7711 | |||
e8595e9df7 | |||
227ad2ef42 | |||
2a4be94878 | |||
9d743397bf | |||
2c57d619bc | |||
d1ebc89a08 | |||
2ec2a5d64c | |||
9ead97bb51 | |||
e43322625a | |||
bed59cec5e | |||
c94d6eb902 | |||
d88f53e2b9 | |||
87984c6db4 | |||
ffc1401327 | |||
8a6eeed7f5 | |||
7be072efa6 | |||
7482a96443 | |||
3bcd30545e | |||
224a31a765 | |||
b54343d9fd | |||
259f16520a | |||
8419ab22d6 | |||
913327f10c | |||
ad1abcb556 | |||
a65adf74f9 | |||
d5cec5e752 | |||
2e70291162 | |||
35d22217a5 | |||
a24671f661 | |||
e0684addee | |||
fef9f21b28 | |||
6a470be220 | |||
169d8ae93c | |||
c79d3b8fd1 | |||
aa90021fbc | |||
fddfa6fbac | |||
997a92bd58 | |||
fbcbcabe98 | |||
eec726c6d8 | |||
666debc518 | |||
c354a9b922 | |||
06848b2415 | |||
e4e94d26ae | |||
c9439facdd | |||
3d7bbc9edf | |||
ffb4b89099 | |||
04071c1c72 | |||
7de8c5db78 | |||
bb9c42cf1d | |||
f4dda9a1be | |||
b881d227f3 | |||
8e2d1d521e | |||
27226e742d | |||
6c5944606a | |||
ae81c964b6 | |||
e02c17c9ea | |||
6e31079033 | |||
ec287902e6 | |||
4d5bb68d58 | |||
2e14792a94 | |||
82f67987a3 | |||
699bcd40be | |||
7f1ccfbb7b | |||
eceeb1b1f5 | |||
16889ba43d | |||
40d3952270 | |||
4350791e0d | |||
d648045366 | |||
628456833a | |||
2aa61d0bc8 | |||
4aed6f8c7d | |||
01b7d758d5 | |||
267ac57361 | |||
bb5b1a076b | |||
e01ee026e6 | |||
e4433653db | |||
d9de945d8a | |||
2ff302929c | |||
e5c0ea0a95 | |||
163a3be18b | |||
7a77c16d37 | |||
488bf092d5 | |||
05dc46b0e3 | |||
39252ba028 | |||
71e4cea6de | |||
c4c2b066d1 | |||
6a0a3648f1 | |||
6118faa118 | |||
183c52ab02 | |||
58f85f9a30 | |||
40252c20f7 | |||
76a4a9df86 | |||
befaec1e56 | |||
9711a98d6c | |||
438eade413 | |||
69297c1b77 | |||
8016f60a46 | |||
631d0ec708 | |||
f97e72e5dd | |||
8ac0c96537 | |||
faaddc9b4e | |||
a36af0767b | |||
037040f73e | |||
2598ed06f1 | |||
01952e6634 | |||
9d2b14d2ec | |||
6685106306 | |||
d64e8eee51 | |||
8b39fb4bc0 | |||
96c2d65489 | |||
7ecccf6225 | |||
cee5c77166 | |||
79fba68e40 | |||
e868841782 | |||
f9fe3e14d2 | |||
bdb866ea76 | |||
e121ad558d | |||
1f0564406b | |||
936d6185eb | |||
9322964d14 | |||
4aa4b211c6 | |||
8ccfa74d12 | |||
30b0f4e022 | |||
203153e7bb | |||
4cfb6d7167 | |||
b29e61133e | |||
4088eb434b | |||
5553628601 | |||
5ed805a98e | |||
985ac6b946 | |||
ecf0a6c92b | |||
04197a5144 | |||
0b4cb325c6 | |||
1a799d14b7 | |||
827e547d9e | |||
e9becc079c | |||
466b8c4ea2 | |||
e1e0bd1f75 | |||
74cfd2709b | |||
c2a64ddffd | |||
745b4ad660 | |||
4c5f74e452 | |||
b1ad2190a2 | |||
f231db11a2 | |||
79360640f4 | |||
7b01b2fd01 | |||
aad84232ca | |||
3c03580607 | |||
54527e7e30 | |||
5ea32d1359 | |||
5cc384034d | |||
0375523331 | |||
c32ba1961e | |||
250303b437 | |||
029eaf3bac | |||
ba72d8301e | |||
fee390eea2 | |||
9ff2ece6ab | |||
2487cb7b2c | |||
8ce5041596 | |||
f7a51898d3 | |||
b9a1b73425 | |||
dc2545cad6 | |||
f33929d014 | |||
3010e5ba64 | |||
ba7bc738c1 | |||
f4599a2a3d | |||
022a1d4e6e | |||
41d1baac31 | |||
46496d8761 | |||
7c9263bce0 | |||
dab9e99f0f | |||
c5f15bf7c0 | |||
6d35d676db | |||
0745bb2657 | |||
25857b8988 | |||
bdb5271de3 | |||
884092225d | |||
5d0c3a614e | |||
1efc2b4a01 | |||
d3ddcdbd8a | |||
2635c0e3b6 | |||
43322283dc | |||
f9b7683a3b | |||
eeab6860f1 | |||
7e59de2bcc | |||
163fdbf2fd | |||
555be54790 | |||
c5cd433daf | |||
2a3e15217a | |||
0369a069ad | |||
abaa7f312f | |||
7cccfb2cf0 | |||
57f43f4944 | |||
17af578d72 | |||
b1a07b8276 | |||
4e16c24981 | |||
b3d6e67196 | |||
503d66d8af | |||
679bac4bf3 | |||
97836cf09f | |||
80e3a37ab5 | |||
bb4a1b5274 | |||
551dfecea9 | |||
6944cdb8d1 | |||
59b417493e | |||
30d13eea86 | |||
727cc3e324 | |||
c5ceeb1625 | |||
db75704bfc | |||
87ea5913f2 | |||
185307d1dd | |||
c116f94261 | |||
7993f3cdda | |||
b1d1fd778d | |||
be4456cf24 | |||
cf738ed4a1 | |||
6cfc68e1e6 | |||
4c426ef1d4 | |||
472ce9f5fa | |||
0184dcc510 | |||
c4b301f988 | |||
31a7be561e | |||
384b3c5948 | |||
35de228f33 | |||
ace097c36e | |||
b155354034 | |||
382582728e | |||
b4d43b9f66 | |||
4ccad7554b | |||
403b64edf4 | |||
a38769cda8 | |||
44859d0267 | |||
6ad6dbefe7 | |||
33fe4e99f9 | |||
4214585073 | |||
b51f07cd06 | |||
04f2f0e186 | |||
cb07ba7e3d | |||
23ff7df6a7 | |||
cc1b1a703d | |||
bdf7ed2301 | |||
9c76f67f13 | |||
52b99aa91d | |||
9371979628 | |||
2086004261 | |||
2338788050 | |||
0402cd882a | |||
936183a492 | |||
85e8267031 | |||
e30f46b957 | |||
e4978cfbe3 | |||
126e298214 | |||
38e4387f8e | |||
24245e0094 | |||
db6f1b0884 | |||
f2fad61bde | |||
ee69084421 | |||
d37d43f036 | |||
7bdac71087 | |||
f97e8383a3 | |||
3000cdad22 | |||
b9d9efd394 | |||
497bde4de5 | |||
4abf8e6ef8 | |||
137d0131bf | |||
42e679b9f6 | |||
902665bce6 | |||
c8d882ae2a | |||
3eb87cec5c | |||
5fb8ed217c | |||
7e12e0a2fa | |||
7893b85509 | |||
b4e50e67e8 | |||
0936aeab2c | |||
14e134da02 | |||
04e52d6166 | |||
909d58b2e2 | |||
5cf16607d3 | |||
c190b98ed5 | |||
4863307299 | |||
f75870beac | |||
bf0b0cbc2f | |||
3a10968a70 | |||
c46de6932a | |||
303a82f33a | |||
7a91d51dcf | |||
a8d539189e | |||
588142dfcb | |||
a6d258b84d | |||
a769498568 | |||
884a387eca | |||
80b87fe6c1 | |||
e9f75b1782 | |||
a35e402161 | |||
dd7aea6c11 | |||
5196805fa2 | |||
85b24acd6a | |||
36ea2fb6ee | |||
2cd1f0452e | |||
65e3a78a9e | |||
d792f7928d | |||
6efdde9f6e | |||
7446c5954a | |||
d58bfe5a58 | |||
70f6890352 | |||
666d534636 | |||
f2af756425 | |||
544e7b0a97 | |||
e0df232da7 | |||
5a7c3afa73 | |||
9bc422f130 | |||
e81bc030bb | |||
eb5acc9ae9 | |||
26c45a7958 | |||
68425f4da8 | |||
53e902a19b | |||
4e4d40f7c0 | |||
093fdb6587 | |||
2fb6466f79 | |||
724aafb52d | |||
ccd218cd8f | |||
dd6542268a | |||
baca5f7e88 | |||
89ece429fb | |||
565480588d | |||
1829101e28 | |||
1966133f8e | |||
f1027e23b4 | |||
2cd38a0bf8 | |||
1b46cc9b6d | |||
1242e60bdd | |||
2d0f508648 | |||
143d8a7249 | |||
5db69f3f66 | |||
ff0a3c8f80 | |||
094cdbe090 | |||
148a84de0c | |||
1c5da49e6c | |||
b8433dfd2f | |||
f2fe2d9b86 | |||
c9877c7cf6 | |||
69e04d8953 | |||
f1f1137d61 | |||
f77ef2edb0 | |||
e695338e21 | |||
bd80f7eedd | |||
bf79c6618e | |||
f045d49a71 | |||
719757d6a8 | |||
011d4f426c | |||
53d6a7b895 | |||
335f5ef4ad | |||
672cc499b9 | |||
61df418c59 | |||
4534120628 | |||
cbc0798f67 | |||
d5a5b19efd | |||
5d6cb80b8f | |||
0eb35cbe50 | |||
ce201a5311 | |||
12fd10c201 | |||
a17d7af4d9 | |||
fbd3f2a10b | |||
37128b6f70 | |||
143b4cc992 | |||
8d20116038 | |||
53263d873d | |||
7487992bd3 | |||
b25ea555c3 | |||
3bfd72158c | |||
59b31cb6e0 | |||
1e7ab2a63f | |||
e76efdd7b3 | |||
730ce4c3c2 | |||
745a39ba3d | |||
efc986c508 | |||
edd0151a26 | |||
5e0ee14575 | |||
70df18944a | |||
0836a22d38 | |||
b6a16e6390 | |||
351fe2c793 | |||
fb99c71939 | |||
3a2a59eb87 | |||
bc0308478b | |||
610d3c4e46 | |||
033a7e91de | |||
854f2b6ef4 | |||
a892b1006b | |||
db2ad9dfce | |||
ef668c92c2 | |||
65b162b32f | |||
cd51f17c64 | |||
53a6c5d93a | |||
c2791e85f3 | |||
5bca9fcdd9 | |||
74c1f3d5e6 | |||
91f3ba5a3f | |||
691a75936d | |||
710d4b0391 | |||
a1f77d92c6 | |||
ecf8f2b7c8 | |||
f609f91b72 | |||
59bbb580e3 | |||
da45e5d884 | |||
0826c0749f | |||
de50d81c91 | |||
2b30e3aaba | |||
793f90cdc0 | |||
d503352b14 | |||
2f992cba32 | |||
b5267f9ad2 | |||
45401230cf | |||
56f4eea26c | |||
f385d0ca09 | |||
84c4d3c345 | |||
a8864fba9f | |||
275e4b727a | |||
c4c01f914c | |||
9d5bf60d3c | |||
217ea7d274 | |||
51813dfed1 | |||
fef4ae74e2 | |||
db83b1b5ab | |||
ede7f12d4a | |||
04d84a23fd | |||
0a1c6a1c16 | |||
33e0456737 | |||
07669002cb | |||
a0444584cb | |||
3cba0b8613 | |||
a27852d0e7 | |||
61ac9ae090 | |||
3ee6ffd078 | |||
28db6ffef4 | |||
2f9e7e40c4 | |||
45d21685b9 | |||
597868b4c4 | |||
75b4c2deac | |||
b75415075c | |||
4eb285cf90 | |||
5f434ed723 | |||
606eab8043 | |||
cd07cfae1c | |||
55693aabe5 | |||
23bd3a1dd3 | |||
bbf71fe363 | |||
91f011ab0d | |||
87b9d9b4f2 | |||
57bd7b717b | |||
4e46520362 | |||
63d356ffce | |||
35765966bf | |||
254709804d | |||
e0b6de32f7 | |||
4baf87f92c | |||
84f7e137c2 | |||
26e2475a0f | |||
c59bafffb9 | |||
0290cad5db | |||
ed68d0e852 | |||
1a5c774cbf | |||
a9f11b3cb2 | |||
0c635bb427 | |||
7bdbde7af8 | |||
223bf963f0 | |||
b2bd91c99b | |||
3f5ea0b182 | |||
b148ac9d9a | |||
a67df63ef1 | |||
f91074881f | |||
75ee0570da | |||
88b86728a4 | |||
e66291f6d0 | |||
7ba25bedf9 | |||
3794a78b80 | |||
33949c34d2 | |||
8f62fb7bd3 | |||
98ffba1401 | |||
c1b86a2323 | |||
cecd1d864f | |||
fc241240d8 | |||
9f3406ea46 | |||
b1525bffae | |||
685f080d62 | |||
8898e2f26d | |||
52f1e5d911 | |||
8e3d355d44 | |||
4a4776e9ab | |||
2fa715f8b5 | |||
6287543e35 | |||
b0936b0e20 | |||
0b8df7be79 | |||
717ece9d81 | |||
5566ae5dde | |||
2d5a0df798 | |||
f7fc8a95be | |||
1ad7b555df | |||
7e6dd2dff0 | |||
8d070cfb25 | |||
a6053d54f1 | |||
e072a92a9b | |||
7601ee2608 | |||
1f7627fd3c | |||
b42b4746af | |||
e21526754b | |||
60798a32f6 | |||
1d947b3034 | |||
2d113f3546 | |||
de7eae4826 | |||
2fe99e8820 | |||
cd81dd6403 | |||
80d2ceb222 | |||
c5aa4d3528 | |||
bed45f9400 | |||
55e4d464a7 | |||
75cc353380 | |||
c9129d90de | |||
57365c98cc | |||
dc96476af3 | |||
2577cec095 | |||
e48d34659e | |||
ab8f911a67 | |||
608aff7f62 | |||
13657c407d | |||
e4ed8f65f3 | |||
fdb44479f8 | |||
188572170e | |||
d75c669fac | |||
091f893625 | |||
d947858325 | |||
67700e9b90 | |||
a5be53f9c8 | |||
9ed12c5d9c | |||
4f7bdea9d2 | |||
69998b0c6f | |||
5c6eeac8f0 | |||
e98607248e | |||
2f6ab7f5b8 | |||
3a6cd4200e | |||
25f17682ca | |||
8a68ff9605 | |||
297e7c6ee6 | |||
e3b1c45aeb | |||
7119f94aba | |||
01f443d75a | |||
b926116a14 | |||
3ff9decfd4 | |||
14a6674e32 | |||
9779565abf | |||
cf76b1bcec | |||
e00aa6b923 | |||
86d973d24e | |||
34acdd2534 | |||
d94aaef39e | |||
bd489c4eaa | |||
2dc810c2e4 | |||
bb1b5f5f86 | |||
e2126652a3 | |||
9a27d0111d | |||
918ff85c1e | |||
3d07da82ab | |||
e15c65abc2 | |||
daa851f6cd | |||
a43f42f9ff | |||
bb8337fe0f | |||
17f85eab24 | |||
b9477bc2dd | |||
5e7127d00b | |||
5d0efdb14a | |||
f35b2d9c31 | |||
e0904f721b | |||
9830553748 | |||
2bc7f5cb3a | |||
b292b98c3e | |||
2f127de752 | |||
7da1314e38 | |||
435370c6f0 | |||
e8f75fa368 | |||
87636f2ac2 | |||
337aee0a9c | |||
7cf1b36bcd | |||
5e57234ec6 | |||
5d016502eb | |||
475a47d531 | |||
62d0b10a7b | |||
d666e93ecc | |||
3f61950f01 | |||
4fd38ecc3a | |||
9fae805e04 | |||
6a927c5d19 | |||
eca119e5d6 | |||
6ba6ba0ef3 | |||
23acdd3f14 | |||
2644874d9d | |||
3d125940f6 | |||
a94f162b9f | |||
e5a2122e64 | |||
ccf86432b3 | |||
79770d269e | |||
c39864f5e1 | |||
5465727e53 | |||
d21720db31 | |||
971de8ea7b | |||
24c1308840 | |||
b962a1f5e0 | |||
5acde75e5d | |||
d67872d2f4 | |||
e9d6b611c5 | |||
c3d2f2b76f | |||
cd7c5deca0 | |||
e02ac0af2e | |||
898e12a2d9 | |||
ae0a36c9a5 | |||
76abcc1d1e | |||
d315382572 | |||
43bda84362 | |||
9b017dab46 | |||
e9dc3b3368 | |||
c9571423f8 | |||
34fb20f67c | |||
ecff4f17b0 | |||
cc14fa9820 | |||
3ce2a6b46b | |||
841be34968 | |||
ee1c2f5717 | |||
6a1f737380 | |||
e9311273dd | |||
605a9a487b | |||
2a32f6afa6 | |||
498fe90b45 | |||
53d6f4d17e | |||
9d8f914fe8 | |||
ceea368e88 | |||
b660539c4a | |||
752371d91b | |||
1a68dc58eb | |||
df5ee52050 | |||
fab96c68e3 | |||
bf1fbb20ab | |||
29472463ba | |||
c325dc35f6 | |||
f322b9abb4 | |||
db728cd866 | |||
c4657969eb | |||
7b947de1ee | |||
6392c87945 | |||
97d2b2f7a0 | |||
3a0e782790 | |||
490d09a314 | |||
13111b4e97 | |||
bd0312a484 | |||
334851e4b6 | |||
014d060989 | |||
44da16e8a0 | |||
65e0f35fda | |||
08c880db18 | |||
a101f1c167 | |||
49cd59bc86 | |||
30d452905f | |||
d6c93a28ca | |||
d572a13021 | |||
3ba5f95b46 | |||
2630dd9787 | |||
dafb1d68d3 | |||
4655e81a75 | |||
723c5dc3d6 | |||
e6a0eeb80d | |||
0960b5b53d | |||
fc06ced9f9 | |||
fce89f218a | |||
37282b4b9c | |||
835cd6888f | |||
8ced8641c8 | |||
2536f80625 | |||
e7a3bcbbb8 | |||
0ce6ca9c7b | |||
25b51d8cb7 | |||
0fc3a39829 | |||
cef005c3e8 | |||
c7c57e34db | |||
0d2b61f11d | |||
2bf9db0d3b | |||
f00e0ce556 | |||
1b5a4a0c5d | |||
de8b2c4276 | |||
727ee98a40 | |||
df14a70c45 | |||
71cab95b4c | |||
f18cb76173 | |||
d3fd537ea5 | |||
9275fd4329 | |||
0048b69c03 | |||
13f3da50d4 | |||
3218c13205 | |||
b0f9a02394 | |||
2b8db3ce3e | |||
5df6de075e | |||
a0de6e8eab | |||
16614f86b3 | |||
88443387b1 | |||
99482ae58a | |||
ec1df9b7f6 | |||
06d029c1c8 | |||
b715b14807 | |||
60829ba72f | |||
a22f99ae41 | |||
3575b8f8bd | |||
a5ece0e050 | |||
cc50bac8c7 | |||
0cb1b3f687 | |||
9e426aa432 | |||
08a3f68d38 | |||
feb39d61ef | |||
7198572dd7 | |||
2daf66740b | |||
f4f04d9fa8 | |||
18afd7f679 | |||
6623b21e10 | |||
ca8c32cd7a | |||
f0a9a1a30e | |||
879a9a5cf0 | |||
ff6929dde8 | |||
1c85f4e43b | |||
719965af35 | |||
5732e47ebb | |||
f3fdf823cf | |||
a1bfd2cd72 | |||
6d7508b3d5 | |||
69b1e8aa65 | |||
9452e4ec09 | |||
4c50deea28 | |||
d63060fc95 | |||
b6ea3bfcc3 | |||
aa4982e4c9 | |||
9bb1816bdc | |||
840ed0fab7 | |||
c024912fb8 | |||
15f6579eb3 | |||
d4cd69bdef | |||
d2dfac81ad | |||
4719901067 | |||
a949fa5d20 | |||
0afac0856c | |||
4c0f670465 | |||
33f0e786bb | |||
57272ba82e | |||
0125ae2fda | |||
a7ce096047 | |||
87bda12e85 | |||
5f947bba69 | |||
b3d2c9214b | |||
7354d88914 | |||
ce86abbe8a | |||
75b87c8a51 | |||
abb7a3dfec | |||
cc6c79643e | |||
2095179bee | |||
b0ca41e19a | |||
1875ddd47c | |||
446c4e5556 | |||
67f4563acb | |||
050e4fd591 | |||
60e679209a | |||
f1a6b14fdc | |||
ca3d8ff4fc | |||
98ea26b8d8 | |||
c24c720b61 | |||
2d1a396897 | |||
1dcb58a7d0 | |||
37dbf2bf0f | |||
438c54713a | |||
e020ebee4e | |||
21c5c34ee2 | |||
54fccd71fb | |||
fb5c8fd948 | |||
26120ca18d | |||
7da73d6f3b | |||
f0d4c36701 | |||
2ec00b9272 | |||
2a3a81b51f | |||
7b4f43542a | |||
9fb29ce123 | |||
3a68bb4c7f | |||
cd1d7ff81e | |||
da88ff4411 | |||
8135cdc53c | |||
4f2517ff11 | |||
fe200eeb52 | |||
078a8b270f | |||
3c8dea1f8d | |||
8ad8a0e61d | |||
d1f70d9929 | |||
c8a300f639 | |||
1b34c9118e | |||
366ad214b8 | |||
242b52690d | |||
4cc70ce501 | |||
498a0e8a79 | |||
bc7ef67d9b | |||
2f968c943b | |||
2b5b4ac292 | |||
6f6cd77a50 | |||
896d5dffd3 | |||
9360966bd2 | |||
ef9ce1d0a5 | |||
05f66b6836 | |||
eb7af87bcf | |||
938d608c9c | |||
d63bbf44dc | |||
a8421a128a | |||
fb2316146f | |||
8bd5e60b16 | |||
3d2cdd0ea5 | |||
4e3d6739a1 | |||
552ac89929 | |||
89e717d948 | |||
0f0dfa3930 | |||
76ca9f8145 | |||
accc56d82b | |||
db45da1208 | |||
50fa1ac6db | |||
5da554f294 | |||
77bb4af241 | |||
fd89b67f5c | |||
a490f03dc2 | |||
deec0536d6 | |||
06e556d202 | |||
8225cdc56b | |||
337fb9c7e9 | |||
9bb9617858 | |||
f690687671 | |||
336f7bd0ed | |||
2810cbc778 | |||
6ed4e28346 | |||
ad3193a0e5 | |||
b81ac9e654 | |||
0f3dd233ec | |||
c12c360f89 | |||
fbcde472ca | |||
d237b69865 | |||
5b23f24881 | |||
66bdd46871 | |||
a608fb024b | |||
f8e3273dec | |||
006734b798 | |||
350cde4c4b | |||
48244781c2 | |||
19a83d8085 | |||
b1168ffada | |||
4c5c7aa74b | |||
ff84fea0bb | |||
d33f43a754 | |||
e756c412e3 | |||
b812a36236 | |||
161f445a4d | |||
68194f42b0 | |||
b1562faee0 | |||
3e768c9dc7 | |||
96fdcef9e3 | |||
2a1ccb2b0c | |||
0a389e94de | |||
2675c3f8b5 | |||
27b07327bc | |||
02d7945eb8 | |||
8f82a4f828 | |||
146fe902b7 | |||
722acefdc4 | |||
13cc3844d7 | |||
feabbdb440 | |||
8630f39dba | |||
df01883f9b | |||
1fc99f4e47 | |||
1775dbe176 | |||
521cd3ce67 | |||
5470df6219 | |||
0ed2bd1d95 | |||
c7a4eefa7e | |||
43c3d9ea17 | |||
4259b8a2ac | |||
2816d4f387 | |||
44469464d2 | |||
c95583bf4f | |||
6a5644d392 | |||
fe08675956 | |||
be0e8ac232 | |||
47c1a63a07 | |||
559b846b17 | |||
7c6c64d463 | |||
3778f9d47e | |||
993eedf9fa | |||
02e0cdf359 | |||
a8e98a6962 | |||
5ab508cbcc | |||
370e3fa666 | |||
b54a392c9a | |||
21f7385400 | |||
24d8dfbc34 | |||
a6df7d284c | |||
67092448c2 | |||
e92ceebde0 | |||
03eaf07ec6 | |||
2896a79120 | |||
8c6eef4713 | |||
34d237fbfb | |||
c99883fee9 | |||
ec18b4bac4 | |||
35f2596c27 | |||
5d40e26201 | |||
70939e2f73 | |||
ae6e0949d1 | |||
339ba9f6f7 | |||
70cd4ab270 | |||
e284ad1d1a | |||
3e5481999d | |||
d3c388391e | |||
2450a2987a | |||
f5c25a68d8 | |||
9fa44db94b | |||
c9ef744c7b | |||
438ee1cad9 | |||
23d7781c0b | |||
a54c527ae9 | |||
df830f1238 | |||
90be5c0839 | |||
7965f9fed0 | |||
de646819b8 | |||
bd4edc9a69 | |||
ce03a401c6 | |||
45476c40c7 | |||
1619134720 | |||
7efd1a5b23 | |||
329c31da7d | |||
5cc6679fb8 | |||
632768bc65 | |||
0758d2f1d6 | |||
bb0ee80571 |
4
.gitattributes
vendored
Normal file
4
.gitattributes
vendored
Normal file
@ -0,0 +1,4 @@
|
|||||||
|
# Prevent /bin/sh scripts from being clobbered by autocrlf=true
|
||||||
|
git_ssh text eol=lf
|
||||||
|
repo text eol=lf
|
||||||
|
hooks/* text eol=lf
|
10
.gitignore
vendored
10
.gitignore
vendored
@ -1 +1,11 @@
|
|||||||
|
*.egg-info/
|
||||||
|
*.log
|
||||||
*.pyc
|
*.pyc
|
||||||
|
__pycache__
|
||||||
|
/dist
|
||||||
|
.repopickle_*
|
||||||
|
/repoc
|
||||||
|
/.tox
|
||||||
|
|
||||||
|
# PyCharm related
|
||||||
|
/.idea/
|
||||||
|
12
.mailmap
Normal file
12
.mailmap
Normal file
@ -0,0 +1,12 @@
|
|||||||
|
Anthony Newnam <anthony.newnam@garmin.com> Anthony <anthony@bnovc.com>
|
||||||
|
He Ping <tdihp@hotmail.com> heping <tdihp@hotmail.com>
|
||||||
|
Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu xiuyun <xiuyun.hu@hisilicon.com>
|
||||||
|
Hu Xiuyun <xiuyun.hu@hisilicon.com> Hu Xiuyun <clouds08@qq.com>
|
||||||
|
Jelly Chen <chenguodong@huawei.com> chenguodong <chenguodong@huawei.com>
|
||||||
|
Jia Bi <bijia@xiaomi.com> bijia <bijia@xiaomi.com>
|
||||||
|
JoonCheol Park <jooncheol@gmail.com> Jooncheol Park <jooncheol@gmail.com>
|
||||||
|
Sergii Pylypenko <x.pelya.x@gmail.com> pelya <x.pelya.x@gmail.com>
|
||||||
|
Shawn Pearce <sop@google.com> Shawn O. Pearce <sop@google.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@gmail.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjolin <ulrik.sjolin@sonyericsson.com>
|
||||||
|
Ulrik Sjölin <ulrik.sjolin@sonyericsson.com> Ulrik Sjölin <ulrik.sjolin@sonyericsson.com>
|
17
.project
Normal file
17
.project
Normal file
@ -0,0 +1,17 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<projectDescription>
|
||||||
|
<name>git-repo</name>
|
||||||
|
<comment></comment>
|
||||||
|
<projects>
|
||||||
|
</projects>
|
||||||
|
<buildSpec>
|
||||||
|
<buildCommand>
|
||||||
|
<name>org.python.pydev.PyDevBuilder</name>
|
||||||
|
<arguments>
|
||||||
|
</arguments>
|
||||||
|
</buildCommand>
|
||||||
|
</buildSpec>
|
||||||
|
<natures>
|
||||||
|
<nature>org.python.pydev.pythonNature</nature>
|
||||||
|
</natures>
|
||||||
|
</projectDescription>
|
10
.pydevproject
Normal file
10
.pydevproject
Normal file
@ -0,0 +1,10 @@
|
|||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="no"?>
|
||||||
|
<?eclipse-pydev version="1.0"?>
|
||||||
|
|
||||||
|
<pydev_project>
|
||||||
|
<pydev_pathproperty name="org.python.pydev.PROJECT_SOURCE_PATH">
|
||||||
|
<path>/git-repo</path>
|
||||||
|
</pydev_pathproperty>
|
||||||
|
<pydev_property name="org.python.pydev.PYTHON_PROJECT_VERSION">python 2.7</pydev_property>
|
||||||
|
<pydev_property name="org.python.pydev.PYTHON_PROJECT_INTERPRETER">Default</pydev_property>
|
||||||
|
</pydev_project>
|
6
MANIFEST.in
Normal file
6
MANIFEST.in
Normal file
@ -0,0 +1,6 @@
|
|||||||
|
graft docs hooks tests
|
||||||
|
include *.py
|
||||||
|
include LICENSE
|
||||||
|
include git_ssh
|
||||||
|
include repo
|
||||||
|
include run_tests
|
36
README.md
Normal file
36
README.md
Normal file
@ -0,0 +1,36 @@
|
|||||||
|
# repo
|
||||||
|
|
||||||
|
Repo is a tool built on top of Git. Repo helps manage many Git repositories,
|
||||||
|
does the uploads to revision control systems, and automates parts of the
|
||||||
|
development workflow. Repo is not meant to replace Git, only to make it
|
||||||
|
easier to work with Git. The repo command is an executable Python script
|
||||||
|
that you can put anywhere in your path.
|
||||||
|
|
||||||
|
* Homepage: https://gerrit.googlesource.com/git-repo/
|
||||||
|
* Bug reports: https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo
|
||||||
|
* Source: https://gerrit.googlesource.com/git-repo/
|
||||||
|
* Overview: https://source.android.com/source/developing.html
|
||||||
|
* Docs: https://source.android.com/source/using-repo.html
|
||||||
|
* [repo Manifest Format](./docs/manifest-format.md)
|
||||||
|
* [repo Hooks](./docs/repo-hooks.md)
|
||||||
|
* [Submitting patches](./SUBMITTING_PATCHES.md)
|
||||||
|
* Running Repo in [Microsoft Windows](./docs/windows.md)
|
||||||
|
|
||||||
|
## Install
|
||||||
|
|
||||||
|
Many distros include repo, so you might be able to install from there.
|
||||||
|
```sh
|
||||||
|
# Debian/Ubuntu.
|
||||||
|
$ sudo apt-get install repo
|
||||||
|
|
||||||
|
# Gentoo.
|
||||||
|
$ sudo emerge dev-vcs/repo
|
||||||
|
```
|
||||||
|
|
||||||
|
You can install it manually as well as it's a single script.
|
||||||
|
```sh
|
||||||
|
$ mkdir -p ~/.bin
|
||||||
|
$ PATH="${HOME}/.bin:${PATH}"
|
||||||
|
$ curl https://storage.googleapis.com/git-repo-downloads/repo > ~/.bin/repo
|
||||||
|
$ chmod a+rx ~/.bin/repo
|
||||||
|
```
|
177
SUBMITTING_PATCHES.md
Normal file
177
SUBMITTING_PATCHES.md
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
[TOC]
|
||||||
|
|
||||||
|
# Short Version
|
||||||
|
|
||||||
|
- Make small logical changes.
|
||||||
|
- Provide a meaningful commit message.
|
||||||
|
- Check for coding errors and style nits with pyflakes and flake8
|
||||||
|
- Make sure all code is under the Apache License, 2.0.
|
||||||
|
- Publish your changes for review.
|
||||||
|
- Make corrections if requested.
|
||||||
|
- Verify your changes on gerrit so they can be submitted.
|
||||||
|
|
||||||
|
`git push https://gerrit-review.googlesource.com/git-repo HEAD:refs/for/master`
|
||||||
|
|
||||||
|
|
||||||
|
# Long Version
|
||||||
|
|
||||||
|
I wanted a file describing how to submit patches for repo,
|
||||||
|
so I started with the one found in the core Git distribution
|
||||||
|
(Documentation/SubmittingPatches), which itself was based on the
|
||||||
|
patch submission guidelines for the Linux kernel.
|
||||||
|
|
||||||
|
However there are some differences, so please review and familiarize
|
||||||
|
yourself with the following relevant bits.
|
||||||
|
|
||||||
|
|
||||||
|
## Make separate commits for logically separate changes.
|
||||||
|
|
||||||
|
Unless your patch is really trivial, you should not be sending
|
||||||
|
out a patch that was generated between your working tree and your
|
||||||
|
commit head. Instead, always make a commit with complete commit
|
||||||
|
message and generate a series of patches from your repository.
|
||||||
|
It is a good discipline.
|
||||||
|
|
||||||
|
Describe the technical detail of the change(s).
|
||||||
|
|
||||||
|
If your description starts to get too long, that's a sign that you
|
||||||
|
probably need to split up your commit to finer grained pieces.
|
||||||
|
|
||||||
|
|
||||||
|
## Check for coding errors and style nits with pyflakes and flake8
|
||||||
|
|
||||||
|
### Coding errors
|
||||||
|
|
||||||
|
Run `pyflakes` on changed modules:
|
||||||
|
|
||||||
|
pyflakes file.py
|
||||||
|
|
||||||
|
Ideally there should be no new errors or warnings introduced.
|
||||||
|
|
||||||
|
### Style violations
|
||||||
|
|
||||||
|
Run `flake8` on changes modules:
|
||||||
|
|
||||||
|
flake8 file.py
|
||||||
|
|
||||||
|
Note that repo generally follows [Google's python style guide] rather than
|
||||||
|
[PEP 8], so it's possible that the output of `flake8` will be quite noisy.
|
||||||
|
It's not mandatory to avoid all warnings, but at least the maximum line
|
||||||
|
length should be followed.
|
||||||
|
|
||||||
|
If there are many occurrences of the same warning that cannot be
|
||||||
|
avoided without going against the Google style guide, these may be
|
||||||
|
suppressed in the included `.flake8` file.
|
||||||
|
|
||||||
|
[Google's python style guide]: https://google.github.io/styleguide/pyguide.html
|
||||||
|
[PEP 8]: https://www.python.org/dev/peps/pep-0008/
|
||||||
|
|
||||||
|
|
||||||
|
## Running tests
|
||||||
|
|
||||||
|
We use [pytest](https://pytest.org/) and [tox](https://tox.readthedocs.io/) for
|
||||||
|
running tests. You should make sure to install those first.
|
||||||
|
|
||||||
|
To run the full suite against all supported Python versions, simply execute:
|
||||||
|
```sh
|
||||||
|
$ tox -p auto
|
||||||
|
```
|
||||||
|
|
||||||
|
We have [`./run_tests`](./run_tests) which is a simple wrapper around `pytest`:
|
||||||
|
```sh
|
||||||
|
# Run the full suite against the default Python version.
|
||||||
|
$ ./run_tests
|
||||||
|
# List each test as it runs.
|
||||||
|
$ ./run_tests -v
|
||||||
|
|
||||||
|
# Run a specific unittest module (and all tests in it).
|
||||||
|
$ ./run_tests tests/test_git_command.py
|
||||||
|
|
||||||
|
# Run a specific testsuite in a specific unittest module.
|
||||||
|
$ ./run_tests tests/test_editor.py::EditString
|
||||||
|
|
||||||
|
# Run a single test.
|
||||||
|
$ ./run_tests tests/test_editor.py::EditString::test_cat_editor
|
||||||
|
|
||||||
|
# List all available tests.
|
||||||
|
$ ./run_tests --collect-only
|
||||||
|
|
||||||
|
# Run a single test using substring match.
|
||||||
|
$ ./run_tests -k test_cat_editor
|
||||||
|
```
|
||||||
|
|
||||||
|
The coverage isn't great currently, but it should still be run for all commits.
|
||||||
|
Adding more unittests for changes you make would be greatly appreciated :).
|
||||||
|
Check out the [tests/](./tests/) subdirectory for more details.
|
||||||
|
|
||||||
|
|
||||||
|
## Check the license
|
||||||
|
|
||||||
|
repo is licensed under the Apache License, 2.0.
|
||||||
|
|
||||||
|
Because of this licensing model *every* file within the project
|
||||||
|
*must* list the license that covers it in the header of the file.
|
||||||
|
Any new contributions to an existing file *must* be submitted under
|
||||||
|
the current license of that file. Any new files *must* clearly
|
||||||
|
indicate which license they are provided under in the file header.
|
||||||
|
|
||||||
|
Please verify that you are legally allowed and willing to submit your
|
||||||
|
changes under the license covering each file *prior* to submitting
|
||||||
|
your patch. It is virtually impossible to remove a patch once it
|
||||||
|
has been applied and pushed out.
|
||||||
|
|
||||||
|
|
||||||
|
## Sending your patches.
|
||||||
|
|
||||||
|
Do not email your patches to anyone.
|
||||||
|
|
||||||
|
Instead, login to the Gerrit Code Review tool at:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/
|
||||||
|
|
||||||
|
Ensure you have completed one of the necessary contributor
|
||||||
|
agreements, providing documentation to the project maintainers that
|
||||||
|
they have right to redistribute your work under the Apache License:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/#/settings/agreements
|
||||||
|
|
||||||
|
Ensure you have obtained an HTTP password to authenticate:
|
||||||
|
|
||||||
|
https://gerrit-review.googlesource.com/new-password
|
||||||
|
|
||||||
|
Ensure that you have the local commit hook installed to automatically
|
||||||
|
add a ChangeId to your commits:
|
||||||
|
|
||||||
|
curl -Lo `git rev-parse --git-dir`/hooks/commit-msg https://gerrit-review.googlesource.com/tools/hooks/commit-msg
|
||||||
|
chmod +x `git rev-parse --git-dir`/hooks/commit-msg
|
||||||
|
|
||||||
|
If you have already committed your changes you will need to amend the commit
|
||||||
|
to get the ChangeId added.
|
||||||
|
|
||||||
|
git commit --amend
|
||||||
|
|
||||||
|
Push your patches over HTTPS to the review server, possibly through
|
||||||
|
a remembered remote to make this easier in the future:
|
||||||
|
|
||||||
|
git config remote.review.url https://gerrit-review.googlesource.com/git-repo
|
||||||
|
git config remote.review.push HEAD:refs/for/master
|
||||||
|
|
||||||
|
git push review
|
||||||
|
|
||||||
|
You will be automatically emailed a copy of your commits, and any
|
||||||
|
comments made by the project maintainers.
|
||||||
|
|
||||||
|
|
||||||
|
## Make changes if requested
|
||||||
|
|
||||||
|
The project maintainer who reviews your changes might request changes to your
|
||||||
|
commit. If you make the requested changes you will need to amend your commit
|
||||||
|
and push it to the review server again.
|
||||||
|
|
||||||
|
|
||||||
|
## Verify your changes on gerrit
|
||||||
|
|
||||||
|
After you receive a Code-Review+2 from the maintainer, select the Verified
|
||||||
|
button on the gerrit page for the change. This verifies that you have tested
|
||||||
|
your changes and notifies the maintainer that they are ready to be submitted.
|
||||||
|
The maintainer will then submit your changes to the repository.
|
@ -1 +0,0 @@
|
|||||||
__version__ = 'v1.0'
|
|
@ -1,32 +0,0 @@
|
|||||||
#!/usr/bin/python2.4
|
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf import reflection
|
|
||||||
from froofle.protobuf import service
|
|
||||||
from froofle.protobuf import service_reflection
|
|
||||||
from froofle.protobuf import descriptor_pb2
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
_RETRYREQUESTLATERRESPONSE = descriptor.Descriptor(
|
|
||||||
name='RetryRequestLaterResponse',
|
|
||||||
full_name='codereview.RetryRequestLaterResponse',
|
|
||||||
filename='need_retry.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class RetryRequestLaterResponse(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _RETRYREQUESTLATERRESPONSE
|
|
||||||
|
|
@ -1,349 +0,0 @@
|
|||||||
# Copyright 2007, 2008 Google Inc.
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import cookielib
|
|
||||||
import getpass
|
|
||||||
import logging
|
|
||||||
import md5
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
import socket
|
|
||||||
import time
|
|
||||||
import urllib
|
|
||||||
import urllib2
|
|
||||||
import urlparse
|
|
||||||
|
|
||||||
from froofle.protobuf.service import RpcChannel
|
|
||||||
from froofle.protobuf.service import RpcController
|
|
||||||
from need_retry_pb2 import RetryRequestLaterResponse;
|
|
||||||
|
|
||||||
class ClientLoginError(urllib2.HTTPError):
|
|
||||||
"""Raised to indicate an error authenticating with ClientLogin."""
|
|
||||||
|
|
||||||
def __init__(self, url, code, msg, headers, args):
|
|
||||||
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
|
|
||||||
self.args = args
|
|
||||||
self.reason = args["Error"]
|
|
||||||
|
|
||||||
|
|
||||||
class Proxy(object):
|
|
||||||
class _ResultHolder(object):
|
|
||||||
def __call__(self, result):
|
|
||||||
self._result = result
|
|
||||||
|
|
||||||
class _RemoteController(RpcController):
|
|
||||||
def Reset(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def Failed(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def ErrorText(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def StartCancel(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def SetFailed(self, reason):
|
|
||||||
raise RuntimeError, reason
|
|
||||||
|
|
||||||
def IsCancelled(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def NotifyOnCancel(self, callback):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __init__(self, stub):
|
|
||||||
self._stub = stub
|
|
||||||
|
|
||||||
def __getattr__(self, key):
|
|
||||||
method = getattr(self._stub, key)
|
|
||||||
|
|
||||||
def call(request):
|
|
||||||
done = self._ResultHolder()
|
|
||||||
method(self._RemoteController(), request, done)
|
|
||||||
return done._result
|
|
||||||
|
|
||||||
return call
|
|
||||||
|
|
||||||
|
|
||||||
class HttpRpc(RpcChannel):
|
|
||||||
"""Simple protobuf over HTTP POST implementation."""
|
|
||||||
|
|
||||||
def __init__(self, host, auth_function,
|
|
||||||
host_override=None,
|
|
||||||
extra_headers={},
|
|
||||||
cookie_file=None):
|
|
||||||
"""Creates a new HttpRpc.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
host: The host to send requests to.
|
|
||||||
auth_function: A function that takes no arguments and returns an
|
|
||||||
(email, password) tuple when called. Will be called if authentication
|
|
||||||
is required.
|
|
||||||
host_override: The host header to send to the server (defaults to host).
|
|
||||||
extra_headers: A dict of extra headers to append to every request.
|
|
||||||
cookie_file: If not None, name of the file in ~/ to save the
|
|
||||||
cookie jar into. Applications are encouraged to set this to
|
|
||||||
'.$appname_cookies' or some otherwise unique name.
|
|
||||||
"""
|
|
||||||
self.host = host.lower()
|
|
||||||
self.host_override = host_override
|
|
||||||
self.auth_function = auth_function
|
|
||||||
self.authenticated = False
|
|
||||||
self.extra_headers = extra_headers
|
|
||||||
self.xsrf_token = None
|
|
||||||
if cookie_file is None:
|
|
||||||
self.cookie_file = None
|
|
||||||
else:
|
|
||||||
self.cookie_file = os.path.expanduser("~/%s" % cookie_file)
|
|
||||||
self.opener = self._GetOpener()
|
|
||||||
if self.host_override:
|
|
||||||
logging.info("Server: %s; Host: %s", self.host, self.host_override)
|
|
||||||
else:
|
|
||||||
logging.info("Server: %s", self.host)
|
|
||||||
|
|
||||||
def CallMethod(self, method, controller, request, response_type, done):
|
|
||||||
pat = "application/x-google-protobuf; name=%s"
|
|
||||||
|
|
||||||
url = "/proto/%s/%s" % (method.containing_service.name, method.name)
|
|
||||||
reqbin = request.SerializeToString()
|
|
||||||
reqtyp = pat % request.DESCRIPTOR.full_name
|
|
||||||
reqmd5 = base64.b64encode(md5.new(reqbin).digest())
|
|
||||||
|
|
||||||
start = time.time()
|
|
||||||
while True:
|
|
||||||
t, b = self._Send(url, reqbin, reqtyp, reqmd5)
|
|
||||||
if t == (pat % RetryRequestLaterResponse.DESCRIPTOR.full_name):
|
|
||||||
if time.time() >= (start + 1800):
|
|
||||||
controller.SetFailed("timeout")
|
|
||||||
return
|
|
||||||
s = random.uniform(0.250, 2.000)
|
|
||||||
print "Busy, retrying in %.3f seconds ..." % s
|
|
||||||
time.sleep(s)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if t == (pat % response_type.DESCRIPTOR.full_name):
|
|
||||||
response = response_type()
|
|
||||||
response.ParseFromString(b)
|
|
||||||
done(response)
|
|
||||||
else:
|
|
||||||
controller.SetFailed("Unexpected %s response" % t)
|
|
||||||
break
|
|
||||||
|
|
||||||
def _CreateRequest(self, url, data=None):
|
|
||||||
"""Creates a new urllib request."""
|
|
||||||
logging.debug("Creating request for: '%s' with payload:\n%s", url, data)
|
|
||||||
req = urllib2.Request(url, data=data)
|
|
||||||
if self.host_override:
|
|
||||||
req.add_header("Host", self.host_override)
|
|
||||||
for key, value in self.extra_headers.iteritems():
|
|
||||||
req.add_header(key, value)
|
|
||||||
return req
|
|
||||||
|
|
||||||
def _GetAuthToken(self, email, password):
|
|
||||||
"""Uses ClientLogin to authenticate the user, returning an auth token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: The user's email address
|
|
||||||
password: The user's password
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ClientLoginError: If there was an error authenticating with ClientLogin.
|
|
||||||
HTTPError: If there was some other form of HTTP error.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The authentication token returned by ClientLogin.
|
|
||||||
"""
|
|
||||||
req = self._CreateRequest(
|
|
||||||
url="https://www.google.com/accounts/ClientLogin",
|
|
||||||
data=urllib.urlencode({
|
|
||||||
"Email": email,
|
|
||||||
"Passwd": password,
|
|
||||||
"service": "ah",
|
|
||||||
"source": "gerrit-codereview-client",
|
|
||||||
"accountType": "HOSTED_OR_GOOGLE",
|
|
||||||
})
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
response = self.opener.open(req)
|
|
||||||
response_body = response.read()
|
|
||||||
response_dict = dict(x.split("=")
|
|
||||||
for x in response_body.split("\n") if x)
|
|
||||||
return response_dict["Auth"]
|
|
||||||
except urllib2.HTTPError, e:
|
|
||||||
if e.code == 403:
|
|
||||||
body = e.read()
|
|
||||||
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
|
|
||||||
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
|
|
||||||
e.headers, response_dict)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _GetAuthCookie(self, auth_token):
|
|
||||||
"""Fetches authentication cookies for an authentication token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
auth_token: The authentication token returned by ClientLogin.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPError: If there was an error fetching the authentication cookies.
|
|
||||||
"""
|
|
||||||
# This is a dummy value to allow us to identify when we're successful.
|
|
||||||
continue_location = "http://localhost/"
|
|
||||||
args = {"continue": continue_location, "auth": auth_token}
|
|
||||||
req = self._CreateRequest("http://%s/_ah/login?%s" %
|
|
||||||
(self.host, urllib.urlencode(args)))
|
|
||||||
try:
|
|
||||||
response = self.opener.open(req)
|
|
||||||
except urllib2.HTTPError, e:
|
|
||||||
response = e
|
|
||||||
if (response.code != 302 or
|
|
||||||
response.info()["location"] != continue_location):
|
|
||||||
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
|
|
||||||
response.headers, response.fp)
|
|
||||||
self.authenticated = True
|
|
||||||
|
|
||||||
def _GetXsrfToken(self):
|
|
||||||
"""Fetches /proto/_token for use in X-XSRF-Token HTTP header.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
HTTPError: If there was an error fetching a new token.
|
|
||||||
"""
|
|
||||||
tries = 0
|
|
||||||
while True:
|
|
||||||
url = "http://%s/proto/_token" % self.host
|
|
||||||
req = self._CreateRequest(url)
|
|
||||||
try:
|
|
||||||
response = self.opener.open(req)
|
|
||||||
self.xsrf_token = response.read()
|
|
||||||
return
|
|
||||||
except urllib2.HTTPError, e:
|
|
||||||
if tries > 3:
|
|
||||||
raise
|
|
||||||
elif e.code == 401:
|
|
||||||
self._Authenticate()
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _Authenticate(self):
|
|
||||||
"""Authenticates the user.
|
|
||||||
|
|
||||||
The authentication process works as follows:
|
|
||||||
1) We get a username and password from the user
|
|
||||||
2) We use ClientLogin to obtain an AUTH token for the user
|
|
||||||
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
|
|
||||||
3) We pass the auth token to /_ah/login on the server to obtain an
|
|
||||||
authentication cookie. If login was successful, it tries to redirect
|
|
||||||
us to the URL we provided.
|
|
||||||
|
|
||||||
If we attempt to access the upload API without first obtaining an
|
|
||||||
authentication cookie, it returns a 401 response and directs us to
|
|
||||||
authenticate ourselves with ClientLogin.
|
|
||||||
"""
|
|
||||||
for i in range(3):
|
|
||||||
credentials = self.auth_function()
|
|
||||||
auth_token = self._GetAuthToken(credentials[0], credentials[1])
|
|
||||||
self._GetAuthCookie(auth_token)
|
|
||||||
if self.cookie_file is not None:
|
|
||||||
self.cookie_jar.save()
|
|
||||||
return
|
|
||||||
|
|
||||||
def _Send(self, request_path, payload, content_type, content_md5):
|
|
||||||
"""Sends an RPC and returns the response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request_path: The path to send the request to, eg /api/appversion/create.
|
|
||||||
payload: The body of the request, or None to send an empty request.
|
|
||||||
content_type: The Content-Type header to use.
|
|
||||||
content_md5: The Content-MD5 header to use.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The content type, as a string.
|
|
||||||
The response body, as a string.
|
|
||||||
"""
|
|
||||||
if not self.authenticated:
|
|
||||||
self._Authenticate()
|
|
||||||
if not self.xsrf_token:
|
|
||||||
self._GetXsrfToken()
|
|
||||||
|
|
||||||
old_timeout = socket.getdefaulttimeout()
|
|
||||||
socket.setdefaulttimeout(None)
|
|
||||||
try:
|
|
||||||
tries = 0
|
|
||||||
while True:
|
|
||||||
tries += 1
|
|
||||||
url = "http://%s%s" % (self.host, request_path)
|
|
||||||
req = self._CreateRequest(url=url, data=payload)
|
|
||||||
req.add_header("Content-Type", content_type)
|
|
||||||
req.add_header("Content-MD5", content_md5)
|
|
||||||
req.add_header("X-XSRF-Token", self.xsrf_token)
|
|
||||||
try:
|
|
||||||
f = self.opener.open(req)
|
|
||||||
hdr = f.info()
|
|
||||||
type = hdr.getheader('Content-Type',
|
|
||||||
'application/octet-stream')
|
|
||||||
response = f.read()
|
|
||||||
f.close()
|
|
||||||
return type, response
|
|
||||||
except urllib2.HTTPError, e:
|
|
||||||
if tries > 3:
|
|
||||||
raise
|
|
||||||
elif e.code == 401:
|
|
||||||
self._Authenticate()
|
|
||||||
elif e.code == 403:
|
|
||||||
if not hasattr(e, 'read'):
|
|
||||||
e.read = lambda self: ''
|
|
||||||
raise RuntimeError, '403\nxsrf: %s\n%s' \
|
|
||||||
% (self.xsrf_token, e.read())
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
socket.setdefaulttimeout(old_timeout)
|
|
||||||
|
|
||||||
def _GetOpener(self):
|
|
||||||
"""Returns an OpenerDirector that supports cookies and ignores redirects.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A urllib2.OpenerDirector object.
|
|
||||||
"""
|
|
||||||
opener = urllib2.OpenerDirector()
|
|
||||||
opener.add_handler(urllib2.ProxyHandler())
|
|
||||||
opener.add_handler(urllib2.UnknownHandler())
|
|
||||||
opener.add_handler(urllib2.HTTPHandler())
|
|
||||||
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
|
|
||||||
opener.add_handler(urllib2.HTTPSHandler())
|
|
||||||
opener.add_handler(urllib2.HTTPErrorProcessor())
|
|
||||||
if self.cookie_file is not None:
|
|
||||||
self.cookie_jar = cookielib.MozillaCookieJar(self.cookie_file)
|
|
||||||
if os.path.exists(self.cookie_file):
|
|
||||||
try:
|
|
||||||
self.cookie_jar.load()
|
|
||||||
self.authenticated = True
|
|
||||||
except (cookielib.LoadError, IOError):
|
|
||||||
# Failed to load cookies - just ignore them.
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
# Create an empty cookie file with mode 600
|
|
||||||
fd = os.open(self.cookie_file, os.O_CREAT, 0600)
|
|
||||||
os.close(fd)
|
|
||||||
# Always chmod the cookie file
|
|
||||||
os.chmod(self.cookie_file, 0600)
|
|
||||||
else:
|
|
||||||
# Don't save cookies across runs of update.py.
|
|
||||||
self.cookie_jar = cookielib.CookieJar()
|
|
||||||
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
|
|
||||||
return opener
|
|
||||||
|
|
@ -1,48 +0,0 @@
|
|||||||
#!/usr/bin/python2.4
|
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf import reflection
|
|
||||||
from froofle.protobuf import service
|
|
||||||
from froofle.protobuf import service_reflection
|
|
||||||
from froofle.protobuf import descriptor_pb2
|
|
||||||
|
|
||||||
|
|
||||||
import upload_bundle_pb2
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
_REVIEWSERVICE = descriptor.ServiceDescriptor(
|
|
||||||
name='ReviewService',
|
|
||||||
full_name='codereview.ReviewService',
|
|
||||||
index=0,
|
|
||||||
options=None,
|
|
||||||
methods=[
|
|
||||||
descriptor.MethodDescriptor(
|
|
||||||
name='UploadBundle',
|
|
||||||
full_name='codereview.ReviewService.UploadBundle',
|
|
||||||
index=0,
|
|
||||||
containing_service=None,
|
|
||||||
input_type=upload_bundle_pb2._UPLOADBUNDLEREQUEST,
|
|
||||||
output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE,
|
|
||||||
options=None,
|
|
||||||
),
|
|
||||||
descriptor.MethodDescriptor(
|
|
||||||
name='ContinueBundle',
|
|
||||||
full_name='codereview.ReviewService.ContinueBundle',
|
|
||||||
index=1,
|
|
||||||
containing_service=None,
|
|
||||||
input_type=upload_bundle_pb2._UPLOADBUNDLECONTINUE,
|
|
||||||
output_type=upload_bundle_pb2._UPLOADBUNDLERESPONSE,
|
|
||||||
options=None,
|
|
||||||
),
|
|
||||||
])
|
|
||||||
|
|
||||||
class ReviewService(service.Service):
|
|
||||||
__metaclass__ = service_reflection.GeneratedServiceType
|
|
||||||
DESCRIPTOR = _REVIEWSERVICE
|
|
||||||
class ReviewService_Stub(ReviewService):
|
|
||||||
__metaclass__ = service_reflection.GeneratedServiceStubType
|
|
||||||
DESCRIPTOR = _REVIEWSERVICE
|
|
||||||
|
|
@ -1,190 +0,0 @@
|
|||||||
#!/usr/bin/python2.4
|
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf import reflection
|
|
||||||
from froofle.protobuf import service
|
|
||||||
from froofle.protobuf import service_reflection
|
|
||||||
from froofle.protobuf import descriptor_pb2
|
|
||||||
|
|
||||||
|
|
||||||
_UPLOADBUNDLERESPONSE_CODETYPE = descriptor.EnumDescriptor(
|
|
||||||
name='CodeType',
|
|
||||||
full_name='codereview.UploadBundleResponse.CodeType',
|
|
||||||
filename='CodeType',
|
|
||||||
values=[
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='RECEIVED', index=0, number=1,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='CONTINUE', index=1, number=4,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='UNAUTHORIZED_USER', index=2, number=7,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='UNKNOWN_PROJECT', index=3, number=2,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='UNKNOWN_BRANCH', index=4, number=3,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='UNKNOWN_BUNDLE', index=5, number=5,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='NOT_BUNDLE_OWNER', index=6, number=6,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='BUNDLE_CLOSED', index=7, number=8,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_UPLOADBUNDLEREQUEST = descriptor.Descriptor(
|
|
||||||
name='UploadBundleRequest',
|
|
||||||
full_name='codereview.UploadBundleRequest',
|
|
||||||
filename='upload_bundle.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='dest_project', full_name='codereview.UploadBundleRequest.dest_project', index=0,
|
|
||||||
number=10, type=9, cpp_type=9, label=2,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='dest_branch', full_name='codereview.UploadBundleRequest.dest_branch', index=1,
|
|
||||||
number=11, type=9, cpp_type=9, label=2,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='partial_upload', full_name='codereview.UploadBundleRequest.partial_upload', index=2,
|
|
||||||
number=12, type=8, cpp_type=7, label=2,
|
|
||||||
default_value=False,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='bundle_data', full_name='codereview.UploadBundleRequest.bundle_data', index=3,
|
|
||||||
number=13, type=12, cpp_type=9, label=2,
|
|
||||||
default_value="",
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='contained_object', full_name='codereview.UploadBundleRequest.contained_object', index=4,
|
|
||||||
number=1, type=9, cpp_type=9, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_UPLOADBUNDLERESPONSE = descriptor.Descriptor(
|
|
||||||
name='UploadBundleResponse',
|
|
||||||
full_name='codereview.UploadBundleResponse',
|
|
||||||
filename='upload_bundle.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='status_code', full_name='codereview.UploadBundleResponse.status_code', index=0,
|
|
||||||
number=10, type=14, cpp_type=8, label=2,
|
|
||||||
default_value=1,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='bundle_id', full_name='codereview.UploadBundleResponse.bundle_id', index=1,
|
|
||||||
number=11, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
_UPLOADBUNDLERESPONSE_CODETYPE,
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_UPLOADBUNDLECONTINUE = descriptor.Descriptor(
|
|
||||||
name='UploadBundleContinue',
|
|
||||||
full_name='codereview.UploadBundleContinue',
|
|
||||||
filename='upload_bundle.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='bundle_id', full_name='codereview.UploadBundleContinue.bundle_id', index=0,
|
|
||||||
number=10, type=9, cpp_type=9, label=2,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='segment_id', full_name='codereview.UploadBundleContinue.segment_id', index=1,
|
|
||||||
number=11, type=5, cpp_type=1, label=2,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='partial_upload', full_name='codereview.UploadBundleContinue.partial_upload', index=2,
|
|
||||||
number=12, type=8, cpp_type=7, label=2,
|
|
||||||
default_value=False,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='bundle_data', full_name='codereview.UploadBundleContinue.bundle_data', index=3,
|
|
||||||
number=13, type=12, cpp_type=9, label=1,
|
|
||||||
default_value="",
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_UPLOADBUNDLERESPONSE.fields_by_name['status_code'].enum_type = _UPLOADBUNDLERESPONSE_CODETYPE
|
|
||||||
|
|
||||||
class UploadBundleRequest(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _UPLOADBUNDLEREQUEST
|
|
||||||
|
|
||||||
class UploadBundleResponse(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _UPLOADBUNDLERESPONSE
|
|
||||||
|
|
||||||
class UploadBundleContinue(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _UPLOADBUNDLECONTINUE
|
|
||||||
|
|
171
color.py
171
color.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -17,78 +18,105 @@ import os
|
|||||||
import sys
|
import sys
|
||||||
|
|
||||||
import pager
|
import pager
|
||||||
from git_config import GitConfig
|
|
||||||
|
|
||||||
COLORS = {None :-1,
|
COLORS = {None: -1,
|
||||||
'normal' :-1,
|
'normal': -1,
|
||||||
'black' : 0,
|
'black': 0,
|
||||||
'red' : 1,
|
'red': 1,
|
||||||
'green' : 2,
|
'green': 2,
|
||||||
'yellow' : 3,
|
'yellow': 3,
|
||||||
'blue' : 4,
|
'blue': 4,
|
||||||
'magenta': 5,
|
'magenta': 5,
|
||||||
'cyan' : 6,
|
'cyan': 6,
|
||||||
'white' : 7}
|
'white': 7}
|
||||||
|
|
||||||
ATTRS = {None :-1,
|
ATTRS = {None: -1,
|
||||||
'bold' : 1,
|
'bold': 1,
|
||||||
'dim' : 2,
|
'dim': 2,
|
||||||
'ul' : 4,
|
'ul': 4,
|
||||||
'blink' : 5,
|
'blink': 5,
|
||||||
'reverse': 7}
|
'reverse': 7}
|
||||||
|
|
||||||
RESET = "\033[m"
|
RESET = "\033[m"
|
||||||
|
|
||||||
def is_color(s): return s in COLORS
|
|
||||||
def is_attr(s): return s in ATTRS
|
|
||||||
|
|
||||||
def _Color(fg = None, bg = None, attr = None):
|
def is_color(s):
|
||||||
fg = COLORS[fg]
|
return s in COLORS
|
||||||
bg = COLORS[bg]
|
|
||||||
attr = ATTRS[attr]
|
|
||||||
|
|
||||||
if attr >= 0 or fg >= 0 or bg >= 0:
|
|
||||||
need_sep = False
|
|
||||||
code = "\033["
|
|
||||||
|
|
||||||
if attr >= 0:
|
def is_attr(s):
|
||||||
code += chr(ord('0') + attr)
|
return s in ATTRS
|
||||||
need_sep = True
|
|
||||||
|
|
||||||
if fg >= 0:
|
|
||||||
if need_sep:
|
|
||||||
code += ';'
|
|
||||||
need_sep = True
|
|
||||||
|
|
||||||
if fg < 8:
|
def _Color(fg=None, bg=None, attr=None):
|
||||||
code += '3%c' % (ord('0') + fg)
|
fg = COLORS[fg]
|
||||||
else:
|
bg = COLORS[bg]
|
||||||
code += '38;5;%d' % fg
|
attr = ATTRS[attr]
|
||||||
|
|
||||||
if bg >= 0:
|
if attr >= 0 or fg >= 0 or bg >= 0:
|
||||||
if need_sep:
|
need_sep = False
|
||||||
code += ';'
|
code = "\033["
|
||||||
need_sep = True
|
|
||||||
|
|
||||||
if bg < 8:
|
if attr >= 0:
|
||||||
code += '4%c' % (ord('0') + bg)
|
code += chr(ord('0') + attr)
|
||||||
else:
|
need_sep = True
|
||||||
code += '48;5;%d' % bg
|
|
||||||
code += 'm'
|
if fg >= 0:
|
||||||
else:
|
if need_sep:
|
||||||
code = ''
|
code += ';'
|
||||||
return code
|
need_sep = True
|
||||||
|
|
||||||
|
if fg < 8:
|
||||||
|
code += '3%c' % (ord('0') + fg)
|
||||||
|
else:
|
||||||
|
code += '38;5;%d' % fg
|
||||||
|
|
||||||
|
if bg >= 0:
|
||||||
|
if need_sep:
|
||||||
|
code += ';'
|
||||||
|
|
||||||
|
if bg < 8:
|
||||||
|
code += '4%c' % (ord('0') + bg)
|
||||||
|
else:
|
||||||
|
code += '48;5;%d' % bg
|
||||||
|
code += 'm'
|
||||||
|
else:
|
||||||
|
code = ''
|
||||||
|
return code
|
||||||
|
|
||||||
|
DEFAULT = None
|
||||||
|
|
||||||
|
|
||||||
|
def SetDefaultColoring(state):
|
||||||
|
"""Set coloring behavior to |state|.
|
||||||
|
|
||||||
|
This is useful for overriding config options via the command line.
|
||||||
|
"""
|
||||||
|
if state is None:
|
||||||
|
# Leave it alone -- return quick!
|
||||||
|
return
|
||||||
|
|
||||||
|
global DEFAULT
|
||||||
|
state = state.lower()
|
||||||
|
if state in ('auto',):
|
||||||
|
DEFAULT = state
|
||||||
|
elif state in ('always', 'yes', 'true', True):
|
||||||
|
DEFAULT = 'always'
|
||||||
|
elif state in ('never', 'no', 'false', False):
|
||||||
|
DEFAULT = 'never'
|
||||||
|
|
||||||
|
|
||||||
class Coloring(object):
|
class Coloring(object):
|
||||||
def __init__(self, config, type):
|
def __init__(self, config, section_type):
|
||||||
self._section = 'color.%s' % type
|
self._section = 'color.%s' % section_type
|
||||||
self._config = config
|
self._config = config
|
||||||
self._out = sys.stdout
|
self._out = sys.stdout
|
||||||
|
|
||||||
on = self._config.GetString(self._section)
|
on = DEFAULT
|
||||||
if on is None:
|
if on is None:
|
||||||
on = self._config.GetString('color.ui')
|
on = self._config.GetString(self._section)
|
||||||
|
if on is None:
|
||||||
|
on = self._config.GetString('color.ui')
|
||||||
|
|
||||||
if on == 'auto':
|
if on == 'auto':
|
||||||
if pager.active or os.isatty(1):
|
if pager.active or os.isatty(1):
|
||||||
@ -100,6 +128,9 @@ class Coloring(object):
|
|||||||
else:
|
else:
|
||||||
self._on = False
|
self._on = False
|
||||||
|
|
||||||
|
def redirect(self, out):
|
||||||
|
self._out = out
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def is_on(self):
|
def is_on(self):
|
||||||
return self._on
|
return self._on
|
||||||
@ -107,28 +138,54 @@ class Coloring(object):
|
|||||||
def write(self, fmt, *args):
|
def write(self, fmt, *args):
|
||||||
self._out.write(fmt % args)
|
self._out.write(fmt % args)
|
||||||
|
|
||||||
|
def flush(self):
|
||||||
|
self._out.flush()
|
||||||
|
|
||||||
def nl(self):
|
def nl(self):
|
||||||
self._out.write('\n')
|
self._out.write('\n')
|
||||||
|
|
||||||
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
def printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
s = self
|
s = self
|
||||||
c = self.colorer(opt, fg, bg, attr)
|
c = self.colorer(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
s._out.write(c(fmt, *args))
|
s._out.write(c(fmt, *args))
|
||||||
return f
|
return f
|
||||||
|
|
||||||
|
def nofmt_printer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
|
s = self
|
||||||
|
c = self.nofmt_colorer(opt, fg, bg, attr)
|
||||||
|
|
||||||
|
def f(fmt):
|
||||||
|
s._out.write(c(fmt))
|
||||||
|
return f
|
||||||
|
|
||||||
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
def colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
if self._on:
|
if self._on:
|
||||||
c = self._parse(opt, fg, bg, attr)
|
c = self._parse(opt, fg, bg, attr)
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
str = fmt % args
|
output = fmt % args
|
||||||
return ''.join([c, str, RESET])
|
return ''.join([c, output, RESET])
|
||||||
return f
|
return f
|
||||||
else:
|
else:
|
||||||
|
|
||||||
def f(fmt, *args):
|
def f(fmt, *args):
|
||||||
return fmt % args
|
return fmt % args
|
||||||
return f
|
return f
|
||||||
|
|
||||||
|
def nofmt_colorer(self, opt=None, fg=None, bg=None, attr=None):
|
||||||
|
if self._on:
|
||||||
|
c = self._parse(opt, fg, bg, attr)
|
||||||
|
|
||||||
|
def f(fmt):
|
||||||
|
return ''.join([c, fmt, RESET])
|
||||||
|
return f
|
||||||
|
else:
|
||||||
|
def f(fmt):
|
||||||
|
return fmt
|
||||||
|
return f
|
||||||
|
|
||||||
def _parse(self, opt, fg, bg, attr):
|
def _parse(self, opt, fg, bg, attr):
|
||||||
if not opt:
|
if not opt:
|
||||||
return _Color(fg, bg, attr)
|
return _Color(fg, bg, attr)
|
||||||
@ -137,7 +194,7 @@ class Coloring(object):
|
|||||||
if v is None:
|
if v is None:
|
||||||
return _Color(fg, bg, attr)
|
return _Color(fg, bg, attr)
|
||||||
|
|
||||||
v = v.trim().lowercase()
|
v = v.strip().lower()
|
||||||
if v == "reset":
|
if v == "reset":
|
||||||
return RESET
|
return RESET
|
||||||
elif v == '':
|
elif v == '':
|
||||||
@ -146,8 +203,10 @@ class Coloring(object):
|
|||||||
have_fg = False
|
have_fg = False
|
||||||
for a in v.split(' '):
|
for a in v.split(' '):
|
||||||
if is_color(a):
|
if is_color(a):
|
||||||
if have_fg: bg = a
|
if have_fg:
|
||||||
else: fg = a
|
bg = a
|
||||||
|
else:
|
||||||
|
fg = a
|
||||||
elif is_attr(a):
|
elif is_attr(a):
|
||||||
attr = a
|
attr = a
|
||||||
|
|
||||||
|
214
command.py
214
command.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -15,18 +16,48 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
import optparse
|
import optparse
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
from event_log import EventLog
|
||||||
from error import NoSuchProjectError
|
from error import NoSuchProjectError
|
||||||
|
from error import InvalidProjectGroupsError
|
||||||
|
|
||||||
|
|
||||||
class Command(object):
|
class Command(object):
|
||||||
"""Base class for any command line action in repo.
|
"""Base class for any command line action in repo.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
common = False
|
common = False
|
||||||
|
event_log = EventLog()
|
||||||
manifest = None
|
manifest = None
|
||||||
_optparse = None
|
_optparse = None
|
||||||
|
|
||||||
|
def WantPager(self, _opt):
|
||||||
|
return False
|
||||||
|
|
||||||
|
def ReadEnvironmentOptions(self, opts):
|
||||||
|
""" Set options from environment variables. """
|
||||||
|
|
||||||
|
env_options = self._RegisteredEnvironmentOptions()
|
||||||
|
|
||||||
|
for env_key, opt_key in env_options.items():
|
||||||
|
# Get the user-set option value if any
|
||||||
|
opt_value = getattr(opts, opt_key)
|
||||||
|
|
||||||
|
# If the value is set, it means the user has passed it as a command
|
||||||
|
# line option, and we should use that. Otherwise we can try to set it
|
||||||
|
# with the value from the corresponding environment variable.
|
||||||
|
if opt_value is not None:
|
||||||
|
continue
|
||||||
|
|
||||||
|
env_value = os.environ.get(env_key)
|
||||||
|
if env_value is not None:
|
||||||
|
setattr(opts, opt_key, env_value)
|
||||||
|
|
||||||
|
return opts
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def OptionParser(self):
|
def OptionParser(self):
|
||||||
if self._optparse is None:
|
if self._optparse is None:
|
||||||
@ -35,7 +66,7 @@ class Command(object):
|
|||||||
usage = self.helpUsage.strip().replace('%prog', me)
|
usage = self.helpUsage.strip().replace('%prog', me)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
usage = 'repo %s' % self.NAME
|
usage = 'repo %s' % self.NAME
|
||||||
self._optparse = optparse.OptionParser(usage = usage)
|
self._optparse = optparse.OptionParser(usage=usage)
|
||||||
self._Options(self._optparse)
|
self._Options(self._optparse)
|
||||||
return self._optparse
|
return self._optparse
|
||||||
|
|
||||||
@ -43,74 +74,193 @@ class Command(object):
|
|||||||
"""Initialize the option parser.
|
"""Initialize the option parser.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def _RegisteredEnvironmentOptions(self):
|
||||||
|
"""Get options that can be set from environment variables.
|
||||||
|
|
||||||
|
Return a dictionary mapping environment variable name
|
||||||
|
to option key name that it can override.
|
||||||
|
|
||||||
|
Example: {'REPO_MY_OPTION': 'my_option'}
|
||||||
|
|
||||||
|
Will allow the option with key value 'my_option' to be set
|
||||||
|
from the value in the environment variable named 'REPO_MY_OPTION'.
|
||||||
|
|
||||||
|
Note: This does not work properly for options that are explicitly
|
||||||
|
set to None by the user, or options that are defined with a
|
||||||
|
default value other than None.
|
||||||
|
|
||||||
|
"""
|
||||||
|
return {}
|
||||||
|
|
||||||
def Usage(self):
|
def Usage(self):
|
||||||
"""Display usage and terminate.
|
"""Display usage and terminate.
|
||||||
"""
|
"""
|
||||||
self.OptionParser.print_usage()
|
self.OptionParser.print_usage()
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
"""Validate the user options & arguments before executing.
|
||||||
|
|
||||||
|
This is meant to help break the code up into logical steps. Some tips:
|
||||||
|
* Use self.OptionParser.error to display CLI related errors.
|
||||||
|
* Adjust opt member defaults as makes sense.
|
||||||
|
* Adjust the args list, but do so inplace so the caller sees updates.
|
||||||
|
* Try to avoid updating self state. Leave that to Execute.
|
||||||
|
"""
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
"""Perform the action, after option parsing is complete.
|
"""Perform the action, after option parsing is complete.
|
||||||
"""
|
"""
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def GetProjects(self, args, missing_ok=False):
|
def _ResetPathToProjectMap(self, projects):
|
||||||
|
self._by_path = dict((p.worktree, p) for p in projects)
|
||||||
|
|
||||||
|
def _UpdatePathToProjectMap(self, project):
|
||||||
|
self._by_path[project.worktree] = project
|
||||||
|
|
||||||
|
def _GetProjectByPath(self, manifest, path):
|
||||||
|
project = None
|
||||||
|
if os.path.exists(path):
|
||||||
|
oldpath = None
|
||||||
|
while path and \
|
||||||
|
path != oldpath and \
|
||||||
|
path != manifest.topdir:
|
||||||
|
try:
|
||||||
|
project = self._by_path[path]
|
||||||
|
break
|
||||||
|
except KeyError:
|
||||||
|
oldpath = path
|
||||||
|
path = os.path.dirname(path)
|
||||||
|
if not project and path == manifest.topdir:
|
||||||
|
try:
|
||||||
|
project = self._by_path[path]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
project = self._by_path[path]
|
||||||
|
except KeyError:
|
||||||
|
pass
|
||||||
|
return project
|
||||||
|
|
||||||
|
def GetProjects(self, args, manifest=None, groups='', missing_ok=False,
|
||||||
|
submodules_ok=False):
|
||||||
"""A list of projects that match the arguments.
|
"""A list of projects that match the arguments.
|
||||||
"""
|
"""
|
||||||
all = self.manifest.projects
|
if not manifest:
|
||||||
|
manifest = self.manifest
|
||||||
|
all_projects_list = manifest.projects
|
||||||
result = []
|
result = []
|
||||||
|
|
||||||
|
mp = manifest.manifestProject
|
||||||
|
|
||||||
|
if not groups:
|
||||||
|
groups = mp.config.GetString('manifest.groups')
|
||||||
|
if not groups:
|
||||||
|
groups = 'default,platform-' + platform.system().lower()
|
||||||
|
groups = [x for x in re.split(r'[,\s]+', groups) if x]
|
||||||
|
|
||||||
if not args:
|
if not args:
|
||||||
for project in all.values():
|
derived_projects = {}
|
||||||
if missing_ok or project.Exists:
|
for project in all_projects_list:
|
||||||
|
if submodules_ok or project.sync_s:
|
||||||
|
derived_projects.update((p.name, p)
|
||||||
|
for p in project.GetDerivedSubprojects())
|
||||||
|
all_projects_list.extend(derived_projects.values())
|
||||||
|
for project in all_projects_list:
|
||||||
|
if (missing_ok or project.Exists) and project.MatchesGroups(groups):
|
||||||
result.append(project)
|
result.append(project)
|
||||||
else:
|
else:
|
||||||
by_path = None
|
self._ResetPathToProjectMap(all_projects_list)
|
||||||
|
|
||||||
for arg in args:
|
for arg in args:
|
||||||
project = all.get(arg)
|
# We have to filter by manifest groups in case the requested project is
|
||||||
|
# checked out multiple times or differently based on them.
|
||||||
|
projects = [project for project in manifest.GetProjectsWithName(arg)
|
||||||
|
if project.MatchesGroups(groups)]
|
||||||
|
|
||||||
if not project:
|
if not projects:
|
||||||
path = os.path.abspath(arg)
|
path = os.path.abspath(arg).replace('\\', '/')
|
||||||
|
project = self._GetProjectByPath(manifest, path)
|
||||||
|
|
||||||
if not by_path:
|
# If it's not a derived project, update path->project mapping and
|
||||||
by_path = dict()
|
# search again, as arg might actually point to a derived subproject.
|
||||||
for p in all.values():
|
if (project and not project.Derived and (submodules_ok or
|
||||||
by_path[p.worktree] = p
|
project.sync_s)):
|
||||||
|
search_again = False
|
||||||
|
for subproject in project.GetDerivedSubprojects():
|
||||||
|
self._UpdatePathToProjectMap(subproject)
|
||||||
|
search_again = True
|
||||||
|
if search_again:
|
||||||
|
project = self._GetProjectByPath(manifest, path) or project
|
||||||
|
|
||||||
if os.path.exists(path):
|
if project:
|
||||||
while path \
|
projects = [project]
|
||||||
and path != '/' \
|
|
||||||
and path != self.manifest.topdir:
|
|
||||||
try:
|
|
||||||
project = by_path[path]
|
|
||||||
break
|
|
||||||
except KeyError:
|
|
||||||
path = os.path.dirname(path)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
project = by_path[path]
|
|
||||||
except KeyError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if not project:
|
if not projects:
|
||||||
raise NoSuchProjectError(arg)
|
|
||||||
if not missing_ok and not project.Exists:
|
|
||||||
raise NoSuchProjectError(arg)
|
raise NoSuchProjectError(arg)
|
||||||
|
|
||||||
result.append(project)
|
for project in projects:
|
||||||
|
if not missing_ok and not project.Exists:
|
||||||
|
raise NoSuchProjectError('%s (%s)' % (arg, project.relpath))
|
||||||
|
if not project.MatchesGroups(groups):
|
||||||
|
raise InvalidProjectGroupsError(arg)
|
||||||
|
|
||||||
|
result.extend(projects)
|
||||||
|
|
||||||
def _getpath(x):
|
def _getpath(x):
|
||||||
return x.relpath
|
return x.relpath
|
||||||
result.sort(key=_getpath)
|
result.sort(key=_getpath)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def FindProjects(self, args, inverse=False):
|
||||||
|
result = []
|
||||||
|
patterns = [re.compile(r'%s' % a, re.IGNORECASE) for a in args]
|
||||||
|
for project in self.GetProjects(''):
|
||||||
|
for pattern in patterns:
|
||||||
|
match = pattern.search(project.name) or pattern.search(project.relpath)
|
||||||
|
if not inverse and match:
|
||||||
|
result.append(project)
|
||||||
|
break
|
||||||
|
if inverse and match:
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
if inverse:
|
||||||
|
result.append(project)
|
||||||
|
result.sort(key=lambda project: project.relpath)
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
class InteractiveCommand(Command):
|
class InteractiveCommand(Command):
|
||||||
"""Command which requires user interaction on the tty and
|
"""Command which requires user interaction on the tty and
|
||||||
must not run within a pager, even if the user asks to.
|
must not run within a pager, even if the user asks to.
|
||||||
"""
|
"""
|
||||||
|
def WantPager(self, _opt):
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
class PagedCommand(Command):
|
class PagedCommand(Command):
|
||||||
"""Command which defaults to output in a pager, as its
|
"""Command which defaults to output in a pager, as its
|
||||||
display tends to be larger than one screen full.
|
display tends to be larger than one screen full.
|
||||||
"""
|
"""
|
||||||
|
def WantPager(self, _opt):
|
||||||
|
return True
|
||||||
|
|
||||||
|
|
||||||
|
class MirrorSafeCommand(object):
|
||||||
|
"""Command permits itself to run within a mirror,
|
||||||
|
and does not require a working directory.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class GitcAvailableCommand(object):
|
||||||
|
"""Command that requires GITC to be available, but does
|
||||||
|
not require the local client to be a GITC client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
|
||||||
|
class GitcClientCommand(object):
|
||||||
|
"""Command that requires the local client to be a GITC
|
||||||
|
client.
|
||||||
|
"""
|
||||||
|
145
docs/internal-fs-layout.md
Normal file
145
docs/internal-fs-layout.md
Normal file
@ -0,0 +1,145 @@
|
|||||||
|
# Repo internal filesystem layout
|
||||||
|
|
||||||
|
A reference to the `.repo/` tree in repo client checkouts.
|
||||||
|
Hopefully it's complete & up-to-date, but who knows!
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**Warning**:
|
||||||
|
This is meant for developers of the repo project itself as a quick reference.
|
||||||
|
**Nothing** in here must be construed as ABI, or that repo itself will never
|
||||||
|
change its internals in backwards incompatible ways.
|
||||||
|
***
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
## .repo/ layout
|
||||||
|
|
||||||
|
All content under `.repo/` is managed by `repo` itself with few exceptions.
|
||||||
|
|
||||||
|
In general, you should not make manual changes in here.
|
||||||
|
If a setting was initialized using an option to `repo init`, you should use that
|
||||||
|
command to change the setting later on.
|
||||||
|
It is always safe to re-run `repo init` in existing repo client checkouts.
|
||||||
|
For example, if you want to change the manifest branch, you can simply run
|
||||||
|
`repo init --manifest-branch=<new name>` and repo will take care of the rest.
|
||||||
|
|
||||||
|
### repo/ state
|
||||||
|
|
||||||
|
* `repo/`: A git checkout of the repo project. This is how `repo` re-execs
|
||||||
|
itself to get the latest released version.
|
||||||
|
|
||||||
|
It tracks the git repository at `REPO_URL` using the `REPO_REV` branch.
|
||||||
|
Those are specified at `repo init` time using the `--repo-url=<REPO_URL>`
|
||||||
|
and `--repo-branch=<REPO_REV>` options.
|
||||||
|
|
||||||
|
Any changes made to this directory will usually be automatically discarded
|
||||||
|
by repo itself when it checks for updates. If you want to update to the
|
||||||
|
latest version of repo, use `repo selfupdate` instead. If you want to
|
||||||
|
change the git URL/branch that this tracks, re-run `repo init` with the new
|
||||||
|
settings.
|
||||||
|
|
||||||
|
* `.repo_fetchtimes.json`: Used by `repo sync` to record stats when syncing
|
||||||
|
the various projects.
|
||||||
|
|
||||||
|
### Manifests
|
||||||
|
|
||||||
|
For more documentation on the manifest format, including the local_manifests
|
||||||
|
support, see the [manifest-format.md] file.
|
||||||
|
|
||||||
|
* `manifests/`: A git checkout of the manifest project. Its `.git/` state
|
||||||
|
points to the `manifest.git` bare checkout (see below). It tracks the git
|
||||||
|
branch specified at `repo init` time via `--manifest-branch`.
|
||||||
|
|
||||||
|
The local branch name is always `default` regardless of the remote tracking
|
||||||
|
branch. Do not get confused if the remote branch is not `default`, or if
|
||||||
|
there is a remote `default` that is completely different!
|
||||||
|
|
||||||
|
No manual changes should be made in here as it will just confuse repo and
|
||||||
|
it won't automatically recover causing no new changes to be picked up.
|
||||||
|
|
||||||
|
* `manifests.git/`: A bare checkout of the manifest project. It tracks the
|
||||||
|
git repository specified at `repo init` time via `--manifest-url`.
|
||||||
|
|
||||||
|
No manual changes should be made in here as it will just confuse repo.
|
||||||
|
If you want to switch the tracking settings, re-run `repo init` with the
|
||||||
|
new settings.
|
||||||
|
|
||||||
|
* `manifest.xml -> manifests/<manifest-name>.xml`: A symlink to the manifest
|
||||||
|
that the user wishes to sync. It is specified at `repo init` time via
|
||||||
|
`--manifest-name`.
|
||||||
|
|
||||||
|
Do not try to repoint this symlink to other files as it will confuse repo.
|
||||||
|
If you want to switch manifest files, re-run `repo init` with the new
|
||||||
|
setting.
|
||||||
|
|
||||||
|
* `manifests.git/.repo_config.json`: JSON cache of the `manifests.git/config`
|
||||||
|
file for repo to read/process quickly.
|
||||||
|
|
||||||
|
* `local_manifest.xml` (*Deprecated*): User-authored tweaks to the manifest
|
||||||
|
used to sync. See [local manifests] for more details.
|
||||||
|
* `local_manifests/`: Directory of user-authored manifest fragments to tweak
|
||||||
|
the manifest used to sync. See [local manifests] for more details.
|
||||||
|
|
||||||
|
### Project objects
|
||||||
|
|
||||||
|
* `project.list`: Tracking file used by `repo sync` to determine when projects
|
||||||
|
are added or removed and need corresponding updates in the checkout.
|
||||||
|
* `projects/`: Bare checkouts of every project synced by the manifest. The
|
||||||
|
filesystem layout matches the `<project path=...` setting in the manifest
|
||||||
|
(i.e. where it's checked out in the repo client source tree). Those
|
||||||
|
checkouts will symlink their `.git/` state to paths under here.
|
||||||
|
|
||||||
|
Some git state is further split out under `project-objects/`.
|
||||||
|
* `project-objects/`: Git objects that are safe to share across multiple
|
||||||
|
git checkouts. The filesystem layout matches the `<project name=...`
|
||||||
|
setting in the manifest (i.e. the path on the remote server). This allows
|
||||||
|
for multiple checkouts of the same remote git repo to share their objects.
|
||||||
|
For example, you could have different branches of `foo/bar.git` checked
|
||||||
|
out to `foo/bar-master`, `foo/bar-release`, etc... There will be multiple
|
||||||
|
trees under `projects/` for each one, but only one under `project-objects/`.
|
||||||
|
|
||||||
|
This can run into problems if different remotes use the same path on their
|
||||||
|
respective servers ...
|
||||||
|
* `subprojects/`: Like `projects/`, but for git submodules.
|
||||||
|
* `subproject-objects/`: Like `project-objects/`, but for git submodules.
|
||||||
|
|
||||||
|
### Settings
|
||||||
|
|
||||||
|
The `.repo/manifests.git/config` file is used to track settings for the entire
|
||||||
|
repo client checkout.
|
||||||
|
Most settings use the `[repo]` section to avoid conflicts with git.
|
||||||
|
User controlled settings are initialized when running `repo init`.
|
||||||
|
|
||||||
|
| Setting | `repo init` Option | Use/Meaning |
|
||||||
|
|-------------------|---------------------------|-------------|
|
||||||
|
| manifest.groups | `--groups` & `--platform` | The manifest groups to sync |
|
||||||
|
| repo.archive | `--archive` | Use `git archive` for checkouts |
|
||||||
|
| repo.clonefilter | `--clone-filter` | Filter setting when using [partial git clones] |
|
||||||
|
| repo.depth | `--depth` | Create shallow checkouts when cloning |
|
||||||
|
| repo.dissociate | `--dissociate` | Dissociate from any reference/mirrors after initial clone |
|
||||||
|
| repo.mirror | `--mirror` | Checkout is a repo mirror |
|
||||||
|
| repo.partialclone | `--partial-clone` | Create [partial git clones] |
|
||||||
|
| repo.reference | `--reference` | Reference repo client checkout |
|
||||||
|
| repo.submodules | `--submodules` | Sync git submodules |
|
||||||
|
| user.email | `--config-name` | User's e-mail address; Copied into `.git/config` when checking out a new project |
|
||||||
|
| user.name | `--config-name` | User's name; Copied into `.git/config` when checking out a new project |
|
||||||
|
|
||||||
|
[partial git clones]: https://git-scm.com/docs/gitrepository-layout#_code_partialclone_code
|
||||||
|
|
||||||
|
## ~/ dotconfig layout
|
||||||
|
|
||||||
|
Repo will create & maintain a few files in the user's home directory.
|
||||||
|
|
||||||
|
* `.repoconfig/`: Repo's per-user directory for all random config files/state.
|
||||||
|
* `.repoconfig/keyring-version`: Cache file for checking if the gnupg subdir
|
||||||
|
has all the same keys as the repo launcher. Used to avoid running gpg
|
||||||
|
constantly as that can be quite slow.
|
||||||
|
* `.repoconfig/gnupg/`: GnuPG's internal state directory used when repo needs
|
||||||
|
to run `gpg`. This provides isolation from the user's normal `~/.gnupg/`.
|
||||||
|
|
||||||
|
* `.repo_.gitconfig.json`: JSON cache of the `.gitconfig` file for repo to
|
||||||
|
read/process quickly.
|
||||||
|
|
||||||
|
|
||||||
|
[manifest-format.md]: ./manifest-format.md
|
||||||
|
[local manifests]: ./manifest-format.md#Local-Manifests
|
405
docs/manifest-format.md
Normal file
405
docs/manifest-format.md
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# repo Manifest Format
|
||||||
|
|
||||||
|
A repo manifest describes the structure of a repo client; that is
|
||||||
|
the directories that are visible and where they should be obtained
|
||||||
|
from with git.
|
||||||
|
|
||||||
|
The basic structure of a manifest is a bare Git repository holding
|
||||||
|
a single `default.xml` XML file in the top level directory.
|
||||||
|
|
||||||
|
Manifests are inherently version controlled, since they are kept
|
||||||
|
within a Git repository. Updates to manifests are automatically
|
||||||
|
obtained by clients during `repo sync`.
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
|
||||||
|
## XML File Format
|
||||||
|
|
||||||
|
A manifest XML file (e.g. `default.xml`) roughly conforms to the
|
||||||
|
following DTD:
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<!DOCTYPE manifest [
|
||||||
|
<!ELEMENT manifest (notice?,
|
||||||
|
remote*,
|
||||||
|
default?,
|
||||||
|
manifest-server?,
|
||||||
|
remove-project*,
|
||||||
|
project*,
|
||||||
|
extend-project*,
|
||||||
|
repo-hooks?,
|
||||||
|
include*)>
|
||||||
|
|
||||||
|
<!ELEMENT notice (#PCDATA)>
|
||||||
|
|
||||||
|
<!ELEMENT remote EMPTY>
|
||||||
|
<!ATTLIST remote name ID #REQUIRED>
|
||||||
|
<!ATTLIST remote alias CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remote fetch CDATA #REQUIRED>
|
||||||
|
<!ATTLIST remote pushurl CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remote review CDATA #IMPLIED>
|
||||||
|
<!ATTLIST remote revision CDATA #IMPLIED>
|
||||||
|
|
||||||
|
<!ELEMENT default EMPTY>
|
||||||
|
<!ATTLIST default remote IDREF #IMPLIED>
|
||||||
|
<!ATTLIST default revision CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default dest-branch CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default upstream CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default sync-j CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default sync-c CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default sync-s CDATA #IMPLIED>
|
||||||
|
<!ATTLIST default sync-tags CDATA #IMPLIED>
|
||||||
|
|
||||||
|
<!ELEMENT manifest-server EMPTY>
|
||||||
|
<!ATTLIST manifest-server url CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT project (annotation*,
|
||||||
|
project*,
|
||||||
|
copyfile*,
|
||||||
|
linkfile*)>
|
||||||
|
<!ATTLIST project name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST project path CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project remote IDREF #IMPLIED>
|
||||||
|
<!ATTLIST project revision CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project dest-branch CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project groups CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project sync-c CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project sync-s CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project sync-tags CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project upstream CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project clone-depth CDATA #IMPLIED>
|
||||||
|
<!ATTLIST project force-path CDATA #IMPLIED>
|
||||||
|
|
||||||
|
<!ELEMENT annotation EMPTY>
|
||||||
|
<!ATTLIST annotation name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST annotation value CDATA #REQUIRED>
|
||||||
|
<!ATTLIST annotation keep CDATA "true">
|
||||||
|
|
||||||
|
<!ELEMENT copyfile EMPTY>
|
||||||
|
<!ATTLIST copyfile src CDATA #REQUIRED>
|
||||||
|
<!ATTLIST copyfile dest CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT linkfile EMPTY>
|
||||||
|
<!ATTLIST linkfile src CDATA #REQUIRED>
|
||||||
|
<!ATTLIST linkfile dest CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT extend-project EMPTY>
|
||||||
|
<!ATTLIST extend-project name CDATA #REQUIRED>
|
||||||
|
<!ATTLIST extend-project path CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project groups CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project revision CDATA #IMPLIED>
|
||||||
|
<!ATTLIST extend-project remote CDATA #IMPLIED>
|
||||||
|
|
||||||
|
<!ELEMENT remove-project EMPTY>
|
||||||
|
<!ATTLIST remove-project name CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT repo-hooks EMPTY>
|
||||||
|
<!ATTLIST repo-hooks in-project CDATA #REQUIRED>
|
||||||
|
<!ATTLIST repo-hooks enabled-list CDATA #REQUIRED>
|
||||||
|
|
||||||
|
<!ELEMENT include EMPTY>
|
||||||
|
<!ATTLIST include name CDATA #REQUIRED>
|
||||||
|
]>
|
||||||
|
```
|
||||||
|
|
||||||
|
A description of the elements and their attributes follows.
|
||||||
|
|
||||||
|
|
||||||
|
### Element manifest
|
||||||
|
|
||||||
|
The root element of the file.
|
||||||
|
|
||||||
|
|
||||||
|
### Element remote
|
||||||
|
|
||||||
|
One or more remote elements may be specified. Each remote element
|
||||||
|
specifies a Git URL shared by one or more projects and (optionally)
|
||||||
|
the Gerrit review server those projects upload changes through.
|
||||||
|
|
||||||
|
Attribute `name`: A short name unique to this manifest file. The
|
||||||
|
name specified here is used as the remote name in each project's
|
||||||
|
.git/config, and is therefore automatically available to commands
|
||||||
|
like `git fetch`, `git remote`, `git pull` and `git push`.
|
||||||
|
|
||||||
|
Attribute `alias`: The alias, if specified, is used to override
|
||||||
|
`name` to be set as the remote name in each project's .git/config.
|
||||||
|
Its value can be duplicated while attribute `name` has to be unique
|
||||||
|
in the manifest file. This helps each project to be able to have
|
||||||
|
same remote name which actually points to different remote url.
|
||||||
|
|
||||||
|
Attribute `fetch`: The Git URL prefix for all projects which use
|
||||||
|
this remote. Each project's name is appended to this prefix to
|
||||||
|
form the actual URL used to clone the project.
|
||||||
|
|
||||||
|
Attribute `pushurl`: The Git "push" URL prefix for all projects
|
||||||
|
which use this remote. Each project's name is appended to this
|
||||||
|
prefix to form the actual URL used to "git push" the project.
|
||||||
|
This attribute is optional; if not specified then "git push"
|
||||||
|
will use the same URL as the `fetch` attribute.
|
||||||
|
|
||||||
|
Attribute `review`: Hostname of the Gerrit server where reviews
|
||||||
|
are uploaded to by `repo upload`. This attribute is optional;
|
||||||
|
if not specified then `repo upload` will not function.
|
||||||
|
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||||
|
`refs/heads/master`). Remotes with their own revision will override
|
||||||
|
the default revision.
|
||||||
|
|
||||||
|
### Element default
|
||||||
|
|
||||||
|
At most one default element may be specified. Its remote and
|
||||||
|
revision attributes are used when a project element does not
|
||||||
|
specify its own remote or revision attribute.
|
||||||
|
|
||||||
|
Attribute `remote`: Name of a previously defined remote element.
|
||||||
|
Project elements lacking a remote attribute of their own will use
|
||||||
|
this remote.
|
||||||
|
|
||||||
|
Attribute `revision`: Name of a Git branch (e.g. `master` or
|
||||||
|
`refs/heads/master`). Project elements lacking their own
|
||||||
|
revision attribute will use this revision.
|
||||||
|
|
||||||
|
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||||
|
Project elements not setting their own `dest-branch` will inherit
|
||||||
|
this value. If this value is not set, projects will use `revision`
|
||||||
|
by default instead.
|
||||||
|
|
||||||
|
Attribute `upstream`: Name of the Git ref in which a sha1
|
||||||
|
can be found. Used when syncing a revision locked manifest in
|
||||||
|
-c mode to avoid having to sync the entire ref space. Project elements
|
||||||
|
not setting their own `upstream` will inherit this value.
|
||||||
|
|
||||||
|
Attribute `sync-j`: Number of parallel jobs to use when synching.
|
||||||
|
|
||||||
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
|
branch (specified in the `revision` attribute) rather than the
|
||||||
|
whole ref space. Project elements lacking a sync-c element of
|
||||||
|
their own will use this value.
|
||||||
|
|
||||||
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
|
Attribute `sync-tags`: Set to false to only sync the given Git
|
||||||
|
branch (specified in the `revision` attribute) rather than
|
||||||
|
the other ref tags.
|
||||||
|
|
||||||
|
|
||||||
|
### Element manifest-server
|
||||||
|
|
||||||
|
At most one manifest-server may be specified. The url attribute
|
||||||
|
is used to specify the URL of a manifest server, which is an
|
||||||
|
XML RPC service.
|
||||||
|
|
||||||
|
The manifest server should implement the following RPC methods:
|
||||||
|
|
||||||
|
GetApprovedManifest(branch, target)
|
||||||
|
|
||||||
|
Return a manifest in which each project is pegged to a known good revision
|
||||||
|
for the current branch and target. This is used by repo sync when the
|
||||||
|
--smart-sync option is given.
|
||||||
|
|
||||||
|
The target to use is defined by environment variables TARGET_PRODUCT
|
||||||
|
and TARGET_BUILD_VARIANT. These variables are used to create a string
|
||||||
|
of the form $TARGET_PRODUCT-$TARGET_BUILD_VARIANT, e.g. passion-userdebug.
|
||||||
|
If one of those variables or both are not present, the program will call
|
||||||
|
GetApprovedManifest without the target parameter and the manifest server
|
||||||
|
should choose a reasonable default target.
|
||||||
|
|
||||||
|
GetManifest(tag)
|
||||||
|
|
||||||
|
Return a manifest in which each project is pegged to the revision at
|
||||||
|
the specified tag. This is used by repo sync when the --smart-tag option
|
||||||
|
is given.
|
||||||
|
|
||||||
|
|
||||||
|
### Element project
|
||||||
|
|
||||||
|
One or more project elements may be specified. Each element
|
||||||
|
describes a single Git repository to be cloned into the repo
|
||||||
|
client workspace. You may specify Git-submodules by creating a
|
||||||
|
nested project. Git-submodules will be automatically
|
||||||
|
recognized and inherit their parent's attributes, but those
|
||||||
|
may be overridden by an explicitly specified project element.
|
||||||
|
|
||||||
|
Attribute `name`: A unique name for this project. The project's
|
||||||
|
name is appended onto its remote's fetch URL to generate the actual
|
||||||
|
URL to configure the Git remote with. The URL gets formed as:
|
||||||
|
|
||||||
|
${remote_fetch}/${project_name}.git
|
||||||
|
|
||||||
|
where ${remote_fetch} is the remote's fetch attribute and
|
||||||
|
${project_name} is the project's name attribute. The suffix ".git"
|
||||||
|
is always appended as repo assumes the upstream is a forest of
|
||||||
|
bare Git repositories. If the project has a parent element, its
|
||||||
|
name will be prefixed by the parent's.
|
||||||
|
|
||||||
|
The project name must match the name Gerrit knows, if Gerrit is
|
||||||
|
being used for code reviews.
|
||||||
|
|
||||||
|
Attribute `path`: An optional path relative to the top directory
|
||||||
|
of the repo client where the Git working directory for this project
|
||||||
|
should be placed. If not supplied the project name is used.
|
||||||
|
If the project has a parent element, its path will be prefixed
|
||||||
|
by the parent's.
|
||||||
|
|
||||||
|
Attribute `remote`: Name of a previously defined remote element.
|
||||||
|
If not supplied the remote given by the default element is used.
|
||||||
|
|
||||||
|
Attribute `revision`: Name of the Git branch the manifest wants
|
||||||
|
to track for this project. Names can be relative to refs/heads
|
||||||
|
(e.g. just "master") or absolute (e.g. "refs/heads/master").
|
||||||
|
Tags and/or explicit SHA-1s should work in theory, but have not
|
||||||
|
been extensively tested. If not supplied the revision given by
|
||||||
|
the remote element is used if applicable, else the default
|
||||||
|
element is used.
|
||||||
|
|
||||||
|
Attribute `dest-branch`: Name of a Git branch (e.g. `master`).
|
||||||
|
When using `repo upload`, changes will be submitted for code
|
||||||
|
review on this branch. If unspecified both here and in the
|
||||||
|
default element, `revision` is used instead.
|
||||||
|
|
||||||
|
Attribute `groups`: List of groups to which this project belongs,
|
||||||
|
whitespace or comma separated. All projects belong to the group
|
||||||
|
"all", and each project automatically belongs to a group of
|
||||||
|
its name:`name` and path:`path`. E.g. for
|
||||||
|
<project name="monkeys" path="barrel-of"/>, that project
|
||||||
|
definition is implicitly in the following manifest groups:
|
||||||
|
default, name:monkeys, and path:barrel-of. If you place a project in the
|
||||||
|
group "notdefault", it will not be automatically downloaded by repo.
|
||||||
|
If the project has a parent element, the `name` and `path` here
|
||||||
|
are the prefixed ones.
|
||||||
|
|
||||||
|
Attribute `sync-c`: Set to true to only sync the given Git
|
||||||
|
branch (specified in the `revision` attribute) rather than the
|
||||||
|
whole ref space.
|
||||||
|
|
||||||
|
Attribute `sync-s`: Set to true to also sync sub-projects.
|
||||||
|
|
||||||
|
Attribute `upstream`: Name of the Git ref in which a sha1
|
||||||
|
can be found. Used when syncing a revision locked manifest in
|
||||||
|
-c mode to avoid having to sync the entire ref space.
|
||||||
|
|
||||||
|
Attribute `clone-depth`: Set the depth to use when fetching this
|
||||||
|
project. If specified, this value will override any value given
|
||||||
|
to repo init with the --depth option on the command line.
|
||||||
|
|
||||||
|
Attribute `force-path`: Set to true to force this project to create the
|
||||||
|
local mirror repository according to its `path` attribute (if supplied)
|
||||||
|
rather than the `name` attribute. This attribute only applies to the
|
||||||
|
local mirrors syncing, it will be ignored when syncing the projects in a
|
||||||
|
client working directory.
|
||||||
|
|
||||||
|
### Element extend-project
|
||||||
|
|
||||||
|
Modify the attributes of the named project.
|
||||||
|
|
||||||
|
This element is mostly useful in a local manifest file, to modify the
|
||||||
|
attributes of an existing project without completely replacing the
|
||||||
|
existing project definition. This makes the local manifest more robust
|
||||||
|
against changes to the original manifest.
|
||||||
|
|
||||||
|
Attribute `path`: If specified, limit the change to projects checked out
|
||||||
|
at the specified path, rather than all projects with the given name.
|
||||||
|
|
||||||
|
Attribute `groups`: List of additional groups to which this project
|
||||||
|
belongs. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
|
Attribute `revision`: If specified, overrides the revision of the original
|
||||||
|
project. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
|
Attribute `remote`: If specified, overrides the remote of the original
|
||||||
|
project. Same syntax as the corresponding element of `project`.
|
||||||
|
|
||||||
|
### Element annotation
|
||||||
|
|
||||||
|
Zero or more annotation elements may be specified as children of a
|
||||||
|
project element. Each element describes a name-value pair that will be
|
||||||
|
exported into each project's environment during a 'forall' command,
|
||||||
|
prefixed with REPO__. In addition, there is an optional attribute
|
||||||
|
"keep" which accepts the case insensitive values "true" (default) or
|
||||||
|
"false". This attribute determines whether or not the annotation will
|
||||||
|
be kept when exported with the manifest subcommand.
|
||||||
|
|
||||||
|
### Element copyfile
|
||||||
|
|
||||||
|
Zero or more copyfile elements may be specified as children of a
|
||||||
|
project element. Each element describes a src-dest pair of files;
|
||||||
|
the "src" file will be copied to the "dest" place during `repo sync`
|
||||||
|
command.
|
||||||
|
|
||||||
|
"src" is project relative, "dest" is relative to the top of the tree.
|
||||||
|
Copying from paths outside of the project or to paths outside of the repo
|
||||||
|
client is not allowed.
|
||||||
|
|
||||||
|
"src" and "dest" must be files. Directories or symlinks are not allowed.
|
||||||
|
Intermediate paths must not be symlinks either.
|
||||||
|
|
||||||
|
Parent directories of "dest" will be automatically created if missing.
|
||||||
|
|
||||||
|
### Element linkfile
|
||||||
|
|
||||||
|
It's just like copyfile and runs at the same time as copyfile but
|
||||||
|
instead of copying it creates a symlink.
|
||||||
|
|
||||||
|
The symlink is created at "dest" (relative to the top of the tree) and
|
||||||
|
points to the path specified by "src" which is a path in the project.
|
||||||
|
|
||||||
|
Parent directories of "dest" will be automatically created if missing.
|
||||||
|
|
||||||
|
The symlink target may be a file or directory, but it may not point outside
|
||||||
|
of the repo client.
|
||||||
|
|
||||||
|
### Element remove-project
|
||||||
|
|
||||||
|
Deletes the named project from the internal manifest table, possibly
|
||||||
|
allowing a subsequent project element in the same manifest file to
|
||||||
|
replace the project with a different source.
|
||||||
|
|
||||||
|
This element is mostly useful in a local manifest file, where
|
||||||
|
the user can remove a project, and possibly replace it with their
|
||||||
|
own definition.
|
||||||
|
|
||||||
|
### Element include
|
||||||
|
|
||||||
|
This element provides the capability of including another manifest
|
||||||
|
file into the originating manifest. Normal rules apply for the
|
||||||
|
target manifest to include - it must be a usable manifest on its own.
|
||||||
|
|
||||||
|
Attribute `name`: the manifest to include, specified relative to
|
||||||
|
the manifest repository's root.
|
||||||
|
|
||||||
|
|
||||||
|
## Local Manifests
|
||||||
|
|
||||||
|
Additional remotes and projects may be added through local manifest
|
||||||
|
files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
||||||
|
|
||||||
|
For example:
|
||||||
|
|
||||||
|
$ ls .repo/local_manifests
|
||||||
|
local_manifest.xml
|
||||||
|
another_local_manifest.xml
|
||||||
|
|
||||||
|
$ cat .repo/local_manifests/local_manifest.xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8"?>
|
||||||
|
<manifest>
|
||||||
|
<project path="manifest"
|
||||||
|
name="tools/manifest" />
|
||||||
|
<project path="platform-manifest"
|
||||||
|
name="platform/manifest" />
|
||||||
|
</manifest>
|
||||||
|
|
||||||
|
Users may add projects to the local manifest(s) prior to a `repo sync`
|
||||||
|
invocation, instructing repo to automatically download and manage
|
||||||
|
these extra projects.
|
||||||
|
|
||||||
|
Manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml` will
|
||||||
|
be loaded in alphabetical order.
|
||||||
|
|
||||||
|
Additional remotes and projects may also be added through a local
|
||||||
|
manifest, stored in `$TOP_DIR/.repo/local_manifest.xml`. This method
|
||||||
|
is deprecated in favor of using multiple manifest files as mentioned
|
||||||
|
above.
|
||||||
|
|
||||||
|
If `$TOP_DIR/.repo/local_manifest.xml` exists, it will be loaded before
|
||||||
|
any manifest files stored in `$TOP_DIR/.repo/local_manifests/*.xml`.
|
47
docs/python-support.md
Normal file
47
docs/python-support.md
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
# Supported Python Versions
|
||||||
|
|
||||||
|
With Python 2.7 officially going EOL on [01 Jan 2020](https://pythonclock.org/),
|
||||||
|
we need a support plan for the repo project itself.
|
||||||
|
Inevitably, there will be a long tail of users who still want to use Python 2 on
|
||||||
|
their old LTS/corp systems and have little power to change the system.
|
||||||
|
|
||||||
|
## Summary
|
||||||
|
|
||||||
|
* Python 3.6 (released Dec 2016) is required by default starting with repo-2.x.
|
||||||
|
* Older versions of Python (e.g. v2.7) may use the legacy feature-frozen branch
|
||||||
|
based on repo-1.x.
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
We provide a branch for Python 2 users that is feature-frozen.
|
||||||
|
Bugfixes may be added on a best-effort basis or from the community, but largely
|
||||||
|
no new features will be added, nor is support guaranteed.
|
||||||
|
|
||||||
|
Users can select this during `repo init` time via the [repo launcher].
|
||||||
|
Otherwise the default branches (e.g. stable & master) will be used which will
|
||||||
|
require Python 3.
|
||||||
|
|
||||||
|
This means the [repo launcher] needs to support both Python 2 & Python 3, but
|
||||||
|
since it doesn't import any other repo code, this shouldn't be too problematic.
|
||||||
|
|
||||||
|
The master branch will require Python 3.6 at a minimum.
|
||||||
|
If the system has an older version of Python 3, then users will have to select
|
||||||
|
the legacy Python 2 branch instead.
|
||||||
|
|
||||||
|
### repo hooks
|
||||||
|
|
||||||
|
Projects that use [repo hooks] run on independent schedules.
|
||||||
|
They might migrate to Python 3 earlier or later than us.
|
||||||
|
To support them, we'll probe the shebang of the hook script and if we find an
|
||||||
|
interpreter in there that indicates a different version than repo is currently
|
||||||
|
running under, we'll attempt to reexec ourselves under that.
|
||||||
|
|
||||||
|
For example, a hook with a header like `#!/usr/bin/python2` will have repo
|
||||||
|
execute `/usr/bin/python2` to execute the hook code specifically if repo is
|
||||||
|
currently running Python 3.
|
||||||
|
|
||||||
|
For more details, consult the [repo hooks] documentation.
|
||||||
|
|
||||||
|
|
||||||
|
[repo hooks]: ./repo-hooks.md
|
||||||
|
[repo launcher]: ../repo
|
167
docs/release-process.md
Normal file
167
docs/release-process.md
Normal file
@ -0,0 +1,167 @@
|
|||||||
|
# repo release process
|
||||||
|
|
||||||
|
This is the process for creating a new release of repo, as well as all the
|
||||||
|
related topics and flows.
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
## Launcher script
|
||||||
|
|
||||||
|
The main repo script serves as a standalone program and is often referred to as
|
||||||
|
the "launcher script".
|
||||||
|
This makes it easy to copy around and install as you don't have to install any
|
||||||
|
other files from the git repo.
|
||||||
|
|
||||||
|
Whenever major changes are made to the launcher script, you should increment the
|
||||||
|
`VERSION` variable in the launcher itself.
|
||||||
|
At runtime, repo will check this to see if it needs to be updated (and notify
|
||||||
|
the user automatically).
|
||||||
|
|
||||||
|
## Key management
|
||||||
|
|
||||||
|
Every release has a git tag that is signed with a key that repo recognizes.
|
||||||
|
Those keys are hardcoded inside of the repo launcher itself -- look for the
|
||||||
|
`KEYRING_VERSION` and `MAINTAINER_KEYS` settings.
|
||||||
|
|
||||||
|
Adding new keys to the repo launcher will allow tags to be recognized by new
|
||||||
|
keys, but only people using that updated version will be able to.
|
||||||
|
Since the majority of users will be using an official launcher version, their
|
||||||
|
version will simply ignore any new signed tags.
|
||||||
|
|
||||||
|
If you want to add new keys, it's best to register them long ahead of time,
|
||||||
|
and then wait for that updated launcher to make its way out to everyone.
|
||||||
|
Even then, there will be a long tail of users with outdated launchers, so be
|
||||||
|
prepared for people asking questions.
|
||||||
|
|
||||||
|
### Registering a new key
|
||||||
|
|
||||||
|
The process of actually adding a new key is quite simple.
|
||||||
|
|
||||||
|
1. Add the public half of the key to `MAINTAINER_KEYS`.
|
||||||
|
2. Increment `KEYRING_VERSION` so repo knows it needs to update.
|
||||||
|
3. Wait a long time after that version is in a release (~months) before trying
|
||||||
|
to create a new release using those new keys.
|
||||||
|
|
||||||
|
## Self update algorithm
|
||||||
|
|
||||||
|
When creating a new repo checkout with `repo init`, there are a few options that
|
||||||
|
control how repo finds updates:
|
||||||
|
|
||||||
|
* `--repo-url`: This tells repo where to clone the full repo project itself.
|
||||||
|
It defaults to the official project (`REPO_URL` in the launcher script).
|
||||||
|
* `--repo-branch`: This tells repo which branch to use for the full project.
|
||||||
|
It defaults to the `stable` branch (`REPO_REV` in the launcher script).
|
||||||
|
|
||||||
|
Whenever `repo sync` is run, repo will check to see if an update is available.
|
||||||
|
It fetches the latest repo-branch from the repo-url.
|
||||||
|
Then it verifies that the latest commit in the branch has a valid signed tag
|
||||||
|
using `git tag -v` (which uses gpg).
|
||||||
|
If the tag is valid, then repo will update its internal checkout to it.
|
||||||
|
|
||||||
|
If the latest commit doesn't have a signed tag, repo will fall back to the
|
||||||
|
most recent tag it can find (via `git describe`).
|
||||||
|
If that tag is valid, then repo will warn and use that commit instead.
|
||||||
|
|
||||||
|
If that tag cannot be verified, it gives up and forces the user to resolve.
|
||||||
|
|
||||||
|
## Branch management
|
||||||
|
|
||||||
|
All development happens on the `master` branch and should generally be stable.
|
||||||
|
|
||||||
|
Since the repo launcher defaults to tracking the `stable` branch, it is not
|
||||||
|
normally updated until a new release is available.
|
||||||
|
If something goes wrong with a new release, an older release can be force pushed
|
||||||
|
and clients will automatically downgrade.
|
||||||
|
|
||||||
|
The `maint` branch is used to track the previous major release of repo.
|
||||||
|
It is not normally meant to be used by people as `stable` should be good enough.
|
||||||
|
Once a new major release is pushed to the `stable` branch, then the previous
|
||||||
|
major release can be pushed to `maint`.
|
||||||
|
For example, when `stable` moves from `v1.10.x` to `v1.11.x`, then the `maint`
|
||||||
|
branch will be updated from `v1.9.x` to `v1.10.x`.
|
||||||
|
|
||||||
|
We don't have parallel release branches/series.
|
||||||
|
Typically all tags are made against the `master` branch and then pushed to the
|
||||||
|
`stable` branch to make it available to the rest of the world.
|
||||||
|
Since repo doesn't typically see a lot of changes, this tends to be OK.
|
||||||
|
|
||||||
|
## Creating a new release
|
||||||
|
|
||||||
|
When you want to create a new release, you'll need to select a good version and
|
||||||
|
create a signed tag using a key registered in repo itself.
|
||||||
|
Typically we just tag the latest version of the `master` branch.
|
||||||
|
The tag could be pushed now, but it won't be used by clients normally (since the
|
||||||
|
default `repo-branch` setting is `stable`).
|
||||||
|
This would allow some early testing on systems who explicitly select `master`.
|
||||||
|
|
||||||
|
### Creating a signed tag
|
||||||
|
|
||||||
|
Lets assume your keys live in a dedicated directory, e.g. `~/.gnupg/repo/`.
|
||||||
|
|
||||||
|
*** note
|
||||||
|
If you need access to the official keys, check out the internal documentation
|
||||||
|
at [go/repo-release].
|
||||||
|
Note that only official maintainers of repo will have access as it describes
|
||||||
|
internal processes for accessing the restricted keys.
|
||||||
|
***
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# Set the gpg key directory.
|
||||||
|
$ export GNUPGHOME=~/.gnupg/repo/
|
||||||
|
|
||||||
|
# Verify the listed key is “Repo Maintainer”.
|
||||||
|
$ gpg -K
|
||||||
|
|
||||||
|
# Pick whatever branch or commit you want to tag.
|
||||||
|
$ r=master
|
||||||
|
|
||||||
|
# Pick the new version.
|
||||||
|
$ t=1.12.10
|
||||||
|
|
||||||
|
# Create the signed tag.
|
||||||
|
$ git tag -s v$t -u "Repo Maintainer <repo@android.kernel.org>" -m "repo $t" $r
|
||||||
|
|
||||||
|
# Verify the signed tag.
|
||||||
|
$ git show v$t
|
||||||
|
```
|
||||||
|
|
||||||
|
### Push the new release
|
||||||
|
|
||||||
|
Once you're ready to make the release available to everyone, push it to the
|
||||||
|
`stable` branch.
|
||||||
|
|
||||||
|
Make sure you never push the tag itself to the stable branch!
|
||||||
|
Only push the commit -- notice the use of `$t` and `$r` below.
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ git push https://gerrit-review.googlesource.com/git-repo v$t
|
||||||
|
$ git push https://gerrit-review.googlesource.com/git-repo $r:stable
|
||||||
|
```
|
||||||
|
|
||||||
|
If something goes horribly wrong, you can force push the previous version to the
|
||||||
|
`stable` branch and people should automatically recover.
|
||||||
|
Again, make sure you never push the tag itself!
|
||||||
|
|
||||||
|
```sh
|
||||||
|
$ oldrev="whatever-old-commit"
|
||||||
|
$ git push https://gerrit-review.googlesource.com/git-repo $oldrev:stable --force
|
||||||
|
```
|
||||||
|
|
||||||
|
### Announce the release
|
||||||
|
|
||||||
|
Once you do push a new release to `stable`, make sure to announce it on the
|
||||||
|
[repo-discuss@googlegroups.com] group.
|
||||||
|
Here is an [example announcement].
|
||||||
|
|
||||||
|
You can create a short changelog using the command:
|
||||||
|
|
||||||
|
```sh
|
||||||
|
# If you haven't pushed to the stable branch yet, you can use origin/stable.
|
||||||
|
# If you have pushed, change origin/stable to the previous release tag.
|
||||||
|
$ git log --format="%h (%aN) %s" --no-merges origin/stable..$r
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
[example announcement]: https://groups.google.com/d/topic/repo-discuss/UGBNismWo1M/discussion
|
||||||
|
[repo-discuss@googlegroups.com]: https://groups.google.com/forum/#!forum/repo-discuss
|
||||||
|
[go/repo-release]: https://goto.google.com/repo-release
|
135
docs/repo-hooks.md
Normal file
135
docs/repo-hooks.md
Normal file
@ -0,0 +1,135 @@
|
|||||||
|
# repo hooks
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
Repo provides a mechanism to hook specific stages of the runtime with custom
|
||||||
|
python modules. All the hooks live in one git project which is checked out by
|
||||||
|
the manifest (specified during `repo init`), and the manifest itself defines
|
||||||
|
which hooks are registered.
|
||||||
|
|
||||||
|
These are useful to run linters, check formatting, and run quick unittests
|
||||||
|
before allowing a step to proceed (e.g. before uploading a commit to Gerrit).
|
||||||
|
|
||||||
|
A complete example can be found in the Android project. It can be easily
|
||||||
|
re-used by any repo based project and is not specific to Android.<br>
|
||||||
|
https://android.googlesource.com/platform/tools/repohooks
|
||||||
|
|
||||||
|
## Approvals
|
||||||
|
|
||||||
|
When a hook is processed the first time, the user is prompted for approval.
|
||||||
|
We don't want to execute arbitrary code without explicit consent. For manifests
|
||||||
|
fetched via secure protocols (e.g. https://), the user is prompted once. For
|
||||||
|
insecure protocols (e.g. http://), the user is prompted whenever the registered
|
||||||
|
repohooks project is updated and a hook is triggered.
|
||||||
|
|
||||||
|
## Manifest Settings
|
||||||
|
|
||||||
|
For the full syntax, see the [repo manifest format](./manifest-format.md).
|
||||||
|
|
||||||
|
Here's a short example from
|
||||||
|
[Android](https://android.googlesource.com/platform/manifest/+/master/default.xml).
|
||||||
|
The `<project>` line checks out the repohooks git repo to the local
|
||||||
|
`tools/repohooks/` path. The `<repo-hooks>` line says to look in the project
|
||||||
|
with the name `platform/tools/repohooks` for hooks to run during the
|
||||||
|
`pre-upload` phase.
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<project path="tools/repohooks" name="platform/tools/repohooks" />
|
||||||
|
<repo-hooks in-project="platform/tools/repohooks" enabled-list="pre-upload" />
|
||||||
|
```
|
||||||
|
|
||||||
|
## Source Layout
|
||||||
|
|
||||||
|
The repohooks git repo should have a python file with the same name as the hook.
|
||||||
|
So if you want to support the `pre-upload` hook, you'll need to create a file
|
||||||
|
named `pre-upload.py`. Repo will dynamically load that module when processing
|
||||||
|
the hook and then call the `main` function in it.
|
||||||
|
|
||||||
|
Hooks should have their `main` accept `**kwargs` for future compatibility.
|
||||||
|
|
||||||
|
## Runtime
|
||||||
|
|
||||||
|
Hook return values are ignored.
|
||||||
|
|
||||||
|
Any uncaught exceptions from the hook will cause the step to fail. This is
|
||||||
|
intended as a fallback safety check though rather than the normal flow. If
|
||||||
|
you want your hook to trigger a failure, it should call `sys.exit()` (after
|
||||||
|
displaying relevant diagnostics).
|
||||||
|
|
||||||
|
Output (stdout & stderr) are not filtered in any way. Hooks should generally
|
||||||
|
not be too verbose. A short summary is nice, and some status information when
|
||||||
|
long running operations occur, but long/verbose output should be used only if
|
||||||
|
the hook ultimately fails.
|
||||||
|
|
||||||
|
The hook runs from the top level of the repo client where the operation is
|
||||||
|
started.
|
||||||
|
For example, if the repo client is under `~/tree/`, then that is where the hook
|
||||||
|
runs, even if you ran repo in a git repository at `~/tree/src/foo/`, or in a
|
||||||
|
subdirectory of that git repository in `~/tree/src/foo/bar/`.
|
||||||
|
Hooks frequently start off by doing a `os.chdir` to the specific project they're
|
||||||
|
called on (see below) and then changing back to the original dir when they're
|
||||||
|
finished.
|
||||||
|
|
||||||
|
Python's `sys.path` is modified so that the top of repohooks directory comes
|
||||||
|
first. This should help simplify the hook logic to easily allow importing of
|
||||||
|
local modules.
|
||||||
|
|
||||||
|
Repo does not modify the state of the git checkout. This means that the hooks
|
||||||
|
might be running in a dirty git repo with many commits and checked out to the
|
||||||
|
latest one. If the hook wants to operate on specific git commits, it needs to
|
||||||
|
manually discover the list of pending commits, extract the diff/commit, and
|
||||||
|
then check it directly. Hooks should not normally modify the active git repo
|
||||||
|
(such as checking out a specific commit to run checks) without first prompting
|
||||||
|
the user. Although user interaction is discouraged in the common case, it can
|
||||||
|
be useful when deploying automatic fixes.
|
||||||
|
|
||||||
|
### Shebang Handling
|
||||||
|
|
||||||
|
*** note
|
||||||
|
This is intended as a transitional feature. Hooks are expected to eventually
|
||||||
|
migrate to Python 3 only as Python 2 is EOL & deprecated.
|
||||||
|
***
|
||||||
|
|
||||||
|
If the hook is written against a specific version of Python (either 2 or 3),
|
||||||
|
the script can declare that explicitly. Repo will then attempt to execute it
|
||||||
|
under the right version of Python regardless of the version repo itself might
|
||||||
|
be executing under.
|
||||||
|
|
||||||
|
Here are the shebangs that are recognized.
|
||||||
|
|
||||||
|
* `#!/usr/bin/env python` & `#!/usr/bin/python`: The hook is compatible with
|
||||||
|
Python 2 & Python 3. For maximum compatibility, these are recommended.
|
||||||
|
* `#!/usr/bin/env python2` & `#!/usr/bin/python2`: The hook requires Python 2.
|
||||||
|
Version specific names like `python2.7` are also recognized.
|
||||||
|
* `#!/usr/bin/env python3` & `#!/usr/bin/python3`: The hook requires Python 3.
|
||||||
|
Version specific names like `python3.6` are also recognized.
|
||||||
|
|
||||||
|
If no shebang is detected, or does not match the forms above, we assume that the
|
||||||
|
hook is compatible with both Python 2 & Python 3 as if `#!/usr/bin/python` was
|
||||||
|
used.
|
||||||
|
|
||||||
|
## Hooks
|
||||||
|
|
||||||
|
Here are all the points available for hooking.
|
||||||
|
|
||||||
|
### pre-upload
|
||||||
|
|
||||||
|
This hook runs when people run `repo upload`.
|
||||||
|
|
||||||
|
The `pre-upload.py` file should be defined like:
|
||||||
|
|
||||||
|
```py
|
||||||
|
def main(project_list, worktree_list=None, **kwargs):
|
||||||
|
"""Main function invoked directly by repo.
|
||||||
|
|
||||||
|
We must use the name "main" as that is what repo requires.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project_list: List of projects to run on.
|
||||||
|
worktree_list: A list of directories. It should be the same length as
|
||||||
|
project_list, so that each entry in project_list matches with a
|
||||||
|
directory in worktree_list. If None, we will attempt to calculate
|
||||||
|
the directories automatically.
|
||||||
|
kwargs: Leave this here for forward-compatibility.
|
||||||
|
"""
|
||||||
|
```
|
144
docs/windows.md
Normal file
144
docs/windows.md
Normal file
@ -0,0 +1,144 @@
|
|||||||
|
# Microsoft Windows Details
|
||||||
|
|
||||||
|
Repo is primarily developed on Linux with a lot of users on macOS.
|
||||||
|
Windows is, unfortunately, not a common platform.
|
||||||
|
There is support in repo for Windows, but there might be some rough edges.
|
||||||
|
|
||||||
|
Keep in mind that Windows in general is "best effort" and "community supported".
|
||||||
|
That means we don't actively test or verify behavior, but rely heavily on users
|
||||||
|
to report problems back to us, and to contribute fixes as needed.
|
||||||
|
|
||||||
|
[TOC]
|
||||||
|
|
||||||
|
## Windows
|
||||||
|
|
||||||
|
We only support Windows 10 or newer.
|
||||||
|
This is largely due to symlinks not being available in older versions, but it's
|
||||||
|
also due to most developers not using Windows.
|
||||||
|
|
||||||
|
We will never add code specific to older versions of Windows.
|
||||||
|
It might work, but it most likely won't, so please don't bother asking.
|
||||||
|
|
||||||
|
## Symlinks
|
||||||
|
|
||||||
|
Repo will use symlinks heavily internally.
|
||||||
|
On *NIX platforms, this isn't an issue, but Windows makes it a bit difficult.
|
||||||
|
|
||||||
|
There are some documents out there for how to do this, but usually the easiest
|
||||||
|
answer is to run your shell as an Administrator and invoke repo/git in that.
|
||||||
|
|
||||||
|
This isn't a great solution, but Windows doesn't make this easy, so here we are.
|
||||||
|
|
||||||
|
### Launch Git Bash
|
||||||
|
|
||||||
|
If you install Git Bash (see below), you can launch that with appropriate
|
||||||
|
permissions so that all programs "just work".
|
||||||
|
|
||||||
|
* Open the Start Menu (i.e. press the ⊞ key).
|
||||||
|
* Find/search for "Git Bash".
|
||||||
|
* Right click it and select "Run as administrator".
|
||||||
|
|
||||||
|
*** note
|
||||||
|
**NB**: This environment is only needed when running `repo`, or any specific `git`
|
||||||
|
command that might involve symlinks (e.g. `pull` or `checkout`).
|
||||||
|
You do not need to run all your commands in here such as your editor.
|
||||||
|
***
|
||||||
|
|
||||||
|
### Symlinks with GNU tools
|
||||||
|
|
||||||
|
If you want to use `ln -s` inside of the default Git/bash shell, you might need
|
||||||
|
to export this environment variable:
|
||||||
|
```sh
|
||||||
|
$ export MSYS="winsymlinks:nativestrict"
|
||||||
|
```
|
||||||
|
|
||||||
|
Otherwise `ln -s` will copy files and not actually create a symlink.
|
||||||
|
This also helps `tar` unpack symlinks, so that's nice.
|
||||||
|
|
||||||
|
### References
|
||||||
|
|
||||||
|
* https://github.com/git-for-windows/git/wiki/Symbolic-Links
|
||||||
|
* https://blogs.windows.com/windowsdeveloper/2016/12/02/symlinks-windows-10/
|
||||||
|
|
||||||
|
## Python
|
||||||
|
|
||||||
|
You should make sure to be running Python 3.6 or newer under Windows.
|
||||||
|
Python 2 might work, but due to already limited platform testing, you should
|
||||||
|
only run newer Python versions.
|
||||||
|
See our [Python Support](./python-support.md) document for more details.
|
||||||
|
|
||||||
|
You can grab the latest Windows installer here:<br>
|
||||||
|
https://www.python.org/downloads/release/python-3
|
||||||
|
|
||||||
|
## Git
|
||||||
|
|
||||||
|
You should install the most recent version of Git for Windows:<br>
|
||||||
|
https://git-scm.com/download/win
|
||||||
|
|
||||||
|
When installing, make sure to turn on "Enable symbolic links" when prompted.
|
||||||
|
|
||||||
|
If you've already installed Git for Windows, you can simply download the latest
|
||||||
|
installer from above and run it again.
|
||||||
|
It should safely upgrade things in situ for you.
|
||||||
|
This is useful if you want to switch the symbolic link option after the fact.
|
||||||
|
|
||||||
|
## Shell
|
||||||
|
|
||||||
|
We don't have a specific requirement for shell environments when running repo.
|
||||||
|
Most developers use MinTTY/bash that's included with the Git for Windows install
|
||||||
|
(so see above for installing Git).
|
||||||
|
|
||||||
|
Command & Powershell & the Windows Terminal probably work.
|
||||||
|
Who knows!
|
||||||
|
|
||||||
|
## FAQ
|
||||||
|
|
||||||
|
### repo upload always complains about allowing hooks or using --no-verify!
|
||||||
|
|
||||||
|
When using `repo upload` in projects that have custom repohooks, you might get
|
||||||
|
an error like the following:
|
||||||
|
```sh
|
||||||
|
$ repo upload
|
||||||
|
ERROR: You must allow the pre-upload hook or use --no-verify.
|
||||||
|
```
|
||||||
|
|
||||||
|
This can be confusing as you never get prompted.
|
||||||
|
[MinTTY has a bug][mintty] that breaks isatty checking inside of repo which
|
||||||
|
causes repo to never interactively prompt the user which means the upload check
|
||||||
|
always fails.
|
||||||
|
|
||||||
|
You can workaround this by manually granting consent when uploading.
|
||||||
|
Simply add the `--verify` option whenever uploading:
|
||||||
|
```sh
|
||||||
|
$ repo upload --verify
|
||||||
|
```
|
||||||
|
|
||||||
|
You will have to specify this flag every time you upload.
|
||||||
|
|
||||||
|
[mintty]: https://github.com/mintty/mintty/issues/56
|
||||||
|
|
||||||
|
### repohooks always fail with an close_fds error.
|
||||||
|
|
||||||
|
When using the [reference repohooks project][repohooks] included in AOSP,
|
||||||
|
you might see errors like this when running `repo upload`:
|
||||||
|
```sh
|
||||||
|
$ repo upload
|
||||||
|
ERROR: Traceback (most recent call last):
|
||||||
|
...
|
||||||
|
File "C:\...\lib\subprocess.py", line 351, in __init__
|
||||||
|
raise ValueError("close_fds is not supported on Windows "
|
||||||
|
ValueError: close_fds is not supported on Windows platforms if you redirect stdin/stderr/stdout
|
||||||
|
|
||||||
|
Failed to run main() for pre-upload hook; see traceback above.
|
||||||
|
```
|
||||||
|
|
||||||
|
This error shows up when using Python 2.
|
||||||
|
You should upgrade to Python 3 instead (see above).
|
||||||
|
|
||||||
|
If you already have Python 3 installed, make sure it's the default version.
|
||||||
|
Running `python --version` should say `Python 3`, not `Python 2`.
|
||||||
|
If you didn't install the Python versions, or don't have permission to change
|
||||||
|
the default version, you can probably workaround this by changing `$PATH` in
|
||||||
|
your shell so the Python 3 version is found first.
|
||||||
|
|
||||||
|
[repohooks]: https://android.googlesource.com/platform/tools/repohooks
|
62
editor.py
62
editor.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,12 +14,15 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
import tempfile
|
import tempfile
|
||||||
|
|
||||||
from error import EditorError
|
from error import EditorError
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
class Editor(object):
|
class Editor(object):
|
||||||
"""Manages the user's preferred text editor."""
|
"""Manages the user's preferred text editor."""
|
||||||
@ -38,9 +42,10 @@ class Editor(object):
|
|||||||
if e:
|
if e:
|
||||||
return e
|
return e
|
||||||
|
|
||||||
e = cls.globalConfig.GetString('core.editor')
|
if cls.globalConfig:
|
||||||
if e:
|
e = cls.globalConfig.GetString('core.editor')
|
||||||
return e
|
if e:
|
||||||
|
return e
|
||||||
|
|
||||||
e = os.getenv('VISUAL')
|
e = os.getenv('VISUAL')
|
||||||
if e:
|
if e:
|
||||||
@ -51,10 +56,10 @@ class Editor(object):
|
|||||||
return e
|
return e
|
||||||
|
|
||||||
if os.getenv('TERM') == 'dumb':
|
if os.getenv('TERM') == 'dumb':
|
||||||
print >>sys.stderr,\
|
print(
|
||||||
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
"""No editor specified in GIT_EDITOR, core.editor, VISUAL or EDITOR.
|
||||||
Tried to fall back to vi but terminal is dumb. Please configure at
|
Tried to fall back to vi but terminal is dumb. Please configure at
|
||||||
least one of these before using this command."""
|
least one of these before using this command.""", file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
return 'vi'
|
return 'vi'
|
||||||
@ -63,23 +68,50 @@ least one of these before using this command."""
|
|||||||
def EditString(cls, data):
|
def EditString(cls, data):
|
||||||
"""Opens an editor to edit the given content.
|
"""Opens an editor to edit the given content.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
data : the text to edit
|
data: The text to edit.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
new value of edited text; None if editing did not succeed
|
New value of edited text.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
EditorError: The editor failed to run.
|
||||||
"""
|
"""
|
||||||
editor = cls._GetEditor()
|
editor = cls._GetEditor()
|
||||||
|
if editor == ':':
|
||||||
|
return data
|
||||||
|
|
||||||
fd, path = tempfile.mkstemp()
|
fd, path = tempfile.mkstemp()
|
||||||
try:
|
try:
|
||||||
os.write(fd, data)
|
os.write(fd, data.encode('utf-8'))
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
fd = None
|
fd = None
|
||||||
|
|
||||||
if subprocess.Popen([editor, path]).wait() != 0:
|
if platform_utils.isWindows():
|
||||||
raise EditorError()
|
# Split on spaces, respecting quoted strings
|
||||||
return open(path).read()
|
import shlex
|
||||||
|
args = shlex.split(editor)
|
||||||
|
shell = False
|
||||||
|
elif re.compile("^.*[$ \t'].*$").match(editor):
|
||||||
|
args = [editor + ' "$@"', 'sh']
|
||||||
|
shell = True
|
||||||
|
else:
|
||||||
|
args = [editor]
|
||||||
|
shell = False
|
||||||
|
args.append(path)
|
||||||
|
|
||||||
|
try:
|
||||||
|
rc = subprocess.Popen(args, shell=shell).wait()
|
||||||
|
except OSError as e:
|
||||||
|
raise EditorError('editor failed, %s: %s %s'
|
||||||
|
% (str(e), editor, path))
|
||||||
|
if rc != 0:
|
||||||
|
raise EditorError('editor failed with exit status %d: %s %s'
|
||||||
|
% (rc, editor, path))
|
||||||
|
|
||||||
|
with open(path, mode='rb') as fd2:
|
||||||
|
return fd2.read().decode('utf-8')
|
||||||
finally:
|
finally:
|
||||||
if fd:
|
if fd:
|
||||||
os.close(fd)
|
os.close(fd)
|
||||||
os.remove(path)
|
platform_utils.remove(path)
|
||||||
|
78
error.py
78
error.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -17,32 +18,60 @@ class ManifestParseError(Exception):
|
|||||||
"""Failed to parse the manifest file.
|
"""Failed to parse the manifest file.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class EditorError(Exception):
|
class ManifestInvalidRevisionError(Exception):
|
||||||
"""Unspecified error from the user's text editor.
|
"""The revision value in a project is incorrect.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
class GitError(Exception):
|
class ManifestInvalidPathError(Exception):
|
||||||
"""Unspecified internal error from git.
|
"""A path used in <copyfile> or <linkfile> is incorrect.
|
||||||
"""
|
"""
|
||||||
def __init__(self, command):
|
|
||||||
self.command = command
|
|
||||||
|
|
||||||
def __str__(self):
|
class NoManifestException(Exception):
|
||||||
return self.command
|
"""The required manifest does not exist.
|
||||||
|
|
||||||
class ImportError(Exception):
|
|
||||||
"""An import from a non-Git format cannot be performed.
|
|
||||||
"""
|
"""
|
||||||
def __init__(self, reason):
|
def __init__(self, path, reason):
|
||||||
|
super(NoManifestException, self).__init__()
|
||||||
|
self.path = path
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return self.reason
|
return self.reason
|
||||||
|
|
||||||
|
class EditorError(Exception):
|
||||||
|
"""Unspecified error from the user's text editor.
|
||||||
|
"""
|
||||||
|
def __init__(self, reason):
|
||||||
|
super(EditorError, self).__init__()
|
||||||
|
self.reason = reason
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.reason
|
||||||
|
|
||||||
|
class GitError(Exception):
|
||||||
|
"""Unspecified internal error from git.
|
||||||
|
"""
|
||||||
|
def __init__(self, command):
|
||||||
|
super(GitError, self).__init__()
|
||||||
|
self.command = command
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.command
|
||||||
|
|
||||||
class UploadError(Exception):
|
class UploadError(Exception):
|
||||||
"""A bundle upload to Gerrit did not succeed.
|
"""A bundle upload to Gerrit did not succeed.
|
||||||
"""
|
"""
|
||||||
def __init__(self, reason):
|
def __init__(self, reason):
|
||||||
|
super(UploadError, self).__init__()
|
||||||
|
self.reason = reason
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
return self.reason
|
||||||
|
|
||||||
|
class DownloadError(Exception):
|
||||||
|
"""Cannot download a repository.
|
||||||
|
"""
|
||||||
|
def __init__(self, reason):
|
||||||
|
super(DownloadError, self).__init__()
|
||||||
self.reason = reason
|
self.reason = reason
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
@ -52,10 +81,24 @@ class NoSuchProjectError(Exception):
|
|||||||
"""A specified project does not exist in the work tree.
|
"""A specified project does not exist in the work tree.
|
||||||
"""
|
"""
|
||||||
def __init__(self, name=None):
|
def __init__(self, name=None):
|
||||||
|
super(NoSuchProjectError, self).__init__()
|
||||||
self.name = name
|
self.name = name
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
if self.Name is None:
|
if self.name is None:
|
||||||
|
return 'in current directory'
|
||||||
|
return self.name
|
||||||
|
|
||||||
|
|
||||||
|
class InvalidProjectGroupsError(Exception):
|
||||||
|
"""A specified project is not suitable for the specified groups
|
||||||
|
"""
|
||||||
|
def __init__(self, name=None):
|
||||||
|
super(InvalidProjectGroupsError, self).__init__()
|
||||||
|
self.name = name
|
||||||
|
|
||||||
|
def __str__(self):
|
||||||
|
if self.name is None:
|
||||||
return 'in current directory'
|
return 'in current directory'
|
||||||
return self.name
|
return self.name
|
||||||
|
|
||||||
@ -64,3 +107,12 @@ class RepoChangedException(Exception):
|
|||||||
repo or manifest repositories. In this special case we must
|
repo or manifest repositories. In this special case we must
|
||||||
use exec to re-execute repo with the new code and manifest.
|
use exec to re-execute repo with the new code and manifest.
|
||||||
"""
|
"""
|
||||||
|
def __init__(self, extra_args=None):
|
||||||
|
super(RepoChangedException, self).__init__()
|
||||||
|
self.extra_args = extra_args or []
|
||||||
|
|
||||||
|
class HookError(Exception):
|
||||||
|
"""Thrown if a 'repo-hook' could not be run.
|
||||||
|
|
||||||
|
The common case is that the file wasn't present when we tried to run it.
|
||||||
|
"""
|
||||||
|
177
event_log.py
Normal file
177
event_log.py
Normal file
@ -0,0 +1,177 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2017 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import json
|
||||||
|
import multiprocessing
|
||||||
|
|
||||||
|
TASK_COMMAND = 'command'
|
||||||
|
TASK_SYNC_NETWORK = 'sync-network'
|
||||||
|
TASK_SYNC_LOCAL = 'sync-local'
|
||||||
|
|
||||||
|
class EventLog(object):
|
||||||
|
"""Event log that records events that occurred during a repo invocation.
|
||||||
|
|
||||||
|
Events are written to the log as a consecutive JSON entries, one per line.
|
||||||
|
Each entry contains the following keys:
|
||||||
|
- id: A ('RepoOp', ID) tuple, suitable for storing in a datastore.
|
||||||
|
The ID is only unique for the invocation of the repo command.
|
||||||
|
- name: Name of the object being operated upon.
|
||||||
|
- task_name: The task that was performed.
|
||||||
|
- start: Timestamp of when the operation started.
|
||||||
|
- finish: Timestamp of when the operation finished.
|
||||||
|
- success: Boolean indicating if the operation was successful.
|
||||||
|
- try_count: A counter indicating the try count of this task.
|
||||||
|
|
||||||
|
Optionally:
|
||||||
|
- parent: A ('RepoOp', ID) tuple indicating the parent event for nested
|
||||||
|
events.
|
||||||
|
|
||||||
|
Valid task_names include:
|
||||||
|
- command: The invocation of a subcommand.
|
||||||
|
- sync-network: The network component of a sync command.
|
||||||
|
- sync-local: The local component of a sync command.
|
||||||
|
|
||||||
|
Specific tasks may include additional informational properties.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initializes the event log."""
|
||||||
|
self._log = []
|
||||||
|
self._parent = None
|
||||||
|
|
||||||
|
def Add(self, name, task_name, start, finish=None, success=None,
|
||||||
|
try_count=1, kind='RepoOp'):
|
||||||
|
"""Add an event to the log.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the object being operated upon.
|
||||||
|
task_name: A sub-task that was performed for name.
|
||||||
|
start: Timestamp of when the operation started.
|
||||||
|
finish: Timestamp of when the operation finished.
|
||||||
|
success: Boolean indicating if the operation was successful.
|
||||||
|
try_count: A counter indicating the try count of this task.
|
||||||
|
kind: The kind of the object for the unique identifier.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dictionary of the event added to the log.
|
||||||
|
"""
|
||||||
|
event = {
|
||||||
|
'id': (kind, _NextEventId()),
|
||||||
|
'name': name,
|
||||||
|
'task_name': task_name,
|
||||||
|
'start_time': start,
|
||||||
|
'try': try_count,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self._parent:
|
||||||
|
event['parent'] = self._parent['id']
|
||||||
|
|
||||||
|
if success is not None or finish is not None:
|
||||||
|
self.FinishEvent(event, finish, success)
|
||||||
|
|
||||||
|
self._log.append(event)
|
||||||
|
return event
|
||||||
|
|
||||||
|
def AddSync(self, project, task_name, start, finish, success):
|
||||||
|
"""Add a event to the log for a sync command.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project: Project being synced.
|
||||||
|
task_name: A sub-task that was performed for name.
|
||||||
|
One of (TASK_SYNC_NETWORK, TASK_SYNC_LOCAL)
|
||||||
|
start: Timestamp of when the operation started.
|
||||||
|
finish: Timestamp of when the operation finished.
|
||||||
|
success: Boolean indicating if the operation was successful.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dictionary of the event added to the log.
|
||||||
|
"""
|
||||||
|
event = self.Add(project.relpath, task_name, start, finish, success)
|
||||||
|
if event is not None:
|
||||||
|
event['project'] = project.name
|
||||||
|
if project.revisionExpr:
|
||||||
|
event['revision'] = project.revisionExpr
|
||||||
|
if project.remote.url:
|
||||||
|
event['project_url'] = project.remote.url
|
||||||
|
if project.remote.fetchUrl:
|
||||||
|
event['remote_url'] = project.remote.fetchUrl
|
||||||
|
try:
|
||||||
|
event['git_hash'] = project.GetCommitRevisionId()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return event
|
||||||
|
|
||||||
|
def GetStatusString(self, success):
|
||||||
|
"""Converst a boolean success to a status string.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
success: Boolean indicating if the operation was successful.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
status string.
|
||||||
|
"""
|
||||||
|
return 'pass' if success else 'fail'
|
||||||
|
|
||||||
|
def FinishEvent(self, event, finish, success):
|
||||||
|
"""Finishes an incomplete event.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event: An event that has been added to the log.
|
||||||
|
finish: Timestamp of when the operation finished.
|
||||||
|
success: Boolean indicating if the operation was successful.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A dictionary of the event added to the log.
|
||||||
|
"""
|
||||||
|
event['status'] = self.GetStatusString(success)
|
||||||
|
event['finish_time'] = finish
|
||||||
|
return event
|
||||||
|
|
||||||
|
def SetParent(self, event):
|
||||||
|
"""Set a parent event for all new entities.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event: The event to use as a parent.
|
||||||
|
"""
|
||||||
|
self._parent = event
|
||||||
|
|
||||||
|
def Write(self, filename):
|
||||||
|
"""Writes the log out to a file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: The file to write the log to.
|
||||||
|
"""
|
||||||
|
with open(filename, 'w+') as f:
|
||||||
|
for e in self._log:
|
||||||
|
json.dump(e, f, sort_keys=True)
|
||||||
|
f.write('\n')
|
||||||
|
|
||||||
|
|
||||||
|
# An integer id that is unique across this invocation of the program.
|
||||||
|
_EVENT_ID = multiprocessing.Value('i', 1)
|
||||||
|
|
||||||
|
def _NextEventId():
|
||||||
|
"""Helper function for grabbing the next unique id.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
A unique, to this invocation of the program, integer id.
|
||||||
|
"""
|
||||||
|
with _EVENT_ID.get_lock():
|
||||||
|
val = _EVENT_ID.value
|
||||||
|
_EVENT_ID.value += 1
|
||||||
|
return val
|
@ -1,433 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# TODO(robinson): We probably need to provide deep-copy methods for
|
|
||||||
# descriptor types. When a FieldDescriptor is passed into
|
|
||||||
# Descriptor.__init__(), we should make a deep copy and then set
|
|
||||||
# containing_type on it. Alternatively, we could just get
|
|
||||||
# rid of containing_type (iit's not needed for reflection.py, at least).
|
|
||||||
#
|
|
||||||
# TODO(robinson): Print method?
|
|
||||||
#
|
|
||||||
# TODO(robinson): Useful __repr__?
|
|
||||||
|
|
||||||
"""Descriptors essentially contain exactly the information found in a .proto
|
|
||||||
file, in types that make this information accessible in Python.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
class DescriptorBase(object):
|
|
||||||
|
|
||||||
"""Descriptors base class.
|
|
||||||
|
|
||||||
This class is the base of all descriptor classes. It provides common options
|
|
||||||
related functionaility.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, options, options_class_name):
|
|
||||||
"""Initialize the descriptor given its options message and the name of the
|
|
||||||
class of the options message. The name of the class is required in case
|
|
||||||
the options message is None and has to be created.
|
|
||||||
"""
|
|
||||||
self._options = options
|
|
||||||
self._options_class_name = options_class_name
|
|
||||||
|
|
||||||
def GetOptions(self):
|
|
||||||
"""Retrieves descriptor options.
|
|
||||||
|
|
||||||
This method returns the options set or creates the default options for the
|
|
||||||
descriptor.
|
|
||||||
"""
|
|
||||||
if self._options:
|
|
||||||
return self._options
|
|
||||||
from froofle.protobuf import descriptor_pb2
|
|
||||||
try:
|
|
||||||
options_class = getattr(descriptor_pb2, self._options_class_name)
|
|
||||||
except AttributeError:
|
|
||||||
raise RuntimeError('Unknown options class name %s!' %
|
|
||||||
(self._options_class_name))
|
|
||||||
self._options = options_class()
|
|
||||||
return self._options
|
|
||||||
|
|
||||||
|
|
||||||
class Descriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for a protocol message type.
|
|
||||||
|
|
||||||
A Descriptor instance has the following attributes:
|
|
||||||
|
|
||||||
name: (str) Name of this protocol message type.
|
|
||||||
full_name: (str) Fully-qualified name of this protocol message type,
|
|
||||||
which will include protocol "package" name and the name of any
|
|
||||||
enclosing types.
|
|
||||||
|
|
||||||
filename: (str) Name of the .proto file containing this message.
|
|
||||||
|
|
||||||
containing_type: (Descriptor) Reference to the descriptor of the
|
|
||||||
type containing us, or None if we have no containing type.
|
|
||||||
|
|
||||||
fields: (list of FieldDescriptors) Field descriptors for all
|
|
||||||
fields in this type.
|
|
||||||
fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
|
|
||||||
objects as in |fields|, but indexed by "number" attribute in each
|
|
||||||
FieldDescriptor.
|
|
||||||
fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
|
|
||||||
objects as in |fields|, but indexed by "name" attribute in each
|
|
||||||
FieldDescriptor.
|
|
||||||
|
|
||||||
nested_types: (list of Descriptors) Descriptor references
|
|
||||||
for all protocol message types nested within this one.
|
|
||||||
nested_types_by_name: (dict str -> Descriptor) Same Descriptor
|
|
||||||
objects as in |nested_types|, but indexed by "name" attribute
|
|
||||||
in each Descriptor.
|
|
||||||
|
|
||||||
enum_types: (list of EnumDescriptors) EnumDescriptor references
|
|
||||||
for all enums contained within this type.
|
|
||||||
enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
|
|
||||||
objects as in |enum_types|, but indexed by "name" attribute
|
|
||||||
in each EnumDescriptor.
|
|
||||||
enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
|
|
||||||
from enum value name to EnumValueDescriptor for that value.
|
|
||||||
|
|
||||||
extensions: (list of FieldDescriptor) All extensions defined directly
|
|
||||||
within this message type (NOT within a nested type).
|
|
||||||
extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
|
|
||||||
objects as |extensions|, but indexed by "name" attribute of each
|
|
||||||
FieldDescriptor.
|
|
||||||
|
|
||||||
options: (descriptor_pb2.MessageOptions) Protocol message options or None
|
|
||||||
to use default message options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, full_name, filename, containing_type,
|
|
||||||
fields, nested_types, enum_types, extensions, options=None):
|
|
||||||
"""Arguments to __init__() are as described in the description
|
|
||||||
of Descriptor fields above.
|
|
||||||
"""
|
|
||||||
super(Descriptor, self).__init__(options, 'MessageOptions')
|
|
||||||
self.name = name
|
|
||||||
self.full_name = full_name
|
|
||||||
self.filename = filename
|
|
||||||
self.containing_type = containing_type
|
|
||||||
|
|
||||||
# We have fields in addition to fields_by_name and fields_by_number,
|
|
||||||
# so that:
|
|
||||||
# 1. Clients can index fields by "order in which they're listed."
|
|
||||||
# 2. Clients can easily iterate over all fields with the terse
|
|
||||||
# syntax: for f in descriptor.fields: ...
|
|
||||||
self.fields = fields
|
|
||||||
for field in self.fields:
|
|
||||||
field.containing_type = self
|
|
||||||
self.fields_by_number = dict((f.number, f) for f in fields)
|
|
||||||
self.fields_by_name = dict((f.name, f) for f in fields)
|
|
||||||
|
|
||||||
self.nested_types = nested_types
|
|
||||||
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
|
||||||
|
|
||||||
self.enum_types = enum_types
|
|
||||||
for enum_type in self.enum_types:
|
|
||||||
enum_type.containing_type = self
|
|
||||||
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
|
||||||
self.enum_values_by_name = dict(
|
|
||||||
(v.name, v) for t in enum_types for v in t.values)
|
|
||||||
|
|
||||||
self.extensions = extensions
|
|
||||||
for extension in self.extensions:
|
|
||||||
extension.extension_scope = self
|
|
||||||
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
|
||||||
|
|
||||||
|
|
||||||
# TODO(robinson): We should have aggressive checking here,
|
|
||||||
# for example:
|
|
||||||
# * If you specify a repeated field, you should not be allowed
|
|
||||||
# to specify a default value.
|
|
||||||
# * [Other examples here as needed].
|
|
||||||
#
|
|
||||||
# TODO(robinson): for this and other *Descriptor classes, we
|
|
||||||
# might also want to lock things down aggressively (e.g.,
|
|
||||||
# prevent clients from setting the attributes). Having
|
|
||||||
# stronger invariants here in general will reduce the number
|
|
||||||
# of runtime checks we must do in reflection.py...
|
|
||||||
class FieldDescriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for a single field in a .proto file.
|
|
||||||
|
|
||||||
A FieldDescriptor instance has the following attriubtes:
|
|
||||||
|
|
||||||
name: (str) Name of this field, exactly as it appears in .proto.
|
|
||||||
full_name: (str) Name of this field, including containing scope. This is
|
|
||||||
particularly relevant for extensions.
|
|
||||||
index: (int) Dense, 0-indexed index giving the order that this
|
|
||||||
field textually appears within its message in the .proto file.
|
|
||||||
number: (int) Tag number declared for this field in the .proto file.
|
|
||||||
|
|
||||||
type: (One of the TYPE_* constants below) Declared type.
|
|
||||||
cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
|
|
||||||
represent this field.
|
|
||||||
|
|
||||||
label: (One of the LABEL_* constants below) Tells whether this
|
|
||||||
field is optional, required, or repeated.
|
|
||||||
default_value: (Varies) Default value of this field. Only
|
|
||||||
meaningful for non-repeated scalar fields. Repeated fields
|
|
||||||
should always set this to [], and non-repeated composite
|
|
||||||
fields should always set this to None.
|
|
||||||
|
|
||||||
containing_type: (Descriptor) Descriptor of the protocol message
|
|
||||||
type that contains this field. Set by the Descriptor constructor
|
|
||||||
if we're passed into one.
|
|
||||||
Somewhat confusingly, for extension fields, this is the
|
|
||||||
descriptor of the EXTENDED message, not the descriptor
|
|
||||||
of the message containing this field. (See is_extension and
|
|
||||||
extension_scope below).
|
|
||||||
message_type: (Descriptor) If a composite field, a descriptor
|
|
||||||
of the message type contained in this field. Otherwise, this is None.
|
|
||||||
enum_type: (EnumDescriptor) If this field contains an enum, a
|
|
||||||
descriptor of that enum. Otherwise, this is None.
|
|
||||||
|
|
||||||
is_extension: True iff this describes an extension field.
|
|
||||||
extension_scope: (Descriptor) Only meaningful if is_extension is True.
|
|
||||||
Gives the message that immediately contains this extension field.
|
|
||||||
Will be None iff we're a top-level (file-level) extension field.
|
|
||||||
|
|
||||||
options: (descriptor_pb2.FieldOptions) Protocol message field options or
|
|
||||||
None to use default field options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Must be consistent with C++ FieldDescriptor::Type enum in
|
|
||||||
# descriptor.h.
|
|
||||||
#
|
|
||||||
# TODO(robinson): Find a way to eliminate this repetition.
|
|
||||||
TYPE_DOUBLE = 1
|
|
||||||
TYPE_FLOAT = 2
|
|
||||||
TYPE_INT64 = 3
|
|
||||||
TYPE_UINT64 = 4
|
|
||||||
TYPE_INT32 = 5
|
|
||||||
TYPE_FIXED64 = 6
|
|
||||||
TYPE_FIXED32 = 7
|
|
||||||
TYPE_BOOL = 8
|
|
||||||
TYPE_STRING = 9
|
|
||||||
TYPE_GROUP = 10
|
|
||||||
TYPE_MESSAGE = 11
|
|
||||||
TYPE_BYTES = 12
|
|
||||||
TYPE_UINT32 = 13
|
|
||||||
TYPE_ENUM = 14
|
|
||||||
TYPE_SFIXED32 = 15
|
|
||||||
TYPE_SFIXED64 = 16
|
|
||||||
TYPE_SINT32 = 17
|
|
||||||
TYPE_SINT64 = 18
|
|
||||||
MAX_TYPE = 18
|
|
||||||
|
|
||||||
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
|
||||||
# descriptor.h.
|
|
||||||
#
|
|
||||||
# TODO(robinson): Find a way to eliminate this repetition.
|
|
||||||
CPPTYPE_INT32 = 1
|
|
||||||
CPPTYPE_INT64 = 2
|
|
||||||
CPPTYPE_UINT32 = 3
|
|
||||||
CPPTYPE_UINT64 = 4
|
|
||||||
CPPTYPE_DOUBLE = 5
|
|
||||||
CPPTYPE_FLOAT = 6
|
|
||||||
CPPTYPE_BOOL = 7
|
|
||||||
CPPTYPE_ENUM = 8
|
|
||||||
CPPTYPE_STRING = 9
|
|
||||||
CPPTYPE_MESSAGE = 10
|
|
||||||
MAX_CPPTYPE = 10
|
|
||||||
|
|
||||||
# Must be consistent with C++ FieldDescriptor::Label enum in
|
|
||||||
# descriptor.h.
|
|
||||||
#
|
|
||||||
# TODO(robinson): Find a way to eliminate this repetition.
|
|
||||||
LABEL_OPTIONAL = 1
|
|
||||||
LABEL_REQUIRED = 2
|
|
||||||
LABEL_REPEATED = 3
|
|
||||||
MAX_LABEL = 3
|
|
||||||
|
|
||||||
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
|
||||||
default_value, message_type, enum_type, containing_type,
|
|
||||||
is_extension, extension_scope, options=None):
|
|
||||||
"""The arguments are as described in the description of FieldDescriptor
|
|
||||||
attributes above.
|
|
||||||
|
|
||||||
Note that containing_type may be None, and may be set later if necessary
|
|
||||||
(to deal with circular references between message types, for example).
|
|
||||||
Likewise for extension_scope.
|
|
||||||
"""
|
|
||||||
super(FieldDescriptor, self).__init__(options, 'FieldOptions')
|
|
||||||
self.name = name
|
|
||||||
self.full_name = full_name
|
|
||||||
self.index = index
|
|
||||||
self.number = number
|
|
||||||
self.type = type
|
|
||||||
self.cpp_type = cpp_type
|
|
||||||
self.label = label
|
|
||||||
self.default_value = default_value
|
|
||||||
self.containing_type = containing_type
|
|
||||||
self.message_type = message_type
|
|
||||||
self.enum_type = enum_type
|
|
||||||
self.is_extension = is_extension
|
|
||||||
self.extension_scope = extension_scope
|
|
||||||
|
|
||||||
|
|
||||||
class EnumDescriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for an enum defined in a .proto file.
|
|
||||||
|
|
||||||
An EnumDescriptor instance has the following attributes:
|
|
||||||
|
|
||||||
name: (str) Name of the enum type.
|
|
||||||
full_name: (str) Full name of the type, including package name
|
|
||||||
and any enclosing type(s).
|
|
||||||
filename: (str) Name of the .proto file in which this appears.
|
|
||||||
|
|
||||||
values: (list of EnumValueDescriptors) List of the values
|
|
||||||
in this enum.
|
|
||||||
values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
|
|
||||||
but indexed by the "name" field of each EnumValueDescriptor.
|
|
||||||
values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
|
|
||||||
but indexed by the "number" field of each EnumValueDescriptor.
|
|
||||||
containing_type: (Descriptor) Descriptor of the immediate containing
|
|
||||||
type of this enum, or None if this is an enum defined at the
|
|
||||||
top level in a .proto file. Set by Descriptor's constructor
|
|
||||||
if we're passed into one.
|
|
||||||
options: (descriptor_pb2.EnumOptions) Enum options message or
|
|
||||||
None to use default enum options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, full_name, filename, values,
|
|
||||||
containing_type=None, options=None):
|
|
||||||
"""Arguments are as described in the attribute description above."""
|
|
||||||
super(EnumDescriptor, self).__init__(options, 'EnumOptions')
|
|
||||||
self.name = name
|
|
||||||
self.full_name = full_name
|
|
||||||
self.filename = filename
|
|
||||||
self.values = values
|
|
||||||
for value in self.values:
|
|
||||||
value.type = self
|
|
||||||
self.values_by_name = dict((v.name, v) for v in values)
|
|
||||||
self.values_by_number = dict((v.number, v) for v in values)
|
|
||||||
self.containing_type = containing_type
|
|
||||||
|
|
||||||
|
|
||||||
class EnumValueDescriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for a single value within an enum.
|
|
||||||
|
|
||||||
name: (str) Name of this value.
|
|
||||||
index: (int) Dense, 0-indexed index giving the order that this
|
|
||||||
value appears textually within its enum in the .proto file.
|
|
||||||
number: (int) Actual number assigned to this enum value.
|
|
||||||
type: (EnumDescriptor) EnumDescriptor to which this value
|
|
||||||
belongs. Set by EnumDescriptor's constructor if we're
|
|
||||||
passed into one.
|
|
||||||
options: (descriptor_pb2.EnumValueOptions) Enum value options message or
|
|
||||||
None to use default enum value options options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, index, number, type=None, options=None):
|
|
||||||
"""Arguments are as described in the attribute description above."""
|
|
||||||
super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
|
|
||||||
self.name = name
|
|
||||||
self.index = index
|
|
||||||
self.number = number
|
|
||||||
self.type = type
|
|
||||||
|
|
||||||
|
|
||||||
class ServiceDescriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for a service.
|
|
||||||
|
|
||||||
name: (str) Name of the service.
|
|
||||||
full_name: (str) Full name of the service, including package name.
|
|
||||||
index: (int) 0-indexed index giving the order that this services
|
|
||||||
definition appears withing the .proto file.
|
|
||||||
methods: (list of MethodDescriptor) List of methods provided by this
|
|
||||||
service.
|
|
||||||
options: (descriptor_pb2.ServiceOptions) Service options message or
|
|
||||||
None to use default service options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, full_name, index, methods, options=None):
|
|
||||||
super(ServiceDescriptor, self).__init__(options, 'ServiceOptions')
|
|
||||||
self.name = name
|
|
||||||
self.full_name = full_name
|
|
||||||
self.index = index
|
|
||||||
self.methods = methods
|
|
||||||
# Set the containing service for each method in this service.
|
|
||||||
for method in self.methods:
|
|
||||||
method.containing_service = self
|
|
||||||
|
|
||||||
def FindMethodByName(self, name):
|
|
||||||
"""Searches for the specified method, and returns its descriptor."""
|
|
||||||
for method in self.methods:
|
|
||||||
if name == method.name:
|
|
||||||
return method
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class MethodDescriptor(DescriptorBase):
|
|
||||||
|
|
||||||
"""Descriptor for a method in a service.
|
|
||||||
|
|
||||||
name: (str) Name of the method within the service.
|
|
||||||
full_name: (str) Full name of method.
|
|
||||||
index: (int) 0-indexed index of the method inside the service.
|
|
||||||
containing_service: (ServiceDescriptor) The service that contains this
|
|
||||||
method.
|
|
||||||
input_type: The descriptor of the message that this method accepts.
|
|
||||||
output_type: The descriptor of the message that this method returns.
|
|
||||||
options: (descriptor_pb2.MethodOptions) Method options message or
|
|
||||||
None to use default method options.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, name, full_name, index, containing_service,
|
|
||||||
input_type, output_type, options=None):
|
|
||||||
"""The arguments are as described in the description of MethodDescriptor
|
|
||||||
attributes above.
|
|
||||||
|
|
||||||
Note that containing_service may be None, and may be set later if necessary.
|
|
||||||
"""
|
|
||||||
super(MethodDescriptor, self).__init__(options, 'MethodOptions')
|
|
||||||
self.name = name
|
|
||||||
self.full_name = full_name
|
|
||||||
self.index = index
|
|
||||||
self.containing_service = containing_service
|
|
||||||
self.input_type = input_type
|
|
||||||
self.output_type = output_type
|
|
||||||
|
|
||||||
|
|
||||||
def _ParseOptions(message, string):
|
|
||||||
"""Parses serialized options.
|
|
||||||
|
|
||||||
This helper function is used to parse serialized options in generated
|
|
||||||
proto2 files. It must not be used outside proto2.
|
|
||||||
"""
|
|
||||||
message.ParseFromString(string)
|
|
||||||
return message;
|
|
@ -1,950 +0,0 @@
|
|||||||
#!/usr/bin/python2.4
|
|
||||||
# Generated by the protocol buffer compiler. DO NOT EDIT!
|
|
||||||
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf import reflection
|
|
||||||
from froofle.protobuf import service
|
|
||||||
from froofle.protobuf import service_reflection
|
|
||||||
|
|
||||||
|
|
||||||
_FIELDDESCRIPTORPROTO_TYPE = descriptor.EnumDescriptor(
|
|
||||||
name='Type',
|
|
||||||
full_name='froofle.protobuf.FieldDescriptorProto.Type',
|
|
||||||
filename='Type',
|
|
||||||
values=[
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_DOUBLE', index=0, number=1,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_FLOAT', index=1, number=2,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_INT64', index=2, number=3,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_UINT64', index=3, number=4,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_INT32', index=4, number=5,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_FIXED64', index=5, number=6,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_FIXED32', index=6, number=7,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_BOOL', index=7, number=8,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_STRING', index=8, number=9,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_GROUP', index=9, number=10,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_MESSAGE', index=10, number=11,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_BYTES', index=11, number=12,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_UINT32', index=12, number=13,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_ENUM', index=13, number=14,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_SFIXED32', index=14, number=15,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_SFIXED64', index=15, number=16,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_SINT32', index=16, number=17,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='TYPE_SINT64', index=17, number=18,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
_FIELDDESCRIPTORPROTO_LABEL = descriptor.EnumDescriptor(
|
|
||||||
name='Label',
|
|
||||||
full_name='froofle.protobuf.FieldDescriptorProto.Label',
|
|
||||||
filename='Label',
|
|
||||||
values=[
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='LABEL_OPTIONAL', index=0, number=1,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='LABEL_REQUIRED', index=1, number=2,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='LABEL_REPEATED', index=2, number=3,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
_FILEOPTIONS_OPTIMIZEMODE = descriptor.EnumDescriptor(
|
|
||||||
name='OptimizeMode',
|
|
||||||
full_name='froofle.protobuf.FileOptions.OptimizeMode',
|
|
||||||
filename='OptimizeMode',
|
|
||||||
values=[
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='SPEED', index=0, number=1,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='CODE_SIZE', index=1, number=2,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
_FIELDOPTIONS_CTYPE = descriptor.EnumDescriptor(
|
|
||||||
name='CType',
|
|
||||||
full_name='froofle.protobuf.FieldOptions.CType',
|
|
||||||
filename='CType',
|
|
||||||
values=[
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='CORD', index=0, number=1,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
descriptor.EnumValueDescriptor(
|
|
||||||
name='STRING_PIECE', index=1, number=2,
|
|
||||||
options=None,
|
|
||||||
type=None),
|
|
||||||
],
|
|
||||||
options=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
_FILEDESCRIPTORSET = descriptor.Descriptor(
|
|
||||||
name='FileDescriptorSet',
|
|
||||||
full_name='froofle.protobuf.FileDescriptorSet',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='file', full_name='froofle.protobuf.FileDescriptorSet.file', index=0,
|
|
||||||
number=1, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_FILEDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='FileDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.FileDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.FileDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='package', full_name='froofle.protobuf.FileDescriptorProto.package', index=1,
|
|
||||||
number=2, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='dependency', full_name='froofle.protobuf.FileDescriptorProto.dependency', index=2,
|
|
||||||
number=3, type=9, cpp_type=9, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='message_type', full_name='froofle.protobuf.FileDescriptorProto.message_type', index=3,
|
|
||||||
number=4, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='enum_type', full_name='froofle.protobuf.FileDescriptorProto.enum_type', index=4,
|
|
||||||
number=5, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='service', full_name='froofle.protobuf.FileDescriptorProto.service', index=5,
|
|
||||||
number=6, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='extension', full_name='froofle.protobuf.FileDescriptorProto.extension', index=6,
|
|
||||||
number=7, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.FileDescriptorProto.options', index=7,
|
|
||||||
number=8, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_DESCRIPTORPROTO_EXTENSIONRANGE = descriptor.Descriptor(
|
|
||||||
name='ExtensionRange',
|
|
||||||
full_name='froofle.protobuf.DescriptorProto.ExtensionRange',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='start', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.start', index=0,
|
|
||||||
number=1, type=5, cpp_type=1, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='end', full_name='froofle.protobuf.DescriptorProto.ExtensionRange.end', index=1,
|
|
||||||
number=2, type=5, cpp_type=1, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
_DESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='DescriptorProto',
|
|
||||||
full_name='froofle.protobuf.DescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.DescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='field', full_name='froofle.protobuf.DescriptorProto.field', index=1,
|
|
||||||
number=2, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='extension', full_name='froofle.protobuf.DescriptorProto.extension', index=2,
|
|
||||||
number=6, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='nested_type', full_name='froofle.protobuf.DescriptorProto.nested_type', index=3,
|
|
||||||
number=3, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='enum_type', full_name='froofle.protobuf.DescriptorProto.enum_type', index=4,
|
|
||||||
number=4, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='extension_range', full_name='froofle.protobuf.DescriptorProto.extension_range', index=5,
|
|
||||||
number=5, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.DescriptorProto.options', index=6,
|
|
||||||
number=7, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_FIELDDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='FieldDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.FieldDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.FieldDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='number', full_name='froofle.protobuf.FieldDescriptorProto.number', index=1,
|
|
||||||
number=3, type=5, cpp_type=1, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='label', full_name='froofle.protobuf.FieldDescriptorProto.label', index=2,
|
|
||||||
number=4, type=14, cpp_type=8, label=1,
|
|
||||||
default_value=1,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='type', full_name='froofle.protobuf.FieldDescriptorProto.type', index=3,
|
|
||||||
number=5, type=14, cpp_type=8, label=1,
|
|
||||||
default_value=1,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='type_name', full_name='froofle.protobuf.FieldDescriptorProto.type_name', index=4,
|
|
||||||
number=6, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='extendee', full_name='froofle.protobuf.FieldDescriptorProto.extendee', index=5,
|
|
||||||
number=2, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='default_value', full_name='froofle.protobuf.FieldDescriptorProto.default_value', index=6,
|
|
||||||
number=7, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.FieldDescriptorProto.options', index=7,
|
|
||||||
number=8, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
_FIELDDESCRIPTORPROTO_TYPE,
|
|
||||||
_FIELDDESCRIPTORPROTO_LABEL,
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_ENUMDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='EnumDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.EnumDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.EnumDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='value', full_name='froofle.protobuf.EnumDescriptorProto.value', index=1,
|
|
||||||
number=2, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.EnumDescriptorProto.options', index=2,
|
|
||||||
number=3, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_ENUMVALUEDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='EnumValueDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.EnumValueDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.EnumValueDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='number', full_name='froofle.protobuf.EnumValueDescriptorProto.number', index=1,
|
|
||||||
number=2, type=5, cpp_type=1, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.EnumValueDescriptorProto.options', index=2,
|
|
||||||
number=3, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_SERVICEDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='ServiceDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.ServiceDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.ServiceDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='method', full_name='froofle.protobuf.ServiceDescriptorProto.method', index=1,
|
|
||||||
number=2, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.ServiceDescriptorProto.options', index=2,
|
|
||||||
number=3, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_METHODDESCRIPTORPROTO = descriptor.Descriptor(
|
|
||||||
name='MethodDescriptorProto',
|
|
||||||
full_name='froofle.protobuf.MethodDescriptorProto',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.MethodDescriptorProto.name', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='input_type', full_name='froofle.protobuf.MethodDescriptorProto.input_type', index=1,
|
|
||||||
number=2, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='output_type', full_name='froofle.protobuf.MethodDescriptorProto.output_type', index=2,
|
|
||||||
number=3, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='options', full_name='froofle.protobuf.MethodDescriptorProto.options', index=3,
|
|
||||||
number=4, type=11, cpp_type=10, label=1,
|
|
||||||
default_value=None,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_FILEOPTIONS = descriptor.Descriptor(
|
|
||||||
name='FileOptions',
|
|
||||||
full_name='froofle.protobuf.FileOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='java_package', full_name='froofle.protobuf.FileOptions.java_package', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='java_outer_classname', full_name='froofle.protobuf.FileOptions.java_outer_classname', index=1,
|
|
||||||
number=8, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='java_multiple_files', full_name='froofle.protobuf.FileOptions.java_multiple_files', index=2,
|
|
||||||
number=10, type=8, cpp_type=7, label=1,
|
|
||||||
default_value=False,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='optimize_for', full_name='froofle.protobuf.FileOptions.optimize_for', index=3,
|
|
||||||
number=9, type=14, cpp_type=8, label=1,
|
|
||||||
default_value=2,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.FileOptions.uninterpreted_option', index=4,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
_FILEOPTIONS_OPTIMIZEMODE,
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_MESSAGEOPTIONS = descriptor.Descriptor(
|
|
||||||
name='MessageOptions',
|
|
||||||
full_name='froofle.protobuf.MessageOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='message_set_wire_format', full_name='froofle.protobuf.MessageOptions.message_set_wire_format', index=0,
|
|
||||||
number=1, type=8, cpp_type=7, label=1,
|
|
||||||
default_value=False,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.MessageOptions.uninterpreted_option', index=1,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_FIELDOPTIONS = descriptor.Descriptor(
|
|
||||||
name='FieldOptions',
|
|
||||||
full_name='froofle.protobuf.FieldOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='ctype', full_name='froofle.protobuf.FieldOptions.ctype', index=0,
|
|
||||||
number=1, type=14, cpp_type=8, label=1,
|
|
||||||
default_value=1,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='experimental_map_key', full_name='froofle.protobuf.FieldOptions.experimental_map_key', index=1,
|
|
||||||
number=9, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.FieldOptions.uninterpreted_option', index=2,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
_FIELDOPTIONS_CTYPE,
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_ENUMOPTIONS = descriptor.Descriptor(
|
|
||||||
name='EnumOptions',
|
|
||||||
full_name='froofle.protobuf.EnumOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.EnumOptions.uninterpreted_option', index=0,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_ENUMVALUEOPTIONS = descriptor.Descriptor(
|
|
||||||
name='EnumValueOptions',
|
|
||||||
full_name='froofle.protobuf.EnumValueOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.EnumValueOptions.uninterpreted_option', index=0,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_SERVICEOPTIONS = descriptor.Descriptor(
|
|
||||||
name='ServiceOptions',
|
|
||||||
full_name='froofle.protobuf.ServiceOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.ServiceOptions.uninterpreted_option', index=0,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_METHODOPTIONS = descriptor.Descriptor(
|
|
||||||
name='MethodOptions',
|
|
||||||
full_name='froofle.protobuf.MethodOptions',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='uninterpreted_option', full_name='froofle.protobuf.MethodOptions.uninterpreted_option', index=0,
|
|
||||||
number=999, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_UNINTERPRETEDOPTION_NAMEPART = descriptor.Descriptor(
|
|
||||||
name='NamePart',
|
|
||||||
full_name='froofle.protobuf.UninterpretedOption.NamePart',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name_part', full_name='froofle.protobuf.UninterpretedOption.NamePart.name_part', index=0,
|
|
||||||
number=1, type=9, cpp_type=9, label=2,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='is_extension', full_name='froofle.protobuf.UninterpretedOption.NamePart.is_extension', index=1,
|
|
||||||
number=2, type=8, cpp_type=7, label=2,
|
|
||||||
default_value=False,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
_UNINTERPRETEDOPTION = descriptor.Descriptor(
|
|
||||||
name='UninterpretedOption',
|
|
||||||
full_name='froofle.protobuf.UninterpretedOption',
|
|
||||||
filename='froofle/protobuf/descriptor.proto',
|
|
||||||
containing_type=None,
|
|
||||||
fields=[
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='name', full_name='froofle.protobuf.UninterpretedOption.name', index=0,
|
|
||||||
number=2, type=11, cpp_type=10, label=3,
|
|
||||||
default_value=[],
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='identifier_value', full_name='froofle.protobuf.UninterpretedOption.identifier_value', index=1,
|
|
||||||
number=3, type=9, cpp_type=9, label=1,
|
|
||||||
default_value=unicode("", "utf-8"),
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='positive_int_value', full_name='froofle.protobuf.UninterpretedOption.positive_int_value', index=2,
|
|
||||||
number=4, type=4, cpp_type=4, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='negative_int_value', full_name='froofle.protobuf.UninterpretedOption.negative_int_value', index=3,
|
|
||||||
number=5, type=3, cpp_type=2, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='double_value', full_name='froofle.protobuf.UninterpretedOption.double_value', index=4,
|
|
||||||
number=6, type=1, cpp_type=5, label=1,
|
|
||||||
default_value=0,
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
descriptor.FieldDescriptor(
|
|
||||||
name='string_value', full_name='froofle.protobuf.UninterpretedOption.string_value', index=5,
|
|
||||||
number=7, type=12, cpp_type=9, label=1,
|
|
||||||
default_value="",
|
|
||||||
message_type=None, enum_type=None, containing_type=None,
|
|
||||||
is_extension=False, extension_scope=None,
|
|
||||||
options=None),
|
|
||||||
],
|
|
||||||
extensions=[
|
|
||||||
],
|
|
||||||
nested_types=[], # TODO(robinson): Implement.
|
|
||||||
enum_types=[
|
|
||||||
],
|
|
||||||
options=None)
|
|
||||||
|
|
||||||
|
|
||||||
_FILEDESCRIPTORSET.fields_by_name['file'].message_type = _FILEDESCRIPTORPROTO
|
|
||||||
_FILEDESCRIPTORPROTO.fields_by_name['message_type'].message_type = _DESCRIPTORPROTO
|
|
||||||
_FILEDESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
|
|
||||||
_FILEDESCRIPTORPROTO.fields_by_name['service'].message_type = _SERVICEDESCRIPTORPROTO
|
|
||||||
_FILEDESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
|
|
||||||
_FILEDESCRIPTORPROTO.fields_by_name['options'].message_type = _FILEOPTIONS
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['field'].message_type = _FIELDDESCRIPTORPROTO
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['extension'].message_type = _FIELDDESCRIPTORPROTO
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['nested_type'].message_type = _DESCRIPTORPROTO
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['enum_type'].message_type = _ENUMDESCRIPTORPROTO
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['extension_range'].message_type = _DESCRIPTORPROTO_EXTENSIONRANGE
|
|
||||||
_DESCRIPTORPROTO.fields_by_name['options'].message_type = _MESSAGEOPTIONS
|
|
||||||
_FIELDDESCRIPTORPROTO.fields_by_name['label'].enum_type = _FIELDDESCRIPTORPROTO_LABEL
|
|
||||||
_FIELDDESCRIPTORPROTO.fields_by_name['type'].enum_type = _FIELDDESCRIPTORPROTO_TYPE
|
|
||||||
_FIELDDESCRIPTORPROTO.fields_by_name['options'].message_type = _FIELDOPTIONS
|
|
||||||
_ENUMDESCRIPTORPROTO.fields_by_name['value'].message_type = _ENUMVALUEDESCRIPTORPROTO
|
|
||||||
_ENUMDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMOPTIONS
|
|
||||||
_ENUMVALUEDESCRIPTORPROTO.fields_by_name['options'].message_type = _ENUMVALUEOPTIONS
|
|
||||||
_SERVICEDESCRIPTORPROTO.fields_by_name['method'].message_type = _METHODDESCRIPTORPROTO
|
|
||||||
_SERVICEDESCRIPTORPROTO.fields_by_name['options'].message_type = _SERVICEOPTIONS
|
|
||||||
_METHODDESCRIPTORPROTO.fields_by_name['options'].message_type = _METHODOPTIONS
|
|
||||||
_FILEOPTIONS.fields_by_name['optimize_for'].enum_type = _FILEOPTIONS_OPTIMIZEMODE
|
|
||||||
_FILEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_MESSAGEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_FIELDOPTIONS.fields_by_name['ctype'].enum_type = _FIELDOPTIONS_CTYPE
|
|
||||||
_FIELDOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_ENUMOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_ENUMVALUEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_SERVICEOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_METHODOPTIONS.fields_by_name['uninterpreted_option'].message_type = _UNINTERPRETEDOPTION
|
|
||||||
_UNINTERPRETEDOPTION.fields_by_name['name'].message_type = _UNINTERPRETEDOPTION_NAMEPART
|
|
||||||
|
|
||||||
class FileDescriptorSet(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _FILEDESCRIPTORSET
|
|
||||||
|
|
||||||
class FileDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _FILEDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class DescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
|
|
||||||
class ExtensionRange(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _DESCRIPTORPROTO_EXTENSIONRANGE
|
|
||||||
DESCRIPTOR = _DESCRIPTORPROTO
|
|
||||||
|
|
||||||
class FieldDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _FIELDDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class EnumDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _ENUMDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class EnumValueDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _ENUMVALUEDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class ServiceDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _SERVICEDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class MethodDescriptorProto(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _METHODDESCRIPTORPROTO
|
|
||||||
|
|
||||||
class FileOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _FILEOPTIONS
|
|
||||||
|
|
||||||
class MessageOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _MESSAGEOPTIONS
|
|
||||||
|
|
||||||
class FieldOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _FIELDOPTIONS
|
|
||||||
|
|
||||||
class EnumOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _ENUMOPTIONS
|
|
||||||
|
|
||||||
class EnumValueOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _ENUMVALUEOPTIONS
|
|
||||||
|
|
||||||
class ServiceOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _SERVICEOPTIONS
|
|
||||||
|
|
||||||
class MethodOptions(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _METHODOPTIONS
|
|
||||||
|
|
||||||
class UninterpretedOption(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
|
|
||||||
class NamePart(message.Message):
|
|
||||||
__metaclass__ = reflection.GeneratedProtocolMessageType
|
|
||||||
DESCRIPTOR = _UNINTERPRETEDOPTION_NAMEPART
|
|
||||||
DESCRIPTOR = _UNINTERPRETEDOPTION
|
|
||||||
|
|
@ -1,209 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Class for decoding protocol buffer primitives.
|
|
||||||
|
|
||||||
Contains the logic for decoding every logical protocol field type
|
|
||||||
from one of the 5 physical wire types.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf.internal import input_stream
|
|
||||||
from froofle.protobuf.internal import wire_format
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
|
||||||
# that the interface is strongly inspired by WireFormat from the C++ proto2
|
|
||||||
# implementation.
|
|
||||||
|
|
||||||
|
|
||||||
class Decoder(object):
|
|
||||||
|
|
||||||
"""Decodes logical protocol buffer fields from the wire."""
|
|
||||||
|
|
||||||
def __init__(self, s):
|
|
||||||
"""Initializes the decoder to read from s.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
s: An immutable sequence of bytes, which must be accessible
|
|
||||||
via the Python buffer() primitive (i.e., buffer(s)).
|
|
||||||
"""
|
|
||||||
self._stream = input_stream.InputStream(s)
|
|
||||||
|
|
||||||
def EndOfStream(self):
|
|
||||||
"""Returns true iff we've reached the end of the bytes we're reading."""
|
|
||||||
return self._stream.EndOfStream()
|
|
||||||
|
|
||||||
def Position(self):
|
|
||||||
"""Returns the 0-indexed position in |s|."""
|
|
||||||
return self._stream.Position()
|
|
||||||
|
|
||||||
def ReadFieldNumberAndWireType(self):
|
|
||||||
"""Reads a tag from the wire. Returns a (field_number, wire_type) pair."""
|
|
||||||
tag_and_type = self.ReadUInt32()
|
|
||||||
return wire_format.UnpackTag(tag_and_type)
|
|
||||||
|
|
||||||
def SkipBytes(self, bytes):
|
|
||||||
"""Skips the specified number of bytes on the wire."""
|
|
||||||
self._stream.SkipBytes(bytes)
|
|
||||||
|
|
||||||
# Note that the Read*() methods below are not exactly symmetrical with the
|
|
||||||
# corresponding Encoder.Append*() methods. Those Encoder methods first
|
|
||||||
# encode a tag, but the Read*() methods below assume that the tag has already
|
|
||||||
# been read, and that the client wishes to read a field of the specified type
|
|
||||||
# starting at the current position.
|
|
||||||
|
|
||||||
def ReadInt32(self):
|
|
||||||
"""Reads and returns a signed, varint-encoded, 32-bit integer."""
|
|
||||||
return self._stream.ReadVarint32()
|
|
||||||
|
|
||||||
def ReadInt64(self):
|
|
||||||
"""Reads and returns a signed, varint-encoded, 64-bit integer."""
|
|
||||||
return self._stream.ReadVarint64()
|
|
||||||
|
|
||||||
def ReadUInt32(self):
|
|
||||||
"""Reads and returns an signed, varint-encoded, 32-bit integer."""
|
|
||||||
return self._stream.ReadVarUInt32()
|
|
||||||
|
|
||||||
def ReadUInt64(self):
|
|
||||||
"""Reads and returns an signed, varint-encoded,64-bit integer."""
|
|
||||||
return self._stream.ReadVarUInt64()
|
|
||||||
|
|
||||||
def ReadSInt32(self):
|
|
||||||
"""Reads and returns a signed, zigzag-encoded, varint-encoded,
|
|
||||||
32-bit integer."""
|
|
||||||
return wire_format.ZigZagDecode(self._stream.ReadVarUInt32())
|
|
||||||
|
|
||||||
def ReadSInt64(self):
|
|
||||||
"""Reads and returns a signed, zigzag-encoded, varint-encoded,
|
|
||||||
64-bit integer."""
|
|
||||||
return wire_format.ZigZagDecode(self._stream.ReadVarUInt64())
|
|
||||||
|
|
||||||
def ReadFixed32(self):
|
|
||||||
"""Reads and returns an unsigned, fixed-width, 32-bit integer."""
|
|
||||||
return self._stream.ReadLittleEndian32()
|
|
||||||
|
|
||||||
def ReadFixed64(self):
|
|
||||||
"""Reads and returns an unsigned, fixed-width, 64-bit integer."""
|
|
||||||
return self._stream.ReadLittleEndian64()
|
|
||||||
|
|
||||||
def ReadSFixed32(self):
|
|
||||||
"""Reads and returns a signed, fixed-width, 32-bit integer."""
|
|
||||||
value = self._stream.ReadLittleEndian32()
|
|
||||||
if value >= (1 << 31):
|
|
||||||
value -= (1 << 32)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def ReadSFixed64(self):
|
|
||||||
"""Reads and returns a signed, fixed-width, 64-bit integer."""
|
|
||||||
value = self._stream.ReadLittleEndian64()
|
|
||||||
if value >= (1 << 63):
|
|
||||||
value -= (1 << 64)
|
|
||||||
return value
|
|
||||||
|
|
||||||
def ReadFloat(self):
|
|
||||||
"""Reads and returns a 4-byte floating-point number."""
|
|
||||||
serialized = self._stream.ReadBytes(4)
|
|
||||||
return struct.unpack('f', serialized)[0]
|
|
||||||
|
|
||||||
def ReadDouble(self):
|
|
||||||
"""Reads and returns an 8-byte floating-point number."""
|
|
||||||
serialized = self._stream.ReadBytes(8)
|
|
||||||
return struct.unpack('d', serialized)[0]
|
|
||||||
|
|
||||||
def ReadBool(self):
|
|
||||||
"""Reads and returns a bool."""
|
|
||||||
i = self._stream.ReadVarUInt32()
|
|
||||||
return bool(i)
|
|
||||||
|
|
||||||
def ReadEnum(self):
|
|
||||||
"""Reads and returns an enum value."""
|
|
||||||
return self._stream.ReadVarUInt32()
|
|
||||||
|
|
||||||
def ReadString(self):
|
|
||||||
"""Reads and returns a length-delimited string."""
|
|
||||||
bytes = self.ReadBytes()
|
|
||||||
return unicode(bytes, 'utf-8')
|
|
||||||
|
|
||||||
def ReadBytes(self):
|
|
||||||
"""Reads and returns a length-delimited byte sequence."""
|
|
||||||
length = self._stream.ReadVarUInt32()
|
|
||||||
return self._stream.ReadBytes(length)
|
|
||||||
|
|
||||||
def ReadMessageInto(self, msg):
|
|
||||||
"""Calls msg.MergeFromString() to merge
|
|
||||||
length-delimited serialized message data into |msg|.
|
|
||||||
|
|
||||||
REQUIRES: The decoder must be positioned at the serialized "length"
|
|
||||||
prefix to a length-delmiited serialized message.
|
|
||||||
|
|
||||||
POSTCONDITION: The decoder is positioned just after the
|
|
||||||
serialized message, and we have merged those serialized
|
|
||||||
contents into |msg|.
|
|
||||||
"""
|
|
||||||
length = self._stream.ReadVarUInt32()
|
|
||||||
sub_buffer = self._stream.GetSubBuffer(length)
|
|
||||||
num_bytes_used = msg.MergeFromString(sub_buffer)
|
|
||||||
if num_bytes_used != length:
|
|
||||||
raise message.DecodeError(
|
|
||||||
'Submessage told to deserialize from %d-byte encoding, '
|
|
||||||
'but used only %d bytes' % (length, num_bytes_used))
|
|
||||||
self._stream.SkipBytes(num_bytes_used)
|
|
||||||
|
|
||||||
def ReadGroupInto(self, expected_field_number, group):
|
|
||||||
"""Calls group.MergeFromString() to merge
|
|
||||||
END_GROUP-delimited serialized message data into |group|.
|
|
||||||
We'll raise an exception if we don't find an END_GROUP
|
|
||||||
tag immediately after the serialized message contents.
|
|
||||||
|
|
||||||
REQUIRES: The decoder is positioned just after the START_GROUP
|
|
||||||
tag for this group.
|
|
||||||
|
|
||||||
POSTCONDITION: The decoder is positioned just after the
|
|
||||||
END_GROUP tag for this group, and we have merged
|
|
||||||
the contents of the group into |group|.
|
|
||||||
"""
|
|
||||||
sub_buffer = self._stream.GetSubBuffer() # No a priori length limit.
|
|
||||||
num_bytes_used = group.MergeFromString(sub_buffer)
|
|
||||||
if num_bytes_used < 0:
|
|
||||||
raise message.DecodeError('Group message reported negative bytes read.')
|
|
||||||
self._stream.SkipBytes(num_bytes_used)
|
|
||||||
field_number, field_type = self.ReadFieldNumberAndWireType()
|
|
||||||
if field_type != wire_format.WIRETYPE_END_GROUP:
|
|
||||||
raise message.DecodeError('Group message did not end with an END_GROUP.')
|
|
||||||
if field_number != expected_field_number:
|
|
||||||
raise message.DecodeError('END_GROUP tag had field '
|
|
||||||
'number %d, was expecting field number %d' % (
|
|
||||||
field_number, expected_field_number))
|
|
||||||
# We're now positioned just after the END_GROUP tag. Perfect.
|
|
@ -1,206 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Class for encoding protocol message primitives.
|
|
||||||
|
|
||||||
Contains the logic for encoding every logical protocol field type
|
|
||||||
into one of the 5 physical wire types.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf.internal import wire_format
|
|
||||||
from froofle.protobuf.internal import output_stream
|
|
||||||
|
|
||||||
|
|
||||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
|
||||||
# that the interface is strongly inspired by WireFormat from the C++ proto2
|
|
||||||
# implementation.
|
|
||||||
|
|
||||||
|
|
||||||
class Encoder(object):
|
|
||||||
|
|
||||||
"""Encodes logical protocol buffer fields to the wire format."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._stream = output_stream.OutputStream()
|
|
||||||
|
|
||||||
def ToString(self):
|
|
||||||
"""Returns all values encoded in this object as a string."""
|
|
||||||
return self._stream.ToString()
|
|
||||||
|
|
||||||
# All the Append*() methods below first append a tag+type pair to the buffer
|
|
||||||
# before appending the specified value.
|
|
||||||
|
|
||||||
def AppendInt32(self, field_number, value):
|
|
||||||
"""Appends a 32-bit integer to our buffer, varint-encoded."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
self._stream.AppendVarint32(value)
|
|
||||||
|
|
||||||
def AppendInt64(self, field_number, value):
|
|
||||||
"""Appends a 64-bit integer to our buffer, varint-encoded."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
self._stream.AppendVarint64(value)
|
|
||||||
|
|
||||||
def AppendUInt32(self, field_number, unsigned_value):
|
|
||||||
"""Appends an unsigned 32-bit integer to our buffer, varint-encoded."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
self._stream.AppendVarUInt32(unsigned_value)
|
|
||||||
|
|
||||||
def AppendUInt64(self, field_number, unsigned_value):
|
|
||||||
"""Appends an unsigned 64-bit integer to our buffer, varint-encoded."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
self._stream.AppendVarUInt64(unsigned_value)
|
|
||||||
|
|
||||||
def AppendSInt32(self, field_number, value):
|
|
||||||
"""Appends a 32-bit integer to our buffer, zigzag-encoded and then
|
|
||||||
varint-encoded.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
zigzag_value = wire_format.ZigZagEncode(value)
|
|
||||||
self._stream.AppendVarUInt32(zigzag_value)
|
|
||||||
|
|
||||||
def AppendSInt64(self, field_number, value):
|
|
||||||
"""Appends a 64-bit integer to our buffer, zigzag-encoded and then
|
|
||||||
varint-encoded.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_VARINT)
|
|
||||||
zigzag_value = wire_format.ZigZagEncode(value)
|
|
||||||
self._stream.AppendVarUInt64(zigzag_value)
|
|
||||||
|
|
||||||
def AppendFixed32(self, field_number, unsigned_value):
|
|
||||||
"""Appends an unsigned 32-bit integer to our buffer, in little-endian
|
|
||||||
byte-order.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
|
||||||
self._stream.AppendLittleEndian32(unsigned_value)
|
|
||||||
|
|
||||||
def AppendFixed64(self, field_number, unsigned_value):
|
|
||||||
"""Appends an unsigned 64-bit integer to our buffer, in little-endian
|
|
||||||
byte-order.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
|
||||||
self._stream.AppendLittleEndian64(unsigned_value)
|
|
||||||
|
|
||||||
def AppendSFixed32(self, field_number, value):
|
|
||||||
"""Appends a signed 32-bit integer to our buffer, in little-endian
|
|
||||||
byte-order.
|
|
||||||
"""
|
|
||||||
sign = (value & 0x80000000) and -1 or 0
|
|
||||||
if value >> 32 != sign:
|
|
||||||
raise message.EncodeError('SFixed32 out of range: %d' % value)
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
|
||||||
self._stream.AppendLittleEndian32(value & 0xffffffff)
|
|
||||||
|
|
||||||
def AppendSFixed64(self, field_number, value):
|
|
||||||
"""Appends a signed 64-bit integer to our buffer, in little-endian
|
|
||||||
byte-order.
|
|
||||||
"""
|
|
||||||
sign = (value & 0x8000000000000000) and -1 or 0
|
|
||||||
if value >> 64 != sign:
|
|
||||||
raise message.EncodeError('SFixed64 out of range: %d' % value)
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
|
||||||
self._stream.AppendLittleEndian64(value & 0xffffffffffffffff)
|
|
||||||
|
|
||||||
def AppendFloat(self, field_number, value):
|
|
||||||
"""Appends a floating-point number to our buffer."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED32)
|
|
||||||
self._stream.AppendRawBytes(struct.pack('f', value))
|
|
||||||
|
|
||||||
def AppendDouble(self, field_number, value):
|
|
||||||
"""Appends a double-precision floating-point number to our buffer."""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_FIXED64)
|
|
||||||
self._stream.AppendRawBytes(struct.pack('d', value))
|
|
||||||
|
|
||||||
def AppendBool(self, field_number, value):
|
|
||||||
"""Appends a boolean to our buffer."""
|
|
||||||
self.AppendInt32(field_number, value)
|
|
||||||
|
|
||||||
def AppendEnum(self, field_number, value):
|
|
||||||
"""Appends an enum value to our buffer."""
|
|
||||||
self.AppendInt32(field_number, value)
|
|
||||||
|
|
||||||
def AppendString(self, field_number, value):
|
|
||||||
"""Appends a length-prefixed unicode string, encoded as UTF-8 to our buffer,
|
|
||||||
with the length varint-encoded.
|
|
||||||
"""
|
|
||||||
self.AppendBytes(field_number, value.encode('utf-8'))
|
|
||||||
|
|
||||||
def AppendBytes(self, field_number, value):
|
|
||||||
"""Appends a length-prefixed sequence of bytes to our buffer, with the
|
|
||||||
length varint-encoded.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
|
||||||
self._stream.AppendVarUInt32(len(value))
|
|
||||||
self._stream.AppendRawBytes(value)
|
|
||||||
|
|
||||||
# TODO(robinson): For AppendGroup() and AppendMessage(), we'd really like to
|
|
||||||
# avoid the extra string copy here. We can do so if we widen the Message
|
|
||||||
# interface to be able to serialize to a stream in addition to a string. The
|
|
||||||
# challenge when thinking ahead to the Python/C API implementation of Message
|
|
||||||
# is finding a stream-like Python thing to which we can write raw bytes
|
|
||||||
# from C. I'm not sure such a thing exists(?). (array.array is pretty much
|
|
||||||
# what we want, but it's not directly exposed in the Python/C API).
|
|
||||||
|
|
||||||
def AppendGroup(self, field_number, group):
|
|
||||||
"""Appends a group to our buffer.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_START_GROUP)
|
|
||||||
self._stream.AppendRawBytes(group.SerializeToString())
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_END_GROUP)
|
|
||||||
|
|
||||||
def AppendMessage(self, field_number, msg):
|
|
||||||
"""Appends a nested message to our buffer.
|
|
||||||
"""
|
|
||||||
self._AppendTag(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
|
||||||
self._stream.AppendVarUInt32(msg.ByteSize())
|
|
||||||
self._stream.AppendRawBytes(msg.SerializeToString())
|
|
||||||
|
|
||||||
def AppendMessageSetItem(self, field_number, msg):
|
|
||||||
"""Appends an item using the message set wire format.
|
|
||||||
|
|
||||||
The message set message looks like this:
|
|
||||||
message MessageSet {
|
|
||||||
repeated group Item = 1 {
|
|
||||||
required int32 type_id = 2;
|
|
||||||
required string message = 3;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
self._AppendTag(1, wire_format.WIRETYPE_START_GROUP)
|
|
||||||
self.AppendInt32(2, field_number)
|
|
||||||
self.AppendMessage(3, msg)
|
|
||||||
self._AppendTag(1, wire_format.WIRETYPE_END_GROUP)
|
|
||||||
|
|
||||||
def _AppendTag(self, field_number, wire_type):
|
|
||||||
"""Appends a tag containing field number and wire type information."""
|
|
||||||
self._stream.AppendVarUInt32(wire_format.PackTag(field_number, wire_type))
|
|
@ -1,326 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""InputStream is the primitive interface for reading bits from the wire.
|
|
||||||
|
|
||||||
All protocol buffer deserialization can be expressed in terms of
|
|
||||||
the InputStream primitives provided here.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from array import array
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf.internal import wire_format
|
|
||||||
|
|
||||||
|
|
||||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
|
||||||
# that the interface is strongly inspired by CodedInputStream from the C++
|
|
||||||
# proto2 implementation.
|
|
||||||
|
|
||||||
|
|
||||||
class InputStreamBuffer(object):
|
|
||||||
|
|
||||||
"""Contains all logic for reading bits, and dealing with stream position.
|
|
||||||
|
|
||||||
If an InputStream method ever raises an exception, the stream is left
|
|
||||||
in an indeterminate state and is not safe for further use.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, s):
|
|
||||||
# What we really want is something like array('B', s), where elements we
|
|
||||||
# read from the array are already given to us as one-byte integers. BUT
|
|
||||||
# using array() instead of buffer() would force full string copies to result
|
|
||||||
# from each GetSubBuffer() call.
|
|
||||||
#
|
|
||||||
# So, if the N serialized bytes of a single protocol buffer object are
|
|
||||||
# split evenly between 2 child messages, and so on recursively, using
|
|
||||||
# array('B', s) instead of buffer() would incur an additional N*logN bytes
|
|
||||||
# copied during deserialization.
|
|
||||||
#
|
|
||||||
# The higher constant overhead of having to ord() for every byte we read
|
|
||||||
# from the buffer in _ReadVarintHelper() could definitely lead to worse
|
|
||||||
# performance in many real-world scenarios, even if the asymptotic
|
|
||||||
# complexity is better. However, our real answer is that the mythical
|
|
||||||
# Python/C extension module output mode for the protocol compiler will
|
|
||||||
# be blazing-fast and will eliminate most use of this class anyway.
|
|
||||||
self._buffer = buffer(s)
|
|
||||||
self._pos = 0
|
|
||||||
|
|
||||||
def EndOfStream(self):
|
|
||||||
"""Returns true iff we're at the end of the stream.
|
|
||||||
If this returns true, then a call to any other InputStream method
|
|
||||||
will raise an exception.
|
|
||||||
"""
|
|
||||||
return self._pos >= len(self._buffer)
|
|
||||||
|
|
||||||
def Position(self):
|
|
||||||
"""Returns the current position in the stream, or equivalently, the
|
|
||||||
number of bytes read so far.
|
|
||||||
"""
|
|
||||||
return self._pos
|
|
||||||
|
|
||||||
def GetSubBuffer(self, size=None):
|
|
||||||
"""Returns a sequence-like object that represents a portion of our
|
|
||||||
underlying sequence.
|
|
||||||
|
|
||||||
Position 0 in the returned object corresponds to self.Position()
|
|
||||||
in this stream.
|
|
||||||
|
|
||||||
If size is specified, then the returned object ends after the
|
|
||||||
next "size" bytes in this stream. If size is not specified,
|
|
||||||
then the returned object ends at the end of this stream.
|
|
||||||
|
|
||||||
We guarantee that the returned object R supports the Python buffer
|
|
||||||
interface (and thus that the call buffer(R) will work).
|
|
||||||
|
|
||||||
Note that the returned buffer is read-only.
|
|
||||||
|
|
||||||
The intended use for this method is for nested-message and nested-group
|
|
||||||
deserialization, where we want to make a recursive MergeFromString()
|
|
||||||
call on the portion of the original sequence that contains the serialized
|
|
||||||
nested message. (And we'd like to do so without making unnecessary string
|
|
||||||
copies).
|
|
||||||
|
|
||||||
REQUIRES: size is nonnegative.
|
|
||||||
"""
|
|
||||||
# Note that buffer() doesn't perform any actual string copy.
|
|
||||||
if size is None:
|
|
||||||
return buffer(self._buffer, self._pos)
|
|
||||||
else:
|
|
||||||
if size < 0:
|
|
||||||
raise message.DecodeError('Negative size %d' % size)
|
|
||||||
return buffer(self._buffer, self._pos, size)
|
|
||||||
|
|
||||||
def SkipBytes(self, num_bytes):
|
|
||||||
"""Skip num_bytes bytes ahead, or go to the end of the stream, whichever
|
|
||||||
comes first.
|
|
||||||
|
|
||||||
REQUIRES: num_bytes is nonnegative.
|
|
||||||
"""
|
|
||||||
if num_bytes < 0:
|
|
||||||
raise message.DecodeError('Negative num_bytes %d' % num_bytes)
|
|
||||||
self._pos += num_bytes
|
|
||||||
self._pos = min(self._pos, len(self._buffer))
|
|
||||||
|
|
||||||
def ReadBytes(self, size):
|
|
||||||
"""Reads up to 'size' bytes from the stream, stopping early
|
|
||||||
only if we reach the end of the stream. Returns the bytes read
|
|
||||||
as a string.
|
|
||||||
"""
|
|
||||||
if size < 0:
|
|
||||||
raise message.DecodeError('Negative size %d' % size)
|
|
||||||
s = (self._buffer[self._pos : self._pos + size])
|
|
||||||
self._pos += len(s) # Only advance by the number of bytes actually read.
|
|
||||||
return s
|
|
||||||
|
|
||||||
def ReadLittleEndian32(self):
|
|
||||||
"""Interprets the next 4 bytes of the stream as a little-endian
|
|
||||||
encoded, unsiged 32-bit integer, and returns that integer.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN,
|
|
||||||
self._buffer[self._pos : self._pos + 4])
|
|
||||||
self._pos += 4
|
|
||||||
return i[0] # unpack() result is a 1-element tuple.
|
|
||||||
except struct.error, e:
|
|
||||||
raise message.DecodeError(e)
|
|
||||||
|
|
||||||
def ReadLittleEndian64(self):
|
|
||||||
"""Interprets the next 8 bytes of the stream as a little-endian
|
|
||||||
encoded, unsiged 64-bit integer, and returns that integer.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN,
|
|
||||||
self._buffer[self._pos : self._pos + 8])
|
|
||||||
self._pos += 8
|
|
||||||
return i[0] # unpack() result is a 1-element tuple.
|
|
||||||
except struct.error, e:
|
|
||||||
raise message.DecodeError(e)
|
|
||||||
|
|
||||||
def ReadVarint32(self):
|
|
||||||
"""Reads a varint from the stream, interprets this varint
|
|
||||||
as a signed, 32-bit integer, and returns the integer.
|
|
||||||
"""
|
|
||||||
i = self.ReadVarint64()
|
|
||||||
if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for int32: %d' % i)
|
|
||||||
return int(i)
|
|
||||||
|
|
||||||
def ReadVarUInt32(self):
|
|
||||||
"""Reads a varint from the stream, interprets this varint
|
|
||||||
as an unsigned, 32-bit integer, and returns the integer.
|
|
||||||
"""
|
|
||||||
i = self.ReadVarUInt64()
|
|
||||||
if i > wire_format.UINT32_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for uint32: %d' % i)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def ReadVarint64(self):
|
|
||||||
"""Reads a varint from the stream, interprets this varint
|
|
||||||
as a signed, 64-bit integer, and returns the integer.
|
|
||||||
"""
|
|
||||||
i = self.ReadVarUInt64()
|
|
||||||
if i > wire_format.INT64_MAX:
|
|
||||||
i -= (1 << 64)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def ReadVarUInt64(self):
|
|
||||||
"""Reads a varint from the stream, interprets this varint
|
|
||||||
as an unsigned, 64-bit integer, and returns the integer.
|
|
||||||
"""
|
|
||||||
i = self._ReadVarintHelper()
|
|
||||||
if not 0 <= i <= wire_format.UINT64_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for uint64: %d' % i)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def _ReadVarintHelper(self):
|
|
||||||
"""Helper for the various varint-reading methods above.
|
|
||||||
Reads an unsigned, varint-encoded integer from the stream and
|
|
||||||
returns this integer.
|
|
||||||
|
|
||||||
Does no bounds checking except to ensure that we read at most as many bytes
|
|
||||||
as could possibly be present in a varint-encoded 64-bit number.
|
|
||||||
"""
|
|
||||||
result = 0
|
|
||||||
shift = 0
|
|
||||||
while 1:
|
|
||||||
if shift >= 64:
|
|
||||||
raise message.DecodeError('Too many bytes when decoding varint.')
|
|
||||||
try:
|
|
||||||
b = ord(self._buffer[self._pos])
|
|
||||||
except IndexError:
|
|
||||||
raise message.DecodeError('Truncated varint.')
|
|
||||||
self._pos += 1
|
|
||||||
result |= ((b & 0x7f) << shift)
|
|
||||||
shift += 7
|
|
||||||
if not (b & 0x80):
|
|
||||||
return result
|
|
||||||
|
|
||||||
class InputStreamArray(object):
|
|
||||||
def __init__(self, s):
|
|
||||||
self._buffer = array('B', s)
|
|
||||||
self._pos = 0
|
|
||||||
|
|
||||||
def EndOfStream(self):
|
|
||||||
return self._pos >= len(self._buffer)
|
|
||||||
|
|
||||||
def Position(self):
|
|
||||||
return self._pos
|
|
||||||
|
|
||||||
def GetSubBuffer(self, size=None):
|
|
||||||
if size is None:
|
|
||||||
return self._buffer[self._pos : ].tostring()
|
|
||||||
else:
|
|
||||||
if size < 0:
|
|
||||||
raise message.DecodeError('Negative size %d' % size)
|
|
||||||
return self._buffer[self._pos : self._pos + size].tostring()
|
|
||||||
|
|
||||||
def SkipBytes(self, num_bytes):
|
|
||||||
if num_bytes < 0:
|
|
||||||
raise message.DecodeError('Negative num_bytes %d' % num_bytes)
|
|
||||||
self._pos += num_bytes
|
|
||||||
self._pos = min(self._pos, len(self._buffer))
|
|
||||||
|
|
||||||
def ReadBytes(self, size):
|
|
||||||
if size < 0:
|
|
||||||
raise message.DecodeError('Negative size %d' % size)
|
|
||||||
s = self._buffer[self._pos : self._pos + size].tostring()
|
|
||||||
self._pos += len(s) # Only advance by the number of bytes actually read.
|
|
||||||
return s
|
|
||||||
|
|
||||||
def ReadLittleEndian32(self):
|
|
||||||
try:
|
|
||||||
i = struct.unpack(wire_format.FORMAT_UINT32_LITTLE_ENDIAN,
|
|
||||||
self._buffer[self._pos : self._pos + 4])
|
|
||||||
self._pos += 4
|
|
||||||
return i[0] # unpack() result is a 1-element tuple.
|
|
||||||
except struct.error, e:
|
|
||||||
raise message.DecodeError(e)
|
|
||||||
|
|
||||||
def ReadLittleEndian64(self):
|
|
||||||
try:
|
|
||||||
i = struct.unpack(wire_format.FORMAT_UINT64_LITTLE_ENDIAN,
|
|
||||||
self._buffer[self._pos : self._pos + 8])
|
|
||||||
self._pos += 8
|
|
||||||
return i[0] # unpack() result is a 1-element tuple.
|
|
||||||
except struct.error, e:
|
|
||||||
raise message.DecodeError(e)
|
|
||||||
|
|
||||||
def ReadVarint32(self):
|
|
||||||
i = self.ReadVarint64()
|
|
||||||
if not wire_format.INT32_MIN <= i <= wire_format.INT32_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for int32: %d' % i)
|
|
||||||
return int(i)
|
|
||||||
|
|
||||||
def ReadVarUInt32(self):
|
|
||||||
i = self.ReadVarUInt64()
|
|
||||||
if i > wire_format.UINT32_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for uint32: %d' % i)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def ReadVarint64(self):
|
|
||||||
i = self.ReadVarUInt64()
|
|
||||||
if i > wire_format.INT64_MAX:
|
|
||||||
i -= (1 << 64)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def ReadVarUInt64(self):
|
|
||||||
i = self._ReadVarintHelper()
|
|
||||||
if not 0 <= i <= wire_format.UINT64_MAX:
|
|
||||||
raise message.DecodeError('Value out of range for uint64: %d' % i)
|
|
||||||
return i
|
|
||||||
|
|
||||||
def _ReadVarintHelper(self):
|
|
||||||
result = 0
|
|
||||||
shift = 0
|
|
||||||
while 1:
|
|
||||||
if shift >= 64:
|
|
||||||
raise message.DecodeError('Too many bytes when decoding varint.')
|
|
||||||
try:
|
|
||||||
b = self._buffer[self._pos]
|
|
||||||
except IndexError:
|
|
||||||
raise message.DecodeError('Truncated varint.')
|
|
||||||
self._pos += 1
|
|
||||||
result |= ((b & 0x7f) << shift)
|
|
||||||
shift += 7
|
|
||||||
if not (b & 0x80):
|
|
||||||
return result
|
|
||||||
|
|
||||||
try:
|
|
||||||
buffer("")
|
|
||||||
InputStream = InputStreamBuffer
|
|
||||||
except NotImplementedError:
|
|
||||||
# Google App Engine: dev_appserver.py
|
|
||||||
InputStream = InputStreamArray
|
|
||||||
except RuntimeError:
|
|
||||||
# Google App Engine: production
|
|
||||||
InputStream = InputStreamArray
|
|
@ -1,69 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Defines a listener interface for observing certain
|
|
||||||
state transitions on Message objects.
|
|
||||||
|
|
||||||
Also defines a null implementation of this interface.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
|
|
||||||
class MessageListener(object):
|
|
||||||
|
|
||||||
"""Listens for transitions to nonempty and for invalidations of cached
|
|
||||||
byte sizes. Meant to be registered via Message._SetListener().
|
|
||||||
"""
|
|
||||||
|
|
||||||
def TransitionToNonempty(self):
|
|
||||||
"""Called the *first* time that this message becomes nonempty.
|
|
||||||
Implementations are free (but not required) to call this method multiple
|
|
||||||
times after the message has become nonempty.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ByteSizeDirty(self):
|
|
||||||
"""Called *every* time the cached byte size value
|
|
||||||
for this object is invalidated (transitions from being
|
|
||||||
"clean" to "dirty").
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class NullMessageListener(object):
|
|
||||||
|
|
||||||
"""No-op MessageListener implementation."""
|
|
||||||
|
|
||||||
def TransitionToNonempty(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
def ByteSizeDirty(self):
|
|
||||||
pass
|
|
@ -1,125 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""OutputStream is the primitive interface for sticking bits on the wire.
|
|
||||||
|
|
||||||
All protocol buffer serialization can be expressed in terms of
|
|
||||||
the OutputStream primitives provided here.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
import array
|
|
||||||
import struct
|
|
||||||
from froofle.protobuf import message
|
|
||||||
from froofle.protobuf.internal import wire_format
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# Note that much of this code is ported from //net/proto/ProtocolBuffer, and
|
|
||||||
# that the interface is strongly inspired by CodedOutputStream from the C++
|
|
||||||
# proto2 implementation.
|
|
||||||
|
|
||||||
|
|
||||||
class OutputStream(object):
|
|
||||||
|
|
||||||
"""Contains all logic for writing bits, and ToString() to get the result."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self._buffer = array.array('B')
|
|
||||||
|
|
||||||
def AppendRawBytes(self, raw_bytes):
|
|
||||||
"""Appends raw_bytes to our internal buffer."""
|
|
||||||
self._buffer.fromstring(raw_bytes)
|
|
||||||
|
|
||||||
def AppendLittleEndian32(self, unsigned_value):
|
|
||||||
"""Appends an unsigned 32-bit integer to the internal buffer,
|
|
||||||
in little-endian byte order.
|
|
||||||
"""
|
|
||||||
if not 0 <= unsigned_value <= wire_format.UINT32_MAX:
|
|
||||||
raise message.EncodeError(
|
|
||||||
'Unsigned 32-bit out of range: %d' % unsigned_value)
|
|
||||||
self._buffer.fromstring(struct.pack(
|
|
||||||
wire_format.FORMAT_UINT32_LITTLE_ENDIAN, unsigned_value))
|
|
||||||
|
|
||||||
def AppendLittleEndian64(self, unsigned_value):
|
|
||||||
"""Appends an unsigned 64-bit integer to the internal buffer,
|
|
||||||
in little-endian byte order.
|
|
||||||
"""
|
|
||||||
if not 0 <= unsigned_value <= wire_format.UINT64_MAX:
|
|
||||||
raise message.EncodeError(
|
|
||||||
'Unsigned 64-bit out of range: %d' % unsigned_value)
|
|
||||||
self._buffer.fromstring(struct.pack(
|
|
||||||
wire_format.FORMAT_UINT64_LITTLE_ENDIAN, unsigned_value))
|
|
||||||
|
|
||||||
def AppendVarint32(self, value):
|
|
||||||
"""Appends a signed 32-bit integer to the internal buffer,
|
|
||||||
encoded as a varint. (Note that a negative varint32 will
|
|
||||||
always require 10 bytes of space.)
|
|
||||||
"""
|
|
||||||
if not wire_format.INT32_MIN <= value <= wire_format.INT32_MAX:
|
|
||||||
raise message.EncodeError('Value out of range: %d' % value)
|
|
||||||
self.AppendVarint64(value)
|
|
||||||
|
|
||||||
def AppendVarUInt32(self, value):
|
|
||||||
"""Appends an unsigned 32-bit integer to the internal buffer,
|
|
||||||
encoded as a varint.
|
|
||||||
"""
|
|
||||||
if not 0 <= value <= wire_format.UINT32_MAX:
|
|
||||||
raise message.EncodeError('Value out of range: %d' % value)
|
|
||||||
self.AppendVarUInt64(value)
|
|
||||||
|
|
||||||
def AppendVarint64(self, value):
|
|
||||||
"""Appends a signed 64-bit integer to the internal buffer,
|
|
||||||
encoded as a varint.
|
|
||||||
"""
|
|
||||||
if not wire_format.INT64_MIN <= value <= wire_format.INT64_MAX:
|
|
||||||
raise message.EncodeError('Value out of range: %d' % value)
|
|
||||||
if value < 0:
|
|
||||||
value += (1 << 64)
|
|
||||||
self.AppendVarUInt64(value)
|
|
||||||
|
|
||||||
def AppendVarUInt64(self, unsigned_value):
|
|
||||||
"""Appends an unsigned 64-bit integer to the internal buffer,
|
|
||||||
encoded as a varint.
|
|
||||||
"""
|
|
||||||
if not 0 <= unsigned_value <= wire_format.UINT64_MAX:
|
|
||||||
raise message.EncodeError('Value out of range: %d' % unsigned_value)
|
|
||||||
while True:
|
|
||||||
bits = unsigned_value & 0x7f
|
|
||||||
unsigned_value >>= 7
|
|
||||||
if not unsigned_value:
|
|
||||||
self._buffer.append(bits)
|
|
||||||
break
|
|
||||||
self._buffer.append(0x80|bits)
|
|
||||||
|
|
||||||
def ToString(self):
|
|
||||||
"""Returns a string containing the bytes in our internal buffer."""
|
|
||||||
return self._buffer.tostring()
|
|
@ -1,268 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Provides type checking routines.
|
|
||||||
|
|
||||||
This module defines type checking utilities in the forms of dictionaries:
|
|
||||||
|
|
||||||
VALUE_CHECKERS: A dictionary of field types and a value validation object.
|
|
||||||
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
|
|
||||||
function.
|
|
||||||
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
|
|
||||||
function.
|
|
||||||
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
|
|
||||||
coresponding wire types.
|
|
||||||
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
|
|
||||||
function.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
from froofle.protobuf.internal import decoder
|
|
||||||
from froofle.protobuf.internal import encoder
|
|
||||||
from froofle.protobuf.internal import wire_format
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
|
|
||||||
_FieldDescriptor = descriptor.FieldDescriptor
|
|
||||||
|
|
||||||
|
|
||||||
def GetTypeChecker(cpp_type, field_type):
|
|
||||||
"""Returns a type checker for a message field of the specified types.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cpp_type: C++ type of the field (see descriptor.py).
|
|
||||||
field_type: Protocol message field type (see descriptor.py).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
An instance of TypeChecker which can be used to verify the types
|
|
||||||
of values assigned to a field of the specified type.
|
|
||||||
"""
|
|
||||||
if (cpp_type == _FieldDescriptor.CPPTYPE_STRING and
|
|
||||||
field_type == _FieldDescriptor.TYPE_STRING):
|
|
||||||
return UnicodeValueChecker()
|
|
||||||
return _VALUE_CHECKERS[cpp_type]
|
|
||||||
|
|
||||||
|
|
||||||
# None of the typecheckers below make any attempt to guard against people
|
|
||||||
# subclassing builtin types and doing weird things. We're not trying to
|
|
||||||
# protect against malicious clients here, just people accidentally shooting
|
|
||||||
# themselves in the foot in obvious ways.
|
|
||||||
|
|
||||||
class TypeChecker(object):
|
|
||||||
|
|
||||||
"""Type checker used to catch type errors as early as possible
|
|
||||||
when the client is setting scalar fields in protocol messages.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, *acceptable_types):
|
|
||||||
self._acceptable_types = acceptable_types
|
|
||||||
|
|
||||||
def CheckValue(self, proposed_value):
|
|
||||||
if not isinstance(proposed_value, self._acceptable_types):
|
|
||||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
|
||||||
(proposed_value, type(proposed_value), self._acceptable_types))
|
|
||||||
raise TypeError(message)
|
|
||||||
|
|
||||||
|
|
||||||
# IntValueChecker and its subclasses perform integer type-checks
|
|
||||||
# and bounds-checks.
|
|
||||||
class IntValueChecker(object):
|
|
||||||
|
|
||||||
"""Checker used for integer fields. Performs type-check and range check."""
|
|
||||||
|
|
||||||
def CheckValue(self, proposed_value):
|
|
||||||
if not isinstance(proposed_value, (int, long)):
|
|
||||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
|
||||||
(proposed_value, type(proposed_value), (int, long)))
|
|
||||||
raise TypeError(message)
|
|
||||||
if not self._MIN <= proposed_value <= self._MAX:
|
|
||||||
raise ValueError('Value out of range: %d' % proposed_value)
|
|
||||||
|
|
||||||
|
|
||||||
class UnicodeValueChecker(object):
|
|
||||||
|
|
||||||
"""Checker used for string fields."""
|
|
||||||
|
|
||||||
def CheckValue(self, proposed_value):
|
|
||||||
if not isinstance(proposed_value, (str, unicode)):
|
|
||||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
|
||||||
(proposed_value, type(proposed_value), (str, unicode)))
|
|
||||||
raise TypeError(message)
|
|
||||||
|
|
||||||
# If the value is of type 'str' make sure that it is in 7-bit ASCII
|
|
||||||
# encoding.
|
|
||||||
if isinstance(proposed_value, str):
|
|
||||||
try:
|
|
||||||
unicode(proposed_value, 'ascii')
|
|
||||||
except UnicodeDecodeError:
|
|
||||||
raise ValueError('%.1024r isn\'t in 7-bit ASCII encoding.'
|
|
||||||
% (proposed_value))
|
|
||||||
|
|
||||||
|
|
||||||
class Int32ValueChecker(IntValueChecker):
|
|
||||||
# We're sure to use ints instead of longs here since comparison may be more
|
|
||||||
# efficient.
|
|
||||||
_MIN = -2147483648
|
|
||||||
_MAX = 2147483647
|
|
||||||
|
|
||||||
|
|
||||||
class Uint32ValueChecker(IntValueChecker):
|
|
||||||
_MIN = 0
|
|
||||||
_MAX = (1 << 32) - 1
|
|
||||||
|
|
||||||
|
|
||||||
class Int64ValueChecker(IntValueChecker):
|
|
||||||
_MIN = -(1 << 63)
|
|
||||||
_MAX = (1 << 63) - 1
|
|
||||||
|
|
||||||
|
|
||||||
class Uint64ValueChecker(IntValueChecker):
|
|
||||||
_MIN = 0
|
|
||||||
_MAX = (1 << 64) - 1
|
|
||||||
|
|
||||||
|
|
||||||
# Type-checkers for all scalar CPPTYPEs.
|
|
||||||
_VALUE_CHECKERS = {
|
|
||||||
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
|
|
||||||
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
|
|
||||||
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
|
|
||||||
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
|
|
||||||
_FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker(
|
|
||||||
float, int, long),
|
|
||||||
_FieldDescriptor.CPPTYPE_FLOAT: TypeChecker(
|
|
||||||
float, int, long),
|
|
||||||
_FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int),
|
|
||||||
_FieldDescriptor.CPPTYPE_ENUM: Int32ValueChecker(),
|
|
||||||
_FieldDescriptor.CPPTYPE_STRING: TypeChecker(str),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Map from field type to a function F, such that F(field_num, value)
|
|
||||||
# gives the total byte size for a value of the given type. This
|
|
||||||
# byte size includes tag information and any other additional space
|
|
||||||
# associated with serializing "value".
|
|
||||||
TYPE_TO_BYTE_SIZE_FN = {
|
|
||||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
|
|
||||||
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
|
|
||||||
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
|
|
||||||
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
|
|
||||||
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
|
|
||||||
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
|
|
||||||
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
|
|
||||||
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
|
|
||||||
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Maps from field type to an unbound Encoder method F, such that
|
|
||||||
# F(encoder, field_number, value) will append the serialization
|
|
||||||
# of a value of this type to the encoder.
|
|
||||||
_Encoder = encoder.Encoder
|
|
||||||
TYPE_TO_SERIALIZE_METHOD = {
|
|
||||||
_FieldDescriptor.TYPE_DOUBLE: _Encoder.AppendDouble,
|
|
||||||
_FieldDescriptor.TYPE_FLOAT: _Encoder.AppendFloat,
|
|
||||||
_FieldDescriptor.TYPE_INT64: _Encoder.AppendInt64,
|
|
||||||
_FieldDescriptor.TYPE_UINT64: _Encoder.AppendUInt64,
|
|
||||||
_FieldDescriptor.TYPE_INT32: _Encoder.AppendInt32,
|
|
||||||
_FieldDescriptor.TYPE_FIXED64: _Encoder.AppendFixed64,
|
|
||||||
_FieldDescriptor.TYPE_FIXED32: _Encoder.AppendFixed32,
|
|
||||||
_FieldDescriptor.TYPE_BOOL: _Encoder.AppendBool,
|
|
||||||
_FieldDescriptor.TYPE_STRING: _Encoder.AppendString,
|
|
||||||
_FieldDescriptor.TYPE_GROUP: _Encoder.AppendGroup,
|
|
||||||
_FieldDescriptor.TYPE_MESSAGE: _Encoder.AppendMessage,
|
|
||||||
_FieldDescriptor.TYPE_BYTES: _Encoder.AppendBytes,
|
|
||||||
_FieldDescriptor.TYPE_UINT32: _Encoder.AppendUInt32,
|
|
||||||
_FieldDescriptor.TYPE_ENUM: _Encoder.AppendEnum,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED32: _Encoder.AppendSFixed32,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED64: _Encoder.AppendSFixed64,
|
|
||||||
_FieldDescriptor.TYPE_SINT32: _Encoder.AppendSInt32,
|
|
||||||
_FieldDescriptor.TYPE_SINT64: _Encoder.AppendSInt64,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Maps from field type to expected wiretype.
|
|
||||||
FIELD_TYPE_TO_WIRE_TYPE = {
|
|
||||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
|
|
||||||
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
|
|
||||||
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
|
|
||||||
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
|
|
||||||
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_STRING:
|
|
||||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
|
||||||
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
|
|
||||||
_FieldDescriptor.TYPE_MESSAGE:
|
|
||||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
|
||||||
_FieldDescriptor.TYPE_BYTES:
|
|
||||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
|
||||||
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
|
|
||||||
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
|
|
||||||
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
# Maps from field type to an unbound Decoder method F,
|
|
||||||
# such that F(decoder) will read a field of the requested type.
|
|
||||||
#
|
|
||||||
# Note that Message and Group are intentionally missing here.
|
|
||||||
# They're handled by _RecursivelyMerge().
|
|
||||||
_Decoder = decoder.Decoder
|
|
||||||
TYPE_TO_DESERIALIZE_METHOD = {
|
|
||||||
_FieldDescriptor.TYPE_DOUBLE: _Decoder.ReadDouble,
|
|
||||||
_FieldDescriptor.TYPE_FLOAT: _Decoder.ReadFloat,
|
|
||||||
_FieldDescriptor.TYPE_INT64: _Decoder.ReadInt64,
|
|
||||||
_FieldDescriptor.TYPE_UINT64: _Decoder.ReadUInt64,
|
|
||||||
_FieldDescriptor.TYPE_INT32: _Decoder.ReadInt32,
|
|
||||||
_FieldDescriptor.TYPE_FIXED64: _Decoder.ReadFixed64,
|
|
||||||
_FieldDescriptor.TYPE_FIXED32: _Decoder.ReadFixed32,
|
|
||||||
_FieldDescriptor.TYPE_BOOL: _Decoder.ReadBool,
|
|
||||||
_FieldDescriptor.TYPE_STRING: _Decoder.ReadString,
|
|
||||||
_FieldDescriptor.TYPE_BYTES: _Decoder.ReadBytes,
|
|
||||||
_FieldDescriptor.TYPE_UINT32: _Decoder.ReadUInt32,
|
|
||||||
_FieldDescriptor.TYPE_ENUM: _Decoder.ReadEnum,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED32: _Decoder.ReadSFixed32,
|
|
||||||
_FieldDescriptor.TYPE_SFIXED64: _Decoder.ReadSFixed64,
|
|
||||||
_FieldDescriptor.TYPE_SINT32: _Decoder.ReadSInt32,
|
|
||||||
_FieldDescriptor.TYPE_SINT64: _Decoder.ReadSInt64,
|
|
||||||
}
|
|
@ -1,236 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Constants and static functions to support protocol buffer wire format."""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
import struct
|
|
||||||
from froofle.protobuf import message
|
|
||||||
|
|
||||||
|
|
||||||
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
|
|
||||||
_TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
|
|
||||||
|
|
||||||
# These numbers identify the wire type of a protocol buffer value.
|
|
||||||
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
|
|
||||||
# tag-and-type to store one of these WIRETYPE_* constants.
|
|
||||||
# These values must match WireType enum in //net/proto2/public/wire_format.h.
|
|
||||||
WIRETYPE_VARINT = 0
|
|
||||||
WIRETYPE_FIXED64 = 1
|
|
||||||
WIRETYPE_LENGTH_DELIMITED = 2
|
|
||||||
WIRETYPE_START_GROUP = 3
|
|
||||||
WIRETYPE_END_GROUP = 4
|
|
||||||
WIRETYPE_FIXED32 = 5
|
|
||||||
_WIRETYPE_MAX = 5
|
|
||||||
|
|
||||||
|
|
||||||
# Bounds for various integer types.
|
|
||||||
INT32_MAX = int((1 << 31) - 1)
|
|
||||||
INT32_MIN = int(-(1 << 31))
|
|
||||||
UINT32_MAX = (1 << 32) - 1
|
|
||||||
|
|
||||||
INT64_MAX = (1 << 63) - 1
|
|
||||||
INT64_MIN = -(1 << 63)
|
|
||||||
UINT64_MAX = (1 << 64) - 1
|
|
||||||
|
|
||||||
# "struct" format strings that will encode/decode the specified formats.
|
|
||||||
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
|
|
||||||
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
|
|
||||||
|
|
||||||
|
|
||||||
# We'll have to provide alternate implementations of AppendLittleEndian*() on
|
|
||||||
# any architectures where these checks fail.
|
|
||||||
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
|
|
||||||
raise AssertionError('Format "I" is not a 32-bit number.')
|
|
||||||
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
|
|
||||||
raise AssertionError('Format "Q" is not a 64-bit number.')
|
|
||||||
|
|
||||||
|
|
||||||
def PackTag(field_number, wire_type):
|
|
||||||
"""Returns an unsigned 32-bit integer that encodes the field number and
|
|
||||||
wire type information in standard protocol message wire format.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
field_number: Expected to be an integer in the range [1, 1 << 29)
|
|
||||||
wire_type: One of the WIRETYPE_* constants.
|
|
||||||
"""
|
|
||||||
if not 0 <= wire_type <= _WIRETYPE_MAX:
|
|
||||||
raise message.EncodeError('Unknown wire type: %d' % wire_type)
|
|
||||||
return (field_number << TAG_TYPE_BITS) | wire_type
|
|
||||||
|
|
||||||
|
|
||||||
def UnpackTag(tag):
|
|
||||||
"""The inverse of PackTag(). Given an unsigned 32-bit number,
|
|
||||||
returns a (field_number, wire_type) tuple.
|
|
||||||
"""
|
|
||||||
return (tag >> TAG_TYPE_BITS), (tag & _TAG_TYPE_MASK)
|
|
||||||
|
|
||||||
|
|
||||||
def ZigZagEncode(value):
|
|
||||||
"""ZigZag Transform: Encodes signed integers so that they can be
|
|
||||||
effectively used with varint encoding. See wire_format.h for
|
|
||||||
more details.
|
|
||||||
"""
|
|
||||||
if value >= 0:
|
|
||||||
return value << 1
|
|
||||||
return (value << 1) ^ (~0)
|
|
||||||
|
|
||||||
|
|
||||||
def ZigZagDecode(value):
|
|
||||||
"""Inverse of ZigZagEncode()."""
|
|
||||||
if not value & 0x1:
|
|
||||||
return value >> 1
|
|
||||||
return (value >> 1) ^ (~0)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
# The *ByteSize() functions below return the number of bytes required to
|
|
||||||
# serialize "field number + type" information and then serialize the value.
|
|
||||||
|
|
||||||
|
|
||||||
def Int32ByteSize(field_number, int32):
|
|
||||||
return Int64ByteSize(field_number, int32)
|
|
||||||
|
|
||||||
|
|
||||||
def Int64ByteSize(field_number, int64):
|
|
||||||
# Have to convert to uint before calling UInt64ByteSize().
|
|
||||||
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
|
|
||||||
|
|
||||||
|
|
||||||
def UInt32ByteSize(field_number, uint32):
|
|
||||||
return UInt64ByteSize(field_number, uint32)
|
|
||||||
|
|
||||||
|
|
||||||
def UInt64ByteSize(field_number, uint64):
|
|
||||||
return _TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
|
|
||||||
|
|
||||||
|
|
||||||
def SInt32ByteSize(field_number, int32):
|
|
||||||
return UInt32ByteSize(field_number, ZigZagEncode(int32))
|
|
||||||
|
|
||||||
|
|
||||||
def SInt64ByteSize(field_number, int64):
|
|
||||||
return UInt64ByteSize(field_number, ZigZagEncode(int64))
|
|
||||||
|
|
||||||
|
|
||||||
def Fixed32ByteSize(field_number, fixed32):
|
|
||||||
return _TagByteSize(field_number) + 4
|
|
||||||
|
|
||||||
|
|
||||||
def Fixed64ByteSize(field_number, fixed64):
|
|
||||||
return _TagByteSize(field_number) + 8
|
|
||||||
|
|
||||||
|
|
||||||
def SFixed32ByteSize(field_number, sfixed32):
|
|
||||||
return _TagByteSize(field_number) + 4
|
|
||||||
|
|
||||||
|
|
||||||
def SFixed64ByteSize(field_number, sfixed64):
|
|
||||||
return _TagByteSize(field_number) + 8
|
|
||||||
|
|
||||||
|
|
||||||
def FloatByteSize(field_number, flt):
|
|
||||||
return _TagByteSize(field_number) + 4
|
|
||||||
|
|
||||||
|
|
||||||
def DoubleByteSize(field_number, double):
|
|
||||||
return _TagByteSize(field_number) + 8
|
|
||||||
|
|
||||||
|
|
||||||
def BoolByteSize(field_number, b):
|
|
||||||
return _TagByteSize(field_number) + 1
|
|
||||||
|
|
||||||
|
|
||||||
def EnumByteSize(field_number, enum):
|
|
||||||
return UInt32ByteSize(field_number, enum)
|
|
||||||
|
|
||||||
|
|
||||||
def StringByteSize(field_number, string):
|
|
||||||
return BytesByteSize(field_number, string.encode('utf-8'))
|
|
||||||
|
|
||||||
|
|
||||||
def BytesByteSize(field_number, b):
|
|
||||||
return (_TagByteSize(field_number)
|
|
||||||
+ _VarUInt64ByteSizeNoTag(len(b))
|
|
||||||
+ len(b))
|
|
||||||
|
|
||||||
|
|
||||||
def GroupByteSize(field_number, message):
|
|
||||||
return (2 * _TagByteSize(field_number) # START and END group.
|
|
||||||
+ message.ByteSize())
|
|
||||||
|
|
||||||
|
|
||||||
def MessageByteSize(field_number, message):
|
|
||||||
return (_TagByteSize(field_number)
|
|
||||||
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
|
|
||||||
+ message.ByteSize())
|
|
||||||
|
|
||||||
|
|
||||||
def MessageSetItemByteSize(field_number, msg):
|
|
||||||
# First compute the sizes of the tags.
|
|
||||||
# There are 2 tags for the beginning and ending of the repeated group, that
|
|
||||||
# is field number 1, one with field number 2 (type_id) and one with field
|
|
||||||
# number 3 (message).
|
|
||||||
total_size = (2 * _TagByteSize(1) + _TagByteSize(2) + _TagByteSize(3))
|
|
||||||
|
|
||||||
# Add the number of bytes for type_id.
|
|
||||||
total_size += _VarUInt64ByteSizeNoTag(field_number)
|
|
||||||
|
|
||||||
message_size = msg.ByteSize()
|
|
||||||
|
|
||||||
# The number of bytes for encoding the length of the message.
|
|
||||||
total_size += _VarUInt64ByteSizeNoTag(message_size)
|
|
||||||
|
|
||||||
# The size of the message.
|
|
||||||
total_size += message_size
|
|
||||||
return total_size
|
|
||||||
|
|
||||||
|
|
||||||
# Private helper functions for the *ByteSize() functions above.
|
|
||||||
|
|
||||||
|
|
||||||
def _TagByteSize(field_number):
|
|
||||||
"""Returns the bytes required to serialize a tag with this field number."""
|
|
||||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
|
||||||
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
|
|
||||||
|
|
||||||
|
|
||||||
def _VarUInt64ByteSizeNoTag(uint64):
|
|
||||||
"""Returns the bytes required to serialize a single varint.
|
|
||||||
uint64 must be unsigned.
|
|
||||||
"""
|
|
||||||
if uint64 > UINT64_MAX:
|
|
||||||
raise message.EncodeError('Value out of range: %d' % uint64)
|
|
||||||
bytes = 1
|
|
||||||
while uint64 > 0x7f:
|
|
||||||
bytes += 1
|
|
||||||
uint64 >>= 7
|
|
||||||
return bytes
|
|
@ -1,246 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# TODO(robinson): We should just make these methods all "pure-virtual" and move
|
|
||||||
# all implementation out, into reflection.py for now.
|
|
||||||
|
|
||||||
|
|
||||||
"""Contains an abstract base class for protocol messages."""
|
|
||||||
|
|
||||||
__author__ = 'robinson@google.com (Will Robinson)'
|
|
||||||
|
|
||||||
from froofle.protobuf import text_format
|
|
||||||
|
|
||||||
class Error(Exception): pass
|
|
||||||
class DecodeError(Error): pass
|
|
||||||
class EncodeError(Error): pass
|
|
||||||
|
|
||||||
|
|
||||||
class Message(object):
|
|
||||||
|
|
||||||
"""Abstract base class for protocol messages.
|
|
||||||
|
|
||||||
Protocol message classes are almost always generated by the protocol
|
|
||||||
compiler. These generated types subclass Message and implement the methods
|
|
||||||
shown below.
|
|
||||||
|
|
||||||
TODO(robinson): Link to an HTML document here.
|
|
||||||
|
|
||||||
TODO(robinson): Document that instances of this class will also
|
|
||||||
have an Extensions attribute with __getitem__ and __setitem__.
|
|
||||||
Again, not sure how to best convey this.
|
|
||||||
|
|
||||||
TODO(robinson): Document that the class must also have a static
|
|
||||||
RegisterExtension(extension_field) method.
|
|
||||||
Not sure how to best express at this point.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# TODO(robinson): Document these fields and methods.
|
|
||||||
|
|
||||||
__slots__ = []
|
|
||||||
|
|
||||||
DESCRIPTOR = None
|
|
||||||
|
|
||||||
def __eq__(self, other_msg):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def __ne__(self, other_msg):
|
|
||||||
# Can't just say self != other_msg, since that would infinitely recurse. :)
|
|
||||||
return not self == other_msg
|
|
||||||
|
|
||||||
def __str__(self):
|
|
||||||
return text_format.MessageToString(self)
|
|
||||||
|
|
||||||
def MergeFrom(self, other_msg):
|
|
||||||
"""Merges the contents of the specified message into current message.
|
|
||||||
|
|
||||||
This method merges the contents of the specified message into the current
|
|
||||||
message. Singular fields that are set in the specified message overwrite
|
|
||||||
the corresponding fields in the current message. Repeated fields are
|
|
||||||
appended. Singular sub-messages and groups are recursively merged.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
other_msg: Message to merge into the current message.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def CopyFrom(self, other_msg):
|
|
||||||
"""Copies the content of the specified message into the current message.
|
|
||||||
|
|
||||||
The method clears the current message and then merges the specified
|
|
||||||
message using MergeFrom.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
other_msg: Message to copy into the current one.
|
|
||||||
"""
|
|
||||||
if self == other_msg:
|
|
||||||
return
|
|
||||||
self.Clear()
|
|
||||||
self.MergeFrom(other_msg)
|
|
||||||
|
|
||||||
def Clear(self):
|
|
||||||
"""Clears all data that was set in the message."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def IsInitialized(self):
|
|
||||||
"""Checks if the message is initialized.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The method returns True if the message is initialized (i.e. all of its
|
|
||||||
required fields are set).
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
# TODO(robinson): MergeFromString() should probably return None and be
|
|
||||||
# implemented in terms of a helper that returns the # of bytes read. Our
|
|
||||||
# deserialization routines would use the helper when recursively
|
|
||||||
# deserializing, but the end user would almost always just want the no-return
|
|
||||||
# MergeFromString().
|
|
||||||
|
|
||||||
def MergeFromString(self, serialized):
|
|
||||||
"""Merges serialized protocol buffer data into this message.
|
|
||||||
|
|
||||||
When we find a field in |serialized| that is already present
|
|
||||||
in this message:
|
|
||||||
- If it's a "repeated" field, we append to the end of our list.
|
|
||||||
- Else, if it's a scalar, we overwrite our field.
|
|
||||||
- Else, (it's a nonrepeated composite), we recursively merge
|
|
||||||
into the existing composite.
|
|
||||||
|
|
||||||
TODO(robinson): Document handling of unknown fields.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
serialized: Any object that allows us to call buffer(serialized)
|
|
||||||
to access a string of bytes using the buffer interface.
|
|
||||||
|
|
||||||
TODO(robinson): When we switch to a helper, this will return None.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The number of bytes read from |serialized|.
|
|
||||||
For non-group messages, this will always be len(serialized),
|
|
||||||
but for messages which are actually groups, this will
|
|
||||||
generally be less than len(serialized), since we must
|
|
||||||
stop when we reach an END_GROUP tag. Note that if
|
|
||||||
we *do* stop because of an END_GROUP tag, the number
|
|
||||||
of bytes returned does not include the bytes
|
|
||||||
for the END_GROUP tag information.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ParseFromString(self, serialized):
|
|
||||||
"""Like MergeFromString(), except we clear the object first."""
|
|
||||||
self.Clear()
|
|
||||||
self.MergeFromString(serialized)
|
|
||||||
|
|
||||||
def SerializeToString(self):
|
|
||||||
"""Serializes the protocol message to a binary string.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A binary string representation of the message if all of the required
|
|
||||||
fields in the message are set (i.e. the message is initialized).
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
message.EncodeError if the message isn't initialized.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def SerializePartialToString(self):
|
|
||||||
"""Serializes the protocol message to a binary string.
|
|
||||||
|
|
||||||
This method is similar to SerializeToString but doesn't check if the
|
|
||||||
message is initialized.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A string representation of the partial message.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
# TODO(robinson): Decide whether we like these better
|
|
||||||
# than auto-generated has_foo() and clear_foo() methods
|
|
||||||
# on the instances themselves. This way is less consistent
|
|
||||||
# with C++, but it makes reflection-type access easier and
|
|
||||||
# reduces the number of magically autogenerated things.
|
|
||||||
#
|
|
||||||
# TODO(robinson): Be sure to document (and test) exactly
|
|
||||||
# which field names are accepted here. Are we case-sensitive?
|
|
||||||
# What do we do with fields that share names with Python keywords
|
|
||||||
# like 'lambda' and 'yield'?
|
|
||||||
#
|
|
||||||
# nnorwitz says:
|
|
||||||
# """
|
|
||||||
# Typically (in python), an underscore is appended to names that are
|
|
||||||
# keywords. So they would become lambda_ or yield_.
|
|
||||||
# """
|
|
||||||
def ListFields(self, field_name):
|
|
||||||
"""Returns a list of (FieldDescriptor, value) tuples for all
|
|
||||||
fields in the message which are not empty. A singular field is non-empty
|
|
||||||
if HasField() would return true, and a repeated field is non-empty if
|
|
||||||
it contains at least one element. The fields are ordered by field
|
|
||||||
number"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def HasField(self, field_name):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ClearField(self, field_name):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def HasExtension(self, extension_handle):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ClearExtension(self, extension_handle):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ByteSize(self):
|
|
||||||
"""Returns the serialized size of this message.
|
|
||||||
Recursively calls ByteSize() on all contained messages.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _SetListener(self, message_listener):
|
|
||||||
"""Internal method used by the protocol message implementation.
|
|
||||||
Clients should not call this directly.
|
|
||||||
|
|
||||||
Sets a listener that this message will call on certain state transitions.
|
|
||||||
|
|
||||||
The purpose of this method is to register back-edges from children to
|
|
||||||
parents at runtime, for the purpose of setting "has" bits and
|
|
||||||
byte-size-dirty bits in the parent and ancestor objects whenever a child or
|
|
||||||
descendant object is modified.
|
|
||||||
|
|
||||||
If the client wants to disconnect this Message from the object tree, she
|
|
||||||
explicitly sets callback to None.
|
|
||||||
|
|
||||||
If message_listener is None, unregisters any existing listener. Otherwise,
|
|
||||||
message_listener must implement the MessageListener interface in
|
|
||||||
internal/message_listener.py, and we discard any listener registered
|
|
||||||
via a previous _SetListener() call.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
File diff suppressed because it is too large
Load Diff
@ -1,208 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Declares the RPC service interfaces.
|
|
||||||
|
|
||||||
This module declares the abstract interfaces underlying proto2 RPC
|
|
||||||
services. These are intented to be independent of any particular RPC
|
|
||||||
implementation, so that proto2 services can be used on top of a variety
|
|
||||||
of implementations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'petar@google.com (Petar Petrov)'
|
|
||||||
|
|
||||||
|
|
||||||
class Service(object):
|
|
||||||
|
|
||||||
"""Abstract base interface for protocol-buffer-based RPC services.
|
|
||||||
|
|
||||||
Services themselves are abstract classes (implemented either by servers or as
|
|
||||||
stubs), but they subclass this base interface. The methods of this
|
|
||||||
interface can be used to call the methods of the service without knowing
|
|
||||||
its exact type at compile time (analogous to the Message interface).
|
|
||||||
"""
|
|
||||||
|
|
||||||
def GetDescriptor(self):
|
|
||||||
"""Retrieves this service's descriptor."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def CallMethod(self, method_descriptor, rpc_controller,
|
|
||||||
request, done):
|
|
||||||
"""Calls a method of the service specified by method_descriptor.
|
|
||||||
|
|
||||||
Preconditions:
|
|
||||||
* method_descriptor.service == GetDescriptor
|
|
||||||
* request is of the exact same classes as returned by
|
|
||||||
GetRequestClass(method).
|
|
||||||
* After the call has started, the request must not be modified.
|
|
||||||
* "rpc_controller" is of the correct type for the RPC implementation being
|
|
||||||
used by this Service. For stubs, the "correct type" depends on the
|
|
||||||
RpcChannel which the stub is using.
|
|
||||||
|
|
||||||
Postconditions:
|
|
||||||
* "done" will be called when the method is complete. This may be
|
|
||||||
before CallMethod() returns or it may be at some point in the future.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def GetRequestClass(self, method_descriptor):
|
|
||||||
"""Returns the class of the request message for the specified method.
|
|
||||||
|
|
||||||
CallMethod() requires that the request is of a particular subclass of
|
|
||||||
Message. GetRequestClass() gets the default instance of this required
|
|
||||||
type.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
method = service.GetDescriptor().FindMethodByName("Foo")
|
|
||||||
request = stub.GetRequestClass(method)()
|
|
||||||
request.ParseFromString(input)
|
|
||||||
service.CallMethod(method, request, callback)
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def GetResponseClass(self, method_descriptor):
|
|
||||||
"""Returns the class of the response message for the specified method.
|
|
||||||
|
|
||||||
This method isn't really needed, as the RpcChannel's CallMethod constructs
|
|
||||||
the response protocol message. It's provided anyway in case it is useful
|
|
||||||
for the caller to know the response type in advance.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class RpcController(object):
|
|
||||||
|
|
||||||
"""An RpcController mediates a single method call.
|
|
||||||
|
|
||||||
The primary purpose of the controller is to provide a way to manipulate
|
|
||||||
settings specific to the RPC implementation and to find out about RPC-level
|
|
||||||
errors. The methods provided by the RpcController interface are intended
|
|
||||||
to be a "least common denominator" set of features which we expect all
|
|
||||||
implementations to support. Specific implementations may provide more
|
|
||||||
advanced features (e.g. deadline propagation).
|
|
||||||
"""
|
|
||||||
|
|
||||||
# Client-side methods below
|
|
||||||
|
|
||||||
def Reset(self):
|
|
||||||
"""Resets the RpcController to its initial state.
|
|
||||||
|
|
||||||
After the RpcController has been reset, it may be reused in
|
|
||||||
a new call. Must not be called while an RPC is in progress.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def Failed(self):
|
|
||||||
"""Returns true if the call failed.
|
|
||||||
|
|
||||||
After a call has finished, returns true if the call failed. The possible
|
|
||||||
reasons for failure depend on the RPC implementation. Failed() must not
|
|
||||||
be called before a call has finished. If Failed() returns true, the
|
|
||||||
contents of the response message are undefined.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def ErrorText(self):
|
|
||||||
"""If Failed is true, returns a human-readable description of the error."""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def StartCancel(self):
|
|
||||||
"""Initiate cancellation.
|
|
||||||
|
|
||||||
Advises the RPC system that the caller desires that the RPC call be
|
|
||||||
canceled. The RPC system may cancel it immediately, may wait awhile and
|
|
||||||
then cancel it, or may not even cancel the call at all. If the call is
|
|
||||||
canceled, the "done" callback will still be called and the RpcController
|
|
||||||
will indicate that the call failed at that time.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
# Server-side methods below
|
|
||||||
|
|
||||||
def SetFailed(self, reason):
|
|
||||||
"""Sets a failure reason.
|
|
||||||
|
|
||||||
Causes Failed() to return true on the client side. "reason" will be
|
|
||||||
incorporated into the message returned by ErrorText(). If you find
|
|
||||||
you need to return machine-readable information about failures, you
|
|
||||||
should incorporate it into your response protocol buffer and should
|
|
||||||
NOT call SetFailed().
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def IsCanceled(self):
|
|
||||||
"""Checks if the client cancelled the RPC.
|
|
||||||
|
|
||||||
If true, indicates that the client canceled the RPC, so the server may
|
|
||||||
as well give up on replying to it. The server should still call the
|
|
||||||
final "done" callback.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def NotifyOnCancel(self, callback):
|
|
||||||
"""Sets a callback to invoke on cancel.
|
|
||||||
|
|
||||||
Asks that the given callback be called when the RPC is canceled. The
|
|
||||||
callback will always be called exactly once. If the RPC completes without
|
|
||||||
being canceled, the callback will be called after completion. If the RPC
|
|
||||||
has already been canceled when NotifyOnCancel() is called, the callback
|
|
||||||
will be called immediately.
|
|
||||||
|
|
||||||
NotifyOnCancel() must be called no more than once per request.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
|
|
||||||
class RpcChannel(object):
|
|
||||||
|
|
||||||
"""Abstract interface for an RPC channel.
|
|
||||||
|
|
||||||
An RpcChannel represents a communication line to a service which can be used
|
|
||||||
to call that service's methods. The service may be running on another
|
|
||||||
machine. Normally, you should not use an RpcChannel directly, but instead
|
|
||||||
construct a stub {@link Service} wrapping it. Example:
|
|
||||||
|
|
||||||
Example:
|
|
||||||
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
|
|
||||||
RpcController controller = rpcImpl.Controller()
|
|
||||||
MyService service = MyService_Stub(channel)
|
|
||||||
service.MyMethod(controller, request, callback)
|
|
||||||
"""
|
|
||||||
|
|
||||||
def CallMethod(self, method_descriptor, rpc_controller,
|
|
||||||
request, response_class, done):
|
|
||||||
"""Calls the method identified by the descriptor.
|
|
||||||
|
|
||||||
Call the given method of the remote service. The signature of this
|
|
||||||
procedure looks the same as Service.CallMethod(), but the requirements
|
|
||||||
are less strict in one important way: the request object doesn't have to
|
|
||||||
be of any specific class as long as its descriptor is method.input_type.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError
|
|
@ -1,289 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Contains metaclasses used to create protocol service and service stub
|
|
||||||
classes from ServiceDescriptor objects at runtime.
|
|
||||||
|
|
||||||
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
|
|
||||||
inject all useful functionality into the classes output by the protocol
|
|
||||||
compiler at compile-time.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__author__ = 'petar@google.com (Petar Petrov)'
|
|
||||||
|
|
||||||
|
|
||||||
class GeneratedServiceType(type):
|
|
||||||
|
|
||||||
"""Metaclass for service classes created at runtime from ServiceDescriptors.
|
|
||||||
|
|
||||||
Implementations for all methods described in the Service class are added here
|
|
||||||
by this class. We also create properties to allow getting/setting all fields
|
|
||||||
in the protocol message.
|
|
||||||
|
|
||||||
The protocol compiler currently uses this metaclass to create protocol service
|
|
||||||
classes at runtime. Clients can also manually create their own classes at
|
|
||||||
runtime, as in this example:
|
|
||||||
|
|
||||||
mydescriptor = ServiceDescriptor(.....)
|
|
||||||
class MyProtoService(service.Service):
|
|
||||||
__metaclass__ = GeneratedServiceType
|
|
||||||
DESCRIPTOR = mydescriptor
|
|
||||||
myservice_instance = MyProtoService()
|
|
||||||
...
|
|
||||||
"""
|
|
||||||
|
|
||||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
|
||||||
|
|
||||||
def __init__(cls, name, bases, dictionary):
|
|
||||||
"""Creates a message service class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: Name of the class (ignored, but required by the metaclass
|
|
||||||
protocol).
|
|
||||||
bases: Base classes of the class being constructed.
|
|
||||||
dictionary: The class dictionary of the class being constructed.
|
|
||||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
|
||||||
describing this protocol service type.
|
|
||||||
"""
|
|
||||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
|
||||||
# when a service class is subclassed.
|
|
||||||
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
|
|
||||||
return
|
|
||||||
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
|
|
||||||
service_builder = _ServiceBuilder(descriptor)
|
|
||||||
service_builder.BuildService(cls)
|
|
||||||
|
|
||||||
|
|
||||||
class GeneratedServiceStubType(GeneratedServiceType):
|
|
||||||
|
|
||||||
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
|
|
||||||
|
|
||||||
This class has similar responsibilities as GeneratedServiceType, except that
|
|
||||||
it creates the service stub classes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
|
||||||
|
|
||||||
def __init__(cls, name, bases, dictionary):
|
|
||||||
"""Creates a message service stub class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
name: Name of the class (ignored, here).
|
|
||||||
bases: Base classes of the class being constructed.
|
|
||||||
dictionary: The class dictionary of the class being constructed.
|
|
||||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
|
||||||
describing this protocol service type.
|
|
||||||
"""
|
|
||||||
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
|
|
||||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
|
||||||
# when a service stub is subclassed.
|
|
||||||
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
|
|
||||||
return
|
|
||||||
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
|
|
||||||
service_stub_builder = _ServiceStubBuilder(descriptor)
|
|
||||||
service_stub_builder.BuildServiceStub(cls)
|
|
||||||
|
|
||||||
|
|
||||||
class _ServiceBuilder(object):
|
|
||||||
|
|
||||||
"""This class constructs a protocol service class using a service descriptor.
|
|
||||||
|
|
||||||
Given a service descriptor, this class constructs a class that represents
|
|
||||||
the specified service descriptor. One service builder instance constructs
|
|
||||||
exactly one service class. That means all instances of that class share the
|
|
||||||
same builder.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, service_descriptor):
|
|
||||||
"""Initializes an instance of the service class builder.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
service_descriptor: ServiceDescriptor to use when constructing the
|
|
||||||
service class.
|
|
||||||
"""
|
|
||||||
self.descriptor = service_descriptor
|
|
||||||
|
|
||||||
def BuildService(self, cls):
|
|
||||||
"""Constructs the service class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cls: The class that will be constructed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
# CallMethod needs to operate with an instance of the Service class. This
|
|
||||||
# internal wrapper function exists only to be able to pass the service
|
|
||||||
# instance to the method that does the real CallMethod work.
|
|
||||||
def _WrapCallMethod(srvc, method_descriptor,
|
|
||||||
rpc_controller, request, callback):
|
|
||||||
self._CallMethod(srvc, method_descriptor,
|
|
||||||
rpc_controller, request, callback)
|
|
||||||
self.cls = cls
|
|
||||||
cls.CallMethod = _WrapCallMethod
|
|
||||||
cls.GetDescriptor = self._GetDescriptor
|
|
||||||
cls.GetRequestClass = self._GetRequestClass
|
|
||||||
cls.GetResponseClass = self._GetResponseClass
|
|
||||||
for method in self.descriptor.methods:
|
|
||||||
setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
|
|
||||||
|
|
||||||
def _GetDescriptor(self):
|
|
||||||
"""Retrieves the service descriptor.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The descriptor of the service (of type ServiceDescriptor).
|
|
||||||
"""
|
|
||||||
return self.descriptor
|
|
||||||
|
|
||||||
def _CallMethod(self, srvc, method_descriptor,
|
|
||||||
rpc_controller, request, callback):
|
|
||||||
"""Calls the method described by a given method descriptor.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
srvc: Instance of the service for which this method is called.
|
|
||||||
method_descriptor: Descriptor that represent the method to call.
|
|
||||||
rpc_controller: RPC controller to use for this method's execution.
|
|
||||||
request: Request protocol message.
|
|
||||||
callback: A callback to invoke after the method has completed.
|
|
||||||
"""
|
|
||||||
if method_descriptor.containing_service != self.descriptor:
|
|
||||||
raise RuntimeError(
|
|
||||||
'CallMethod() given method descriptor for wrong service type.')
|
|
||||||
method = getattr(srvc, method_descriptor.name)
|
|
||||||
method(rpc_controller, request, callback)
|
|
||||||
|
|
||||||
def _GetRequestClass(self, method_descriptor):
|
|
||||||
"""Returns the class of the request protocol message.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
method_descriptor: Descriptor of the method for which to return the
|
|
||||||
request protocol message class.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A class that represents the input protocol message of the specified
|
|
||||||
method.
|
|
||||||
"""
|
|
||||||
if method_descriptor.containing_service != self.descriptor:
|
|
||||||
raise RuntimeError(
|
|
||||||
'GetRequestClass() given method descriptor for wrong service type.')
|
|
||||||
return method_descriptor.input_type._concrete_class
|
|
||||||
|
|
||||||
def _GetResponseClass(self, method_descriptor):
|
|
||||||
"""Returns the class of the response protocol message.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
method_descriptor: Descriptor of the method for which to return the
|
|
||||||
response protocol message class.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A class that represents the output protocol message of the specified
|
|
||||||
method.
|
|
||||||
"""
|
|
||||||
if method_descriptor.containing_service != self.descriptor:
|
|
||||||
raise RuntimeError(
|
|
||||||
'GetResponseClass() given method descriptor for wrong service type.')
|
|
||||||
return method_descriptor.output_type._concrete_class
|
|
||||||
|
|
||||||
def _GenerateNonImplementedMethod(self, method):
|
|
||||||
"""Generates and returns a method that can be set for a service methods.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
method: Descriptor of the service method for which a method is to be
|
|
||||||
generated.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A method that can be added to the service class.
|
|
||||||
"""
|
|
||||||
return lambda inst, rpc_controller, request, callback: (
|
|
||||||
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
|
||||||
|
|
||||||
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
|
|
||||||
"""The body of all methods in the generated service class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
method_name: Name of the method being executed.
|
|
||||||
rpc_controller: RPC controller used to execute this method.
|
|
||||||
callback: A callback which will be invoked when the method finishes.
|
|
||||||
"""
|
|
||||||
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
|
|
||||||
callback(None)
|
|
||||||
|
|
||||||
|
|
||||||
class _ServiceStubBuilder(object):
|
|
||||||
|
|
||||||
"""Constructs a protocol service stub class using a service descriptor.
|
|
||||||
|
|
||||||
Given a service descriptor, this class constructs a suitable stub class.
|
|
||||||
A stub is just a type-safe wrapper around an RpcChannel which emulates a
|
|
||||||
local implementation of the service.
|
|
||||||
|
|
||||||
One service stub builder instance constructs exactly one class. It means all
|
|
||||||
instances of that class share the same service stub builder.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, service_descriptor):
|
|
||||||
"""Initializes an instance of the service stub class builder.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
service_descriptor: ServiceDescriptor to use when constructing the
|
|
||||||
stub class.
|
|
||||||
"""
|
|
||||||
self.descriptor = service_descriptor
|
|
||||||
|
|
||||||
def BuildServiceStub(self, cls):
|
|
||||||
"""Constructs the stub class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
cls: The class that will be constructed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _ServiceStubInit(stub, rpc_channel):
|
|
||||||
stub.rpc_channel = rpc_channel
|
|
||||||
self.cls = cls
|
|
||||||
cls.__init__ = _ServiceStubInit
|
|
||||||
for method in self.descriptor.methods:
|
|
||||||
setattr(cls, method.name, self._GenerateStubMethod(method))
|
|
||||||
|
|
||||||
def _GenerateStubMethod(self, method):
|
|
||||||
return lambda inst, rpc_controller, request, callback: self._StubMethod(
|
|
||||||
inst, method, rpc_controller, request, callback)
|
|
||||||
|
|
||||||
def _StubMethod(self, stub, method_descriptor,
|
|
||||||
rpc_controller, request, callback):
|
|
||||||
"""The body of all service methods in the generated stub class.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
stub: Stub instance.
|
|
||||||
method_descriptor: Descriptor of the invoked method.
|
|
||||||
rpc_controller: Rpc controller to execute the method.
|
|
||||||
request: Request protocol message.
|
|
||||||
callback: A callback to execute when the method finishes.
|
|
||||||
"""
|
|
||||||
stub.rpc_channel.CallMethod(
|
|
||||||
method_descriptor, rpc_controller, request,
|
|
||||||
method_descriptor.output_type._concrete_class, callback)
|
|
@ -1,125 +0,0 @@
|
|||||||
# Protocol Buffers - Google's data interchange format
|
|
||||||
# Copyright 2008 Google Inc. All rights reserved.
|
|
||||||
# http://code.google.com/p/protobuf/
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are
|
|
||||||
# met:
|
|
||||||
#
|
|
||||||
# * Redistributions of source code must retain the above copyright
|
|
||||||
# notice, this list of conditions and the following disclaimer.
|
|
||||||
# * Redistributions in binary form must reproduce the above
|
|
||||||
# copyright notice, this list of conditions and the following disclaimer
|
|
||||||
# in the documentation and/or other materials provided with the
|
|
||||||
# distribution.
|
|
||||||
# * Neither the name of Google Inc. nor the names of its
|
|
||||||
# contributors may be used to endorse or promote products derived from
|
|
||||||
# this software without specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
|
||||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
|
||||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
|
||||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
|
||||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
|
||||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
|
||||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
|
||||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
|
||||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""Contains routines for printing protocol messages in text format."""
|
|
||||||
|
|
||||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
|
||||||
|
|
||||||
import cStringIO
|
|
||||||
|
|
||||||
from froofle.protobuf import descriptor
|
|
||||||
|
|
||||||
__all__ = [ 'MessageToString', 'PrintMessage', 'PrintField', 'PrintFieldValue' ]
|
|
||||||
|
|
||||||
def MessageToString(message):
|
|
||||||
out = cStringIO.StringIO()
|
|
||||||
PrintMessage(message, out)
|
|
||||||
result = out.getvalue()
|
|
||||||
out.close()
|
|
||||||
return result
|
|
||||||
|
|
||||||
def PrintMessage(message, out, indent = 0):
|
|
||||||
for field, value in message.ListFields():
|
|
||||||
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
|
||||||
for element in value:
|
|
||||||
PrintField(field, element, out, indent)
|
|
||||||
else:
|
|
||||||
PrintField(field, value, out, indent)
|
|
||||||
|
|
||||||
def PrintField(field, value, out, indent = 0):
|
|
||||||
"""Print a single field name/value pair. For repeated fields, the value
|
|
||||||
should be a single element."""
|
|
||||||
|
|
||||||
out.write(' ' * indent);
|
|
||||||
if field.is_extension:
|
|
||||||
out.write('[')
|
|
||||||
if (field.containing_type.GetOptions().message_set_wire_format and
|
|
||||||
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
|
||||||
field.message_type == field.extension_scope and
|
|
||||||
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
|
|
||||||
out.write(field.message_type.full_name)
|
|
||||||
else:
|
|
||||||
out.write(field.full_name)
|
|
||||||
out.write(']')
|
|
||||||
elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
|
|
||||||
# For groups, use the capitalized name.
|
|
||||||
out.write(field.message_type.name)
|
|
||||||
else:
|
|
||||||
out.write(field.name)
|
|
||||||
|
|
||||||
if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
|
||||||
# The colon is optional in this case, but our cross-language golden files
|
|
||||||
# don't include it.
|
|
||||||
out.write(': ')
|
|
||||||
|
|
||||||
PrintFieldValue(field, value, out, indent)
|
|
||||||
out.write('\n')
|
|
||||||
|
|
||||||
def PrintFieldValue(field, value, out, indent = 0):
|
|
||||||
"""Print a single field value (not including name). For repeated fields,
|
|
||||||
the value should be a single element."""
|
|
||||||
|
|
||||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
|
||||||
out.write(' {\n')
|
|
||||||
PrintMessage(value, out, indent + 2)
|
|
||||||
out.write(' ' * indent + '}')
|
|
||||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
|
||||||
out.write(field.enum_type.values_by_number[value].name)
|
|
||||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
|
||||||
out.write('\"')
|
|
||||||
out.write(_CEscape(value))
|
|
||||||
out.write('\"')
|
|
||||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
|
||||||
if value:
|
|
||||||
out.write("true")
|
|
||||||
else:
|
|
||||||
out.write("false")
|
|
||||||
else:
|
|
||||||
out.write(str(value))
|
|
||||||
|
|
||||||
# text.encode('string_escape') does not seem to satisfy our needs as it
|
|
||||||
# encodes unprintable characters using two-digit hex escapes whereas our
|
|
||||||
# C++ unescaping function allows hex escapes to be any length. So,
|
|
||||||
# "\0011".encode('string_escape') ends up being "\\x011", which will be
|
|
||||||
# decoded in C++ as a single-character string with char code 0x11.
|
|
||||||
def _CEscape(text):
|
|
||||||
def escape(c):
|
|
||||||
o = ord(c)
|
|
||||||
if o == 10: return r"\n" # optional escape
|
|
||||||
if o == 13: return r"\r" # optional escape
|
|
||||||
if o == 9: return r"\t" # optional escape
|
|
||||||
if o == 39: return r"\'" # optional escape
|
|
||||||
|
|
||||||
if o == 34: return r'\"' # necessary escape
|
|
||||||
if o == 92: return r"\\" # necessary escape
|
|
||||||
|
|
||||||
if o >= 127 or o < 32: return "\\%03o" % o # necessary escapes
|
|
||||||
return c
|
|
||||||
return "".join([escape(c) for c in text])
|
|
156
gerrit_upload.py
156
gerrit_upload.py
@ -1,156 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import getpass
|
|
||||||
import os
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
from tempfile import mkstemp
|
|
||||||
|
|
||||||
from codereview.proto_client import HttpRpc, Proxy
|
|
||||||
from codereview.review_pb2 import ReviewService_Stub
|
|
||||||
from codereview.upload_bundle_pb2 import *
|
|
||||||
from git_command import GitCommand
|
|
||||||
from error import UploadError
|
|
||||||
|
|
||||||
try:
|
|
||||||
import readline
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
MAX_SEGMENT_SIZE = 1020 * 1024
|
|
||||||
|
|
||||||
def _GetRpcServer(email, server, save_cookies):
|
|
||||||
"""Returns an RpcServer.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
A new RpcServer, on which RPC calls can be made.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def GetUserCredentials():
|
|
||||||
"""Prompts the user for a username and password."""
|
|
||||||
e = email
|
|
||||||
if e is None:
|
|
||||||
e = raw_input("Email: ").strip()
|
|
||||||
password = getpass.getpass("Password for %s: " % e)
|
|
||||||
return (e, password)
|
|
||||||
|
|
||||||
# If this is the dev_appserver, use fake authentication.
|
|
||||||
lc_server = server.lower()
|
|
||||||
if lc_server == "localhost" or lc_server.startswith("localhost:"):
|
|
||||||
if email is None:
|
|
||||||
email = "test@example.com"
|
|
||||||
server = HttpRpc(
|
|
||||||
server,
|
|
||||||
lambda: (email, "password"),
|
|
||||||
extra_headers={"Cookie":
|
|
||||||
'dev_appserver_login="%s:False"' % email})
|
|
||||||
# Don't try to talk to ClientLogin.
|
|
||||||
server.authenticated = True
|
|
||||||
return server
|
|
||||||
|
|
||||||
if save_cookies:
|
|
||||||
cookie_file = ".gerrit_cookies"
|
|
||||||
else:
|
|
||||||
cookie_file = None
|
|
||||||
|
|
||||||
return HttpRpc(server, GetUserCredentials,
|
|
||||||
cookie_file=cookie_file)
|
|
||||||
|
|
||||||
def UploadBundle(project,
|
|
||||||
server,
|
|
||||||
email,
|
|
||||||
dest_project,
|
|
||||||
dest_branch,
|
|
||||||
src_branch,
|
|
||||||
bases,
|
|
||||||
save_cookies=True):
|
|
||||||
|
|
||||||
srv = _GetRpcServer(email, server, save_cookies)
|
|
||||||
review = Proxy(ReviewService_Stub(srv))
|
|
||||||
tmp_fd, tmp_bundle = mkstemp(".bundle", ".gpq")
|
|
||||||
os.close(tmp_fd)
|
|
||||||
|
|
||||||
srcid = project.bare_git.rev_parse(src_branch)
|
|
||||||
revlist = project._revlist(src_branch, *bases)
|
|
||||||
|
|
||||||
if srcid not in revlist:
|
|
||||||
# This can happen if src_branch is an annotated tag
|
|
||||||
#
|
|
||||||
revlist.append(srcid)
|
|
||||||
revlist_size = len(revlist) * 42
|
|
||||||
|
|
||||||
try:
|
|
||||||
cmd = ['bundle', 'create', tmp_bundle, src_branch]
|
|
||||||
cmd.extend(bases)
|
|
||||||
if GitCommand(project, cmd).Wait() != 0:
|
|
||||||
raise UploadError('cannot create bundle')
|
|
||||||
fd = open(tmp_bundle, "rb")
|
|
||||||
|
|
||||||
bundle_id = None
|
|
||||||
segment_id = 0
|
|
||||||
next_data = fd.read(MAX_SEGMENT_SIZE - revlist_size)
|
|
||||||
|
|
||||||
while True:
|
|
||||||
this_data = next_data
|
|
||||||
next_data = fd.read(MAX_SEGMENT_SIZE)
|
|
||||||
segment_id += 1
|
|
||||||
|
|
||||||
if bundle_id is None:
|
|
||||||
req = UploadBundleRequest()
|
|
||||||
req.dest_project = str(dest_project)
|
|
||||||
req.dest_branch = str(dest_branch)
|
|
||||||
for c in revlist:
|
|
||||||
req.contained_object.append(c)
|
|
||||||
else:
|
|
||||||
req = UploadBundleContinue()
|
|
||||||
req.bundle_id = bundle_id
|
|
||||||
req.segment_id = segment_id
|
|
||||||
|
|
||||||
req.bundle_data = this_data
|
|
||||||
if len(next_data) > 0:
|
|
||||||
req.partial_upload = True
|
|
||||||
else:
|
|
||||||
req.partial_upload = False
|
|
||||||
|
|
||||||
if bundle_id is None:
|
|
||||||
rsp = review.UploadBundle(req)
|
|
||||||
else:
|
|
||||||
rsp = review.ContinueBundle(req)
|
|
||||||
|
|
||||||
if rsp.status_code == UploadBundleResponse.CONTINUE:
|
|
||||||
bundle_id = rsp.bundle_id
|
|
||||||
elif rsp.status_code == UploadBundleResponse.RECEIVED:
|
|
||||||
bundle_id = rsp.bundle_id
|
|
||||||
return bundle_id
|
|
||||||
else:
|
|
||||||
if rsp.status_code == UploadBundleResponse.UNKNOWN_PROJECT:
|
|
||||||
reason = 'unknown project "%s"' % dest_project
|
|
||||||
elif rsp.status_code == UploadBundleResponse.UNKNOWN_BRANCH:
|
|
||||||
reason = 'unknown branch "%s"' % dest_branch
|
|
||||||
elif rsp.status_code == UploadBundleResponse.UNKNOWN_BUNDLE:
|
|
||||||
reason = 'unknown bundle'
|
|
||||||
elif rsp.status_code == UploadBundleResponse.NOT_BUNDLE_OWNER:
|
|
||||||
reason = 'not bundle owner'
|
|
||||||
elif rsp.status_code == UploadBundleResponse.BUNDLE_CLOSED:
|
|
||||||
reason = 'bundle closed'
|
|
||||||
elif rsp.status_code == UploadBundleResponse.UNAUTHORIZED_USER:
|
|
||||||
reason = ('Unauthorized user. Visit http://%s/hello to sign up.'
|
|
||||||
% server)
|
|
||||||
else:
|
|
||||||
reason = 'unknown error ' + str(rsp.status_code)
|
|
||||||
raise UploadError(reason)
|
|
||||||
finally:
|
|
||||||
os.unlink(tmp_bundle)
|
|
294
git_command.py
294
git_command.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,30 +14,82 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
from signal import SIGTERM
|
||||||
|
|
||||||
from error import GitError
|
from error import GitError
|
||||||
|
from git_refs import HEAD
|
||||||
|
import platform_utils
|
||||||
|
from repo_trace import REPO_TRACE, IsTrace, Trace
|
||||||
|
from wrapper import Wrapper
|
||||||
|
|
||||||
GIT = 'git'
|
GIT = 'git'
|
||||||
MIN_GIT_VERSION = (1, 5, 4)
|
# Should keep in sync with the "repo" launcher file.
|
||||||
|
MIN_GIT_VERSION = (2, 10, 2)
|
||||||
GIT_DIR = 'GIT_DIR'
|
GIT_DIR = 'GIT_DIR'
|
||||||
REPO_TRACE = 'REPO_TRACE'
|
|
||||||
|
|
||||||
LAST_GITDIR = None
|
LAST_GITDIR = None
|
||||||
LAST_CWD = None
|
LAST_CWD = None
|
||||||
try:
|
|
||||||
TRACE = os.environ[REPO_TRACE] == '1'
|
|
||||||
except KeyError:
|
|
||||||
TRACE = False
|
|
||||||
|
|
||||||
|
_ssh_proxy_path = None
|
||||||
|
_ssh_sock_path = None
|
||||||
|
_ssh_clients = []
|
||||||
|
|
||||||
|
def ssh_sock(create=True):
|
||||||
|
global _ssh_sock_path
|
||||||
|
if _ssh_sock_path is None:
|
||||||
|
if not create:
|
||||||
|
return None
|
||||||
|
tmp_dir = '/tmp'
|
||||||
|
if not os.path.exists(tmp_dir):
|
||||||
|
tmp_dir = tempfile.gettempdir()
|
||||||
|
_ssh_sock_path = os.path.join(
|
||||||
|
tempfile.mkdtemp('', 'ssh-', tmp_dir),
|
||||||
|
'master-%r@%h:%p')
|
||||||
|
return _ssh_sock_path
|
||||||
|
|
||||||
|
def _ssh_proxy():
|
||||||
|
global _ssh_proxy_path
|
||||||
|
if _ssh_proxy_path is None:
|
||||||
|
_ssh_proxy_path = os.path.join(
|
||||||
|
os.path.dirname(__file__),
|
||||||
|
'git_ssh')
|
||||||
|
return _ssh_proxy_path
|
||||||
|
|
||||||
|
def _add_ssh_client(p):
|
||||||
|
_ssh_clients.append(p)
|
||||||
|
|
||||||
|
def _remove_ssh_client(p):
|
||||||
|
try:
|
||||||
|
_ssh_clients.remove(p)
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def terminate_ssh_clients():
|
||||||
|
global _ssh_clients
|
||||||
|
for p in _ssh_clients:
|
||||||
|
try:
|
||||||
|
os.kill(p.pid, SIGTERM)
|
||||||
|
p.wait()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
_ssh_clients = []
|
||||||
|
|
||||||
|
_git_version = None
|
||||||
|
|
||||||
class _GitCall(object):
|
class _GitCall(object):
|
||||||
def version(self):
|
def version_tuple(self):
|
||||||
p = GitCommand(None, ['--version'], capture_stdout=True)
|
global _git_version
|
||||||
if p.Wait() == 0:
|
if _git_version is None:
|
||||||
return p.stdout
|
_git_version = Wrapper().ParseGitVersion()
|
||||||
return None
|
if _git_version is None:
|
||||||
|
print('fatal: unable to detect git version', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
return _git_version
|
||||||
|
|
||||||
def __getattr__(self, name):
|
def __getattr__(self, name):
|
||||||
name = name.replace('_','-')
|
name = name.replace('_','-')
|
||||||
@ -47,6 +100,101 @@ class _GitCall(object):
|
|||||||
return fun
|
return fun
|
||||||
git = _GitCall()
|
git = _GitCall()
|
||||||
|
|
||||||
|
|
||||||
|
def RepoSourceVersion():
|
||||||
|
"""Return the version of the repo.git tree."""
|
||||||
|
ver = getattr(RepoSourceVersion, 'version', None)
|
||||||
|
|
||||||
|
# We avoid GitCommand so we don't run into circular deps -- GitCommand needs
|
||||||
|
# to initialize version info we provide.
|
||||||
|
if ver is None:
|
||||||
|
env = GitCommand._GetBasicEnv()
|
||||||
|
|
||||||
|
proj = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
env[GIT_DIR] = os.path.join(proj, '.git')
|
||||||
|
|
||||||
|
p = subprocess.Popen([GIT, 'describe', HEAD], stdout=subprocess.PIPE,
|
||||||
|
env=env)
|
||||||
|
if p.wait() == 0:
|
||||||
|
ver = p.stdout.read().strip().decode('utf-8')
|
||||||
|
if ver.startswith('v'):
|
||||||
|
ver = ver[1:]
|
||||||
|
else:
|
||||||
|
ver = 'unknown'
|
||||||
|
setattr(RepoSourceVersion, 'version', ver)
|
||||||
|
|
||||||
|
return ver
|
||||||
|
|
||||||
|
|
||||||
|
class UserAgent(object):
|
||||||
|
"""Mange User-Agent settings when talking to external services
|
||||||
|
|
||||||
|
We follow the style as documented here:
|
||||||
|
https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/User-Agent
|
||||||
|
"""
|
||||||
|
|
||||||
|
_os = None
|
||||||
|
_repo_ua = None
|
||||||
|
_git_ua = None
|
||||||
|
|
||||||
|
@property
|
||||||
|
def os(self):
|
||||||
|
"""The operating system name."""
|
||||||
|
if self._os is None:
|
||||||
|
os_name = sys.platform
|
||||||
|
if os_name.lower().startswith('linux'):
|
||||||
|
os_name = 'Linux'
|
||||||
|
elif os_name == 'win32':
|
||||||
|
os_name = 'Win32'
|
||||||
|
elif os_name == 'cygwin':
|
||||||
|
os_name = 'Cygwin'
|
||||||
|
elif os_name == 'darwin':
|
||||||
|
os_name = 'Darwin'
|
||||||
|
self._os = os_name
|
||||||
|
|
||||||
|
return self._os
|
||||||
|
|
||||||
|
@property
|
||||||
|
def repo(self):
|
||||||
|
"""The UA when connecting directly from repo."""
|
||||||
|
if self._repo_ua is None:
|
||||||
|
py_version = sys.version_info
|
||||||
|
self._repo_ua = 'git-repo/%s (%s) git/%s Python/%d.%d.%d' % (
|
||||||
|
RepoSourceVersion(),
|
||||||
|
self.os,
|
||||||
|
git.version_tuple().full,
|
||||||
|
py_version.major, py_version.minor, py_version.micro)
|
||||||
|
|
||||||
|
return self._repo_ua
|
||||||
|
|
||||||
|
@property
|
||||||
|
def git(self):
|
||||||
|
"""The UA when running git."""
|
||||||
|
if self._git_ua is None:
|
||||||
|
self._git_ua = 'git/%s (%s) git-repo/%s' % (
|
||||||
|
git.version_tuple().full,
|
||||||
|
self.os,
|
||||||
|
RepoSourceVersion())
|
||||||
|
|
||||||
|
return self._git_ua
|
||||||
|
|
||||||
|
user_agent = UserAgent()
|
||||||
|
|
||||||
|
def git_require(min_version, fail=False, msg=''):
|
||||||
|
git_version = git.version_tuple()
|
||||||
|
if min_version <= git_version:
|
||||||
|
return True
|
||||||
|
if fail:
|
||||||
|
need = '.'.join(map(str, min_version))
|
||||||
|
if msg:
|
||||||
|
msg = ' for ' + msg
|
||||||
|
print('fatal: git %s or later required%s' % (need, msg), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _setenv(env, name, value):
|
||||||
|
env[name] = value.encode()
|
||||||
|
|
||||||
class GitCommand(object):
|
class GitCommand(object):
|
||||||
def __init__(self,
|
def __init__(self,
|
||||||
project,
|
project,
|
||||||
@ -56,22 +204,30 @@ class GitCommand(object):
|
|||||||
capture_stdout = False,
|
capture_stdout = False,
|
||||||
capture_stderr = False,
|
capture_stderr = False,
|
||||||
disable_editor = False,
|
disable_editor = False,
|
||||||
|
ssh_proxy = False,
|
||||||
cwd = None,
|
cwd = None,
|
||||||
gitdir = None):
|
gitdir = None):
|
||||||
env = dict(os.environ)
|
env = self._GetBasicEnv()
|
||||||
|
|
||||||
for e in [REPO_TRACE,
|
# If we are not capturing std* then need to print it.
|
||||||
GIT_DIR,
|
self.tee = {'stdout': not capture_stdout, 'stderr': not capture_stderr}
|
||||||
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
|
||||||
'GIT_OBJECT_DIRECTORY',
|
|
||||||
'GIT_WORK_TREE',
|
|
||||||
'GIT_GRAFT_FILE',
|
|
||||||
'GIT_INDEX_FILE']:
|
|
||||||
if e in env:
|
|
||||||
del env[e]
|
|
||||||
|
|
||||||
if disable_editor:
|
if disable_editor:
|
||||||
env['GIT_EDITOR'] = ':'
|
_setenv(env, 'GIT_EDITOR', ':')
|
||||||
|
if ssh_proxy:
|
||||||
|
_setenv(env, 'REPO_SSH_SOCK', ssh_sock())
|
||||||
|
_setenv(env, 'GIT_SSH', _ssh_proxy())
|
||||||
|
_setenv(env, 'GIT_SSH_VARIANT', 'ssh')
|
||||||
|
if 'http_proxy' in env and 'darwin' == sys.platform:
|
||||||
|
s = "'http.proxy=%s'" % (env['http_proxy'],)
|
||||||
|
p = env.get('GIT_CONFIG_PARAMETERS')
|
||||||
|
if p is not None:
|
||||||
|
s = p + ' ' + s
|
||||||
|
_setenv(env, 'GIT_CONFIG_PARAMETERS', s)
|
||||||
|
if 'GIT_ALLOW_PROTOCOL' not in env:
|
||||||
|
_setenv(env, 'GIT_ALLOW_PROTOCOL',
|
||||||
|
'file:git:http:https:ssh:persistent-http:persistent-https:sso:rpc')
|
||||||
|
_setenv(env, 'GIT_HTTP_USER_AGENT', user_agent.git)
|
||||||
|
|
||||||
if project:
|
if project:
|
||||||
if not cwd:
|
if not cwd:
|
||||||
@ -82,26 +238,25 @@ class GitCommand(object):
|
|||||||
command = [GIT]
|
command = [GIT]
|
||||||
if bare:
|
if bare:
|
||||||
if gitdir:
|
if gitdir:
|
||||||
env[GIT_DIR] = gitdir
|
_setenv(env, GIT_DIR, gitdir)
|
||||||
cwd = None
|
cwd = None
|
||||||
command.extend(cmdv)
|
command.append(cmdv[0])
|
||||||
|
# Need to use the --progress flag for fetch/clone so output will be
|
||||||
|
# displayed as by default git only does progress output if stderr is a TTY.
|
||||||
|
if sys.stderr.isatty() and cmdv[0] in ('fetch', 'clone'):
|
||||||
|
if '--progress' not in cmdv and '--quiet' not in cmdv:
|
||||||
|
command.append('--progress')
|
||||||
|
command.extend(cmdv[1:])
|
||||||
|
|
||||||
if provide_stdin:
|
if provide_stdin:
|
||||||
stdin = subprocess.PIPE
|
stdin = subprocess.PIPE
|
||||||
else:
|
else:
|
||||||
stdin = None
|
stdin = None
|
||||||
|
|
||||||
if capture_stdout:
|
stdout = subprocess.PIPE
|
||||||
stdout = subprocess.PIPE
|
stderr = subprocess.PIPE
|
||||||
else:
|
|
||||||
stdout = None
|
|
||||||
|
|
||||||
if capture_stderr:
|
if IsTrace():
|
||||||
stderr = subprocess.PIPE
|
|
||||||
else:
|
|
||||||
stderr = None
|
|
||||||
|
|
||||||
if TRACE:
|
|
||||||
global LAST_CWD
|
global LAST_CWD
|
||||||
global LAST_GITDIR
|
global LAST_GITDIR
|
||||||
|
|
||||||
@ -127,7 +282,7 @@ class GitCommand(object):
|
|||||||
dbg += ' 1>|'
|
dbg += ' 1>|'
|
||||||
if stderr == subprocess.PIPE:
|
if stderr == subprocess.PIPE:
|
||||||
dbg += ' 2>|'
|
dbg += ' 2>|'
|
||||||
print >>sys.stderr, dbg
|
Trace('%s', dbg)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
p = subprocess.Popen(command,
|
p = subprocess.Popen(command,
|
||||||
@ -136,29 +291,62 @@ class GitCommand(object):
|
|||||||
stdin = stdin,
|
stdin = stdin,
|
||||||
stdout = stdout,
|
stdout = stdout,
|
||||||
stderr = stderr)
|
stderr = stderr)
|
||||||
except Exception, e:
|
except Exception as e:
|
||||||
raise GitError('%s: %s' % (command[1], e))
|
raise GitError('%s: %s' % (command[1], e))
|
||||||
|
|
||||||
|
if ssh_proxy:
|
||||||
|
_add_ssh_client(p)
|
||||||
|
|
||||||
self.process = p
|
self.process = p
|
||||||
self.stdin = p.stdin
|
self.stdin = p.stdin
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _GetBasicEnv():
|
||||||
|
"""Return a basic env for running git under.
|
||||||
|
|
||||||
|
This is guaranteed to be side-effect free.
|
||||||
|
"""
|
||||||
|
env = os.environ.copy()
|
||||||
|
for key in (REPO_TRACE,
|
||||||
|
GIT_DIR,
|
||||||
|
'GIT_ALTERNATE_OBJECT_DIRECTORIES',
|
||||||
|
'GIT_OBJECT_DIRECTORY',
|
||||||
|
'GIT_WORK_TREE',
|
||||||
|
'GIT_GRAFT_FILE',
|
||||||
|
'GIT_INDEX_FILE'):
|
||||||
|
env.pop(key, None)
|
||||||
|
return env
|
||||||
|
|
||||||
def Wait(self):
|
def Wait(self):
|
||||||
|
try:
|
||||||
|
p = self.process
|
||||||
|
rc = self._CaptureOutput()
|
||||||
|
finally:
|
||||||
|
_remove_ssh_client(p)
|
||||||
|
return rc
|
||||||
|
|
||||||
|
def _CaptureOutput(self):
|
||||||
p = self.process
|
p = self.process
|
||||||
|
s_in = platform_utils.FileDescriptorStreams.create()
|
||||||
|
s_in.add(p.stdout, sys.stdout, 'stdout')
|
||||||
|
s_in.add(p.stderr, sys.stderr, 'stderr')
|
||||||
|
self.stdout = ''
|
||||||
|
self.stderr = ''
|
||||||
|
|
||||||
if p.stdin:
|
while not s_in.is_done:
|
||||||
p.stdin.close()
|
in_ready = s_in.select()
|
||||||
self.stdin = None
|
for s in in_ready:
|
||||||
|
buf = s.read()
|
||||||
if p.stdout:
|
if not buf:
|
||||||
self.stdout = p.stdout.read()
|
s_in.remove(s)
|
||||||
p.stdout.close()
|
continue
|
||||||
else:
|
if not hasattr(buf, 'encode'):
|
||||||
p.stdout = None
|
buf = buf.decode()
|
||||||
|
if s.std_name == 'stdout':
|
||||||
if p.stderr:
|
self.stdout += buf
|
||||||
self.stderr = p.stderr.read()
|
else:
|
||||||
p.stderr.close()
|
self.stderr += buf
|
||||||
else:
|
if self.tee[s.std_name]:
|
||||||
p.stderr = None
|
s.dest.write(buf)
|
||||||
|
s.dest.flush()
|
||||||
return self.process.wait()
|
return p.wait()
|
||||||
|
543
git_config.py
543
git_config.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,42 +14,103 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import errno
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import ssl
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
from error import GitError
|
try:
|
||||||
from git_command import GitCommand
|
import threading as _threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as _threading
|
||||||
|
import time
|
||||||
|
|
||||||
R_HEADS = 'refs/heads/'
|
from pyversion import is_python3
|
||||||
R_TAGS = 'refs/tags/'
|
if is_python3():
|
||||||
ID_RE = re.compile('^[0-9a-f]{40}$')
|
import urllib.request
|
||||||
|
import urllib.error
|
||||||
|
else:
|
||||||
|
import urllib2
|
||||||
|
import imp
|
||||||
|
urllib = imp.new_module('urllib')
|
||||||
|
urllib.request = urllib2
|
||||||
|
urllib.error = urllib2
|
||||||
|
|
||||||
|
from signal import SIGTERM
|
||||||
|
from error import GitError, UploadError
|
||||||
|
import platform_utils
|
||||||
|
from repo_trace import Trace
|
||||||
|
if is_python3():
|
||||||
|
from http.client import HTTPException
|
||||||
|
else:
|
||||||
|
from httplib import HTTPException
|
||||||
|
|
||||||
|
from git_command import GitCommand
|
||||||
|
from git_command import ssh_sock
|
||||||
|
from git_command import terminate_ssh_clients
|
||||||
|
from git_refs import R_CHANGES, R_HEADS, R_TAGS
|
||||||
|
|
||||||
|
ID_RE = re.compile(r'^[0-9a-f]{40}$')
|
||||||
|
|
||||||
|
REVIEW_CACHE = dict()
|
||||||
|
|
||||||
|
def IsChange(rev):
|
||||||
|
return rev.startswith(R_CHANGES)
|
||||||
|
|
||||||
def IsId(rev):
|
def IsId(rev):
|
||||||
return ID_RE.match(rev)
|
return ID_RE.match(rev)
|
||||||
|
|
||||||
|
def IsTag(rev):
|
||||||
|
return rev.startswith(R_TAGS)
|
||||||
|
|
||||||
|
def IsImmutable(rev):
|
||||||
|
return IsChange(rev) or IsId(rev) or IsTag(rev)
|
||||||
|
|
||||||
|
def _key(name):
|
||||||
|
parts = name.split('.')
|
||||||
|
if len(parts) < 2:
|
||||||
|
return name.lower()
|
||||||
|
parts[ 0] = parts[ 0].lower()
|
||||||
|
parts[-1] = parts[-1].lower()
|
||||||
|
return '.'.join(parts)
|
||||||
|
|
||||||
class GitConfig(object):
|
class GitConfig(object):
|
||||||
|
_ForUser = None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def ForUser(cls):
|
def ForUser(cls):
|
||||||
return cls(file = os.path.expanduser('~/.gitconfig'))
|
if cls._ForUser is None:
|
||||||
|
cls._ForUser = cls(configfile = os.path.expanduser('~/.gitconfig'))
|
||||||
|
return cls._ForUser
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def ForRepository(cls, gitdir, defaults=None):
|
def ForRepository(cls, gitdir, defaults=None):
|
||||||
return cls(file = os.path.join(gitdir, 'config'),
|
return cls(configfile = os.path.join(gitdir, 'config'),
|
||||||
defaults = defaults)
|
defaults = defaults)
|
||||||
|
|
||||||
def __init__(self, file, defaults=None):
|
def __init__(self, configfile, defaults=None, jsonFile=None):
|
||||||
self.file = file
|
self.file = configfile
|
||||||
self.defaults = defaults
|
self.defaults = defaults
|
||||||
self._cache_dict = None
|
self._cache_dict = None
|
||||||
|
self._section_dict = None
|
||||||
self._remotes = {}
|
self._remotes = {}
|
||||||
self._branches = {}
|
self._branches = {}
|
||||||
|
|
||||||
|
self._json = jsonFile
|
||||||
|
if self._json is None:
|
||||||
|
self._json = os.path.join(
|
||||||
|
os.path.dirname(self.file),
|
||||||
|
'.repo_' + os.path.basename(self.file) + '.json')
|
||||||
|
|
||||||
def Has(self, name, include_defaults = True):
|
def Has(self, name, include_defaults = True):
|
||||||
"""Return true if this configuration file has the key.
|
"""Return true if this configuration file has the key.
|
||||||
"""
|
"""
|
||||||
name = name.lower()
|
if _key(name) in self._cache:
|
||||||
if name in self._cache:
|
|
||||||
return True
|
return True
|
||||||
if include_defaults and self.defaults:
|
if include_defaults and self.defaults:
|
||||||
return self.defaults.Has(name, include_defaults = True)
|
return self.defaults.Has(name, include_defaults = True)
|
||||||
@ -70,22 +132,20 @@ class GitConfig(object):
|
|||||||
return False
|
return False
|
||||||
return None
|
return None
|
||||||
|
|
||||||
def GetString(self, name, all=False):
|
def GetString(self, name, all_keys=False):
|
||||||
"""Get the first value for a key, or None if it is not defined.
|
"""Get the first value for a key, or None if it is not defined.
|
||||||
|
|
||||||
This configuration file is used first, if the key is not
|
This configuration file is used first, if the key is not
|
||||||
defined or all = True then the defaults are also searched.
|
defined or all_keys = True then the defaults are also searched.
|
||||||
"""
|
"""
|
||||||
name = name.lower()
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
v = self._cache[name]
|
v = self._cache[_key(name)]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
if self.defaults:
|
if self.defaults:
|
||||||
return self.defaults.GetString(name, all = all)
|
return self.defaults.GetString(name, all_keys = all_keys)
|
||||||
v = []
|
v = []
|
||||||
|
|
||||||
if not all:
|
if not all_keys:
|
||||||
if v:
|
if v:
|
||||||
return v[0]
|
return v[0]
|
||||||
return None
|
return None
|
||||||
@ -93,7 +153,7 @@ class GitConfig(object):
|
|||||||
r = []
|
r = []
|
||||||
r.extend(v)
|
r.extend(v)
|
||||||
if self.defaults:
|
if self.defaults:
|
||||||
r.extend(self.defaults.GetString(name, all = True))
|
r.extend(self.defaults.GetString(name, all_keys = True))
|
||||||
return r
|
return r
|
||||||
|
|
||||||
def SetString(self, name, value):
|
def SetString(self, name, value):
|
||||||
@ -103,16 +163,16 @@ class GitConfig(object):
|
|||||||
The supplied value should be either a string,
|
The supplied value should be either a string,
|
||||||
or a list of strings (to store multiple values).
|
or a list of strings (to store multiple values).
|
||||||
"""
|
"""
|
||||||
name = name.lower()
|
key = _key(name)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
old = self._cache[name]
|
old = self._cache[key]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
old = []
|
old = []
|
||||||
|
|
||||||
if value is None:
|
if value is None:
|
||||||
if old:
|
if old:
|
||||||
del self._cache[name]
|
del self._cache[key]
|
||||||
self._do('--unset-all', name)
|
self._do('--unset-all', name)
|
||||||
|
|
||||||
elif isinstance(value, list):
|
elif isinstance(value, list):
|
||||||
@ -123,13 +183,13 @@ class GitConfig(object):
|
|||||||
self.SetString(name, value[0])
|
self.SetString(name, value[0])
|
||||||
|
|
||||||
elif old != value:
|
elif old != value:
|
||||||
self._cache[name] = list(value)
|
self._cache[key] = list(value)
|
||||||
self._do('--replace-all', name, value[0])
|
self._do('--replace-all', name, value[0])
|
||||||
for i in xrange(1, len(value)):
|
for i in range(1, len(value)):
|
||||||
self._do('--add', name, value[i])
|
self._do('--add', name, value[i])
|
||||||
|
|
||||||
elif len(old) != 1 or old[0] != value:
|
elif len(old) != 1 or old[0] != value:
|
||||||
self._cache[name] = [value]
|
self._cache[key] = [value]
|
||||||
self._do('--replace-all', name, value)
|
self._do('--replace-all', name, value)
|
||||||
|
|
||||||
def GetRemote(self, name):
|
def GetRemote(self, name):
|
||||||
@ -152,6 +212,47 @@ class GitConfig(object):
|
|||||||
self._branches[b.name] = b
|
self._branches[b.name] = b
|
||||||
return b
|
return b
|
||||||
|
|
||||||
|
def GetSubSections(self, section):
|
||||||
|
"""List all subsection names matching $section.*.*
|
||||||
|
"""
|
||||||
|
return self._sections.get(section, set())
|
||||||
|
|
||||||
|
def HasSection(self, section, subsection = ''):
|
||||||
|
"""Does at least one key in section.subsection exist?
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return subsection in self._sections[section]
|
||||||
|
except KeyError:
|
||||||
|
return False
|
||||||
|
|
||||||
|
def UrlInsteadOf(self, url):
|
||||||
|
"""Resolve any url.*.insteadof references.
|
||||||
|
"""
|
||||||
|
for new_url in self.GetSubSections('url'):
|
||||||
|
for old_url in self.GetString('url.%s.insteadof' % new_url, True):
|
||||||
|
if old_url is not None and url.startswith(old_url):
|
||||||
|
return new_url + url[len(old_url):]
|
||||||
|
return url
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _sections(self):
|
||||||
|
d = self._section_dict
|
||||||
|
if d is None:
|
||||||
|
d = {}
|
||||||
|
for name in self._cache.keys():
|
||||||
|
p = name.split('.')
|
||||||
|
if 2 == len(p):
|
||||||
|
section = p[0]
|
||||||
|
subsect = ''
|
||||||
|
else:
|
||||||
|
section = p[0]
|
||||||
|
subsect = '.'.join(p[1:-1])
|
||||||
|
if section not in d:
|
||||||
|
d[section] = set()
|
||||||
|
d[section].add(subsect)
|
||||||
|
self._section_dict = d
|
||||||
|
return d
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def _cache(self):
|
def _cache(self):
|
||||||
if self._cache_dict is None:
|
if self._cache_dict is None:
|
||||||
@ -159,21 +260,61 @@ class GitConfig(object):
|
|||||||
return self._cache_dict
|
return self._cache_dict
|
||||||
|
|
||||||
def _Read(self):
|
def _Read(self):
|
||||||
d = self._do('--null', '--list')
|
d = self._ReadJson()
|
||||||
c = {}
|
if d is None:
|
||||||
while d:
|
d = self._ReadGit()
|
||||||
lf = d.index('\n')
|
self._SaveJson(d)
|
||||||
nul = d.index('\0', lf + 1)
|
return d
|
||||||
|
|
||||||
key = d[0:lf]
|
def _ReadJson(self):
|
||||||
val = d[lf + 1:nul]
|
try:
|
||||||
|
if os.path.getmtime(self._json) \
|
||||||
|
<= os.path.getmtime(self.file):
|
||||||
|
platform_utils.remove(self._json)
|
||||||
|
return None
|
||||||
|
except OSError:
|
||||||
|
return None
|
||||||
|
try:
|
||||||
|
Trace(': parsing %s', self.file)
|
||||||
|
with open(self._json) as fd:
|
||||||
|
return json.load(fd)
|
||||||
|
except (IOError, ValueError):
|
||||||
|
platform_utils.remove(self._json)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _SaveJson(self, cache):
|
||||||
|
try:
|
||||||
|
with open(self._json, 'w') as fd:
|
||||||
|
json.dump(cache, fd, indent=2)
|
||||||
|
except (IOError, TypeError):
|
||||||
|
if os.path.exists(self._json):
|
||||||
|
platform_utils.remove(self._json)
|
||||||
|
|
||||||
|
def _ReadGit(self):
|
||||||
|
"""
|
||||||
|
Read configuration data from git.
|
||||||
|
|
||||||
|
This internal method populates the GitConfig cache.
|
||||||
|
|
||||||
|
"""
|
||||||
|
c = {}
|
||||||
|
d = self._do('--null', '--list')
|
||||||
|
if d is None:
|
||||||
|
return c
|
||||||
|
if not is_python3():
|
||||||
|
d = d.decode('utf-8')
|
||||||
|
for line in d.rstrip('\0').split('\0'):
|
||||||
|
if '\n' in line:
|
||||||
|
key, val = line.split('\n', 1)
|
||||||
|
else:
|
||||||
|
key = line
|
||||||
|
val = None
|
||||||
|
|
||||||
if key in c:
|
if key in c:
|
||||||
c[key].append(val)
|
c[key].append(val)
|
||||||
else:
|
else:
|
||||||
c[key] = [val]
|
c[key] = [val]
|
||||||
|
|
||||||
d = d[nul + 1:]
|
|
||||||
return c
|
return c
|
||||||
|
|
||||||
def _do(self, *args):
|
def _do(self, *args):
|
||||||
@ -246,6 +387,193 @@ class RefSpec(object):
|
|||||||
return s
|
return s
|
||||||
|
|
||||||
|
|
||||||
|
_master_processes = []
|
||||||
|
_master_keys = set()
|
||||||
|
_ssh_master = True
|
||||||
|
_master_keys_lock = None
|
||||||
|
|
||||||
|
def init_ssh():
|
||||||
|
"""Should be called once at the start of repo to init ssh master handling.
|
||||||
|
|
||||||
|
At the moment, all we do is to create our lock.
|
||||||
|
"""
|
||||||
|
global _master_keys_lock
|
||||||
|
assert _master_keys_lock is None, "Should only call init_ssh once"
|
||||||
|
_master_keys_lock = _threading.Lock()
|
||||||
|
|
||||||
|
def _open_ssh(host, port=None):
|
||||||
|
global _ssh_master
|
||||||
|
|
||||||
|
# Acquire the lock. This is needed to prevent opening multiple masters for
|
||||||
|
# the same host when we're running "repo sync -jN" (for N > 1) _and_ the
|
||||||
|
# manifest <remote fetch="ssh://xyz"> specifies a different host from the
|
||||||
|
# one that was passed to repo init.
|
||||||
|
_master_keys_lock.acquire()
|
||||||
|
try:
|
||||||
|
|
||||||
|
# Check to see whether we already think that the master is running; if we
|
||||||
|
# think it's already running, return right away.
|
||||||
|
if port is not None:
|
||||||
|
key = '%s:%s' % (host, port)
|
||||||
|
else:
|
||||||
|
key = host
|
||||||
|
|
||||||
|
if key in _master_keys:
|
||||||
|
return True
|
||||||
|
|
||||||
|
if not _ssh_master \
|
||||||
|
or 'GIT_SSH' in os.environ \
|
||||||
|
or sys.platform in ('win32', 'cygwin'):
|
||||||
|
# failed earlier, or cygwin ssh can't do this
|
||||||
|
#
|
||||||
|
return False
|
||||||
|
|
||||||
|
# We will make two calls to ssh; this is the common part of both calls.
|
||||||
|
command_base = ['ssh',
|
||||||
|
'-o','ControlPath %s' % ssh_sock(),
|
||||||
|
host]
|
||||||
|
if port is not None:
|
||||||
|
command_base[1:1] = ['-p', str(port)]
|
||||||
|
|
||||||
|
# Since the key wasn't in _master_keys, we think that master isn't running.
|
||||||
|
# ...but before actually starting a master, we'll double-check. This can
|
||||||
|
# be important because we can't tell that that 'git@myhost.com' is the same
|
||||||
|
# as 'myhost.com' where "User git" is setup in the user's ~/.ssh/config file.
|
||||||
|
check_command = command_base + ['-O','check']
|
||||||
|
try:
|
||||||
|
Trace(': %s', ' '.join(check_command))
|
||||||
|
check_process = subprocess.Popen(check_command,
|
||||||
|
stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
check_process.communicate() # read output, but ignore it...
|
||||||
|
isnt_running = check_process.wait()
|
||||||
|
|
||||||
|
if not isnt_running:
|
||||||
|
# Our double-check found that the master _was_ infact running. Add to
|
||||||
|
# the list of keys.
|
||||||
|
_master_keys.add(key)
|
||||||
|
return True
|
||||||
|
except Exception:
|
||||||
|
# Ignore excpetions. We we will fall back to the normal command and print
|
||||||
|
# to the log there.
|
||||||
|
pass
|
||||||
|
|
||||||
|
command = command_base[:1] + \
|
||||||
|
['-M', '-N'] + \
|
||||||
|
command_base[1:]
|
||||||
|
try:
|
||||||
|
Trace(': %s', ' '.join(command))
|
||||||
|
p = subprocess.Popen(command)
|
||||||
|
except Exception as e:
|
||||||
|
_ssh_master = False
|
||||||
|
print('\nwarn: cannot enable ssh control master for %s:%s\n%s'
|
||||||
|
% (host,port, str(e)), file=sys.stderr)
|
||||||
|
return False
|
||||||
|
|
||||||
|
time.sleep(1)
|
||||||
|
ssh_died = (p.poll() is not None)
|
||||||
|
if ssh_died:
|
||||||
|
return False
|
||||||
|
|
||||||
|
_master_processes.append(p)
|
||||||
|
_master_keys.add(key)
|
||||||
|
return True
|
||||||
|
finally:
|
||||||
|
_master_keys_lock.release()
|
||||||
|
|
||||||
|
def close_ssh():
|
||||||
|
global _master_keys_lock
|
||||||
|
|
||||||
|
terminate_ssh_clients()
|
||||||
|
|
||||||
|
for p in _master_processes:
|
||||||
|
try:
|
||||||
|
os.kill(p.pid, SIGTERM)
|
||||||
|
p.wait()
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
del _master_processes[:]
|
||||||
|
_master_keys.clear()
|
||||||
|
|
||||||
|
d = ssh_sock(create=False)
|
||||||
|
if d:
|
||||||
|
try:
|
||||||
|
platform_utils.rmdir(os.path.dirname(d))
|
||||||
|
except OSError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# We're done with the lock, so we can delete it.
|
||||||
|
_master_keys_lock = None
|
||||||
|
|
||||||
|
URI_SCP = re.compile(r'^([^@:]*@?[^:/]{1,}):')
|
||||||
|
URI_ALL = re.compile(r'^([a-z][a-z+-]*)://([^@/]*@?[^/]*)/')
|
||||||
|
|
||||||
|
def GetSchemeFromUrl(url):
|
||||||
|
m = URI_ALL.match(url)
|
||||||
|
if m:
|
||||||
|
return m.group(1)
|
||||||
|
return None
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def GetUrlCookieFile(url, quiet):
|
||||||
|
if url.startswith('persistent-'):
|
||||||
|
try:
|
||||||
|
p = subprocess.Popen(
|
||||||
|
['git-remote-persistent-https', '-print_config', url],
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
stderr=subprocess.PIPE)
|
||||||
|
try:
|
||||||
|
cookieprefix = 'http.cookiefile='
|
||||||
|
proxyprefix = 'http.proxy='
|
||||||
|
cookiefile = None
|
||||||
|
proxy = None
|
||||||
|
for line in p.stdout:
|
||||||
|
line = line.strip().decode('utf-8')
|
||||||
|
if line.startswith(cookieprefix):
|
||||||
|
cookiefile = os.path.expanduser(line[len(cookieprefix):])
|
||||||
|
if line.startswith(proxyprefix):
|
||||||
|
proxy = line[len(proxyprefix):]
|
||||||
|
# Leave subprocess open, as cookie file may be transient.
|
||||||
|
if cookiefile or proxy:
|
||||||
|
yield cookiefile, proxy
|
||||||
|
return
|
||||||
|
finally:
|
||||||
|
p.stdin.close()
|
||||||
|
if p.wait():
|
||||||
|
err_msg = p.stderr.read().decode('utf-8')
|
||||||
|
if ' -print_config' in err_msg:
|
||||||
|
pass # Persistent proxy doesn't support -print_config.
|
||||||
|
elif not quiet:
|
||||||
|
print(err_msg, file=sys.stderr)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
pass # No persistent proxy.
|
||||||
|
raise
|
||||||
|
cookiefile = GitConfig.ForUser().GetString('http.cookiefile')
|
||||||
|
if cookiefile:
|
||||||
|
cookiefile = os.path.expanduser(cookiefile)
|
||||||
|
yield cookiefile, None
|
||||||
|
|
||||||
|
def _preconnect(url):
|
||||||
|
m = URI_ALL.match(url)
|
||||||
|
if m:
|
||||||
|
scheme = m.group(1)
|
||||||
|
host = m.group(2)
|
||||||
|
if ':' in host:
|
||||||
|
host, port = host.split(':')
|
||||||
|
else:
|
||||||
|
port = None
|
||||||
|
if scheme in ('ssh', 'git+ssh', 'ssh+git'):
|
||||||
|
return _open_ssh(host, port)
|
||||||
|
return False
|
||||||
|
|
||||||
|
m = URI_SCP.match(url)
|
||||||
|
if m:
|
||||||
|
host = m.group(1)
|
||||||
|
return _open_ssh(host)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
class Remote(object):
|
class Remote(object):
|
||||||
"""Configuration options related to a remote.
|
"""Configuration options related to a remote.
|
||||||
"""
|
"""
|
||||||
@ -253,16 +581,106 @@ class Remote(object):
|
|||||||
self._config = config
|
self._config = config
|
||||||
self.name = name
|
self.name = name
|
||||||
self.url = self._Get('url')
|
self.url = self._Get('url')
|
||||||
|
self.pushUrl = self._Get('pushurl')
|
||||||
self.review = self._Get('review')
|
self.review = self._Get('review')
|
||||||
self.fetch = map(lambda x: RefSpec.FromString(x),
|
self.projectname = self._Get('projectname')
|
||||||
self._Get('fetch', all=True))
|
self.fetch = list(map(RefSpec.FromString,
|
||||||
|
self._Get('fetch', all_keys=True)))
|
||||||
|
self._review_url = None
|
||||||
|
|
||||||
|
def _InsteadOf(self):
|
||||||
|
globCfg = GitConfig.ForUser()
|
||||||
|
urlList = globCfg.GetSubSections('url')
|
||||||
|
longest = ""
|
||||||
|
longestUrl = ""
|
||||||
|
|
||||||
|
for url in urlList:
|
||||||
|
key = "url." + url + ".insteadOf"
|
||||||
|
insteadOfList = globCfg.GetString(key, all_keys=True)
|
||||||
|
|
||||||
|
for insteadOf in insteadOfList:
|
||||||
|
if self.url.startswith(insteadOf) \
|
||||||
|
and len(insteadOf) > len(longest):
|
||||||
|
longest = insteadOf
|
||||||
|
longestUrl = url
|
||||||
|
|
||||||
|
if len(longest) == 0:
|
||||||
|
return self.url
|
||||||
|
|
||||||
|
return self.url.replace(longest, longestUrl, 1)
|
||||||
|
|
||||||
|
def PreConnectFetch(self):
|
||||||
|
connectionUrl = self._InsteadOf()
|
||||||
|
return _preconnect(connectionUrl)
|
||||||
|
|
||||||
|
def ReviewUrl(self, userEmail, validate_certs):
|
||||||
|
if self._review_url is None:
|
||||||
|
if self.review is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
u = self.review
|
||||||
|
if u.startswith('persistent-'):
|
||||||
|
u = u[len('persistent-'):]
|
||||||
|
if u.split(':')[0] not in ('http', 'https', 'sso', 'ssh'):
|
||||||
|
u = 'http://%s' % u
|
||||||
|
if u.endswith('/Gerrit'):
|
||||||
|
u = u[:len(u) - len('/Gerrit')]
|
||||||
|
if u.endswith('/ssh_info'):
|
||||||
|
u = u[:len(u) - len('/ssh_info')]
|
||||||
|
if not u.endswith('/'):
|
||||||
|
u += '/'
|
||||||
|
http_url = u
|
||||||
|
|
||||||
|
if u in REVIEW_CACHE:
|
||||||
|
self._review_url = REVIEW_CACHE[u]
|
||||||
|
elif 'REPO_HOST_PORT_INFO' in os.environ:
|
||||||
|
host, port = os.environ['REPO_HOST_PORT_INFO'].split()
|
||||||
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
elif u.startswith('sso:') or u.startswith('ssh:'):
|
||||||
|
self._review_url = u # Assume it's right
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
elif 'REPO_IGNORE_SSH_INFO' in os.environ:
|
||||||
|
self._review_url = http_url
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
info_url = u + 'ssh_info'
|
||||||
|
if not validate_certs:
|
||||||
|
context = ssl._create_unverified_context()
|
||||||
|
info = urllib.request.urlopen(info_url, context=context).read()
|
||||||
|
else:
|
||||||
|
info = urllib.request.urlopen(info_url).read()
|
||||||
|
if info == b'NOT_AVAILABLE' or b'<' in info:
|
||||||
|
# If `info` contains '<', we assume the server gave us some sort
|
||||||
|
# of HTML response back, like maybe a login page.
|
||||||
|
#
|
||||||
|
# Assume HTTP if SSH is not enabled or ssh_info doesn't look right.
|
||||||
|
self._review_url = http_url
|
||||||
|
else:
|
||||||
|
info = info.decode('utf-8')
|
||||||
|
host, port = info.split()
|
||||||
|
self._review_url = self._SshReviewUrl(userEmail, host, port)
|
||||||
|
except urllib.error.HTTPError as e:
|
||||||
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||||
|
except urllib.error.URLError as e:
|
||||||
|
raise UploadError('%s: %s' % (self.review, str(e)))
|
||||||
|
except HTTPException as e:
|
||||||
|
raise UploadError('%s: %s' % (self.review, e.__class__.__name__))
|
||||||
|
|
||||||
|
REVIEW_CACHE[u] = self._review_url
|
||||||
|
return self._review_url + self.projectname
|
||||||
|
|
||||||
|
def _SshReviewUrl(self, userEmail, host, port):
|
||||||
|
username = self._config.GetString('review.%s.username' % self.review)
|
||||||
|
if username is None:
|
||||||
|
username = userEmail.split('@')[0]
|
||||||
|
return 'ssh://%s@%s:%s/' % (username, host, port)
|
||||||
|
|
||||||
def ToLocal(self, rev):
|
def ToLocal(self, rev):
|
||||||
"""Convert a remote revision string to something we have locally.
|
"""Convert a remote revision string to something we have locally.
|
||||||
"""
|
"""
|
||||||
if IsId(rev):
|
if self.name == '.' or IsId(rev):
|
||||||
return rev
|
|
||||||
if rev.startswith(R_TAGS):
|
|
||||||
return rev
|
return rev
|
||||||
|
|
||||||
if not rev.startswith('refs/'):
|
if not rev.startswith('refs/'):
|
||||||
@ -271,7 +689,12 @@ class Remote(object):
|
|||||||
for spec in self.fetch:
|
for spec in self.fetch:
|
||||||
if spec.SourceMatches(rev):
|
if spec.SourceMatches(rev):
|
||||||
return spec.MapSource(rev)
|
return spec.MapSource(rev)
|
||||||
raise GitError('remote %s does not have %s' % (self.name, rev))
|
|
||||||
|
if not rev.startswith(R_HEADS):
|
||||||
|
return rev
|
||||||
|
|
||||||
|
raise GitError('%s: remote %s does not have %s' %
|
||||||
|
(self.projectname, self.name, rev))
|
||||||
|
|
||||||
def WritesTo(self, ref):
|
def WritesTo(self, ref):
|
||||||
"""True if the remote stores to the tracking ref.
|
"""True if the remote stores to the tracking ref.
|
||||||
@ -281,27 +704,34 @@ class Remote(object):
|
|||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def ResetFetch(self):
|
def ResetFetch(self, mirror=False):
|
||||||
"""Set the fetch refspec to its default value.
|
"""Set the fetch refspec to its default value.
|
||||||
"""
|
"""
|
||||||
self.fetch = [RefSpec(True,
|
if mirror:
|
||||||
'refs/heads/*',
|
dst = 'refs/heads/*'
|
||||||
'refs/remotes/%s/*' % self.name)]
|
else:
|
||||||
|
dst = 'refs/remotes/%s/*' % self.name
|
||||||
|
self.fetch = [RefSpec(True, 'refs/heads/*', dst)]
|
||||||
|
|
||||||
def Save(self):
|
def Save(self):
|
||||||
"""Save this remote to the configuration.
|
"""Save this remote to the configuration.
|
||||||
"""
|
"""
|
||||||
self._Set('url', self.url)
|
self._Set('url', self.url)
|
||||||
|
if self.pushUrl is not None:
|
||||||
|
self._Set('pushurl', self.pushUrl + '/' + self.projectname)
|
||||||
|
else:
|
||||||
|
self._Set('pushurl', self.pushUrl)
|
||||||
self._Set('review', self.review)
|
self._Set('review', self.review)
|
||||||
self._Set('fetch', map(lambda x: str(x), self.fetch))
|
self._Set('projectname', self.projectname)
|
||||||
|
self._Set('fetch', list(map(str, self.fetch)))
|
||||||
|
|
||||||
def _Set(self, key, value):
|
def _Set(self, key, value):
|
||||||
key = 'remote.%s.%s' % (self.name, key)
|
key = 'remote.%s.%s' % (self.name, key)
|
||||||
return self._config.SetString(key, value)
|
return self._config.SetString(key, value)
|
||||||
|
|
||||||
def _Get(self, key, all=False):
|
def _Get(self, key, all_keys=False):
|
||||||
key = 'remote.%s.%s' % (self.name, key)
|
key = 'remote.%s.%s' % (self.name, key)
|
||||||
return self._config.GetString(key, all = all)
|
return self._config.GetString(key, all_keys = all_keys)
|
||||||
|
|
||||||
|
|
||||||
class Branch(object):
|
class Branch(object):
|
||||||
@ -329,16 +759,25 @@ class Branch(object):
|
|||||||
def Save(self):
|
def Save(self):
|
||||||
"""Save this branch back into the configuration.
|
"""Save this branch back into the configuration.
|
||||||
"""
|
"""
|
||||||
self._Set('merge', self.merge)
|
if self._config.HasSection('branch', self.name):
|
||||||
if self.remote:
|
if self.remote:
|
||||||
self._Set('remote', self.remote.name)
|
self._Set('remote', self.remote.name)
|
||||||
|
else:
|
||||||
|
self._Set('remote', None)
|
||||||
|
self._Set('merge', self.merge)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self._Set('remote', None)
|
with open(self._config.file, 'a') as fd:
|
||||||
|
fd.write('[branch "%s"]\n' % self.name)
|
||||||
|
if self.remote:
|
||||||
|
fd.write('\tremote = %s\n' % self.remote.name)
|
||||||
|
if self.merge:
|
||||||
|
fd.write('\tmerge = %s\n' % self.merge)
|
||||||
|
|
||||||
def _Set(self, key, value):
|
def _Set(self, key, value):
|
||||||
key = 'branch.%s.%s' % (self.name, key)
|
key = 'branch.%s.%s' % (self.name, key)
|
||||||
return self._config.SetString(key, value)
|
return self._config.SetString(key, value)
|
||||||
|
|
||||||
def _Get(self, key, all=False):
|
def _Get(self, key, all_keys=False):
|
||||||
key = 'branch.%s.%s' % (self.name, key)
|
key = 'branch.%s.%s' % (self.name, key)
|
||||||
return self._config.GetString(key, all = all)
|
return self._config.GetString(key, all_keys = all_keys)
|
||||||
|
162
git_refs.py
Normal file
162
git_refs.py
Normal file
@ -0,0 +1,162 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
from repo_trace import Trace
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
|
HEAD = 'HEAD'
|
||||||
|
R_CHANGES = 'refs/changes/'
|
||||||
|
R_HEADS = 'refs/heads/'
|
||||||
|
R_TAGS = 'refs/tags/'
|
||||||
|
R_PUB = 'refs/published/'
|
||||||
|
R_M = 'refs/remotes/m/'
|
||||||
|
|
||||||
|
|
||||||
|
class GitRefs(object):
|
||||||
|
def __init__(self, gitdir):
|
||||||
|
self._gitdir = gitdir
|
||||||
|
self._phyref = None
|
||||||
|
self._symref = None
|
||||||
|
self._mtime = {}
|
||||||
|
|
||||||
|
@property
|
||||||
|
def all(self):
|
||||||
|
self._EnsureLoaded()
|
||||||
|
return self._phyref
|
||||||
|
|
||||||
|
def get(self, name):
|
||||||
|
try:
|
||||||
|
return self.all[name]
|
||||||
|
except KeyError:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def deleted(self, name):
|
||||||
|
if self._phyref is not None:
|
||||||
|
if name in self._phyref:
|
||||||
|
del self._phyref[name]
|
||||||
|
|
||||||
|
if name in self._symref:
|
||||||
|
del self._symref[name]
|
||||||
|
|
||||||
|
if name in self._mtime:
|
||||||
|
del self._mtime[name]
|
||||||
|
|
||||||
|
def symref(self, name):
|
||||||
|
try:
|
||||||
|
self._EnsureLoaded()
|
||||||
|
return self._symref[name]
|
||||||
|
except KeyError:
|
||||||
|
return ''
|
||||||
|
|
||||||
|
def _EnsureLoaded(self):
|
||||||
|
if self._phyref is None or self._NeedUpdate():
|
||||||
|
self._LoadAll()
|
||||||
|
|
||||||
|
def _NeedUpdate(self):
|
||||||
|
Trace(': scan refs %s', self._gitdir)
|
||||||
|
|
||||||
|
for name, mtime in self._mtime.items():
|
||||||
|
try:
|
||||||
|
if mtime != os.path.getmtime(os.path.join(self._gitdir, name)):
|
||||||
|
return True
|
||||||
|
except OSError:
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _LoadAll(self):
|
||||||
|
Trace(': load refs %s', self._gitdir)
|
||||||
|
|
||||||
|
self._phyref = {}
|
||||||
|
self._symref = {}
|
||||||
|
self._mtime = {}
|
||||||
|
|
||||||
|
self._ReadPackedRefs()
|
||||||
|
self._ReadLoose('refs/')
|
||||||
|
self._ReadLoose1(os.path.join(self._gitdir, HEAD), HEAD)
|
||||||
|
|
||||||
|
scan = self._symref
|
||||||
|
attempts = 0
|
||||||
|
while scan and attempts < 5:
|
||||||
|
scan_next = {}
|
||||||
|
for name, dest in scan.items():
|
||||||
|
if dest in self._phyref:
|
||||||
|
self._phyref[name] = self._phyref[dest]
|
||||||
|
else:
|
||||||
|
scan_next[name] = dest
|
||||||
|
scan = scan_next
|
||||||
|
attempts += 1
|
||||||
|
|
||||||
|
def _ReadPackedRefs(self):
|
||||||
|
path = os.path.join(self._gitdir, 'packed-refs')
|
||||||
|
try:
|
||||||
|
fd = open(path, 'r')
|
||||||
|
mtime = os.path.getmtime(path)
|
||||||
|
except IOError:
|
||||||
|
return
|
||||||
|
except OSError:
|
||||||
|
return
|
||||||
|
try:
|
||||||
|
for line in fd:
|
||||||
|
line = str(line)
|
||||||
|
if line[0] == '#':
|
||||||
|
continue
|
||||||
|
if line[0] == '^':
|
||||||
|
continue
|
||||||
|
|
||||||
|
line = line[:-1]
|
||||||
|
p = line.split(' ')
|
||||||
|
ref_id = p[0]
|
||||||
|
name = p[1]
|
||||||
|
|
||||||
|
self._phyref[name] = ref_id
|
||||||
|
finally:
|
||||||
|
fd.close()
|
||||||
|
self._mtime['packed-refs'] = mtime
|
||||||
|
|
||||||
|
def _ReadLoose(self, prefix):
|
||||||
|
base = os.path.join(self._gitdir, prefix)
|
||||||
|
for name in platform_utils.listdir(base):
|
||||||
|
p = os.path.join(base, name)
|
||||||
|
if platform_utils.isdir(p):
|
||||||
|
self._mtime[prefix] = os.path.getmtime(base)
|
||||||
|
self._ReadLoose(prefix + name + '/')
|
||||||
|
elif name.endswith('.lock'):
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
self._ReadLoose1(p, prefix + name)
|
||||||
|
|
||||||
|
def _ReadLoose1(self, path, name):
|
||||||
|
try:
|
||||||
|
with open(path) as fd:
|
||||||
|
mtime = os.path.getmtime(path)
|
||||||
|
ref_id = fd.readline()
|
||||||
|
except (IOError, OSError):
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
ref_id = ref_id.decode()
|
||||||
|
except AttributeError:
|
||||||
|
pass
|
||||||
|
if not ref_id:
|
||||||
|
return
|
||||||
|
ref_id = ref_id[:-1]
|
||||||
|
|
||||||
|
if ref_id.startswith('ref: '):
|
||||||
|
self._symref[name] = ref_id[5:]
|
||||||
|
else:
|
||||||
|
self._phyref[name] = ref_id
|
||||||
|
self._mtime[name] = mtime
|
10
remote.py → git_ssh
Normal file → Executable file
10
remote.py → git_ssh
Normal file → Executable file
@ -1,5 +1,6 @@
|
|||||||
|
#!/bin/sh
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
@ -13,9 +14,4 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
class Remote(object):
|
exec ssh -o "ControlMaster no" -o "ControlPath $REPO_SSH_SOCK" "$@"
|
||||||
def __init__(self, name, fetch=None, review=None):
|
|
||||||
self.name = name
|
|
||||||
self.fetchUrl = fetch
|
|
||||||
self.reviewUrl = review
|
|
||||||
self.requiredCommits = []
|
|
155
gitc_utils.py
Normal file
155
gitc_utils.py
Normal file
@ -0,0 +1,155 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
import time
|
||||||
|
|
||||||
|
import git_command
|
||||||
|
import git_config
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
from error import ManifestParseError
|
||||||
|
|
||||||
|
NUM_BATCH_RETRIEVE_REVISIONID = 32
|
||||||
|
|
||||||
|
def get_gitc_manifest_dir():
|
||||||
|
return wrapper.Wrapper().get_gitc_manifest_dir()
|
||||||
|
|
||||||
|
def parse_clientdir(gitc_fs_path):
|
||||||
|
return wrapper.Wrapper().gitc_parse_clientdir(gitc_fs_path)
|
||||||
|
|
||||||
|
def _set_project_revisions(projects):
|
||||||
|
"""Sets the revisionExpr for a list of projects.
|
||||||
|
|
||||||
|
Because of the limit of open file descriptors allowed, length of projects
|
||||||
|
should not be overly large. Recommend calling this function multiple times
|
||||||
|
with each call not exceeding NUM_BATCH_RETRIEVE_REVISIONID projects.
|
||||||
|
|
||||||
|
@param projects: List of project objects to set the revionExpr for.
|
||||||
|
"""
|
||||||
|
# Retrieve the commit id for each project based off of it's current
|
||||||
|
# revisionExpr and it is not already a commit id.
|
||||||
|
project_gitcmds = [(
|
||||||
|
project, git_command.GitCommand(None,
|
||||||
|
['ls-remote',
|
||||||
|
project.remote.url,
|
||||||
|
project.revisionExpr],
|
||||||
|
capture_stdout=True, cwd='/tmp'))
|
||||||
|
for project in projects if not git_config.IsId(project.revisionExpr)]
|
||||||
|
for proj, gitcmd in project_gitcmds:
|
||||||
|
if gitcmd.Wait():
|
||||||
|
print('FATAL: Failed to retrieve revisionExpr for %s' % proj)
|
||||||
|
sys.exit(1)
|
||||||
|
revisionExpr = gitcmd.stdout.split('\t')[0]
|
||||||
|
if not revisionExpr:
|
||||||
|
raise ManifestParseError('Invalid SHA-1 revision project %s (%s)' %
|
||||||
|
(proj.remote.url, proj.revisionExpr))
|
||||||
|
proj.revisionExpr = revisionExpr
|
||||||
|
|
||||||
|
def _manifest_groups(manifest):
|
||||||
|
"""Returns the manifest group string that should be synced
|
||||||
|
|
||||||
|
This is the same logic used by Command.GetProjects(), which is used during
|
||||||
|
repo sync
|
||||||
|
|
||||||
|
@param manifest: The XmlManifest object
|
||||||
|
"""
|
||||||
|
mp = manifest.manifestProject
|
||||||
|
groups = mp.config.GetString('manifest.groups')
|
||||||
|
if not groups:
|
||||||
|
groups = 'default,platform-' + platform.system().lower()
|
||||||
|
return groups
|
||||||
|
|
||||||
|
def generate_gitc_manifest(gitc_manifest, manifest, paths=None):
|
||||||
|
"""Generate a manifest for shafsd to use for this GITC client.
|
||||||
|
|
||||||
|
@param gitc_manifest: Current gitc manifest, or None if there isn't one yet.
|
||||||
|
@param manifest: A GitcManifest object loaded with the current repo manifest.
|
||||||
|
@param paths: List of project paths we want to update.
|
||||||
|
"""
|
||||||
|
|
||||||
|
print('Generating GITC Manifest by fetching revision SHAs for each '
|
||||||
|
'project.')
|
||||||
|
if paths is None:
|
||||||
|
paths = list(manifest.paths.keys())
|
||||||
|
|
||||||
|
groups = [x for x in re.split(r'[,\s]+', _manifest_groups(manifest)) if x]
|
||||||
|
|
||||||
|
# Convert the paths to projects, and filter them to the matched groups.
|
||||||
|
projects = [manifest.paths[p] for p in paths]
|
||||||
|
projects = [p for p in projects if p.MatchesGroups(groups)]
|
||||||
|
|
||||||
|
if gitc_manifest is not None:
|
||||||
|
for path, proj in manifest.paths.items():
|
||||||
|
if not proj.MatchesGroups(groups):
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not proj.upstream and not git_config.IsId(proj.revisionExpr):
|
||||||
|
proj.upstream = proj.revisionExpr
|
||||||
|
|
||||||
|
if not path in gitc_manifest.paths:
|
||||||
|
# Any new projects need their first revision, even if we weren't asked
|
||||||
|
# for them.
|
||||||
|
projects.append(proj)
|
||||||
|
elif not path in paths:
|
||||||
|
# And copy revisions from the previous manifest if we're not updating
|
||||||
|
# them now.
|
||||||
|
gitc_proj = gitc_manifest.paths[path]
|
||||||
|
if gitc_proj.old_revision:
|
||||||
|
proj.revisionExpr = None
|
||||||
|
proj.old_revision = gitc_proj.old_revision
|
||||||
|
else:
|
||||||
|
proj.revisionExpr = gitc_proj.revisionExpr
|
||||||
|
|
||||||
|
index = 0
|
||||||
|
while index < len(projects):
|
||||||
|
_set_project_revisions(
|
||||||
|
projects[index:(index+NUM_BATCH_RETRIEVE_REVISIONID)])
|
||||||
|
index += NUM_BATCH_RETRIEVE_REVISIONID
|
||||||
|
|
||||||
|
if gitc_manifest is not None:
|
||||||
|
for path, proj in gitc_manifest.paths.items():
|
||||||
|
if proj.old_revision and path in paths:
|
||||||
|
# If we updated a project that has been started, keep the old-revision
|
||||||
|
# updated.
|
||||||
|
repo_proj = manifest.paths[path]
|
||||||
|
repo_proj.old_revision = repo_proj.revisionExpr
|
||||||
|
repo_proj.revisionExpr = None
|
||||||
|
|
||||||
|
# Convert URLs from relative to absolute.
|
||||||
|
for _name, remote in manifest.remotes.items():
|
||||||
|
remote.fetchUrl = remote.resolvedFetchUrl
|
||||||
|
|
||||||
|
# Save the manifest.
|
||||||
|
save_manifest(manifest)
|
||||||
|
|
||||||
|
def save_manifest(manifest, client_dir=None):
|
||||||
|
"""Save the manifest file in the client_dir.
|
||||||
|
|
||||||
|
@param client_dir: Client directory to save the manifest in.
|
||||||
|
@param manifest: Manifest object to save.
|
||||||
|
"""
|
||||||
|
if not client_dir:
|
||||||
|
client_dir = manifest.gitc_client_dir
|
||||||
|
with open(os.path.join(client_dir, '.manifest'), 'w') as f:
|
||||||
|
manifest.Save(f, groups=_manifest_groups(manifest))
|
||||||
|
# TODO(sbasi/jorg): Come up with a solution to remove the sleep below.
|
||||||
|
# Give the GITC filesystem time to register the manifest changes.
|
||||||
|
time.sleep(3)
|
191
hooks/commit-msg
Executable file
191
hooks/commit-msg
Executable file
@ -0,0 +1,191 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
# From Gerrit Code Review 2.14.6
|
||||||
|
#
|
||||||
|
# Part of Gerrit Code Review (https://www.gerritcodereview.com/)
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
#
|
||||||
|
|
||||||
|
unset GREP_OPTIONS
|
||||||
|
|
||||||
|
CHANGE_ID_AFTER="Bug|Depends-On|Issue|Test|Feature|Fixes|Fixed"
|
||||||
|
MSG="$1"
|
||||||
|
|
||||||
|
# Check for, and add if missing, a unique Change-Id
|
||||||
|
#
|
||||||
|
add_ChangeId() {
|
||||||
|
clean_message=`sed -e '
|
||||||
|
/^diff --git .*/{
|
||||||
|
s///
|
||||||
|
q
|
||||||
|
}
|
||||||
|
/^Signed-off-by:/d
|
||||||
|
/^#/d
|
||||||
|
' "$MSG" | git stripspace`
|
||||||
|
if test -z "$clean_message"
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Do not add Change-Id to temp commits
|
||||||
|
if echo "$clean_message" | head -1 | grep -q '^\(fixup\|squash\)!'
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test "false" = "`git config --bool --get gerrit.createChangeId`"
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Does Change-Id: already exist? if so, exit (no change).
|
||||||
|
if grep -i '^Change-Id:' "$MSG" >/dev/null
|
||||||
|
then
|
||||||
|
return
|
||||||
|
fi
|
||||||
|
|
||||||
|
id=`_gen_ChangeId`
|
||||||
|
T="$MSG.tmp.$$"
|
||||||
|
AWK=awk
|
||||||
|
if [ -x /usr/xpg4/bin/awk ]; then
|
||||||
|
# Solaris AWK is just too broken
|
||||||
|
AWK=/usr/xpg4/bin/awk
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get core.commentChar from git config or use default symbol
|
||||||
|
commentChar=`git config --get core.commentChar`
|
||||||
|
commentChar=${commentChar:-#}
|
||||||
|
|
||||||
|
# How this works:
|
||||||
|
# - parse the commit message as (textLine+ blankLine*)*
|
||||||
|
# - assume textLine+ to be a footer until proven otherwise
|
||||||
|
# - exception: the first block is not footer (as it is the title)
|
||||||
|
# - read textLine+ into a variable
|
||||||
|
# - then count blankLines
|
||||||
|
# - once the next textLine appears, print textLine+ blankLine* as these
|
||||||
|
# aren't footer
|
||||||
|
# - in END, the last textLine+ block is available for footer parsing
|
||||||
|
$AWK '
|
||||||
|
BEGIN {
|
||||||
|
# while we start with the assumption that textLine+
|
||||||
|
# is a footer, the first block is not.
|
||||||
|
isFooter = 0
|
||||||
|
footerComment = 0
|
||||||
|
blankLines = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Skip lines starting with commentChar without any spaces before it.
|
||||||
|
/^'"$commentChar"'/ { next }
|
||||||
|
|
||||||
|
# Skip the line starting with the diff command and everything after it,
|
||||||
|
# up to the end of the file, assuming it is only patch data.
|
||||||
|
# If more than one line before the diff was empty, strip all but one.
|
||||||
|
/^diff --git / {
|
||||||
|
blankLines = 0
|
||||||
|
while (getline) { }
|
||||||
|
next
|
||||||
|
}
|
||||||
|
|
||||||
|
# Count blank lines outside footer comments
|
||||||
|
/^$/ && (footerComment == 0) {
|
||||||
|
blankLines++
|
||||||
|
next
|
||||||
|
}
|
||||||
|
|
||||||
|
# Catch footer comment
|
||||||
|
/^\[[a-zA-Z0-9-]+:/ && (isFooter == 1) {
|
||||||
|
footerComment = 1
|
||||||
|
}
|
||||||
|
|
||||||
|
/]$/ && (footerComment == 1) {
|
||||||
|
footerComment = 2
|
||||||
|
}
|
||||||
|
|
||||||
|
# We have a non-blank line after blank lines. Handle this.
|
||||||
|
(blankLines > 0) {
|
||||||
|
print lines
|
||||||
|
for (i = 0; i < blankLines; i++) {
|
||||||
|
print ""
|
||||||
|
}
|
||||||
|
|
||||||
|
lines = ""
|
||||||
|
blankLines = 0
|
||||||
|
isFooter = 1
|
||||||
|
footerComment = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Detect that the current block is not the footer
|
||||||
|
(footerComment == 0) && (!/^\[?[a-zA-Z0-9-]+:/ || /^[a-zA-Z0-9-]+:\/\//) {
|
||||||
|
isFooter = 0
|
||||||
|
}
|
||||||
|
|
||||||
|
{
|
||||||
|
# We need this information about the current last comment line
|
||||||
|
if (footerComment == 2) {
|
||||||
|
footerComment = 0
|
||||||
|
}
|
||||||
|
if (lines != "") {
|
||||||
|
lines = lines "\n";
|
||||||
|
}
|
||||||
|
lines = lines $0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Footer handling:
|
||||||
|
# If the last block is considered a footer, splice in the Change-Id at the
|
||||||
|
# right place.
|
||||||
|
# Look for the right place to inject Change-Id by considering
|
||||||
|
# CHANGE_ID_AFTER. Keys listed in it (case insensitive) come first,
|
||||||
|
# then Change-Id, then everything else (eg. Signed-off-by:).
|
||||||
|
#
|
||||||
|
# Otherwise just print the last block, a new line and the Change-Id as a
|
||||||
|
# block of its own.
|
||||||
|
END {
|
||||||
|
unprinted = 1
|
||||||
|
if (isFooter == 0) {
|
||||||
|
print lines "\n"
|
||||||
|
lines = ""
|
||||||
|
}
|
||||||
|
changeIdAfter = "^(" tolower("'"$CHANGE_ID_AFTER"'") "):"
|
||||||
|
numlines = split(lines, footer, "\n")
|
||||||
|
for (line = 1; line <= numlines; line++) {
|
||||||
|
if (unprinted && match(tolower(footer[line]), changeIdAfter) != 1) {
|
||||||
|
unprinted = 0
|
||||||
|
print "Change-Id: I'"$id"'"
|
||||||
|
}
|
||||||
|
print footer[line]
|
||||||
|
}
|
||||||
|
if (unprinted) {
|
||||||
|
print "Change-Id: I'"$id"'"
|
||||||
|
}
|
||||||
|
}' "$MSG" > "$T" && mv "$T" "$MSG" || rm -f "$T"
|
||||||
|
}
|
||||||
|
_gen_ChangeIdInput() {
|
||||||
|
echo "tree `git write-tree`"
|
||||||
|
if parent=`git rev-parse "HEAD^0" 2>/dev/null`
|
||||||
|
then
|
||||||
|
echo "parent $parent"
|
||||||
|
fi
|
||||||
|
echo "author `git var GIT_AUTHOR_IDENT`"
|
||||||
|
echo "committer `git var GIT_COMMITTER_IDENT`"
|
||||||
|
echo
|
||||||
|
printf '%s' "$clean_message"
|
||||||
|
}
|
||||||
|
_gen_ChangeId() {
|
||||||
|
_gen_ChangeIdInput |
|
||||||
|
git hash-object -t commit --stdin
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
add_ChangeId
|
59
hooks/pre-auto-gc
Executable file
59
hooks/pre-auto-gc
Executable file
@ -0,0 +1,59 @@
|
|||||||
|
#!/bin/sh
|
||||||
|
#
|
||||||
|
# An example hook script to verify if you are on battery, in case you
|
||||||
|
# are running Windows, Linux or OS X. Called by git-gc --auto with no
|
||||||
|
# arguments. The hook should exit with non-zero status after issuing an
|
||||||
|
# appropriate message if it wants to stop the auto repacking.
|
||||||
|
|
||||||
|
# This program is free software; you can redistribute it and/or modify
|
||||||
|
# it under the terms of the GNU General Public License as published by
|
||||||
|
# the Free Software Foundation; either version 2 of the License, or
|
||||||
|
# (at your option) any later version.
|
||||||
|
#
|
||||||
|
# This program is distributed in the hope that it will be useful,
|
||||||
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||||
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||||
|
# GNU General Public License for more details.
|
||||||
|
#
|
||||||
|
# You should have received a copy of the GNU General Public License
|
||||||
|
# along with this program; if not, write to the Free Software
|
||||||
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
||||||
|
|
||||||
|
if uname -s | grep -q "_NT-"
|
||||||
|
then
|
||||||
|
if test -x $SYSTEMROOT/System32/Wbem/wmic
|
||||||
|
then
|
||||||
|
STATUS=$(wmic path win32_battery get batterystatus /format:list | tr -d '\r\n')
|
||||||
|
[ "$STATUS" = "BatteryStatus=2" ] && exit 0 || exit 1
|
||||||
|
fi
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
if test -x /sbin/on_ac_power && (/sbin/on_ac_power;test $? -ne 1)
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif test "$(cat /sys/class/power_supply/AC/online 2>/dev/null)" = 1
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif grep -q 'on-line' /proc/acpi/ac_adapter/AC/state 2>/dev/null
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif grep -q '0x01$' /proc/apm 2>/dev/null
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif grep -q "AC Power \+: 1" /proc/pmu/info 2>/dev/null
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif test -x /usr/bin/pmset && /usr/bin/pmset -g batt |
|
||||||
|
grep -q "drawing from 'AC Power'"
|
||||||
|
then
|
||||||
|
exit 0
|
||||||
|
elif test -d /sys/bus/acpi/drivers/battery && test 0 = \
|
||||||
|
"$(find /sys/bus/acpi/drivers/battery/ -type l | wc -l)";
|
||||||
|
then
|
||||||
|
# No battery exists.
|
||||||
|
exit 0
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "Auto packing deferred; not on AC"
|
||||||
|
exit 1
|
422
import_ext.py
422
import_ext.py
@ -1,422 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
import stat
|
|
||||||
import sys
|
|
||||||
import urllib2
|
|
||||||
import StringIO
|
|
||||||
|
|
||||||
from error import GitError, ImportError
|
|
||||||
from git_command import GitCommand
|
|
||||||
|
|
||||||
class ImportExternal(object):
|
|
||||||
"""Imports a single revision from a non-git data source.
|
|
||||||
Suitable for use to import a tar or zip based snapshot.
|
|
||||||
"""
|
|
||||||
def __init__(self):
|
|
||||||
self._marks = 0
|
|
||||||
self._files = {}
|
|
||||||
self._tempref = 'refs/repo-external/import'
|
|
||||||
|
|
||||||
self._urls = []
|
|
||||||
self._remap = []
|
|
||||||
self.parent = None
|
|
||||||
self._user_name = 'Upstream'
|
|
||||||
self._user_email = 'upstream-import@none'
|
|
||||||
self._user_when = 1000000
|
|
||||||
|
|
||||||
self.commit = None
|
|
||||||
|
|
||||||
def Clone(self):
|
|
||||||
r = self.__class__()
|
|
||||||
|
|
||||||
r.project = self.project
|
|
||||||
for u in self._urls:
|
|
||||||
r._urls.append(u)
|
|
||||||
for p in self._remap:
|
|
||||||
r._remap.append(_PathMap(r, p._old, p._new))
|
|
||||||
|
|
||||||
return r
|
|
||||||
|
|
||||||
def SetProject(self, project):
|
|
||||||
self.project = project
|
|
||||||
|
|
||||||
def SetVersion(self, version):
|
|
||||||
self.version = version
|
|
||||||
|
|
||||||
def AddUrl(self, url):
|
|
||||||
self._urls.append(url)
|
|
||||||
|
|
||||||
def SetParent(self, commit_hash):
|
|
||||||
self.parent = commit_hash
|
|
||||||
|
|
||||||
def SetCommit(self, commit_hash):
|
|
||||||
self.commit = commit_hash
|
|
||||||
|
|
||||||
def RemapPath(self, old, new, replace_version=True):
|
|
||||||
self._remap.append(_PathMap(self, old, new))
|
|
||||||
|
|
||||||
@property
|
|
||||||
def TagName(self):
|
|
||||||
v = ''
|
|
||||||
for c in self.version:
|
|
||||||
if c >= '0' and c <= '9':
|
|
||||||
v += c
|
|
||||||
elif c >= 'A' and c <= 'Z':
|
|
||||||
v += c
|
|
||||||
elif c >= 'a' and c <= 'z':
|
|
||||||
v += c
|
|
||||||
elif c in ('-', '_', '.', '/', '+', '@'):
|
|
||||||
v += c
|
|
||||||
return 'upstream/%s' % v
|
|
||||||
|
|
||||||
@property
|
|
||||||
def PackageName(self):
|
|
||||||
n = self.project.name
|
|
||||||
if n.startswith('platform/'):
|
|
||||||
# This was not my finest moment...
|
|
||||||
#
|
|
||||||
n = n[len('platform/'):]
|
|
||||||
return n
|
|
||||||
|
|
||||||
def Import(self):
|
|
||||||
self._need_graft = False
|
|
||||||
if self.parent:
|
|
||||||
try:
|
|
||||||
self.project.bare_git.cat_file('-e', self.parent)
|
|
||||||
except GitError:
|
|
||||||
self._need_graft = True
|
|
||||||
|
|
||||||
gfi = GitCommand(self.project,
|
|
||||||
['fast-import', '--force', '--quiet'],
|
|
||||||
bare = True,
|
|
||||||
provide_stdin = True)
|
|
||||||
try:
|
|
||||||
self._out = gfi.stdin
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._UnpackFiles()
|
|
||||||
self._MakeCommit()
|
|
||||||
self._out.flush()
|
|
||||||
finally:
|
|
||||||
rc = gfi.Wait()
|
|
||||||
if rc != 0:
|
|
||||||
raise ImportError('fast-import failed')
|
|
||||||
|
|
||||||
if self._need_graft:
|
|
||||||
id = self._GraftCommit()
|
|
||||||
else:
|
|
||||||
id = self.project.bare_git.rev_parse('%s^0' % self._tempref)
|
|
||||||
|
|
||||||
if self.commit and self.commit != id:
|
|
||||||
raise ImportError('checksum mismatch: %s expected,'
|
|
||||||
' %s imported' % (self.commit, id))
|
|
||||||
|
|
||||||
self._MakeTag(id)
|
|
||||||
return id
|
|
||||||
finally:
|
|
||||||
try:
|
|
||||||
self.project.bare_git.DeleteRef(self._tempref)
|
|
||||||
except GitError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def _PickUrl(self, failed):
|
|
||||||
u = map(lambda x: x.replace('%version%', self.version), self._urls)
|
|
||||||
for f in failed:
|
|
||||||
if f in u:
|
|
||||||
u.remove(f)
|
|
||||||
if len(u) == 0:
|
|
||||||
return None
|
|
||||||
return random.choice(u)
|
|
||||||
|
|
||||||
def _OpenUrl(self):
|
|
||||||
failed = {}
|
|
||||||
while True:
|
|
||||||
url = self._PickUrl(failed.keys())
|
|
||||||
if url is None:
|
|
||||||
why = 'Cannot download %s' % self.project.name
|
|
||||||
|
|
||||||
if failed:
|
|
||||||
why += ': one or more mirrors are down\n'
|
|
||||||
bad_urls = list(failed.keys())
|
|
||||||
bad_urls.sort()
|
|
||||||
for url in bad_urls:
|
|
||||||
why += ' %s: %s\n' % (url, failed[url])
|
|
||||||
else:
|
|
||||||
why += ': no mirror URLs'
|
|
||||||
raise ImportError(why)
|
|
||||||
|
|
||||||
print >>sys.stderr, "Getting %s ..." % url
|
|
||||||
try:
|
|
||||||
return urllib2.urlopen(url), url
|
|
||||||
except urllib2.HTTPError, e:
|
|
||||||
failed[url] = e.code
|
|
||||||
except urllib2.URLError, e:
|
|
||||||
failed[url] = e.reason[1]
|
|
||||||
except OSError, e:
|
|
||||||
failed[url] = e.strerror
|
|
||||||
|
|
||||||
def _UnpackFiles(self):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
def _NextMark(self):
|
|
||||||
self._marks += 1
|
|
||||||
return self._marks
|
|
||||||
|
|
||||||
def _UnpackOneFile(self, mode, size, name, fd):
|
|
||||||
if stat.S_ISDIR(mode): # directory
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
mode = self._CleanMode(mode, name)
|
|
||||||
|
|
||||||
old_name = name
|
|
||||||
name = self._CleanName(name)
|
|
||||||
|
|
||||||
if stat.S_ISLNK(mode) and self._remap:
|
|
||||||
# The link is relative to the old_name, and may need to
|
|
||||||
# be rewritten according to our remap rules if it goes
|
|
||||||
# up high enough in the tree structure.
|
|
||||||
#
|
|
||||||
dest = self._RewriteLink(fd.read(size), old_name, name)
|
|
||||||
fd = StringIO.StringIO(dest)
|
|
||||||
size = len(dest)
|
|
||||||
|
|
||||||
fi = _File(mode, name, self._NextMark())
|
|
||||||
|
|
||||||
self._out.write('blob\n')
|
|
||||||
self._out.write('mark :%d\n' % fi.mark)
|
|
||||||
self._out.write('data %d\n' % size)
|
|
||||||
while size > 0:
|
|
||||||
n = min(2048, size)
|
|
||||||
self._out.write(fd.read(n))
|
|
||||||
size -= n
|
|
||||||
self._out.write('\n')
|
|
||||||
self._files[fi.name] = fi
|
|
||||||
|
|
||||||
def _SetFileMode(self, name, mode):
|
|
||||||
if not stat.S_ISDIR(mode):
|
|
||||||
mode = self._CleanMode(mode, name)
|
|
||||||
name = self._CleanName(name)
|
|
||||||
try:
|
|
||||||
fi = self._files[name]
|
|
||||||
except KeyError:
|
|
||||||
raise ImportError('file %s was not unpacked' % name)
|
|
||||||
fi.mode = mode
|
|
||||||
|
|
||||||
def _RewriteLink(self, dest, relto_old, relto_new):
|
|
||||||
# Drop the last components of the symlink itself
|
|
||||||
# as the dest is relative to the directory its in.
|
|
||||||
#
|
|
||||||
relto_old = _TrimPath(relto_old)
|
|
||||||
relto_new = _TrimPath(relto_new)
|
|
||||||
|
|
||||||
# Resolve the link to be absolute from the top of
|
|
||||||
# the archive, so we can remap its destination.
|
|
||||||
#
|
|
||||||
while dest.find('/./') >= 0 or dest.find('//') >= 0:
|
|
||||||
dest = dest.replace('/./', '/')
|
|
||||||
dest = dest.replace('//', '/')
|
|
||||||
|
|
||||||
if dest.startswith('../') or dest.find('/../') > 0:
|
|
||||||
dest = _FoldPath('%s/%s' % (relto_old, dest))
|
|
||||||
|
|
||||||
for pm in self._remap:
|
|
||||||
if pm.Matches(dest):
|
|
||||||
dest = pm.Apply(dest)
|
|
||||||
break
|
|
||||||
|
|
||||||
dest, relto_new = _StripCommonPrefix(dest, relto_new)
|
|
||||||
while relto_new:
|
|
||||||
i = relto_new.find('/')
|
|
||||||
if i > 0:
|
|
||||||
relto_new = relto_new[i + 1:]
|
|
||||||
else:
|
|
||||||
relto_new = ''
|
|
||||||
dest = '../' + dest
|
|
||||||
return dest
|
|
||||||
|
|
||||||
def _CleanMode(self, mode, name):
|
|
||||||
if stat.S_ISREG(mode): # regular file
|
|
||||||
if (mode & 0111) == 0:
|
|
||||||
return 0644
|
|
||||||
else:
|
|
||||||
return 0755
|
|
||||||
elif stat.S_ISLNK(mode): # symlink
|
|
||||||
return stat.S_IFLNK
|
|
||||||
else:
|
|
||||||
raise ImportError('invalid mode %o in %s' % (mode, name))
|
|
||||||
|
|
||||||
def _CleanName(self, name):
|
|
||||||
old_name = name
|
|
||||||
for pm in self._remap:
|
|
||||||
if pm.Matches(name):
|
|
||||||
name = pm.Apply(name)
|
|
||||||
break
|
|
||||||
while name.startswith('/'):
|
|
||||||
name = name[1:]
|
|
||||||
if not name:
|
|
||||||
raise ImportError('path %s is empty after remap' % old_name)
|
|
||||||
if name.find('/./') >= 0 or name.find('/../') >= 0:
|
|
||||||
raise ImportError('path %s contains relative parts' % name)
|
|
||||||
return name
|
|
||||||
|
|
||||||
def _MakeCommit(self):
|
|
||||||
msg = '%s %s\n' % (self.PackageName, self.version)
|
|
||||||
|
|
||||||
self._out.write('commit %s\n' % self._tempref)
|
|
||||||
self._out.write('committer %s <%s> %d +0000\n' % (
|
|
||||||
self._user_name,
|
|
||||||
self._user_email,
|
|
||||||
self._user_when))
|
|
||||||
self._out.write('data %d\n' % len(msg))
|
|
||||||
self._out.write(msg)
|
|
||||||
self._out.write('\n')
|
|
||||||
if self.parent and not self._need_graft:
|
|
||||||
self._out.write('from %s^0\n' % self.parent)
|
|
||||||
self._out.write('deleteall\n')
|
|
||||||
|
|
||||||
for f in self._files.values():
|
|
||||||
self._out.write('M %o :%d %s\n' % (f.mode, f.mark, f.name))
|
|
||||||
self._out.write('\n')
|
|
||||||
|
|
||||||
def _GraftCommit(self):
|
|
||||||
raw = self.project.bare_git.cat_file('commit', self._tempref)
|
|
||||||
raw = raw.split("\n")
|
|
||||||
while raw[1].startswith('parent '):
|
|
||||||
del raw[1]
|
|
||||||
raw.insert(1, 'parent %s' % self.parent)
|
|
||||||
id = self._WriteObject('commit', "\n".join(raw))
|
|
||||||
|
|
||||||
graft_file = os.path.join(self.project.gitdir, 'info/grafts')
|
|
||||||
if os.path.exists(graft_file):
|
|
||||||
graft_list = open(graft_file, 'rb').read().split("\n")
|
|
||||||
if graft_list and graft_list[-1] == '':
|
|
||||||
del graft_list[-1]
|
|
||||||
else:
|
|
||||||
graft_list = []
|
|
||||||
|
|
||||||
exists = False
|
|
||||||
for line in graft_list:
|
|
||||||
if line == id:
|
|
||||||
exists = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not exists:
|
|
||||||
graft_list.append(id)
|
|
||||||
graft_list.append('')
|
|
||||||
fd = open(graft_file, 'wb')
|
|
||||||
fd.write("\n".join(graft_list))
|
|
||||||
fd.close()
|
|
||||||
|
|
||||||
return id
|
|
||||||
|
|
||||||
def _MakeTag(self, id):
|
|
||||||
name = self.TagName
|
|
||||||
|
|
||||||
raw = []
|
|
||||||
raw.append('object %s' % id)
|
|
||||||
raw.append('type commit')
|
|
||||||
raw.append('tag %s' % name)
|
|
||||||
raw.append('tagger %s <%s> %d +0000' % (
|
|
||||||
self._user_name,
|
|
||||||
self._user_email,
|
|
||||||
self._user_when))
|
|
||||||
raw.append('')
|
|
||||||
raw.append('%s %s\n' % (self.PackageName, self.version))
|
|
||||||
|
|
||||||
tagid = self._WriteObject('tag', "\n".join(raw))
|
|
||||||
self.project.bare_git.UpdateRef('refs/tags/%s' % name, tagid)
|
|
||||||
|
|
||||||
def _WriteObject(self, type, data):
|
|
||||||
wo = GitCommand(self.project,
|
|
||||||
['hash-object', '-t', type, '-w', '--stdin'],
|
|
||||||
bare = True,
|
|
||||||
provide_stdin = True,
|
|
||||||
capture_stdout = True,
|
|
||||||
capture_stderr = True)
|
|
||||||
wo.stdin.write(data)
|
|
||||||
if wo.Wait() != 0:
|
|
||||||
raise GitError('cannot create %s from (%s)' % (type, data))
|
|
||||||
return wo.stdout[:-1]
|
|
||||||
|
|
||||||
|
|
||||||
def _TrimPath(path):
|
|
||||||
i = path.rfind('/')
|
|
||||||
if i > 0:
|
|
||||||
path = path[0:i]
|
|
||||||
return ''
|
|
||||||
|
|
||||||
def _StripCommonPrefix(a, b):
|
|
||||||
while True:
|
|
||||||
ai = a.find('/')
|
|
||||||
bi = b.find('/')
|
|
||||||
if ai > 0 and bi > 0 and a[0:ai] == b[0:bi]:
|
|
||||||
a = a[ai + 1:]
|
|
||||||
b = b[bi + 1:]
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
return a, b
|
|
||||||
|
|
||||||
def _FoldPath(path):
|
|
||||||
while True:
|
|
||||||
if path.startswith('../'):
|
|
||||||
return path
|
|
||||||
|
|
||||||
i = path.find('/../')
|
|
||||||
if i <= 0:
|
|
||||||
if path.startswith('/'):
|
|
||||||
return path[1:]
|
|
||||||
return path
|
|
||||||
|
|
||||||
lhs = path[0:i]
|
|
||||||
rhs = path[i + 4:]
|
|
||||||
|
|
||||||
i = lhs.rfind('/')
|
|
||||||
if i > 0:
|
|
||||||
path = lhs[0:i + 1] + rhs
|
|
||||||
else:
|
|
||||||
path = rhs
|
|
||||||
|
|
||||||
class _File(object):
|
|
||||||
def __init__(self, mode, name, mark):
|
|
||||||
self.mode = mode
|
|
||||||
self.name = name
|
|
||||||
self.mark = mark
|
|
||||||
|
|
||||||
|
|
||||||
class _PathMap(object):
|
|
||||||
def __init__(self, imp, old, new):
|
|
||||||
self._imp = imp
|
|
||||||
self._old = old
|
|
||||||
self._new = new
|
|
||||||
|
|
||||||
def _r(self, p):
|
|
||||||
return p.replace('%version%', self._imp.version)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def old(self):
|
|
||||||
return self._r(self._old)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def new(self):
|
|
||||||
return self._r(self._new)
|
|
||||||
|
|
||||||
def Matches(self, name):
|
|
||||||
return name.startswith(self.old)
|
|
||||||
|
|
||||||
def Apply(self, name):
|
|
||||||
return self.new + name[len(self.old):]
|
|
206
import_tar.py
206
import_tar.py
@ -1,206 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import bz2
|
|
||||||
import stat
|
|
||||||
import tarfile
|
|
||||||
import zlib
|
|
||||||
import StringIO
|
|
||||||
|
|
||||||
from import_ext import ImportExternal
|
|
||||||
from error import ImportError
|
|
||||||
|
|
||||||
class ImportTar(ImportExternal):
|
|
||||||
"""Streams a (optionally compressed) tar file from the network
|
|
||||||
directly into a Project's Git repository.
|
|
||||||
"""
|
|
||||||
@classmethod
|
|
||||||
def CanAccept(cls, url):
|
|
||||||
"""Can this importer read and unpack the data stored at url?
|
|
||||||
"""
|
|
||||||
if url.endswith('.tar.gz') or url.endswith('.tgz'):
|
|
||||||
return True
|
|
||||||
if url.endswith('.tar.bz2'):
|
|
||||||
return True
|
|
||||||
if url.endswith('.tar'):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _UnpackFiles(self):
|
|
||||||
url_fd, url = self._OpenUrl()
|
|
||||||
try:
|
|
||||||
if url.endswith('.tar.gz') or url.endswith('.tgz'):
|
|
||||||
tar_fd = _Gzip(url_fd)
|
|
||||||
elif url.endswith('.tar.bz2'):
|
|
||||||
tar_fd = _Bzip2(url_fd)
|
|
||||||
elif url.endswith('.tar'):
|
|
||||||
tar_fd = _Raw(url_fd)
|
|
||||||
else:
|
|
||||||
raise ImportError('non-tar file extension: %s' % url)
|
|
||||||
|
|
||||||
try:
|
|
||||||
tar = tarfile.TarFile(name = url,
|
|
||||||
mode = 'r',
|
|
||||||
fileobj = tar_fd)
|
|
||||||
try:
|
|
||||||
for entry in tar:
|
|
||||||
mode = entry.mode
|
|
||||||
|
|
||||||
if (mode & 0170000) == 0:
|
|
||||||
if entry.isdir():
|
|
||||||
mode |= stat.S_IFDIR
|
|
||||||
elif entry.isfile() or entry.islnk(): # hard links as files
|
|
||||||
mode |= stat.S_IFREG
|
|
||||||
elif entry.issym():
|
|
||||||
mode |= stat.S_IFLNK
|
|
||||||
|
|
||||||
if stat.S_ISLNK(mode): # symlink
|
|
||||||
data_fd = StringIO.StringIO(entry.linkname)
|
|
||||||
data_sz = len(entry.linkname)
|
|
||||||
elif stat.S_ISDIR(mode): # directory
|
|
||||||
data_fd = StringIO.StringIO('')
|
|
||||||
data_sz = 0
|
|
||||||
else:
|
|
||||||
data_fd = tar.extractfile(entry)
|
|
||||||
data_sz = entry.size
|
|
||||||
|
|
||||||
self._UnpackOneFile(mode, data_sz, entry.name, data_fd)
|
|
||||||
finally:
|
|
||||||
tar.close()
|
|
||||||
finally:
|
|
||||||
tar_fd.close()
|
|
||||||
finally:
|
|
||||||
url_fd.close()
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
class _DecompressStream(object):
|
|
||||||
"""file like object to decompress a tar stream
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._fd = fd
|
|
||||||
self._pos = 0
|
|
||||||
self._buf = None
|
|
||||||
|
|
||||||
def tell(self):
|
|
||||||
return self._pos
|
|
||||||
|
|
||||||
def seek(self, offset):
|
|
||||||
d = offset - self._pos
|
|
||||||
if d > 0:
|
|
||||||
self.read(d)
|
|
||||||
elif d == 0:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
raise NotImplementedError, 'seek backwards'
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self._fd = None
|
|
||||||
|
|
||||||
def read(self, size = -1):
|
|
||||||
if not self._fd:
|
|
||||||
raise EOFError, 'Reached EOF'
|
|
||||||
|
|
||||||
r = []
|
|
||||||
try:
|
|
||||||
if size >= 0:
|
|
||||||
self._ReadChunk(r, size)
|
|
||||||
else:
|
|
||||||
while True:
|
|
||||||
self._ReadChunk(r, 2048)
|
|
||||||
except EOFError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if len(r) == 1:
|
|
||||||
r = r[0]
|
|
||||||
else:
|
|
||||||
r = ''.join(r)
|
|
||||||
self._pos += len(r)
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _ReadChunk(self, r, size):
|
|
||||||
b = self._buf
|
|
||||||
try:
|
|
||||||
while size > 0:
|
|
||||||
if b is None or len(b) == 0:
|
|
||||||
b = self._Decompress(self._fd.read(2048))
|
|
||||||
continue
|
|
||||||
|
|
||||||
use = min(size, len(b))
|
|
||||||
r.append(b[:use])
|
|
||||||
b = b[use:]
|
|
||||||
size -= use
|
|
||||||
finally:
|
|
||||||
self._buf = b
|
|
||||||
|
|
||||||
def _Decompress(self, b):
|
|
||||||
raise NotImplementedError, '_Decompress'
|
|
||||||
|
|
||||||
|
|
||||||
class _Raw(_DecompressStream):
|
|
||||||
"""file like object for an uncompressed stream
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
_DecompressStream.__init__(self, fd)
|
|
||||||
|
|
||||||
def _Decompress(self, b):
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
class _Bzip2(_DecompressStream):
|
|
||||||
"""file like object to decompress a .bz2 stream
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
_DecompressStream.__init__(self, fd)
|
|
||||||
self._bz = bz2.BZ2Decompressor()
|
|
||||||
|
|
||||||
def _Decompress(self, b):
|
|
||||||
return self._bz.decompress(b)
|
|
||||||
|
|
||||||
|
|
||||||
_FHCRC, _FEXTRA, _FNAME, _FCOMMENT = 2, 4, 8, 16
|
|
||||||
class _Gzip(_DecompressStream):
|
|
||||||
"""file like object to decompress a .gz stream
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
_DecompressStream.__init__(self, fd)
|
|
||||||
self._z = zlib.decompressobj(-zlib.MAX_WBITS)
|
|
||||||
|
|
||||||
magic = fd.read(2)
|
|
||||||
if magic != '\037\213':
|
|
||||||
raise IOError, 'Not a gzipped file'
|
|
||||||
|
|
||||||
method = ord(fd.read(1))
|
|
||||||
if method != 8:
|
|
||||||
raise IOError, 'Unknown compression method'
|
|
||||||
|
|
||||||
flag = ord(fd.read(1))
|
|
||||||
fd.read(6)
|
|
||||||
|
|
||||||
if flag & _FEXTRA:
|
|
||||||
xlen = ord(fd.read(1))
|
|
||||||
xlen += 256 * ord(fd.read(1))
|
|
||||||
fd.read(xlen)
|
|
||||||
if flag & _FNAME:
|
|
||||||
while fd.read(1) != '\0':
|
|
||||||
pass
|
|
||||||
if flag & _FCOMMENT:
|
|
||||||
while fd.read(1) != '\0':
|
|
||||||
pass
|
|
||||||
if flag & _FHCRC:
|
|
||||||
fd.read(2)
|
|
||||||
|
|
||||||
def _Decompress(self, b):
|
|
||||||
return self._z.decompress(b)
|
|
345
import_zip.py
345
import_zip.py
@ -1,345 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import stat
|
|
||||||
import struct
|
|
||||||
import zlib
|
|
||||||
import cStringIO
|
|
||||||
|
|
||||||
from import_ext import ImportExternal
|
|
||||||
from error import ImportError
|
|
||||||
|
|
||||||
class ImportZip(ImportExternal):
|
|
||||||
"""Streams a zip file from the network directly into a Project's
|
|
||||||
Git repository.
|
|
||||||
"""
|
|
||||||
@classmethod
|
|
||||||
def CanAccept(cls, url):
|
|
||||||
"""Can this importer read and unpack the data stored at url?
|
|
||||||
"""
|
|
||||||
if url.endswith('.zip') or url.endswith('.jar'):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _UnpackFiles(self):
|
|
||||||
url_fd, url = self._OpenUrl()
|
|
||||||
try:
|
|
||||||
if not self.__class__.CanAccept(url):
|
|
||||||
raise ImportError('non-zip file extension: %s' % url)
|
|
||||||
|
|
||||||
zip = _ZipFile(url_fd)
|
|
||||||
for entry in zip.FileRecords():
|
|
||||||
data = zip.Open(entry).read()
|
|
||||||
sz = len(data)
|
|
||||||
|
|
||||||
if data and _SafeCRLF(data):
|
|
||||||
data = data.replace('\r\n', '\n')
|
|
||||||
sz = len(data)
|
|
||||||
|
|
||||||
fd = cStringIO.StringIO(data)
|
|
||||||
self._UnpackOneFile(entry.mode, sz, entry.name, fd)
|
|
||||||
zip.Close(entry)
|
|
||||||
|
|
||||||
for entry in zip.CentralDirectory():
|
|
||||||
self._SetFileMode(entry.name, entry.mode)
|
|
||||||
|
|
||||||
zip.CheckTail()
|
|
||||||
finally:
|
|
||||||
url_fd.close()
|
|
||||||
|
|
||||||
|
|
||||||
def _SafeCRLF(data):
|
|
||||||
"""Is it reasonably safe to perform a CRLF->LF conversion?
|
|
||||||
|
|
||||||
If the stream contains a NUL byte it is likely binary,
|
|
||||||
and thus a CRLF->LF conversion may damage the stream.
|
|
||||||
|
|
||||||
If the only NUL is in the last position of the stream,
|
|
||||||
but it otherwise can do a CRLF<->LF conversion we do
|
|
||||||
the CRLF conversion anyway. At least one source ZIP
|
|
||||||
file has this structure in its source code.
|
|
||||||
|
|
||||||
If every occurrance of a CR and LF is paired up as a
|
|
||||||
CRLF pair then the conversion is safely bi-directional.
|
|
||||||
s/\r\n/\n/g == s/\n/\r\\n/g can convert between them.
|
|
||||||
"""
|
|
||||||
nul = data.find('\0')
|
|
||||||
if 0 <= nul and nul < (len(data) - 1):
|
|
||||||
return False
|
|
||||||
|
|
||||||
n_lf = 0
|
|
||||||
last = 0
|
|
||||||
while True:
|
|
||||||
lf = data.find('\n', last)
|
|
||||||
if lf < 0:
|
|
||||||
break
|
|
||||||
if lf == 0 or data[lf - 1] != '\r':
|
|
||||||
return False
|
|
||||||
last = lf + 1
|
|
||||||
n_lf += 1
|
|
||||||
return n_lf > 0
|
|
||||||
|
|
||||||
class _ZipFile(object):
|
|
||||||
"""Streaming iterator to parse a zip file on the fly.
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._fd = _UngetStream(fd)
|
|
||||||
|
|
||||||
def FileRecords(self):
|
|
||||||
return _FileIter(self._fd)
|
|
||||||
|
|
||||||
def CentralDirectory(self):
|
|
||||||
return _CentIter(self._fd)
|
|
||||||
|
|
||||||
def CheckTail(self):
|
|
||||||
type_buf = self._fd.read(4)
|
|
||||||
type = struct.unpack('<I', type_buf)[0]
|
|
||||||
if type != 0x06054b50: # end of central directory
|
|
||||||
raise ImportError('zip record %x unsupported' % type)
|
|
||||||
|
|
||||||
def Open(self, entry):
|
|
||||||
if entry.is_compressed:
|
|
||||||
return _InflateStream(self._fd)
|
|
||||||
else:
|
|
||||||
if entry.has_trailer:
|
|
||||||
raise ImportError('unable to extract streamed zip')
|
|
||||||
return _FixedLengthStream(self._fd, entry.uncompressed_size)
|
|
||||||
|
|
||||||
def Close(self, entry):
|
|
||||||
if entry.has_trailer:
|
|
||||||
type = struct.unpack('<I', self._fd.read(4))[0]
|
|
||||||
if type == 0x08074b50:
|
|
||||||
# Not a formal type marker, but commonly seen in zips
|
|
||||||
# as the data descriptor signature.
|
|
||||||
#
|
|
||||||
struct.unpack('<3I', self._fd.read(12))
|
|
||||||
else:
|
|
||||||
# No signature for the data descriptor, so read the
|
|
||||||
# remaining fields out of the stream
|
|
||||||
#
|
|
||||||
self._fd.read(8)
|
|
||||||
|
|
||||||
|
|
||||||
class _FileIter(object):
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._fd = fd
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
fd = self._fd
|
|
||||||
|
|
||||||
type_buf = fd.read(4)
|
|
||||||
type = struct.unpack('<I', type_buf)[0]
|
|
||||||
|
|
||||||
if type != 0x04034b50: # local file header
|
|
||||||
fd.unread(type_buf)
|
|
||||||
raise StopIteration()
|
|
||||||
|
|
||||||
rec = _FileHeader(fd.read(26))
|
|
||||||
rec.name = fd.read(rec.name_len)
|
|
||||||
fd.read(rec.extra_len)
|
|
||||||
|
|
||||||
if rec.name.endswith('/'):
|
|
||||||
rec.name = rec.name[:-1]
|
|
||||||
rec.mode = stat.S_IFDIR | 0777
|
|
||||||
return rec
|
|
||||||
|
|
||||||
|
|
||||||
class _FileHeader(object):
|
|
||||||
"""Information about a single file in the archive.
|
|
||||||
0 version needed to extract 2 bytes
|
|
||||||
1 general purpose bit flag 2 bytes
|
|
||||||
2 compression method 2 bytes
|
|
||||||
3 last mod file time 2 bytes
|
|
||||||
4 last mod file date 2 bytes
|
|
||||||
5 crc-32 4 bytes
|
|
||||||
6 compressed size 4 bytes
|
|
||||||
7 uncompressed size 4 bytes
|
|
||||||
8 file name length 2 bytes
|
|
||||||
9 extra field length 2 bytes
|
|
||||||
"""
|
|
||||||
def __init__(self, raw_bin):
|
|
||||||
rec = struct.unpack('<5H3I2H', raw_bin)
|
|
||||||
|
|
||||||
if rec[2] == 8:
|
|
||||||
self.is_compressed = True
|
|
||||||
elif rec[2] == 0:
|
|
||||||
self.is_compressed = False
|
|
||||||
else:
|
|
||||||
raise ImportError('unrecognized compression format')
|
|
||||||
|
|
||||||
if rec[1] & (1 << 3):
|
|
||||||
self.has_trailer = True
|
|
||||||
else:
|
|
||||||
self.has_trailer = False
|
|
||||||
|
|
||||||
self.compressed_size = rec[6]
|
|
||||||
self.uncompressed_size = rec[7]
|
|
||||||
self.name_len = rec[8]
|
|
||||||
self.extra_len = rec[9]
|
|
||||||
self.mode = stat.S_IFREG | 0644
|
|
||||||
|
|
||||||
|
|
||||||
class _CentIter(object):
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._fd = fd
|
|
||||||
|
|
||||||
def __iter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def next(self):
|
|
||||||
fd = self._fd
|
|
||||||
|
|
||||||
type_buf = fd.read(4)
|
|
||||||
type = struct.unpack('<I', type_buf)[0]
|
|
||||||
|
|
||||||
if type != 0x02014b50: # central directory
|
|
||||||
fd.unread(type_buf)
|
|
||||||
raise StopIteration()
|
|
||||||
|
|
||||||
rec = _CentHeader(fd.read(42))
|
|
||||||
rec.name = fd.read(rec.name_len)
|
|
||||||
fd.read(rec.extra_len)
|
|
||||||
fd.read(rec.comment_len)
|
|
||||||
|
|
||||||
if rec.name.endswith('/'):
|
|
||||||
rec.name = rec.name[:-1]
|
|
||||||
rec.mode = stat.S_IFDIR | 0777
|
|
||||||
return rec
|
|
||||||
|
|
||||||
|
|
||||||
class _CentHeader(object):
|
|
||||||
"""Information about a single file in the archive.
|
|
||||||
0 version made by 2 bytes
|
|
||||||
1 version needed to extract 2 bytes
|
|
||||||
2 general purpose bit flag 2 bytes
|
|
||||||
3 compression method 2 bytes
|
|
||||||
4 last mod file time 2 bytes
|
|
||||||
5 last mod file date 2 bytes
|
|
||||||
6 crc-32 4 bytes
|
|
||||||
7 compressed size 4 bytes
|
|
||||||
8 uncompressed size 4 bytes
|
|
||||||
9 file name length 2 bytes
|
|
||||||
10 extra field length 2 bytes
|
|
||||||
11 file comment length 2 bytes
|
|
||||||
12 disk number start 2 bytes
|
|
||||||
13 internal file attributes 2 bytes
|
|
||||||
14 external file attributes 4 bytes
|
|
||||||
15 relative offset of local header 4 bytes
|
|
||||||
"""
|
|
||||||
def __init__(self, raw_bin):
|
|
||||||
rec = struct.unpack('<6H3I5H2I', raw_bin)
|
|
||||||
self.name_len = rec[9]
|
|
||||||
self.extra_len = rec[10]
|
|
||||||
self.comment_len = rec[11]
|
|
||||||
|
|
||||||
if (rec[0] & 0xff00) == 0x0300: # UNIX
|
|
||||||
self.mode = rec[14] >> 16
|
|
||||||
else:
|
|
||||||
self.mode = stat.S_IFREG | 0644
|
|
||||||
|
|
||||||
|
|
||||||
class _UngetStream(object):
|
|
||||||
"""File like object to read and rewind a stream.
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
self._fd = fd
|
|
||||||
self._buf = None
|
|
||||||
|
|
||||||
def read(self, size = -1):
|
|
||||||
r = []
|
|
||||||
try:
|
|
||||||
if size >= 0:
|
|
||||||
self._ReadChunk(r, size)
|
|
||||||
else:
|
|
||||||
while True:
|
|
||||||
self._ReadChunk(r, 2048)
|
|
||||||
except EOFError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
if len(r) == 1:
|
|
||||||
return r[0]
|
|
||||||
return ''.join(r)
|
|
||||||
|
|
||||||
def unread(self, buf):
|
|
||||||
b = self._buf
|
|
||||||
if b is None or len(b) == 0:
|
|
||||||
self._buf = buf
|
|
||||||
else:
|
|
||||||
self._buf = buf + b
|
|
||||||
|
|
||||||
def _ReadChunk(self, r, size):
|
|
||||||
b = self._buf
|
|
||||||
try:
|
|
||||||
while size > 0:
|
|
||||||
if b is None or len(b) == 0:
|
|
||||||
b = self._Inflate(self._fd.read(2048))
|
|
||||||
if not b:
|
|
||||||
raise EOFError()
|
|
||||||
continue
|
|
||||||
|
|
||||||
use = min(size, len(b))
|
|
||||||
r.append(b[:use])
|
|
||||||
b = b[use:]
|
|
||||||
size -= use
|
|
||||||
finally:
|
|
||||||
self._buf = b
|
|
||||||
|
|
||||||
def _Inflate(self, b):
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
class _FixedLengthStream(_UngetStream):
|
|
||||||
"""File like object to read a fixed length stream.
|
|
||||||
"""
|
|
||||||
def __init__(self, fd, have):
|
|
||||||
_UngetStream.__init__(self, fd)
|
|
||||||
self._have = have
|
|
||||||
|
|
||||||
def _Inflate(self, b):
|
|
||||||
n = self._have
|
|
||||||
if n == 0:
|
|
||||||
self._fd.unread(b)
|
|
||||||
return None
|
|
||||||
|
|
||||||
if len(b) > n:
|
|
||||||
self._fd.unread(b[n:])
|
|
||||||
b = b[:n]
|
|
||||||
self._have -= len(b)
|
|
||||||
return b
|
|
||||||
|
|
||||||
|
|
||||||
class _InflateStream(_UngetStream):
|
|
||||||
"""Inflates the stream as it reads input.
|
|
||||||
"""
|
|
||||||
def __init__(self, fd):
|
|
||||||
_UngetStream.__init__(self, fd)
|
|
||||||
self._z = zlib.decompressobj(-zlib.MAX_WBITS)
|
|
||||||
|
|
||||||
def _Inflate(self, b):
|
|
||||||
z = self._z
|
|
||||||
if not z:
|
|
||||||
self._fd.unread(b)
|
|
||||||
return None
|
|
||||||
|
|
||||||
b = z.decompress(b)
|
|
||||||
if z.unconsumed_tail != '':
|
|
||||||
self._fd.unread(z.unconsumed_tail)
|
|
||||||
elif z.unused_data != '':
|
|
||||||
self._fd.unread(z.unused_data)
|
|
||||||
self._z = None
|
|
||||||
return b
|
|
505
main.py
505
main.py
@ -1,4 +1,5 @@
|
|||||||
#!/bin/sh
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -14,47 +15,105 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
magic='--calling-python-from-/bin/sh--'
|
"""The repo tool.
|
||||||
"""exec" python -E "$0" "$@" """#$magic"
|
|
||||||
if __name__ == '__main__':
|
|
||||||
import sys
|
|
||||||
if sys.argv[-1] == '#%s' % magic:
|
|
||||||
del sys.argv[-1]
|
|
||||||
del magic
|
|
||||||
|
|
||||||
|
People shouldn't run this directly; instead, they should use the `repo` wrapper
|
||||||
|
which takes care of execing this entry point.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import getpass
|
||||||
|
import netrc
|
||||||
import optparse
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
|
||||||
import sys
|
import sys
|
||||||
|
import textwrap
|
||||||
|
import time
|
||||||
|
|
||||||
from command import InteractiveCommand, PagedCommand
|
from pyversion import is_python3
|
||||||
|
if is_python3():
|
||||||
|
import urllib.request
|
||||||
|
else:
|
||||||
|
import imp
|
||||||
|
import urllib2
|
||||||
|
urllib = imp.new_module('urllib')
|
||||||
|
urllib.request = urllib2
|
||||||
|
|
||||||
|
try:
|
||||||
|
import kerberos
|
||||||
|
except ImportError:
|
||||||
|
kerberos = None
|
||||||
|
|
||||||
|
from color import SetDefaultColoring
|
||||||
|
import event_log
|
||||||
|
from repo_trace import SetTrace
|
||||||
|
from git_command import git, GitCommand, user_agent
|
||||||
|
from git_config import init_ssh, close_ssh
|
||||||
|
from command import InteractiveCommand
|
||||||
|
from command import MirrorSafeCommand
|
||||||
|
from command import GitcAvailableCommand, GitcClientCommand
|
||||||
|
from subcmds.version import Version
|
||||||
|
from editor import Editor
|
||||||
|
from error import DownloadError
|
||||||
|
from error import InvalidProjectGroupsError
|
||||||
|
from error import ManifestInvalidRevisionError
|
||||||
|
from error import ManifestParseError
|
||||||
|
from error import NoManifestException
|
||||||
from error import NoSuchProjectError
|
from error import NoSuchProjectError
|
||||||
from error import RepoChangedException
|
from error import RepoChangedException
|
||||||
from manifest import Manifest
|
import gitc_utils
|
||||||
from pager import RunPager
|
from manifest_xml import GitcManifest, XmlManifest
|
||||||
|
from pager import RunPager, TerminatePager
|
||||||
|
from wrapper import WrapperPath, Wrapper
|
||||||
|
|
||||||
from subcmds import all as all_commands
|
from subcmds import all_commands
|
||||||
|
|
||||||
|
if not is_python3():
|
||||||
|
input = raw_input
|
||||||
|
|
||||||
global_options = optparse.OptionParser(
|
global_options = optparse.OptionParser(
|
||||||
usage="repo [-p|--paginate|--no-pager] COMMAND [ARGS]"
|
usage='repo [-p|--paginate|--no-pager] COMMAND [ARGS]',
|
||||||
)
|
add_help_option=False)
|
||||||
|
global_options.add_option('-h', '--help', action='store_true',
|
||||||
|
help='show this help message and exit')
|
||||||
global_options.add_option('-p', '--paginate',
|
global_options.add_option('-p', '--paginate',
|
||||||
dest='pager', action='store_true',
|
dest='pager', action='store_true',
|
||||||
help='display command output in the pager')
|
help='display command output in the pager')
|
||||||
global_options.add_option('--no-pager',
|
global_options.add_option('--no-pager',
|
||||||
dest='no_pager', action='store_true',
|
dest='no_pager', action='store_true',
|
||||||
help='disable the pager')
|
help='disable the pager')
|
||||||
|
global_options.add_option('--color',
|
||||||
|
choices=('auto', 'always', 'never'), default=None,
|
||||||
|
help='control color usage: auto, always, never')
|
||||||
|
global_options.add_option('--trace',
|
||||||
|
dest='trace', action='store_true',
|
||||||
|
help='trace git command execution (REPO_TRACE=1)')
|
||||||
|
global_options.add_option('--trace-python',
|
||||||
|
dest='trace_python', action='store_true',
|
||||||
|
help='trace python command execution')
|
||||||
|
global_options.add_option('--time',
|
||||||
|
dest='time', action='store_true',
|
||||||
|
help='time repo command execution')
|
||||||
|
global_options.add_option('--version',
|
||||||
|
dest='show_version', action='store_true',
|
||||||
|
help='display this version of repo')
|
||||||
|
global_options.add_option('--event-log',
|
||||||
|
dest='event_log', action='store',
|
||||||
|
help='filename of event log to append timeline to')
|
||||||
|
|
||||||
class _Repo(object):
|
class _Repo(object):
|
||||||
def __init__(self, repodir):
|
def __init__(self, repodir):
|
||||||
self.repodir = repodir
|
self.repodir = repodir
|
||||||
self.commands = all_commands
|
self.commands = all_commands
|
||||||
|
# add 'branch' as an alias for 'branches'
|
||||||
|
all_commands['branch'] = all_commands['branches']
|
||||||
|
|
||||||
def _Run(self, argv):
|
def _ParseArgs(self, argv):
|
||||||
|
"""Parse the main `repo` command line options."""
|
||||||
name = None
|
name = None
|
||||||
glob = []
|
glob = []
|
||||||
|
|
||||||
for i in xrange(0, len(argv)):
|
for i in range(len(argv)):
|
||||||
if not argv[i].startswith('-'):
|
if not argv[i].startswith('-'):
|
||||||
name = argv[i]
|
name = argv[i]
|
||||||
if i > 0:
|
if i > 0:
|
||||||
@ -65,18 +124,74 @@ class _Repo(object):
|
|||||||
glob = argv
|
glob = argv
|
||||||
name = 'help'
|
name = 'help'
|
||||||
argv = []
|
argv = []
|
||||||
gopts, gargs = global_options.parse_args(glob)
|
gopts, _gargs = global_options.parse_args(glob)
|
||||||
|
|
||||||
|
if gopts.help:
|
||||||
|
global_options.print_help()
|
||||||
|
commands = ' '.join(sorted(self.commands))
|
||||||
|
wrapped_commands = textwrap.wrap(commands, width=77)
|
||||||
|
print('\nAvailable commands:\n %s' % ('\n '.join(wrapped_commands),))
|
||||||
|
print('\nRun `repo help <command>` for command-specific details.')
|
||||||
|
global_options.exit()
|
||||||
|
|
||||||
|
return (name, gopts, argv)
|
||||||
|
|
||||||
|
def _Run(self, name, gopts, argv):
|
||||||
|
"""Execute the requested subcommand."""
|
||||||
|
result = 0
|
||||||
|
|
||||||
|
if gopts.trace:
|
||||||
|
SetTrace()
|
||||||
|
if gopts.show_version:
|
||||||
|
if name == 'help':
|
||||||
|
name = 'version'
|
||||||
|
else:
|
||||||
|
print('fatal: invalid usage of --version', file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
SetDefaultColoring(gopts.color)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
cmd = self.commands[name]
|
cmd = self.commands[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print >>sys.stderr,\
|
print("repo: '%s' is not a repo command. See 'repo help'." % name,
|
||||||
"repo: '%s' is not a repo command. See 'repo help'."\
|
file=sys.stderr)
|
||||||
% name
|
return 1
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
cmd.repodir = self.repodir
|
cmd.repodir = self.repodir
|
||||||
cmd.manifest = Manifest(cmd.repodir)
|
cmd.manifest = XmlManifest(cmd.repodir)
|
||||||
|
cmd.gitc_manifest = None
|
||||||
|
gitc_client_name = gitc_utils.parse_clientdir(os.getcwd())
|
||||||
|
if gitc_client_name:
|
||||||
|
cmd.gitc_manifest = GitcManifest(cmd.repodir, gitc_client_name)
|
||||||
|
cmd.manifest.isGitcClient = True
|
||||||
|
|
||||||
|
Editor.globalConfig = cmd.manifest.globalConfig
|
||||||
|
|
||||||
|
if not isinstance(cmd, MirrorSafeCommand) and cmd.manifest.IsMirror:
|
||||||
|
print("fatal: '%s' requires a working directory" % name,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if isinstance(cmd, GitcAvailableCommand) and not gitc_utils.get_gitc_manifest_dir():
|
||||||
|
print("fatal: '%s' requires GITC to be available" % name,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if isinstance(cmd, GitcClientCommand) and not gitc_client_name:
|
||||||
|
print("fatal: '%s' requires a GITC client" % name,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
try:
|
||||||
|
copts, cargs = cmd.OptionParser.parse_args(argv)
|
||||||
|
copts = cmd.ReadEnvironmentOptions(copts)
|
||||||
|
except NoManifestException as e:
|
||||||
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
|
file=sys.stderr)
|
||||||
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
if not gopts.no_pager and not isinstance(cmd, InteractiveCommand):
|
||||||
config = cmd.manifest.globalConfig
|
config = cmd.manifest.globalConfig
|
||||||
@ -85,70 +200,110 @@ class _Repo(object):
|
|||||||
else:
|
else:
|
||||||
use_pager = config.GetBoolean('pager.%s' % name)
|
use_pager = config.GetBoolean('pager.%s' % name)
|
||||||
if use_pager is None:
|
if use_pager is None:
|
||||||
use_pager = isinstance(cmd, PagedCommand)
|
use_pager = cmd.WantPager(copts)
|
||||||
if use_pager:
|
if use_pager:
|
||||||
RunPager(config)
|
RunPager(config)
|
||||||
|
|
||||||
copts, cargs = cmd.OptionParser.parse_args(argv)
|
start = time.time()
|
||||||
|
cmd_event = cmd.event_log.Add(name, event_log.TASK_COMMAND, start)
|
||||||
|
cmd.event_log.SetParent(cmd_event)
|
||||||
try:
|
try:
|
||||||
cmd.Execute(copts, cargs)
|
cmd.ValidateOptions(copts, cargs)
|
||||||
except NoSuchProjectError, e:
|
result = cmd.Execute(copts, cargs)
|
||||||
|
except (DownloadError, ManifestInvalidRevisionError,
|
||||||
|
NoManifestException) as e:
|
||||||
|
print('error: in `%s`: %s' % (' '.join([name] + argv), str(e)),
|
||||||
|
file=sys.stderr)
|
||||||
|
if isinstance(e, NoManifestException):
|
||||||
|
print('error: manifest missing or unreadable -- please run init',
|
||||||
|
file=sys.stderr)
|
||||||
|
result = 1
|
||||||
|
except NoSuchProjectError as e:
|
||||||
if e.name:
|
if e.name:
|
||||||
print >>sys.stderr, 'error: project %s not found' % e.name
|
print('error: project %s not found' % e.name, file=sys.stderr)
|
||||||
else:
|
else:
|
||||||
print >>sys.stderr, 'error: no project in current directory'
|
print('error: no project in current directory', file=sys.stderr)
|
||||||
sys.exit(1)
|
result = 1
|
||||||
|
except InvalidProjectGroupsError as e:
|
||||||
|
if e.name:
|
||||||
|
print('error: project group must be enabled for project %s' % e.name, file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print('error: project group must be enabled for the project in the current directory', file=sys.stderr)
|
||||||
|
result = 1
|
||||||
|
except SystemExit as e:
|
||||||
|
if e.code:
|
||||||
|
result = e.code
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
finish = time.time()
|
||||||
|
elapsed = finish - start
|
||||||
|
hours, remainder = divmod(elapsed, 3600)
|
||||||
|
minutes, seconds = divmod(remainder, 60)
|
||||||
|
if gopts.time:
|
||||||
|
if hours == 0:
|
||||||
|
print('real\t%dm%.3fs' % (minutes, seconds), file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print('real\t%dh%dm%.3fs' % (hours, minutes, seconds),
|
||||||
|
file=sys.stderr)
|
||||||
|
|
||||||
def _MyWrapperPath():
|
cmd.event_log.FinishEvent(cmd_event, finish,
|
||||||
return os.path.join(os.path.dirname(__file__), 'repo')
|
result is None or result == 0)
|
||||||
|
if gopts.event_log:
|
||||||
|
cmd.event_log.Write(os.path.abspath(
|
||||||
|
os.path.expanduser(gopts.event_log)))
|
||||||
|
|
||||||
def _CurrentWrapperVersion():
|
return result
|
||||||
VERSION = None
|
|
||||||
pat = re.compile(r'^VERSION *=')
|
|
||||||
fd = open(_MyWrapperPath())
|
def _CheckWrapperVersion(ver_str, repo_path):
|
||||||
for line in fd:
|
"""Verify the repo launcher is new enough for this checkout.
|
||||||
if pat.match(line):
|
|
||||||
fd.close()
|
Args:
|
||||||
exec line
|
ver_str: The version string passed from the repo launcher when it ran us.
|
||||||
return VERSION
|
repo_path: The path to the repo launcher that loaded us.
|
||||||
raise NameError, 'No VERSION in repo script'
|
"""
|
||||||
|
# Refuse to work with really old wrapper versions. We don't test these,
|
||||||
|
# so might as well require a somewhat recent sane version.
|
||||||
|
# v1.15 of the repo launcher was released in ~Mar 2012.
|
||||||
|
MIN_REPO_VERSION = (1, 15)
|
||||||
|
min_str = '.'.join(str(x) for x in MIN_REPO_VERSION)
|
||||||
|
|
||||||
def _CheckWrapperVersion(ver, repo_path):
|
|
||||||
if not repo_path:
|
if not repo_path:
|
||||||
repo_path = '~/bin/repo'
|
repo_path = '~/bin/repo'
|
||||||
|
|
||||||
if not ver:
|
if not ver_str:
|
||||||
print >>sys.stderr, 'no --wrapper-version argument'
|
print('no --wrapper-version argument', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
exp = _CurrentWrapperVersion()
|
# Pull out the version of the repo launcher we know about to compare.
|
||||||
ver = tuple(map(lambda x: int(x), ver.split('.')))
|
exp = Wrapper().VERSION
|
||||||
if len(ver) == 1:
|
ver = tuple(map(int, ver_str.split('.')))
|
||||||
ver = (0, ver[0])
|
|
||||||
|
|
||||||
if exp[0] > ver[0] or ver < (0, 4):
|
exp_str = '.'.join(map(str, exp))
|
||||||
exp_str = '.'.join(map(lambda x: str(x), exp))
|
if ver < MIN_REPO_VERSION:
|
||||||
print >>sys.stderr, """
|
print("""
|
||||||
!!! A new repo command (%5s) is available. !!!
|
repo: error:
|
||||||
!!! You must upgrade before you can continue: !!!
|
!!! Your version of repo %s is too old.
|
||||||
|
!!! We need at least version %s.
|
||||||
|
!!! A new repo command (%s) is available.
|
||||||
|
!!! You must upgrade before you can continue:
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path)
|
""" % (ver_str, min_str, exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if exp > ver:
|
if exp > ver:
|
||||||
exp_str = '.'.join(map(lambda x: str(x), exp))
|
print("""
|
||||||
print >>sys.stderr, """
|
|
||||||
... A new repo command (%5s) is available.
|
... A new repo command (%5s) is available.
|
||||||
... You should upgrade soon:
|
... You should upgrade soon:
|
||||||
|
|
||||||
cp %s %s
|
cp %s %s
|
||||||
""" % (exp_str, _MyWrapperPath(), repo_path)
|
""" % (exp_str, WrapperPath(), repo_path), file=sys.stderr)
|
||||||
|
|
||||||
def _CheckRepoDir(dir):
|
def _CheckRepoDir(repo_dir):
|
||||||
if not dir:
|
if not repo_dir:
|
||||||
print >>sys.stderr, 'no --repo-dir argument'
|
print('no --repo-dir argument', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def _PruneOptions(argv, opt):
|
def _PruneOptions(argv, opt):
|
||||||
i = 0
|
i = 0
|
||||||
@ -165,7 +320,184 @@ def _PruneOptions(argv, opt):
|
|||||||
continue
|
continue
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
|
class _UserAgentHandler(urllib.request.BaseHandler):
|
||||||
|
def http_request(self, req):
|
||||||
|
req.add_header('User-Agent', user_agent.repo)
|
||||||
|
return req
|
||||||
|
|
||||||
|
def https_request(self, req):
|
||||||
|
req.add_header('User-Agent', user_agent.repo)
|
||||||
|
return req
|
||||||
|
|
||||||
|
def _AddPasswordFromUserInput(handler, msg, req):
|
||||||
|
# If repo could not find auth info from netrc, try to get it from user input
|
||||||
|
url = req.get_full_url()
|
||||||
|
user, password = handler.passwd.find_user_password(None, url)
|
||||||
|
if user is None:
|
||||||
|
print(msg)
|
||||||
|
try:
|
||||||
|
user = input('User: ')
|
||||||
|
password = getpass.getpass()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
return
|
||||||
|
handler.passwd.add_password(None, url, user, password)
|
||||||
|
|
||||||
|
class _BasicAuthHandler(urllib.request.HTTPBasicAuthHandler):
|
||||||
|
def http_error_401(self, req, fp, code, msg, headers):
|
||||||
|
_AddPasswordFromUserInput(self, msg, req)
|
||||||
|
return urllib.request.HTTPBasicAuthHandler.http_error_401(
|
||||||
|
self, req, fp, code, msg, headers)
|
||||||
|
|
||||||
|
def http_error_auth_reqed(self, authreq, host, req, headers):
|
||||||
|
try:
|
||||||
|
old_add_header = req.add_header
|
||||||
|
def _add_header(name, val):
|
||||||
|
val = val.replace('\n', '')
|
||||||
|
old_add_header(name, val)
|
||||||
|
req.add_header = _add_header
|
||||||
|
return urllib.request.AbstractBasicAuthHandler.http_error_auth_reqed(
|
||||||
|
self, authreq, host, req, headers)
|
||||||
|
except:
|
||||||
|
reset = getattr(self, 'reset_retry_count', None)
|
||||||
|
if reset is not None:
|
||||||
|
reset()
|
||||||
|
elif getattr(self, 'retried', None):
|
||||||
|
self.retried = 0
|
||||||
|
raise
|
||||||
|
|
||||||
|
class _DigestAuthHandler(urllib.request.HTTPDigestAuthHandler):
|
||||||
|
def http_error_401(self, req, fp, code, msg, headers):
|
||||||
|
_AddPasswordFromUserInput(self, msg, req)
|
||||||
|
return urllib.request.HTTPDigestAuthHandler.http_error_401(
|
||||||
|
self, req, fp, code, msg, headers)
|
||||||
|
|
||||||
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||||
|
try:
|
||||||
|
old_add_header = req.add_header
|
||||||
|
def _add_header(name, val):
|
||||||
|
val = val.replace('\n', '')
|
||||||
|
old_add_header(name, val)
|
||||||
|
req.add_header = _add_header
|
||||||
|
return urllib.request.AbstractDigestAuthHandler.http_error_auth_reqed(
|
||||||
|
self, auth_header, host, req, headers)
|
||||||
|
except:
|
||||||
|
reset = getattr(self, 'reset_retry_count', None)
|
||||||
|
if reset is not None:
|
||||||
|
reset()
|
||||||
|
elif getattr(self, 'retried', None):
|
||||||
|
self.retried = 0
|
||||||
|
raise
|
||||||
|
|
||||||
|
class _KerberosAuthHandler(urllib.request.BaseHandler):
|
||||||
|
def __init__(self):
|
||||||
|
self.retried = 0
|
||||||
|
self.context = None
|
||||||
|
self.handler_order = urllib.request.BaseHandler.handler_order - 50
|
||||||
|
|
||||||
|
def http_error_401(self, req, fp, code, msg, headers):
|
||||||
|
host = req.get_host()
|
||||||
|
retry = self.http_error_auth_reqed('www-authenticate', host, req, headers)
|
||||||
|
return retry
|
||||||
|
|
||||||
|
def http_error_auth_reqed(self, auth_header, host, req, headers):
|
||||||
|
try:
|
||||||
|
spn = "HTTP@%s" % host
|
||||||
|
authdata = self._negotiate_get_authdata(auth_header, headers)
|
||||||
|
|
||||||
|
if self.retried > 3:
|
||||||
|
raise urllib.request.HTTPError(req.get_full_url(), 401,
|
||||||
|
"Negotiate auth failed", headers, None)
|
||||||
|
else:
|
||||||
|
self.retried += 1
|
||||||
|
|
||||||
|
neghdr = self._negotiate_get_svctk(spn, authdata)
|
||||||
|
if neghdr is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
req.add_unredirected_header('Authorization', neghdr)
|
||||||
|
response = self.parent.open(req)
|
||||||
|
|
||||||
|
srvauth = self._negotiate_get_authdata(auth_header, response.info())
|
||||||
|
if self._validate_response(srvauth):
|
||||||
|
return response
|
||||||
|
except kerberos.GSSError:
|
||||||
|
return None
|
||||||
|
except:
|
||||||
|
self.reset_retry_count()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
self._clean_context()
|
||||||
|
|
||||||
|
def reset_retry_count(self):
|
||||||
|
self.retried = 0
|
||||||
|
|
||||||
|
def _negotiate_get_authdata(self, auth_header, headers):
|
||||||
|
authhdr = headers.get(auth_header, None)
|
||||||
|
if authhdr is not None:
|
||||||
|
for mech_tuple in authhdr.split(","):
|
||||||
|
mech, __, authdata = mech_tuple.strip().partition(" ")
|
||||||
|
if mech.lower() == "negotiate":
|
||||||
|
return authdata.strip()
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _negotiate_get_svctk(self, spn, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result, self.context = kerberos.authGSSClientInit(spn)
|
||||||
|
if result < kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result < kerberos.AUTH_GSS_CONTINUE:
|
||||||
|
return None
|
||||||
|
|
||||||
|
response = kerberos.authGSSClientResponse(self.context)
|
||||||
|
return "Negotiate %s" % response
|
||||||
|
|
||||||
|
def _validate_response(self, authdata):
|
||||||
|
if authdata is None:
|
||||||
|
return None
|
||||||
|
result = kerberos.authGSSClientStep(self.context, authdata)
|
||||||
|
if result == kerberos.AUTH_GSS_COMPLETE:
|
||||||
|
return True
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _clean_context(self):
|
||||||
|
if self.context is not None:
|
||||||
|
kerberos.authGSSClientClean(self.context)
|
||||||
|
self.context = None
|
||||||
|
|
||||||
|
def init_http():
|
||||||
|
handlers = [_UserAgentHandler()]
|
||||||
|
|
||||||
|
mgr = urllib.request.HTTPPasswordMgrWithDefaultRealm()
|
||||||
|
try:
|
||||||
|
n = netrc.netrc()
|
||||||
|
for host in n.hosts:
|
||||||
|
p = n.hosts[host]
|
||||||
|
mgr.add_password(p[1], 'http://%s/' % host, p[0], p[2])
|
||||||
|
mgr.add_password(p[1], 'https://%s/' % host, p[0], p[2])
|
||||||
|
except netrc.NetrcParseError:
|
||||||
|
pass
|
||||||
|
except IOError:
|
||||||
|
pass
|
||||||
|
handlers.append(_BasicAuthHandler(mgr))
|
||||||
|
handlers.append(_DigestAuthHandler(mgr))
|
||||||
|
if kerberos:
|
||||||
|
handlers.append(_KerberosAuthHandler())
|
||||||
|
|
||||||
|
if 'http_proxy' in os.environ:
|
||||||
|
url = os.environ['http_proxy']
|
||||||
|
handlers.append(urllib.request.ProxyHandler({'http': url, 'https': url}))
|
||||||
|
if 'REPO_CURL_VERBOSE' in os.environ:
|
||||||
|
handlers.append(urllib.request.HTTPHandler(debuglevel=1))
|
||||||
|
handlers.append(urllib.request.HTTPSHandler(debuglevel=1))
|
||||||
|
urllib.request.install_opener(urllib.request.build_opener(*handlers))
|
||||||
|
|
||||||
def _Main(argv):
|
def _Main(argv):
|
||||||
|
result = 0
|
||||||
|
|
||||||
opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
|
opt = optparse.OptionParser(usage="repo wrapperinfo -- ...")
|
||||||
opt.add_option("--repo-dir", dest="repodir",
|
opt.add_option("--repo-dir", dest="repodir",
|
||||||
help="path to .repo/")
|
help="path to .repo/")
|
||||||
@ -179,20 +511,45 @@ def _Main(argv):
|
|||||||
_CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
|
_CheckWrapperVersion(opt.wrapper_version, opt.wrapper_path)
|
||||||
_CheckRepoDir(opt.repodir)
|
_CheckRepoDir(opt.repodir)
|
||||||
|
|
||||||
|
Version.wrapper_version = opt.wrapper_version
|
||||||
|
Version.wrapper_path = opt.wrapper_path
|
||||||
|
|
||||||
repo = _Repo(opt.repodir)
|
repo = _Repo(opt.repodir)
|
||||||
try:
|
try:
|
||||||
repo._Run(argv)
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
sys.exit(1)
|
|
||||||
except RepoChangedException:
|
|
||||||
# If the repo or manifest changed, re-exec ourselves.
|
|
||||||
#
|
|
||||||
try:
|
try:
|
||||||
os.execv(__file__, sys.argv)
|
init_ssh()
|
||||||
except OSError, e:
|
init_http()
|
||||||
print >>sys.stderr, 'fatal: cannot restart repo after upgrade'
|
name, gopts, argv = repo._ParseArgs(argv)
|
||||||
print >>sys.stderr, 'fatal: %s' % e
|
run = lambda: repo._Run(name, gopts, argv) or 0
|
||||||
sys.exit(128)
|
if gopts.trace_python:
|
||||||
|
import trace
|
||||||
|
tracer = trace.Trace(count=False, trace=True, timing=True,
|
||||||
|
ignoredirs=set(sys.path[1:]))
|
||||||
|
result = tracer.runfunc(run)
|
||||||
|
else:
|
||||||
|
result = run()
|
||||||
|
finally:
|
||||||
|
close_ssh()
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('aborted by user', file=sys.stderr)
|
||||||
|
result = 1
|
||||||
|
except ManifestParseError as mpe:
|
||||||
|
print('fatal: %s' % mpe, file=sys.stderr)
|
||||||
|
result = 1
|
||||||
|
except RepoChangedException as rce:
|
||||||
|
# If repo changed, re-exec ourselves.
|
||||||
|
#
|
||||||
|
argv = list(sys.argv)
|
||||||
|
argv.extend(rce.extra_args)
|
||||||
|
try:
|
||||||
|
os.execv(__file__, argv)
|
||||||
|
except OSError as e:
|
||||||
|
print('fatal: cannot restart repo after upgrade', file=sys.stderr)
|
||||||
|
print('fatal: %s' % e, file=sys.stderr)
|
||||||
|
result = 128
|
||||||
|
|
||||||
|
TerminatePager()
|
||||||
|
sys.exit(result)
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
_Main(sys.argv[1:])
|
_Main(sys.argv[1:])
|
||||||
|
338
manifest.py
338
manifest.py
@ -1,338 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import xml.dom.minidom
|
|
||||||
|
|
||||||
from editor import Editor
|
|
||||||
from git_config import GitConfig, IsId
|
|
||||||
from import_tar import ImportTar
|
|
||||||
from import_zip import ImportZip
|
|
||||||
from project import Project, MetaProject, R_TAGS
|
|
||||||
from remote import Remote
|
|
||||||
from error import ManifestParseError
|
|
||||||
|
|
||||||
MANIFEST_FILE_NAME = 'manifest.xml'
|
|
||||||
|
|
||||||
class _Default(object):
|
|
||||||
"""Project defaults within the manifest."""
|
|
||||||
|
|
||||||
revision = None
|
|
||||||
remote = None
|
|
||||||
|
|
||||||
|
|
||||||
class Manifest(object):
|
|
||||||
"""manages the repo configuration file"""
|
|
||||||
|
|
||||||
def __init__(self, repodir):
|
|
||||||
self.repodir = os.path.abspath(repodir)
|
|
||||||
self.topdir = os.path.dirname(self.repodir)
|
|
||||||
self.manifestFile = os.path.join(self.repodir, MANIFEST_FILE_NAME)
|
|
||||||
|
|
||||||
self.globalConfig = GitConfig.ForUser()
|
|
||||||
Editor.globalConfig = self.globalConfig
|
|
||||||
|
|
||||||
self.repoProject = MetaProject(self, 'repo',
|
|
||||||
gitdir = os.path.join(repodir, 'repo/.git'),
|
|
||||||
worktree = os.path.join(repodir, 'repo'))
|
|
||||||
|
|
||||||
wt = os.path.join(repodir, 'manifests')
|
|
||||||
gd_new = os.path.join(repodir, 'manifests.git')
|
|
||||||
gd_old = os.path.join(wt, '.git')
|
|
||||||
if os.path.exists(gd_new) or not os.path.exists(gd_old):
|
|
||||||
gd = gd_new
|
|
||||||
else:
|
|
||||||
gd = gd_old
|
|
||||||
self.manifestProject = MetaProject(self, 'manifests',
|
|
||||||
gitdir = gd,
|
|
||||||
worktree = wt)
|
|
||||||
|
|
||||||
self._Unload()
|
|
||||||
|
|
||||||
def Link(self, name):
|
|
||||||
"""Update the repo metadata to use a different manifest.
|
|
||||||
"""
|
|
||||||
path = os.path.join(self.manifestProject.worktree, name)
|
|
||||||
if not os.path.isfile(path):
|
|
||||||
raise ManifestParseError('manifest %s not found' % name)
|
|
||||||
|
|
||||||
old = self.manifestFile
|
|
||||||
try:
|
|
||||||
self.manifestFile = path
|
|
||||||
self._Unload()
|
|
||||||
self._Load()
|
|
||||||
finally:
|
|
||||||
self.manifestFile = old
|
|
||||||
|
|
||||||
try:
|
|
||||||
if os.path.exists(self.manifestFile):
|
|
||||||
os.remove(self.manifestFile)
|
|
||||||
os.symlink('manifests/%s' % name, self.manifestFile)
|
|
||||||
except OSError, e:
|
|
||||||
raise ManifestParseError('cannot link manifest %s' % name)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def projects(self):
|
|
||||||
self._Load()
|
|
||||||
return self._projects
|
|
||||||
|
|
||||||
@property
|
|
||||||
def remotes(self):
|
|
||||||
self._Load()
|
|
||||||
return self._remotes
|
|
||||||
|
|
||||||
@property
|
|
||||||
def default(self):
|
|
||||||
self._Load()
|
|
||||||
return self._default
|
|
||||||
|
|
||||||
def _Unload(self):
|
|
||||||
self._loaded = False
|
|
||||||
self._projects = {}
|
|
||||||
self._remotes = {}
|
|
||||||
self._default = None
|
|
||||||
self.branch = None
|
|
||||||
|
|
||||||
def _Load(self):
|
|
||||||
if not self._loaded:
|
|
||||||
self._ParseManifest()
|
|
||||||
self._loaded = True
|
|
||||||
|
|
||||||
def _ParseManifest(self):
|
|
||||||
root = xml.dom.minidom.parse(self.manifestFile)
|
|
||||||
if not root or not root.childNodes:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"no root node in %s" % \
|
|
||||||
self.manifestFile
|
|
||||||
|
|
||||||
config = root.childNodes[0]
|
|
||||||
if config.nodeName != 'manifest':
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"no <manifest> in %s" % \
|
|
||||||
self.manifestFile
|
|
||||||
|
|
||||||
self.branch = config.getAttribute('branch')
|
|
||||||
if not self.branch:
|
|
||||||
self.branch = 'default'
|
|
||||||
|
|
||||||
for node in config.childNodes:
|
|
||||||
if node.nodeName == 'remote':
|
|
||||||
remote = self._ParseRemote(node)
|
|
||||||
if self._remotes.get(remote.name):
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'duplicate remote %s in %s' % \
|
|
||||||
(remote.name, self.manifestFile)
|
|
||||||
self._remotes[remote.name] = remote
|
|
||||||
|
|
||||||
for node in config.childNodes:
|
|
||||||
if node.nodeName == 'default':
|
|
||||||
if self._default is not None:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'duplicate default in %s' % \
|
|
||||||
(self.manifestFile)
|
|
||||||
self._default = self._ParseDefault(node)
|
|
||||||
if self._default is None:
|
|
||||||
self._default = _Default()
|
|
||||||
|
|
||||||
for node in config.childNodes:
|
|
||||||
if node.nodeName == 'project':
|
|
||||||
project = self._ParseProject(node)
|
|
||||||
if self._projects.get(project.name):
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'duplicate project %s in %s' % \
|
|
||||||
(project.name, self.manifestFile)
|
|
||||||
self._projects[project.name] = project
|
|
||||||
|
|
||||||
def _ParseRemote(self, node):
|
|
||||||
"""
|
|
||||||
reads a <remote> element from the manifest file
|
|
||||||
"""
|
|
||||||
name = self._reqatt(node, 'name')
|
|
||||||
fetch = self._reqatt(node, 'fetch')
|
|
||||||
review = node.getAttribute('review')
|
|
||||||
|
|
||||||
r = Remote(name=name,
|
|
||||||
fetch=fetch,
|
|
||||||
review=review)
|
|
||||||
|
|
||||||
for n in node.childNodes:
|
|
||||||
if n.nodeName == 'require':
|
|
||||||
r.requiredCommits.append(self._reqatt(n, 'commit'))
|
|
||||||
|
|
||||||
return r
|
|
||||||
|
|
||||||
def _ParseDefault(self, node):
|
|
||||||
"""
|
|
||||||
reads a <default> element from the manifest file
|
|
||||||
"""
|
|
||||||
d = _Default()
|
|
||||||
d.remote = self._get_remote(node)
|
|
||||||
d.revision = node.getAttribute('revision')
|
|
||||||
return d
|
|
||||||
|
|
||||||
def _ParseProject(self, node):
|
|
||||||
"""
|
|
||||||
reads a <project> element from the manifest file
|
|
||||||
"""
|
|
||||||
name = self._reqatt(node, 'name')
|
|
||||||
|
|
||||||
remote = self._get_remote(node)
|
|
||||||
if remote is None:
|
|
||||||
remote = self._default.remote
|
|
||||||
if remote is None:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"no remote for project %s within %s" % \
|
|
||||||
(name, self.manifestFile)
|
|
||||||
|
|
||||||
revision = node.getAttribute('revision')
|
|
||||||
if not revision:
|
|
||||||
revision = self._default.revision
|
|
||||||
if not revision:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"no revision for project %s within %s" % \
|
|
||||||
(name, self.manifestFile)
|
|
||||||
|
|
||||||
path = node.getAttribute('path')
|
|
||||||
if not path:
|
|
||||||
path = name
|
|
||||||
if path.startswith('/'):
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"project %s path cannot be absolute in %s" % \
|
|
||||||
(name, self.manifestFile)
|
|
||||||
|
|
||||||
worktree = os.path.join(self.topdir, path)
|
|
||||||
gitdir = os.path.join(self.repodir, 'projects/%s.git' % path)
|
|
||||||
|
|
||||||
project = Project(manifest = self,
|
|
||||||
name = name,
|
|
||||||
remote = remote,
|
|
||||||
gitdir = gitdir,
|
|
||||||
worktree = worktree,
|
|
||||||
relpath = path,
|
|
||||||
revision = revision)
|
|
||||||
|
|
||||||
for n in node.childNodes:
|
|
||||||
if n.nodeName == 'remote':
|
|
||||||
r = self._ParseRemote(n)
|
|
||||||
if project.extraRemotes.get(r.name) \
|
|
||||||
or project.remote.name == r.name:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'duplicate remote %s in project %s in %s' % \
|
|
||||||
(r.name, project.name, self.manifestFile)
|
|
||||||
project.extraRemotes[r.name] = r
|
|
||||||
elif n.nodeName == 'copyfile':
|
|
||||||
self._ParseCopyFile(project, n)
|
|
||||||
|
|
||||||
to_resolve = []
|
|
||||||
by_version = {}
|
|
||||||
|
|
||||||
for n in node.childNodes:
|
|
||||||
if n.nodeName == 'import':
|
|
||||||
self._ParseImport(project, n, to_resolve, by_version)
|
|
||||||
|
|
||||||
for pair in to_resolve:
|
|
||||||
sn, pr = pair
|
|
||||||
try:
|
|
||||||
sn.SetParent(by_version[pr].commit)
|
|
||||||
except KeyError:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'snapshot %s not in project %s in %s' % \
|
|
||||||
(pr, project.name, self.manifestFile)
|
|
||||||
|
|
||||||
return project
|
|
||||||
|
|
||||||
def _ParseImport(self, project, import_node, to_resolve, by_version):
|
|
||||||
first_url = None
|
|
||||||
for node in import_node.childNodes:
|
|
||||||
if node.nodeName == 'mirror':
|
|
||||||
first_url = self._reqatt(node, 'url')
|
|
||||||
break
|
|
||||||
if not first_url:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'mirror url required for project %s in %s' % \
|
|
||||||
(project.name, self.manifestFile)
|
|
||||||
|
|
||||||
imp = None
|
|
||||||
for cls in [ImportTar, ImportZip]:
|
|
||||||
if cls.CanAccept(first_url):
|
|
||||||
imp = cls()
|
|
||||||
break
|
|
||||||
if not imp:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'snapshot %s unsupported for project %s in %s' % \
|
|
||||||
(first_url, project.name, self.manifestFile)
|
|
||||||
|
|
||||||
imp.SetProject(project)
|
|
||||||
|
|
||||||
for node in import_node.childNodes:
|
|
||||||
if node.nodeName == 'remap':
|
|
||||||
old = node.getAttribute('strip')
|
|
||||||
new = node.getAttribute('insert')
|
|
||||||
imp.RemapPath(old, new)
|
|
||||||
|
|
||||||
elif node.nodeName == 'mirror':
|
|
||||||
imp.AddUrl(self._reqatt(node, 'url'))
|
|
||||||
|
|
||||||
for node in import_node.childNodes:
|
|
||||||
if node.nodeName == 'snapshot':
|
|
||||||
sn = imp.Clone()
|
|
||||||
sn.SetVersion(self._reqatt(node, 'version'))
|
|
||||||
sn.SetCommit(node.getAttribute('check'))
|
|
||||||
|
|
||||||
pr = node.getAttribute('prior')
|
|
||||||
if pr:
|
|
||||||
if IsId(pr):
|
|
||||||
sn.SetParent(pr)
|
|
||||||
else:
|
|
||||||
to_resolve.append((sn, pr))
|
|
||||||
|
|
||||||
rev = R_TAGS + sn.TagName
|
|
||||||
|
|
||||||
if rev in project.snapshots:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
'duplicate snapshot %s for project %s in %s' % \
|
|
||||||
(sn.version, project.name, self.manifestFile)
|
|
||||||
project.snapshots[rev] = sn
|
|
||||||
by_version[sn.version] = sn
|
|
||||||
|
|
||||||
def _ParseCopyFile(self, project, node):
|
|
||||||
src = self._reqatt(node, 'src')
|
|
||||||
dest = self._reqatt(node, 'dest')
|
|
||||||
# src is project relative, and dest is relative to the top of the tree
|
|
||||||
project.AddCopyFile(src, os.path.join(self.topdir, dest))
|
|
||||||
|
|
||||||
def _get_remote(self, node):
|
|
||||||
name = node.getAttribute('remote')
|
|
||||||
if not name:
|
|
||||||
return None
|
|
||||||
|
|
||||||
v = self._remotes.get(name)
|
|
||||||
if not v:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"remote %s not defined in %s" % \
|
|
||||||
(name, self.manifestFile)
|
|
||||||
return v
|
|
||||||
|
|
||||||
def _reqatt(self, node, attname):
|
|
||||||
"""
|
|
||||||
reads a required attribute from the node.
|
|
||||||
"""
|
|
||||||
v = node.getAttribute(attname)
|
|
||||||
if not v:
|
|
||||||
raise ManifestParseError, \
|
|
||||||
"no %s in <%s> within %s" % \
|
|
||||||
(attname, node.nodeName, self.manifestFile)
|
|
||||||
return v
|
|
1137
manifest_xml.py
Normal file
1137
manifest_xml.py
Normal file
File diff suppressed because it is too large
Load Diff
50
pager.py
Executable file → Normal file
50
pager.py
Executable file → Normal file
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,21 +14,56 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
import select
|
import select
|
||||||
|
import subprocess
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
active = False
|
active = False
|
||||||
|
pager_process = None
|
||||||
|
old_stdout = None
|
||||||
|
old_stderr = None
|
||||||
|
|
||||||
def RunPager(globalConfig):
|
def RunPager(globalConfig):
|
||||||
global active
|
if not os.isatty(0) or not os.isatty(1):
|
||||||
|
|
||||||
if not os.isatty(0):
|
|
||||||
return
|
return
|
||||||
pager = _SelectPager(globalConfig)
|
pager = _SelectPager(globalConfig)
|
||||||
if pager == '' or pager == 'cat':
|
if pager == '' or pager == 'cat':
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if platform_utils.isWindows():
|
||||||
|
_PipePager(pager);
|
||||||
|
else:
|
||||||
|
_ForkPager(pager)
|
||||||
|
|
||||||
|
def TerminatePager():
|
||||||
|
global pager_process, old_stdout, old_stderr
|
||||||
|
if pager_process:
|
||||||
|
sys.stdout.flush()
|
||||||
|
sys.stderr.flush()
|
||||||
|
pager_process.stdin.close()
|
||||||
|
pager_process.wait();
|
||||||
|
pager_process = None
|
||||||
|
# Restore initial stdout/err in case there is more output in this process
|
||||||
|
# after shutting down the pager process
|
||||||
|
sys.stdout = old_stdout
|
||||||
|
sys.stderr = old_stderr
|
||||||
|
|
||||||
|
def _PipePager(pager):
|
||||||
|
global pager_process, old_stdout, old_stderr
|
||||||
|
assert pager_process is None, "Only one active pager process at a time"
|
||||||
|
# Create pager process, piping stdout/err into its stdin
|
||||||
|
pager_process = subprocess.Popen([pager], stdin=subprocess.PIPE, stdout=sys.stdout, stderr=sys.stderr)
|
||||||
|
old_stdout = sys.stdout
|
||||||
|
old_stderr = sys.stderr
|
||||||
|
sys.stdout = pager_process.stdin
|
||||||
|
sys.stderr = pager_process.stdin
|
||||||
|
|
||||||
|
def _ForkPager(pager):
|
||||||
|
global active
|
||||||
# This process turns into the pager; a child it forks will
|
# This process turns into the pager; a child it forks will
|
||||||
# do the real processing and output back to the pager. This
|
# do the real processing and output back to the pager. This
|
||||||
# is necessary to keep the pager in control of the tty.
|
# is necessary to keep the pager in control of the tty.
|
||||||
@ -49,8 +85,8 @@ def RunPager(globalConfig):
|
|||||||
|
|
||||||
_BecomePager(pager)
|
_BecomePager(pager)
|
||||||
except Exception:
|
except Exception:
|
||||||
print >>sys.stderr, "fatal: cannot start pager '%s'" % pager
|
print("fatal: cannot start pager '%s'" % pager, file=sys.stderr)
|
||||||
os.exit(255)
|
sys.exit(255)
|
||||||
|
|
||||||
def _SelectPager(globalConfig):
|
def _SelectPager(globalConfig):
|
||||||
try:
|
try:
|
||||||
@ -74,11 +110,11 @@ def _BecomePager(pager):
|
|||||||
# ready works around a long-standing bug in popularly
|
# ready works around a long-standing bug in popularly
|
||||||
# available versions of 'less', a better 'more'.
|
# available versions of 'less', a better 'more'.
|
||||||
#
|
#
|
||||||
a, b, c = select.select([0], [], [0])
|
_a, _b, _c = select.select([0], [], [0])
|
||||||
|
|
||||||
os.environ['LESS'] = 'FRSX'
|
os.environ['LESS'] = 'FRSX'
|
||||||
|
|
||||||
try:
|
try:
|
||||||
os.execvp(pager, [pager])
|
os.execvp(pager, [pager])
|
||||||
except OSError, e:
|
except OSError:
|
||||||
os.execv('/bin/sh', ['sh', '-c', pager])
|
os.execv('/bin/sh', ['sh', '-c', pager])
|
||||||
|
416
platform_utils.py
Normal file
416
platform_utils.py
Normal file
@ -0,0 +1,416 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2016 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import platform
|
||||||
|
import select
|
||||||
|
import shutil
|
||||||
|
import stat
|
||||||
|
|
||||||
|
from pyversion import is_python3
|
||||||
|
if is_python3():
|
||||||
|
from queue import Queue
|
||||||
|
else:
|
||||||
|
from Queue import Queue
|
||||||
|
|
||||||
|
from threading import Thread
|
||||||
|
|
||||||
|
|
||||||
|
def isWindows():
|
||||||
|
""" Returns True when running with the native port of Python for Windows,
|
||||||
|
False when running on any other platform (including the Cygwin port of
|
||||||
|
Python).
|
||||||
|
"""
|
||||||
|
# Note: The cygwin port of Python returns "CYGWIN_NT_xxx"
|
||||||
|
return platform.system() == "Windows"
|
||||||
|
|
||||||
|
|
||||||
|
class FileDescriptorStreams(object):
|
||||||
|
""" Platform agnostic abstraction enabling non-blocking I/O over a
|
||||||
|
collection of file descriptors. This abstraction is required because
|
||||||
|
fctnl(os.O_NONBLOCK) is not supported on Windows.
|
||||||
|
"""
|
||||||
|
@classmethod
|
||||||
|
def create(cls):
|
||||||
|
""" Factory method: instantiates the concrete class according to the
|
||||||
|
current platform.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
return _FileDescriptorStreamsThreads()
|
||||||
|
else:
|
||||||
|
return _FileDescriptorStreamsNonBlocking()
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
self.streams = []
|
||||||
|
|
||||||
|
def add(self, fd, dest, std_name):
|
||||||
|
""" Wraps an existing file descriptor as a stream.
|
||||||
|
"""
|
||||||
|
self.streams.append(self._create_stream(fd, dest, std_name))
|
||||||
|
|
||||||
|
def remove(self, stream):
|
||||||
|
""" Removes a stream, when done with it.
|
||||||
|
"""
|
||||||
|
self.streams.remove(stream)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def is_done(self):
|
||||||
|
""" Returns True when all streams have been processed.
|
||||||
|
"""
|
||||||
|
return len(self.streams) == 0
|
||||||
|
|
||||||
|
def select(self):
|
||||||
|
""" Returns the set of streams that have data available to read.
|
||||||
|
The returned streams each expose a read() and a close() method.
|
||||||
|
When done with a stream, call the remove(stream) method.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
def _create_stream(self, fd, dest, std_name):
|
||||||
|
""" Creates a new stream wrapping an existing file descriptor.
|
||||||
|
"""
|
||||||
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class _FileDescriptorStreamsNonBlocking(FileDescriptorStreams):
|
||||||
|
""" Implementation of FileDescriptorStreams for platforms that support
|
||||||
|
non blocking I/O.
|
||||||
|
"""
|
||||||
|
class Stream(object):
|
||||||
|
""" Encapsulates a file descriptor """
|
||||||
|
def __init__(self, fd, dest, std_name):
|
||||||
|
self.fd = fd
|
||||||
|
self.dest = dest
|
||||||
|
self.std_name = std_name
|
||||||
|
self.set_non_blocking()
|
||||||
|
|
||||||
|
def set_non_blocking(self):
|
||||||
|
import fcntl
|
||||||
|
flags = fcntl.fcntl(self.fd, fcntl.F_GETFL)
|
||||||
|
fcntl.fcntl(self.fd, fcntl.F_SETFL, flags | os.O_NONBLOCK)
|
||||||
|
|
||||||
|
def fileno(self):
|
||||||
|
return self.fd.fileno()
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
return self.fd.read(4096)
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.fd.close()
|
||||||
|
|
||||||
|
def _create_stream(self, fd, dest, std_name):
|
||||||
|
return self.Stream(fd, dest, std_name)
|
||||||
|
|
||||||
|
def select(self):
|
||||||
|
ready_streams, _, _ = select.select(self.streams, [], [])
|
||||||
|
return ready_streams
|
||||||
|
|
||||||
|
|
||||||
|
class _FileDescriptorStreamsThreads(FileDescriptorStreams):
|
||||||
|
""" Implementation of FileDescriptorStreams for platforms that don't support
|
||||||
|
non blocking I/O. This implementation requires creating threads issuing
|
||||||
|
blocking read operations on file descriptors.
|
||||||
|
"""
|
||||||
|
def __init__(self):
|
||||||
|
super(_FileDescriptorStreamsThreads, self).__init__()
|
||||||
|
# The queue is shared accross all threads so we can simulate the
|
||||||
|
# behavior of the select() function
|
||||||
|
self.queue = Queue(10) # Limit incoming data from streams
|
||||||
|
|
||||||
|
def _create_stream(self, fd, dest, std_name):
|
||||||
|
return self.Stream(fd, dest, std_name, self.queue)
|
||||||
|
|
||||||
|
def select(self):
|
||||||
|
# Return only one stream at a time, as it is the most straighforward
|
||||||
|
# thing to do and it is compatible with the select() function.
|
||||||
|
item = self.queue.get()
|
||||||
|
stream = item.stream
|
||||||
|
stream.data = item.data
|
||||||
|
return [stream]
|
||||||
|
|
||||||
|
class QueueItem(object):
|
||||||
|
""" Item put in the shared queue """
|
||||||
|
def __init__(self, stream, data):
|
||||||
|
self.stream = stream
|
||||||
|
self.data = data
|
||||||
|
|
||||||
|
class Stream(object):
|
||||||
|
""" Encapsulates a file descriptor """
|
||||||
|
def __init__(self, fd, dest, std_name, queue):
|
||||||
|
self.fd = fd
|
||||||
|
self.dest = dest
|
||||||
|
self.std_name = std_name
|
||||||
|
self.queue = queue
|
||||||
|
self.data = None
|
||||||
|
self.thread = Thread(target=self.read_to_queue)
|
||||||
|
self.thread.daemon = True
|
||||||
|
self.thread.start()
|
||||||
|
|
||||||
|
def close(self):
|
||||||
|
self.fd.close()
|
||||||
|
|
||||||
|
def read(self):
|
||||||
|
data = self.data
|
||||||
|
self.data = None
|
||||||
|
return data
|
||||||
|
|
||||||
|
def read_to_queue(self):
|
||||||
|
""" The thread function: reads everything from the file descriptor into
|
||||||
|
the shared queue and terminates when reaching EOF.
|
||||||
|
"""
|
||||||
|
for line in iter(self.fd.readline, b''):
|
||||||
|
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, line))
|
||||||
|
self.fd.close()
|
||||||
|
self.queue.put(_FileDescriptorStreamsThreads.QueueItem(self, None))
|
||||||
|
|
||||||
|
|
||||||
|
def symlink(source, link_name):
|
||||||
|
"""Creates a symbolic link pointing to source named link_name.
|
||||||
|
Note: On Windows, source must exist on disk, as the implementation needs
|
||||||
|
to know whether to create a "File" or a "Directory" symbolic link.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
import platform_utils_win32
|
||||||
|
source = _validate_winpath(source)
|
||||||
|
link_name = _validate_winpath(link_name)
|
||||||
|
target = os.path.join(os.path.dirname(link_name), source)
|
||||||
|
if isdir(target):
|
||||||
|
platform_utils_win32.create_dirsymlink(_makelongpath(source), link_name)
|
||||||
|
else:
|
||||||
|
platform_utils_win32.create_filesymlink(_makelongpath(source), link_name)
|
||||||
|
else:
|
||||||
|
return os.symlink(source, link_name)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_winpath(path):
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
if _winpath_is_valid(path):
|
||||||
|
return path
|
||||||
|
raise ValueError("Path \"%s\" must be a relative path or an absolute "
|
||||||
|
"path starting with a drive letter".format(path))
|
||||||
|
|
||||||
|
|
||||||
|
def _winpath_is_valid(path):
|
||||||
|
"""Windows only: returns True if path is relative (e.g. ".\\foo") or is
|
||||||
|
absolute including a drive letter (e.g. "c:\\foo"). Returns False if path
|
||||||
|
is ambiguous (e.g. "x:foo" or "\\foo").
|
||||||
|
"""
|
||||||
|
assert isWindows()
|
||||||
|
path = os.path.normpath(path)
|
||||||
|
drive, tail = os.path.splitdrive(path)
|
||||||
|
if tail:
|
||||||
|
if not drive:
|
||||||
|
return tail[0] != os.sep # "\\foo" is invalid
|
||||||
|
else:
|
||||||
|
return tail[0] == os.sep # "x:foo" is invalid
|
||||||
|
else:
|
||||||
|
return not drive # "x:" is invalid
|
||||||
|
|
||||||
|
|
||||||
|
def _makelongpath(path):
|
||||||
|
"""Return the input path normalized to support the Windows long path syntax
|
||||||
|
("\\\\?\\" prefix) if needed, i.e. if the input path is longer than the
|
||||||
|
MAX_PATH limit.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
# Note: MAX_PATH is 260, but, for directories, the maximum value is actually 246.
|
||||||
|
if len(path) < 246:
|
||||||
|
return path
|
||||||
|
if path.startswith(u"\\\\?\\"):
|
||||||
|
return path
|
||||||
|
if not os.path.isabs(path):
|
||||||
|
return path
|
||||||
|
# Append prefix and ensure unicode so that the special longpath syntax
|
||||||
|
# is supported by underlying Win32 API calls
|
||||||
|
return u"\\\\?\\" + os.path.normpath(path)
|
||||||
|
else:
|
||||||
|
return path
|
||||||
|
|
||||||
|
|
||||||
|
def rmtree(path, ignore_errors=False):
|
||||||
|
"""shutil.rmtree(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Unix, Windows."""
|
||||||
|
onerror = None
|
||||||
|
if isWindows():
|
||||||
|
path = _makelongpath(path)
|
||||||
|
onerror = handle_rmtree_error
|
||||||
|
shutil.rmtree(path, ignore_errors=ignore_errors, onerror=onerror)
|
||||||
|
|
||||||
|
|
||||||
|
def handle_rmtree_error(function, path, excinfo):
|
||||||
|
# Allow deleting read-only files
|
||||||
|
os.chmod(path, stat.S_IWRITE)
|
||||||
|
function(path)
|
||||||
|
|
||||||
|
|
||||||
|
def rename(src, dst):
|
||||||
|
"""os.rename(src, dst) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Unix, Windows."""
|
||||||
|
if isWindows():
|
||||||
|
# On Windows, rename fails if destination exists, see
|
||||||
|
# https://docs.python.org/2/library/os.html#os.rename
|
||||||
|
try:
|
||||||
|
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EEXIST:
|
||||||
|
os.remove(_makelongpath(dst))
|
||||||
|
os.rename(_makelongpath(src), _makelongpath(dst))
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
os.rename(src, dst)
|
||||||
|
|
||||||
|
|
||||||
|
def remove(path):
|
||||||
|
"""Remove (delete) the file path. This is a replacement for os.remove that
|
||||||
|
allows deleting read-only files on Windows, with support for long paths and
|
||||||
|
for deleting directory symbolic links.
|
||||||
|
|
||||||
|
Availability: Unix, Windows."""
|
||||||
|
if isWindows():
|
||||||
|
longpath = _makelongpath(path)
|
||||||
|
try:
|
||||||
|
os.remove(longpath)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.EACCES:
|
||||||
|
os.chmod(longpath, stat.S_IWRITE)
|
||||||
|
# Directory symbolic links must be deleted with 'rmdir'.
|
||||||
|
if islink(longpath) and isdir(longpath):
|
||||||
|
os.rmdir(longpath)
|
||||||
|
else:
|
||||||
|
os.remove(longpath)
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
os.remove(path)
|
||||||
|
|
||||||
|
|
||||||
|
def walk(top, topdown=True, onerror=None, followlinks=False):
|
||||||
|
"""os.walk(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
return _walk_windows_impl(top, topdown, onerror, followlinks)
|
||||||
|
else:
|
||||||
|
return os.walk(top, topdown, onerror, followlinks)
|
||||||
|
|
||||||
|
|
||||||
|
def _walk_windows_impl(top, topdown, onerror, followlinks):
|
||||||
|
try:
|
||||||
|
names = listdir(top)
|
||||||
|
except Exception as err:
|
||||||
|
if onerror is not None:
|
||||||
|
onerror(err)
|
||||||
|
return
|
||||||
|
|
||||||
|
dirs, nondirs = [], []
|
||||||
|
for name in names:
|
||||||
|
if isdir(os.path.join(top, name)):
|
||||||
|
dirs.append(name)
|
||||||
|
else:
|
||||||
|
nondirs.append(name)
|
||||||
|
|
||||||
|
if topdown:
|
||||||
|
yield top, dirs, nondirs
|
||||||
|
for name in dirs:
|
||||||
|
new_path = os.path.join(top, name)
|
||||||
|
if followlinks or not islink(new_path):
|
||||||
|
for x in _walk_windows_impl(new_path, topdown, onerror, followlinks):
|
||||||
|
yield x
|
||||||
|
if not topdown:
|
||||||
|
yield top, dirs, nondirs
|
||||||
|
|
||||||
|
|
||||||
|
def listdir(path):
|
||||||
|
"""os.listdir(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
return os.listdir(_makelongpath(path))
|
||||||
|
|
||||||
|
|
||||||
|
def rmdir(path):
|
||||||
|
"""os.rmdir(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
os.rmdir(_makelongpath(path))
|
||||||
|
|
||||||
|
|
||||||
|
def isdir(path):
|
||||||
|
"""os.path.isdir(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
return os.path.isdir(_makelongpath(path))
|
||||||
|
|
||||||
|
|
||||||
|
def islink(path):
|
||||||
|
"""os.path.islink(path) wrapper with support for long paths on Windows.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
import platform_utils_win32
|
||||||
|
return platform_utils_win32.islink(_makelongpath(path))
|
||||||
|
else:
|
||||||
|
return os.path.islink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def readlink(path):
|
||||||
|
"""Return a string representing the path to which the symbolic link
|
||||||
|
points. The result may be either an absolute or relative pathname;
|
||||||
|
if it is relative, it may be converted to an absolute pathname using
|
||||||
|
os.path.join(os.path.dirname(path), result).
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
import platform_utils_win32
|
||||||
|
return platform_utils_win32.readlink(_makelongpath(path))
|
||||||
|
else:
|
||||||
|
return os.readlink(path)
|
||||||
|
|
||||||
|
|
||||||
|
def realpath(path):
|
||||||
|
"""Return the canonical path of the specified filename, eliminating
|
||||||
|
any symbolic links encountered in the path.
|
||||||
|
|
||||||
|
Availability: Windows, Unix.
|
||||||
|
"""
|
||||||
|
if isWindows():
|
||||||
|
current_path = os.path.abspath(path)
|
||||||
|
path_tail = []
|
||||||
|
for c in range(0, 100): # Avoid cycles
|
||||||
|
if islink(current_path):
|
||||||
|
target = readlink(current_path)
|
||||||
|
current_path = os.path.join(os.path.dirname(current_path), target)
|
||||||
|
else:
|
||||||
|
basename = os.path.basename(current_path)
|
||||||
|
if basename == '':
|
||||||
|
path_tail.append(current_path)
|
||||||
|
break
|
||||||
|
path_tail.append(basename)
|
||||||
|
current_path = os.path.dirname(current_path)
|
||||||
|
path_tail.reverse()
|
||||||
|
result = os.path.normpath(os.path.join(*path_tail))
|
||||||
|
return result
|
||||||
|
else:
|
||||||
|
return os.path.realpath(path)
|
229
platform_utils_win32.py
Normal file
229
platform_utils_win32.py
Normal file
@ -0,0 +1,229 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2016 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import errno
|
||||||
|
|
||||||
|
from pyversion import is_python3
|
||||||
|
from ctypes import WinDLL, get_last_error, FormatError, WinError, addressof
|
||||||
|
from ctypes import c_buffer
|
||||||
|
from ctypes.wintypes import BOOL, BOOLEAN, LPCWSTR, DWORD, HANDLE
|
||||||
|
from ctypes.wintypes import WCHAR, USHORT, LPVOID, ULONG
|
||||||
|
if is_python3():
|
||||||
|
from ctypes import c_ubyte, Structure, Union, byref
|
||||||
|
from ctypes.wintypes import LPDWORD
|
||||||
|
else:
|
||||||
|
# For legacy Python2 different imports are needed.
|
||||||
|
from ctypes.wintypes import POINTER, c_ubyte, Structure, Union, byref
|
||||||
|
LPDWORD = POINTER(DWORD)
|
||||||
|
|
||||||
|
kernel32 = WinDLL('kernel32', use_last_error=True)
|
||||||
|
|
||||||
|
UCHAR = c_ubyte
|
||||||
|
|
||||||
|
# Win32 error codes
|
||||||
|
ERROR_SUCCESS = 0
|
||||||
|
ERROR_NOT_SUPPORTED = 50
|
||||||
|
ERROR_PRIVILEGE_NOT_HELD = 1314
|
||||||
|
|
||||||
|
# Win32 API entry points
|
||||||
|
CreateSymbolicLinkW = kernel32.CreateSymbolicLinkW
|
||||||
|
CreateSymbolicLinkW.restype = BOOLEAN
|
||||||
|
CreateSymbolicLinkW.argtypes = (LPCWSTR, # lpSymlinkFileName In
|
||||||
|
LPCWSTR, # lpTargetFileName In
|
||||||
|
DWORD) # dwFlags In
|
||||||
|
|
||||||
|
# Symbolic link creation flags
|
||||||
|
SYMBOLIC_LINK_FLAG_FILE = 0x00
|
||||||
|
SYMBOLIC_LINK_FLAG_DIRECTORY = 0x01
|
||||||
|
# symlink support for CreateSymbolicLink() starting with Windows 10 (1703, v10.0.14972)
|
||||||
|
SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE = 0x02
|
||||||
|
|
||||||
|
GetFileAttributesW = kernel32.GetFileAttributesW
|
||||||
|
GetFileAttributesW.restype = DWORD
|
||||||
|
GetFileAttributesW.argtypes = (LPCWSTR,) # lpFileName In
|
||||||
|
|
||||||
|
INVALID_FILE_ATTRIBUTES = 0xFFFFFFFF
|
||||||
|
FILE_ATTRIBUTE_REPARSE_POINT = 0x00400
|
||||||
|
|
||||||
|
CreateFileW = kernel32.CreateFileW
|
||||||
|
CreateFileW.restype = HANDLE
|
||||||
|
CreateFileW.argtypes = (LPCWSTR, # lpFileName In
|
||||||
|
DWORD, # dwDesiredAccess In
|
||||||
|
DWORD, # dwShareMode In
|
||||||
|
LPVOID, # lpSecurityAttributes In_opt
|
||||||
|
DWORD, # dwCreationDisposition In
|
||||||
|
DWORD, # dwFlagsAndAttributes In
|
||||||
|
HANDLE) # hTemplateFile In_opt
|
||||||
|
|
||||||
|
CloseHandle = kernel32.CloseHandle
|
||||||
|
CloseHandle.restype = BOOL
|
||||||
|
CloseHandle.argtypes = (HANDLE,) # hObject In
|
||||||
|
|
||||||
|
INVALID_HANDLE_VALUE = HANDLE(-1).value
|
||||||
|
OPEN_EXISTING = 3
|
||||||
|
FILE_FLAG_BACKUP_SEMANTICS = 0x02000000
|
||||||
|
FILE_FLAG_OPEN_REPARSE_POINT = 0x00200000
|
||||||
|
|
||||||
|
DeviceIoControl = kernel32.DeviceIoControl
|
||||||
|
DeviceIoControl.restype = BOOL
|
||||||
|
DeviceIoControl.argtypes = (HANDLE, # hDevice In
|
||||||
|
DWORD, # dwIoControlCode In
|
||||||
|
LPVOID, # lpInBuffer In_opt
|
||||||
|
DWORD, # nInBufferSize In
|
||||||
|
LPVOID, # lpOutBuffer Out_opt
|
||||||
|
DWORD, # nOutBufferSize In
|
||||||
|
LPDWORD, # lpBytesReturned Out_opt
|
||||||
|
LPVOID) # lpOverlapped Inout_opt
|
||||||
|
|
||||||
|
# Device I/O control flags and options
|
||||||
|
FSCTL_GET_REPARSE_POINT = 0x000900A8
|
||||||
|
IO_REPARSE_TAG_MOUNT_POINT = 0xA0000003
|
||||||
|
IO_REPARSE_TAG_SYMLINK = 0xA000000C
|
||||||
|
MAXIMUM_REPARSE_DATA_BUFFER_SIZE = 0x4000
|
||||||
|
|
||||||
|
|
||||||
|
class GENERIC_REPARSE_BUFFER(Structure):
|
||||||
|
_fields_ = (('DataBuffer', UCHAR * 1),)
|
||||||
|
|
||||||
|
|
||||||
|
class SYMBOLIC_LINK_REPARSE_BUFFER(Structure):
|
||||||
|
_fields_ = (('SubstituteNameOffset', USHORT),
|
||||||
|
('SubstituteNameLength', USHORT),
|
||||||
|
('PrintNameOffset', USHORT),
|
||||||
|
('PrintNameLength', USHORT),
|
||||||
|
('Flags', ULONG),
|
||||||
|
('PathBuffer', WCHAR * 1))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def PrintName(self):
|
||||||
|
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||||
|
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||||
|
return arrayt.from_address(addressof(self) + offset).value
|
||||||
|
|
||||||
|
|
||||||
|
class MOUNT_POINT_REPARSE_BUFFER(Structure):
|
||||||
|
_fields_ = (('SubstituteNameOffset', USHORT),
|
||||||
|
('SubstituteNameLength', USHORT),
|
||||||
|
('PrintNameOffset', USHORT),
|
||||||
|
('PrintNameLength', USHORT),
|
||||||
|
('PathBuffer', WCHAR * 1))
|
||||||
|
|
||||||
|
@property
|
||||||
|
def PrintName(self):
|
||||||
|
arrayt = WCHAR * (self.PrintNameLength // 2)
|
||||||
|
offset = type(self).PathBuffer.offset + self.PrintNameOffset
|
||||||
|
return arrayt.from_address(addressof(self) + offset).value
|
||||||
|
|
||||||
|
|
||||||
|
class REPARSE_DATA_BUFFER(Structure):
|
||||||
|
class REPARSE_BUFFER(Union):
|
||||||
|
_fields_ = (('SymbolicLinkReparseBuffer', SYMBOLIC_LINK_REPARSE_BUFFER),
|
||||||
|
('MountPointReparseBuffer', MOUNT_POINT_REPARSE_BUFFER),
|
||||||
|
('GenericReparseBuffer', GENERIC_REPARSE_BUFFER))
|
||||||
|
_fields_ = (('ReparseTag', ULONG),
|
||||||
|
('ReparseDataLength', USHORT),
|
||||||
|
('Reserved', USHORT),
|
||||||
|
('ReparseBuffer', REPARSE_BUFFER))
|
||||||
|
_anonymous_ = ('ReparseBuffer',)
|
||||||
|
|
||||||
|
|
||||||
|
def create_filesymlink(source, link_name):
|
||||||
|
"""Creates a Windows file symbolic link source pointing to link_name."""
|
||||||
|
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_FILE)
|
||||||
|
|
||||||
|
|
||||||
|
def create_dirsymlink(source, link_name):
|
||||||
|
"""Creates a Windows directory symbolic link source pointing to link_name.
|
||||||
|
"""
|
||||||
|
_create_symlink(source, link_name, SYMBOLIC_LINK_FLAG_DIRECTORY)
|
||||||
|
|
||||||
|
|
||||||
|
def _create_symlink(source, link_name, dwFlags):
|
||||||
|
if not CreateSymbolicLinkW(link_name, source, dwFlags | SYMBOLIC_LINK_FLAG_ALLOW_UNPRIVILEGED_CREATE):
|
||||||
|
# See https://github.com/golang/go/pull/24307/files#diff-b87bc12e4da2497308f9ef746086e4f0
|
||||||
|
# "the unprivileged create flag is unsupported below Windows 10 (1703, v10.0.14972).
|
||||||
|
# retry without it."
|
||||||
|
if not CreateSymbolicLinkW(link_name, source, dwFlags):
|
||||||
|
code = get_last_error()
|
||||||
|
error_desc = FormatError(code).strip()
|
||||||
|
if code == ERROR_PRIVILEGE_NOT_HELD:
|
||||||
|
raise OSError(errno.EPERM, error_desc, link_name)
|
||||||
|
_raise_winerror(
|
||||||
|
code,
|
||||||
|
'Error creating symbolic link \"%s\"'.format(link_name))
|
||||||
|
|
||||||
|
|
||||||
|
def islink(path):
|
||||||
|
result = GetFileAttributesW(path)
|
||||||
|
if result == INVALID_FILE_ATTRIBUTES:
|
||||||
|
return False
|
||||||
|
return bool(result & FILE_ATTRIBUTE_REPARSE_POINT)
|
||||||
|
|
||||||
|
|
||||||
|
def readlink(path):
|
||||||
|
reparse_point_handle = CreateFileW(path,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
None,
|
||||||
|
OPEN_EXISTING,
|
||||||
|
FILE_FLAG_OPEN_REPARSE_POINT |
|
||||||
|
FILE_FLAG_BACKUP_SEMANTICS,
|
||||||
|
None)
|
||||||
|
if reparse_point_handle == INVALID_HANDLE_VALUE:
|
||||||
|
_raise_winerror(
|
||||||
|
get_last_error(),
|
||||||
|
'Error opening symbolic link \"%s\"'.format(path))
|
||||||
|
target_buffer = c_buffer(MAXIMUM_REPARSE_DATA_BUFFER_SIZE)
|
||||||
|
n_bytes_returned = DWORD()
|
||||||
|
io_result = DeviceIoControl(reparse_point_handle,
|
||||||
|
FSCTL_GET_REPARSE_POINT,
|
||||||
|
None,
|
||||||
|
0,
|
||||||
|
target_buffer,
|
||||||
|
len(target_buffer),
|
||||||
|
byref(n_bytes_returned),
|
||||||
|
None)
|
||||||
|
CloseHandle(reparse_point_handle)
|
||||||
|
if not io_result:
|
||||||
|
_raise_winerror(
|
||||||
|
get_last_error(),
|
||||||
|
'Error reading symbolic link \"%s\"'.format(path))
|
||||||
|
rdb = REPARSE_DATA_BUFFER.from_buffer(target_buffer)
|
||||||
|
if rdb.ReparseTag == IO_REPARSE_TAG_SYMLINK:
|
||||||
|
return _preserve_encoding(path, rdb.SymbolicLinkReparseBuffer.PrintName)
|
||||||
|
elif rdb.ReparseTag == IO_REPARSE_TAG_MOUNT_POINT:
|
||||||
|
return _preserve_encoding(path, rdb.MountPointReparseBuffer.PrintName)
|
||||||
|
# Unsupported reparse point type
|
||||||
|
_raise_winerror(
|
||||||
|
ERROR_NOT_SUPPORTED,
|
||||||
|
'Error reading symbolic link \"%s\"'.format(path))
|
||||||
|
|
||||||
|
|
||||||
|
def _preserve_encoding(source, target):
|
||||||
|
"""Ensures target is the same string type (i.e. unicode or str) as source."""
|
||||||
|
|
||||||
|
if is_python3():
|
||||||
|
return target
|
||||||
|
|
||||||
|
if isinstance(source, unicode):
|
||||||
|
return unicode(target)
|
||||||
|
return str(target)
|
||||||
|
|
||||||
|
|
||||||
|
def _raise_winerror(code, error_desc):
|
||||||
|
win_error_desc = FormatError(code).strip()
|
||||||
|
error_desc = "%s: %s".format(error_desc, win_error_desc)
|
||||||
|
raise WinError(code, error_desc)
|
93
progress.py
Normal file
93
progress.py
Normal file
@ -0,0 +1,93 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
from time import time
|
||||||
|
from repo_trace import IsTrace
|
||||||
|
|
||||||
|
_NOT_TTY = not os.isatty(2)
|
||||||
|
|
||||||
|
# This will erase all content in the current line (wherever the cursor is).
|
||||||
|
# It does not move the cursor, so this is usually followed by \r to move to
|
||||||
|
# column 0.
|
||||||
|
CSI_ERASE_LINE = '\x1b[2K'
|
||||||
|
|
||||||
|
class Progress(object):
|
||||||
|
def __init__(self, title, total=0, units='', print_newline=False,
|
||||||
|
always_print_percentage=False):
|
||||||
|
self._title = title
|
||||||
|
self._total = total
|
||||||
|
self._done = 0
|
||||||
|
self._lastp = -1
|
||||||
|
self._start = time()
|
||||||
|
self._show = False
|
||||||
|
self._units = units
|
||||||
|
self._print_newline = print_newline
|
||||||
|
self._always_print_percentage = always_print_percentage
|
||||||
|
|
||||||
|
def update(self, inc=1, msg=''):
|
||||||
|
self._done += inc
|
||||||
|
|
||||||
|
if _NOT_TTY or IsTrace():
|
||||||
|
return
|
||||||
|
|
||||||
|
if not self._show:
|
||||||
|
if 0.5 <= time() - self._start:
|
||||||
|
self._show = True
|
||||||
|
else:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._total <= 0:
|
||||||
|
sys.stderr.write('%s\r%s: %d,' % (
|
||||||
|
CSI_ERASE_LINE,
|
||||||
|
self._title,
|
||||||
|
self._done))
|
||||||
|
sys.stderr.flush()
|
||||||
|
else:
|
||||||
|
p = (100 * self._done) / self._total
|
||||||
|
|
||||||
|
if self._lastp != p or self._always_print_percentage:
|
||||||
|
self._lastp = p
|
||||||
|
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s)%s%s%s' % (
|
||||||
|
CSI_ERASE_LINE,
|
||||||
|
self._title,
|
||||||
|
p,
|
||||||
|
self._done, self._units,
|
||||||
|
self._total, self._units,
|
||||||
|
' ' if msg else '', msg,
|
||||||
|
"\n" if self._print_newline else ""))
|
||||||
|
sys.stderr.flush()
|
||||||
|
|
||||||
|
def end(self):
|
||||||
|
if _NOT_TTY or IsTrace() or not self._show:
|
||||||
|
return
|
||||||
|
|
||||||
|
if self._total <= 0:
|
||||||
|
sys.stderr.write('%s\r%s: %d, done.\n' % (
|
||||||
|
CSI_ERASE_LINE,
|
||||||
|
self._title,
|
||||||
|
self._done))
|
||||||
|
sys.stderr.flush()
|
||||||
|
else:
|
||||||
|
p = (100 * self._done) / self._total
|
||||||
|
sys.stderr.write('%s\r%s: %3d%% (%d%s/%d%s), done.\n' % (
|
||||||
|
CSI_ERASE_LINE,
|
||||||
|
self._title,
|
||||||
|
p,
|
||||||
|
self._done, self._units,
|
||||||
|
self._total, self._units))
|
||||||
|
sys.stderr.flush()
|
3293
project.py
3293
project.py
File diff suppressed because it is too large
Load Diff
@ -1,11 +1,12 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright 2008 Google Inc.
|
# Copyright (C) 2013 The Android Open Source Project
|
||||||
#
|
#
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
# you may not use this file except in compliance with the License.
|
# you may not use this file except in compliance with the License.
|
||||||
# You may obtain a copy of the License at
|
# You may obtain a copy of the License at
|
||||||
#
|
#
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
#
|
#
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
@ -13,17 +14,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
GERRIT_SRC=../gerrit
|
import sys
|
||||||
GERRIT_MODULES=codereview froofle
|
|
||||||
|
|
||||||
all:
|
def is_python3():
|
||||||
|
return sys.version_info[0] == 3
|
||||||
clean:
|
|
||||||
find . -name \*.pyc -type f | xargs rm -f
|
|
||||||
|
|
||||||
update-pyclient:
|
|
||||||
$(MAKE) -C $(GERRIT_SRC) release-pyclient
|
|
||||||
rm -rf $(GERRIT_MODULES)
|
|
||||||
(cd $(GERRIT_SRC)/release/pyclient && \
|
|
||||||
find . -type f \
|
|
||||||
| cpio -pd $(abspath .))
|
|
40
repo_trace.py
Normal file
40
repo_trace.py
Normal file
@ -0,0 +1,40 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Logic for tracing repo interactions.
|
||||||
|
|
||||||
|
Activated via `repo --trace ...` or `REPO_TRACE=1 repo ...`.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
|
||||||
|
# Env var to implicitly turn on tracing.
|
||||||
|
REPO_TRACE = 'REPO_TRACE'
|
||||||
|
|
||||||
|
_TRACE = os.environ.get(REPO_TRACE) == '1'
|
||||||
|
|
||||||
|
def IsTrace():
|
||||||
|
return _TRACE
|
||||||
|
|
||||||
|
def SetTrace():
|
||||||
|
global _TRACE
|
||||||
|
_TRACE = True
|
||||||
|
|
||||||
|
def Trace(fmt, *args):
|
||||||
|
if IsTrace():
|
||||||
|
print(fmt % args, file=sys.stderr)
|
53
run_tests
Executable file
53
run_tests
Executable file
@ -0,0 +1,53 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# Copyright 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Wrapper to run pytest with the right settings."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import errno
|
||||||
|
import os
|
||||||
|
import subprocess
|
||||||
|
import sys
|
||||||
|
|
||||||
|
|
||||||
|
def run_pytest(cmd, argv):
|
||||||
|
"""Run the unittests via |cmd|."""
|
||||||
|
try:
|
||||||
|
return subprocess.call([cmd] + argv)
|
||||||
|
except OSError as e:
|
||||||
|
if e.errno == errno.ENOENT:
|
||||||
|
print('%s: unable to run `%s`: %s' % (__file__, cmd, e), file=sys.stderr)
|
||||||
|
print('%s: Try installing pytest: sudo apt-get install python-pytest' %
|
||||||
|
(__file__,), file=sys.stderr)
|
||||||
|
return 127
|
||||||
|
else:
|
||||||
|
raise
|
||||||
|
|
||||||
|
|
||||||
|
def main(argv):
|
||||||
|
"""The main entry."""
|
||||||
|
# Add the repo tree to PYTHONPATH as the tests expect to be able to import
|
||||||
|
# modules directly.
|
||||||
|
topdir = os.path.dirname(os.path.realpath(__file__))
|
||||||
|
pythonpath = os.environ.get('PYTHONPATH', '')
|
||||||
|
os.environ['PYTHONPATH'] = '%s:%s' % (topdir, pythonpath)
|
||||||
|
|
||||||
|
return run_pytest('pytest', argv)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
sys.exit(main(sys.argv[1:]))
|
63
setup.py
Executable file
63
setup.py
Executable file
@ -0,0 +1,63 @@
|
|||||||
|
#!/usr/bin/env python
|
||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
# Copyright 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the 'License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Python packaging for repo."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import setuptools
|
||||||
|
|
||||||
|
|
||||||
|
TOPDIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
|
||||||
|
|
||||||
|
# Rip out the first intro paragraph.
|
||||||
|
with open(os.path.join(TOPDIR, 'README.md')) as fp:
|
||||||
|
lines = fp.read().splitlines()[2:]
|
||||||
|
end = lines.index('')
|
||||||
|
long_description = ' '.join(lines[0:end])
|
||||||
|
|
||||||
|
|
||||||
|
# https://packaging.python.org/tutorials/packaging-projects/
|
||||||
|
setuptools.setup(
|
||||||
|
name='repo',
|
||||||
|
version='1.13.8',
|
||||||
|
maintainer='Various',
|
||||||
|
maintainer_email='repo-discuss@googlegroups.com',
|
||||||
|
description='Repo helps manage many Git repositories',
|
||||||
|
long_description=long_description,
|
||||||
|
long_description_content_type='text/plain',
|
||||||
|
url='https://gerrit.googlesource.com/git-repo/',
|
||||||
|
project_urls={
|
||||||
|
'Bug Tracker': 'https://bugs.chromium.org/p/gerrit/issues/list?q=component:repo',
|
||||||
|
},
|
||||||
|
# https://pypi.org/classifiers/
|
||||||
|
classifiers=[
|
||||||
|
'Development Status :: 6 - Mature',
|
||||||
|
'Environment :: Console',
|
||||||
|
'Intended Audience :: Developers',
|
||||||
|
'License :: OSI Approved :: Apache Software License',
|
||||||
|
'Natural Language :: English',
|
||||||
|
'Operating System :: MacOS :: MacOS X',
|
||||||
|
'Operating System :: Microsoft :: Windows :: Windows 10',
|
||||||
|
'Operating System :: POSIX :: Linux',
|
||||||
|
'Topic :: Software Development :: Version Control :: Git',
|
||||||
|
],
|
||||||
|
# We support Python 2.7 and Python 3.6+.
|
||||||
|
python_requires='>=2.7, ' + ', '.join('!=3.%i.*' % x for x in range(0, 6)),
|
||||||
|
packages=['subcmds'],
|
||||||
|
)
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -15,7 +16,7 @@
|
|||||||
|
|
||||||
import os
|
import os
|
||||||
|
|
||||||
all = {}
|
all_commands = {}
|
||||||
|
|
||||||
my_dir = os.path.dirname(__file__)
|
my_dir = os.path.dirname(__file__)
|
||||||
for py in os.listdir(my_dir):
|
for py in os.listdir(my_dir):
|
||||||
@ -38,12 +39,12 @@ for py in os.listdir(my_dir):
|
|||||||
try:
|
try:
|
||||||
cmd = getattr(mod, clsn)()
|
cmd = getattr(mod, clsn)()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
raise SyntaxError, '%s/%s does not define class %s' % (
|
raise SyntaxError('%s/%s does not define class %s' % (
|
||||||
__name__, py, clsn)
|
__name__, py, clsn))
|
||||||
|
|
||||||
name = name.replace('_', '-')
|
name = name.replace('_', '-')
|
||||||
cmd.NAME = name
|
cmd.NAME = name
|
||||||
all[name] = cmd
|
all_commands[name] = cmd
|
||||||
|
|
||||||
if 'help' in all:
|
if 'help' in all_commands:
|
||||||
all['help'].commands = all
|
all_commands['help'].commands = all_commands
|
||||||
|
99
subcmds/abandon.py
Normal file
99
subcmds/abandon.py
Normal file
@ -0,0 +1,99 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
from command import Command
|
||||||
|
from collections import defaultdict
|
||||||
|
from git_command import git
|
||||||
|
from progress import Progress
|
||||||
|
|
||||||
|
class Abandon(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Permanently abandon a development branch"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [--all | <branchname>] [<project>...]
|
||||||
|
|
||||||
|
This subcommand permanently abandons a development branch by
|
||||||
|
deleting it (and all its history) from your local repository.
|
||||||
|
|
||||||
|
It is equivalent to "git branch -D <branchname>".
|
||||||
|
"""
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('--all',
|
||||||
|
dest='all', action='store_true',
|
||||||
|
help='delete all branches in all projects')
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if not opt.all and not args:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
if not opt.all:
|
||||||
|
nb = args[0]
|
||||||
|
if not git.check_ref_format('heads/%s' % nb):
|
||||||
|
self.OptionParser.error("'%s' is not a valid branch name" % nb)
|
||||||
|
else:
|
||||||
|
args.insert(0, "'All local branches'")
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
nb = args[0]
|
||||||
|
err = defaultdict(list)
|
||||||
|
success = defaultdict(list)
|
||||||
|
all_projects = self.GetProjects(args[1:])
|
||||||
|
|
||||||
|
pm = Progress('Abandon %s' % nb, len(all_projects))
|
||||||
|
for project in all_projects:
|
||||||
|
pm.update()
|
||||||
|
|
||||||
|
if opt.all:
|
||||||
|
branches = list(project.GetBranches().keys())
|
||||||
|
else:
|
||||||
|
branches = [nb]
|
||||||
|
|
||||||
|
for name in branches:
|
||||||
|
status = project.AbandonBranch(name)
|
||||||
|
if status is not None:
|
||||||
|
if status:
|
||||||
|
success[name].append(project)
|
||||||
|
else:
|
||||||
|
err[name].append(project)
|
||||||
|
pm.end()
|
||||||
|
|
||||||
|
width = 25
|
||||||
|
for name in branches:
|
||||||
|
if width < len(name):
|
||||||
|
width = len(name)
|
||||||
|
|
||||||
|
if err:
|
||||||
|
for br in err.keys():
|
||||||
|
err_msg = "error: cannot abandon %s" %br
|
||||||
|
print(err_msg, file=sys.stderr)
|
||||||
|
for proj in err[br]:
|
||||||
|
print(' '*len(err_msg) + " | %s" % proj.relpath, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
elif not success:
|
||||||
|
print('error: no project has local branch(es) : %s' % nb,
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print('Abandoned branches:', file=sys.stderr)
|
||||||
|
for br in success.keys():
|
||||||
|
if len(all_projects) > 1 and len(all_projects) == len(success[br]):
|
||||||
|
result = "all project"
|
||||||
|
else:
|
||||||
|
result = "%s" % (
|
||||||
|
('\n'+' '*width + '| ').join(p.relpath for p in success[br]))
|
||||||
|
print("%s%s| %s\n" % (br,' '*(width-len(br)), result),file=sys.stderr)
|
180
subcmds/branches.py
Normal file
180
subcmds/branches.py
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
from color import Coloring
|
||||||
|
from command import Command
|
||||||
|
|
||||||
|
class BranchColoring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'branch')
|
||||||
|
self.current = self.printer('current', fg='green')
|
||||||
|
self.local = self.printer('local')
|
||||||
|
self.notinproject = self.printer('notinproject', fg='red')
|
||||||
|
|
||||||
|
class BranchInfo(object):
|
||||||
|
def __init__(self, name):
|
||||||
|
self.name = name
|
||||||
|
self.current = 0
|
||||||
|
self.published = 0
|
||||||
|
self.published_equal = 0
|
||||||
|
self.projects = []
|
||||||
|
|
||||||
|
def add(self, b):
|
||||||
|
if b.current:
|
||||||
|
self.current += 1
|
||||||
|
if b.published:
|
||||||
|
self.published += 1
|
||||||
|
if b.revision == b.published:
|
||||||
|
self.published_equal += 1
|
||||||
|
self.projects.append(b)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsCurrent(self):
|
||||||
|
return self.current > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsSplitCurrent(self):
|
||||||
|
return self.current != 0 and self.current != len(self.projects)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsPublished(self):
|
||||||
|
return self.published > 0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def IsPublishedEqual(self):
|
||||||
|
return self.published_equal == len(self.projects)
|
||||||
|
|
||||||
|
|
||||||
|
class Branches(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "View current topic branches"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [<project>...]
|
||||||
|
|
||||||
|
Summarizes the currently available topic branches.
|
||||||
|
|
||||||
|
# Branch Display
|
||||||
|
|
||||||
|
The branch display output by this command is organized into four
|
||||||
|
columns of information; for example:
|
||||||
|
|
||||||
|
*P nocolor | in repo
|
||||||
|
repo2 |
|
||||||
|
|
||||||
|
The first column contains a * if the branch is the currently
|
||||||
|
checked out branch in any of the specified projects, or a blank
|
||||||
|
if no project has the branch checked out.
|
||||||
|
|
||||||
|
The second column contains either blank, p or P, depending upon
|
||||||
|
the upload status of the branch.
|
||||||
|
|
||||||
|
(blank): branch not yet published by repo upload
|
||||||
|
P: all commits were published by repo upload
|
||||||
|
p: only some commits were published by repo upload
|
||||||
|
|
||||||
|
The third column contains the branch name.
|
||||||
|
|
||||||
|
The fourth column (after the | separator) lists the projects that
|
||||||
|
the branch appears in, or does not appear in. If no project list
|
||||||
|
is shown, then the branch appears in all projects.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
projects = self.GetProjects(args)
|
||||||
|
out = BranchColoring(self.manifest.manifestProject.config)
|
||||||
|
all_branches = {}
|
||||||
|
project_cnt = len(projects)
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
for name, b in project.GetBranches().items():
|
||||||
|
b.project = project
|
||||||
|
if name not in all_branches:
|
||||||
|
all_branches[name] = BranchInfo(name)
|
||||||
|
all_branches[name].add(b)
|
||||||
|
|
||||||
|
names = list(sorted(all_branches))
|
||||||
|
|
||||||
|
if not names:
|
||||||
|
print(' (no branches)', file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
width = 25
|
||||||
|
for name in names:
|
||||||
|
if width < len(name):
|
||||||
|
width = len(name)
|
||||||
|
|
||||||
|
for name in names:
|
||||||
|
i = all_branches[name]
|
||||||
|
in_cnt = len(i.projects)
|
||||||
|
|
||||||
|
if i.IsCurrent:
|
||||||
|
current = '*'
|
||||||
|
hdr = out.current
|
||||||
|
else:
|
||||||
|
current = ' '
|
||||||
|
hdr = out.local
|
||||||
|
|
||||||
|
if i.IsPublishedEqual:
|
||||||
|
published = 'P'
|
||||||
|
elif i.IsPublished:
|
||||||
|
published = 'p'
|
||||||
|
else:
|
||||||
|
published = ' '
|
||||||
|
|
||||||
|
hdr('%c%c %-*s' % (current, published, width, name))
|
||||||
|
out.write(' |')
|
||||||
|
|
||||||
|
if in_cnt < project_cnt:
|
||||||
|
fmt = out.write
|
||||||
|
paths = []
|
||||||
|
non_cur_paths = []
|
||||||
|
if i.IsSplitCurrent or (in_cnt < project_cnt - in_cnt):
|
||||||
|
in_type = 'in'
|
||||||
|
for b in i.projects:
|
||||||
|
if not i.IsSplitCurrent or b.current:
|
||||||
|
paths.append(b.project.relpath)
|
||||||
|
else:
|
||||||
|
non_cur_paths.append(b.project.relpath)
|
||||||
|
else:
|
||||||
|
fmt = out.notinproject
|
||||||
|
in_type = 'not in'
|
||||||
|
have = set()
|
||||||
|
for b in i.projects:
|
||||||
|
have.add(b.project)
|
||||||
|
for p in projects:
|
||||||
|
if not p in have:
|
||||||
|
paths.append(p.relpath)
|
||||||
|
|
||||||
|
s = ' %s %s' % (in_type, ', '.join(paths))
|
||||||
|
if not i.IsSplitCurrent and (width + 7 + len(s) < 80):
|
||||||
|
fmt = out.current if i.IsCurrent else fmt
|
||||||
|
fmt(s)
|
||||||
|
else:
|
||||||
|
fmt(' %s:' % in_type)
|
||||||
|
fmt = out.current if i.IsCurrent else out.write
|
||||||
|
for p in paths:
|
||||||
|
out.nl()
|
||||||
|
fmt(width*' ' + ' %s' % p)
|
||||||
|
fmt = out.write
|
||||||
|
for p in non_cur_paths:
|
||||||
|
out.nl()
|
||||||
|
fmt(width*' ' + ' %s' % p)
|
||||||
|
else:
|
||||||
|
out.write(' in all projects')
|
||||||
|
out.nl()
|
66
subcmds/checkout.py
Normal file
66
subcmds/checkout.py
Normal file
@ -0,0 +1,66 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
from command import Command
|
||||||
|
from progress import Progress
|
||||||
|
|
||||||
|
class Checkout(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Checkout a branch for development"
|
||||||
|
helpUsage = """
|
||||||
|
%prog <branchname> [<project>...]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command checks out an existing branch that was previously
|
||||||
|
created by 'repo start'.
|
||||||
|
|
||||||
|
The command is equivalent to:
|
||||||
|
|
||||||
|
repo forall [<project>...] -c git checkout <branchname>
|
||||||
|
"""
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if not args:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
nb = args[0]
|
||||||
|
err = []
|
||||||
|
success = []
|
||||||
|
all_projects = self.GetProjects(args[1:])
|
||||||
|
|
||||||
|
pm = Progress('Checkout %s' % nb, len(all_projects))
|
||||||
|
for project in all_projects:
|
||||||
|
pm.update()
|
||||||
|
|
||||||
|
status = project.CheckoutBranch(nb)
|
||||||
|
if status is not None:
|
||||||
|
if status:
|
||||||
|
success.append(project)
|
||||||
|
else:
|
||||||
|
err.append(project)
|
||||||
|
pm.end()
|
||||||
|
|
||||||
|
if err:
|
||||||
|
for p in err:
|
||||||
|
print("error: %s/: cannot checkout %s" % (p.relpath, nb),
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
elif not success:
|
||||||
|
print('error: no project has branch %s' % nb, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
117
subcmds/cherry_pick.py
Normal file
117
subcmds/cherry_pick.py
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2010 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from command import Command
|
||||||
|
from git_command import GitCommand
|
||||||
|
|
||||||
|
CHANGE_ID_RE = re.compile(r'^\s*Change-Id: I([0-9a-f]{40})\s*$')
|
||||||
|
|
||||||
|
class CherryPick(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Cherry-pick a change."
|
||||||
|
helpUsage = """
|
||||||
|
%prog <sha1>
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
'%prog' cherry-picks a change from one branch to another.
|
||||||
|
The change id will be updated, and a reference to the old
|
||||||
|
change id will be added.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
pass
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if len(args) != 1:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
reference = args[0]
|
||||||
|
|
||||||
|
p = GitCommand(None,
|
||||||
|
['rev-parse', '--verify', reference],
|
||||||
|
capture_stdout = True,
|
||||||
|
capture_stderr = True)
|
||||||
|
if p.Wait() != 0:
|
||||||
|
print(p.stderr, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
sha1 = p.stdout.strip()
|
||||||
|
|
||||||
|
p = GitCommand(None, ['cat-file', 'commit', sha1], capture_stdout=True)
|
||||||
|
if p.Wait() != 0:
|
||||||
|
print("error: Failed to retrieve old commit message", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
old_msg = self._StripHeader(p.stdout)
|
||||||
|
|
||||||
|
p = GitCommand(None,
|
||||||
|
['cherry-pick', sha1],
|
||||||
|
capture_stdout = True,
|
||||||
|
capture_stderr = True)
|
||||||
|
status = p.Wait()
|
||||||
|
|
||||||
|
print(p.stdout, file=sys.stdout)
|
||||||
|
print(p.stderr, file=sys.stderr)
|
||||||
|
|
||||||
|
if status == 0:
|
||||||
|
# The cherry-pick was applied correctly. We just need to edit the
|
||||||
|
# commit message.
|
||||||
|
new_msg = self._Reformat(old_msg, sha1)
|
||||||
|
|
||||||
|
p = GitCommand(None, ['commit', '--amend', '-F', '-'],
|
||||||
|
provide_stdin = True,
|
||||||
|
capture_stdout = True,
|
||||||
|
capture_stderr = True)
|
||||||
|
p.stdin.write(new_msg)
|
||||||
|
p.stdin.close()
|
||||||
|
if p.Wait() != 0:
|
||||||
|
print("error: Failed to update commit message", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
else:
|
||||||
|
print('NOTE: When committing (please see above) and editing the commit '
|
||||||
|
'message, please remove the old Change-Id-line and add:')
|
||||||
|
print(self._GetReference(sha1), file=sys.stderr)
|
||||||
|
print(file=sys.stderr)
|
||||||
|
|
||||||
|
def _IsChangeId(self, line):
|
||||||
|
return CHANGE_ID_RE.match(line)
|
||||||
|
|
||||||
|
def _GetReference(self, sha1):
|
||||||
|
return "(cherry picked from commit %s)" % sha1
|
||||||
|
|
||||||
|
def _StripHeader(self, commit_msg):
|
||||||
|
lines = commit_msg.splitlines()
|
||||||
|
return "\n".join(lines[lines.index("")+1:])
|
||||||
|
|
||||||
|
def _Reformat(self, old_msg, sha1):
|
||||||
|
new_msg = []
|
||||||
|
|
||||||
|
for line in old_msg.splitlines():
|
||||||
|
if not self._IsChangeId(line):
|
||||||
|
new_msg.append(line)
|
||||||
|
|
||||||
|
# Add a blank line between the message and the change id/reference
|
||||||
|
try:
|
||||||
|
if new_msg[-1].strip() != "":
|
||||||
|
new_msg.append("")
|
||||||
|
except IndexError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
new_msg.append(self._GetReference(sha1))
|
||||||
|
return "\n".join(new_msg)
|
@ -1,169 +0,0 @@
|
|||||||
#
|
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
|
||||||
#
|
|
||||||
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
||||||
# you may not use this file except in compliance with the License.
|
|
||||||
# You may obtain a copy of the License at
|
|
||||||
#
|
|
||||||
# http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
#
|
|
||||||
# Unless required by applicable law or agreed to in writing, software
|
|
||||||
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
# See the License for the specific language governing permissions and
|
|
||||||
# limitations under the License.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
from command import Command
|
|
||||||
from error import GitError, NoSuchProjectError
|
|
||||||
from git_config import IsId
|
|
||||||
from import_tar import ImportTar
|
|
||||||
from import_zip import ImportZip
|
|
||||||
from project import Project
|
|
||||||
from remote import Remote
|
|
||||||
|
|
||||||
def _ToCommit(project, rev):
|
|
||||||
return project.bare_git.rev_parse('--verify', '%s^0' % rev)
|
|
||||||
|
|
||||||
def _Missing(project, rev):
|
|
||||||
return project._revlist('--objects', rev, '--not', '--all')
|
|
||||||
|
|
||||||
|
|
||||||
class ComputeSnapshotCheck(Command):
|
|
||||||
common = False
|
|
||||||
helpSummary = "Compute the check value for a new snapshot"
|
|
||||||
helpUsage = """
|
|
||||||
%prog -p NAME -v VERSION -s FILE [options]
|
|
||||||
"""
|
|
||||||
helpDescription = """
|
|
||||||
%prog computes and then displays the proper check value for a
|
|
||||||
snapshot, so it can be pasted into the manifest file for a project.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _Options(self, p):
|
|
||||||
g = p.add_option_group('Snapshot description options')
|
|
||||||
g.add_option('-p', '--project',
|
|
||||||
dest='project', metavar='NAME',
|
|
||||||
help='destination project name')
|
|
||||||
g.add_option('-v', '--version',
|
|
||||||
dest='version', metavar='VERSION',
|
|
||||||
help='upstream version/revision identifier')
|
|
||||||
g.add_option('-s', '--snapshot',
|
|
||||||
dest='snapshot', metavar='PATH',
|
|
||||||
help='local tarball path')
|
|
||||||
g.add_option('--new-project',
|
|
||||||
dest='new_project', action='store_true',
|
|
||||||
help='destinition is a new project')
|
|
||||||
g.add_option('--keep',
|
|
||||||
dest='keep_git', action='store_true',
|
|
||||||
help='keep the temporary git repository')
|
|
||||||
|
|
||||||
g = p.add_option_group('Base revision grafting options')
|
|
||||||
g.add_option('--prior',
|
|
||||||
dest='prior', metavar='COMMIT',
|
|
||||||
help='prior revision checksum')
|
|
||||||
|
|
||||||
g = p.add_option_group('Path mangling options')
|
|
||||||
g.add_option('--strip-prefix',
|
|
||||||
dest='strip_prefix', metavar='PREFIX',
|
|
||||||
help='remove prefix from all paths on import')
|
|
||||||
g.add_option('--insert-prefix',
|
|
||||||
dest='insert_prefix', metavar='PREFIX',
|
|
||||||
help='insert prefix before all paths on import')
|
|
||||||
|
|
||||||
|
|
||||||
def _Compute(self, opt):
|
|
||||||
try:
|
|
||||||
real_project = self.GetProjects([opt.project])[0]
|
|
||||||
except NoSuchProjectError:
|
|
||||||
if opt.new_project:
|
|
||||||
print >>sys.stderr, \
|
|
||||||
"warning: project '%s' does not exist" % opt.project
|
|
||||||
else:
|
|
||||||
raise NoSuchProjectError(opt.project)
|
|
||||||
|
|
||||||
self._tmpdir = tempfile.mkdtemp()
|
|
||||||
project = Project(manifest = self.manifest,
|
|
||||||
name = opt.project,
|
|
||||||
remote = Remote('origin'),
|
|
||||||
gitdir = os.path.join(self._tmpdir, '.git'),
|
|
||||||
worktree = self._tmpdir,
|
|
||||||
relpath = opt.project,
|
|
||||||
revision = 'refs/heads/master')
|
|
||||||
project._InitGitDir()
|
|
||||||
|
|
||||||
url = 'file://%s' % os.path.abspath(opt.snapshot)
|
|
||||||
|
|
||||||
imp = None
|
|
||||||
for cls in [ImportTar, ImportZip]:
|
|
||||||
if cls.CanAccept(url):
|
|
||||||
imp = cls()
|
|
||||||
break
|
|
||||||
if not imp:
|
|
||||||
print >>sys.stderr, 'error: %s unsupported' % opt.snapshot
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
imp.SetProject(project)
|
|
||||||
imp.SetVersion(opt.version)
|
|
||||||
imp.AddUrl(url)
|
|
||||||
|
|
||||||
if opt.prior:
|
|
||||||
if opt.new_project:
|
|
||||||
if not IsId(opt.prior):
|
|
||||||
print >>sys.stderr, 'error: --prior=%s not valid' % opt.prior
|
|
||||||
sys.exit(1)
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
opt.prior = _ToCommit(real_project, opt.prior)
|
|
||||||
missing = _Missing(real_project, opt.prior)
|
|
||||||
except GitError, e:
|
|
||||||
print >>sys.stderr,\
|
|
||||||
'error: --prior=%s not valid\n%s' \
|
|
||||||
% (opt.prior, e)
|
|
||||||
sys.exit(1)
|
|
||||||
if missing:
|
|
||||||
print >>sys.stderr,\
|
|
||||||
'error: --prior=%s is valid, but is not reachable' \
|
|
||||||
% opt.prior
|
|
||||||
sys.exit(1)
|
|
||||||
imp.SetParent(opt.prior)
|
|
||||||
|
|
||||||
src = opt.strip_prefix
|
|
||||||
dst = opt.insert_prefix
|
|
||||||
if src or dst:
|
|
||||||
if src is None:
|
|
||||||
src = ''
|
|
||||||
if dst is None:
|
|
||||||
dst = ''
|
|
||||||
imp.RemapPath(src, dst)
|
|
||||||
commitId = imp.Import()
|
|
||||||
|
|
||||||
print >>sys.stderr,"%s\t%s" % (commitId, imp.version)
|
|
||||||
return project
|
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
|
||||||
if args \
|
|
||||||
or not opt.project \
|
|
||||||
or not opt.version \
|
|
||||||
or not opt.snapshot:
|
|
||||||
self.Usage()
|
|
||||||
|
|
||||||
success = False
|
|
||||||
project = None
|
|
||||||
try:
|
|
||||||
self._tmpdir = None
|
|
||||||
project = self._Compute(opt)
|
|
||||||
finally:
|
|
||||||
if project and opt.keep_git:
|
|
||||||
print 'GIT_DIR = %s' % (project.gitdir)
|
|
||||||
elif self._tmpdir:
|
|
||||||
for root, dirs, files in os.walk(self._tmpdir, topdown=False):
|
|
||||||
for name in files:
|
|
||||||
os.remove(os.path.join(root, name))
|
|
||||||
for name in dirs:
|
|
||||||
os.rmdir(os.path.join(root, name))
|
|
||||||
os.rmdir(self._tmpdir)
|
|
||||||
|
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -20,8 +21,24 @@ class Diff(PagedCommand):
|
|||||||
helpSummary = "Show changes between commit and working tree"
|
helpSummary = "Show changes between commit and working tree"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
%prog [<project>...]
|
%prog [<project>...]
|
||||||
|
|
||||||
|
The -u option causes '%prog' to generate diff output with file paths
|
||||||
|
relative to the repository root, so the output can be applied
|
||||||
|
to the Unix 'patch' command.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
def cmd(option, opt_str, value, parser):
|
||||||
|
setattr(parser.values, option.dest, list(parser.rargs))
|
||||||
|
while parser.rargs:
|
||||||
|
del parser.rargs[0]
|
||||||
|
p.add_option('-u', '--absolute',
|
||||||
|
dest='absolute', action='store_true',
|
||||||
|
help='Paths are relative to the repository root')
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
|
ret = 0
|
||||||
for project in self.GetProjects(args):
|
for project in self.GetProjects(args):
|
||||||
project.PrintWorkTreeDiff()
|
if not project.PrintWorkTreeDiff(opt.absolute):
|
||||||
|
ret = 1
|
||||||
|
return ret
|
||||||
|
206
subcmds/diffmanifests.py
Normal file
206
subcmds/diffmanifests.py
Normal file
@ -0,0 +1,206 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
from command import PagedCommand
|
||||||
|
from manifest_xml import XmlManifest
|
||||||
|
|
||||||
|
class _Coloring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, "status")
|
||||||
|
|
||||||
|
class Diffmanifests(PagedCommand):
|
||||||
|
""" A command to see logs in projects represented by manifests
|
||||||
|
|
||||||
|
This is used to see deeper differences between manifests. Where a simple
|
||||||
|
diff would only show a diff of sha1s for example, this command will display
|
||||||
|
the logs of the project between both sha1s, allowing user to see diff at a
|
||||||
|
deeper level.
|
||||||
|
"""
|
||||||
|
|
||||||
|
common = True
|
||||||
|
helpSummary = "Manifest diff utility"
|
||||||
|
helpUsage = """%prog manifest1.xml [manifest2.xml] [options]"""
|
||||||
|
|
||||||
|
helpDescription = """
|
||||||
|
The %prog command shows differences between project revisions of manifest1 and
|
||||||
|
manifest2. if manifest2 is not specified, current manifest.xml will be used
|
||||||
|
instead. Both absolute and relative paths may be used for manifests. Relative
|
||||||
|
paths start from project's ".repo/manifests" folder.
|
||||||
|
|
||||||
|
The --raw option Displays the diff in a way that facilitates parsing, the
|
||||||
|
project pattern will be <status> <path> <revision from> [<revision to>] and the
|
||||||
|
commit pattern will be <status> <onelined log> with status values respectively :
|
||||||
|
|
||||||
|
A = Added project
|
||||||
|
R = Removed project
|
||||||
|
C = Changed project
|
||||||
|
U = Project with unreachable revision(s) (revision(s) not found)
|
||||||
|
|
||||||
|
for project, and
|
||||||
|
|
||||||
|
A = Added commit
|
||||||
|
R = Removed commit
|
||||||
|
|
||||||
|
for a commit.
|
||||||
|
|
||||||
|
Only changed projects may contain commits, and commit status always starts with
|
||||||
|
a space, and are part of last printed project.
|
||||||
|
Unreachable revisions may occur if project is not up to date or if repo has not
|
||||||
|
been initialized with all the groups, in which case some projects won't be
|
||||||
|
synced and their revisions won't be found.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('--raw',
|
||||||
|
dest='raw', action='store_true',
|
||||||
|
help='Display raw diff.')
|
||||||
|
p.add_option('--no-color',
|
||||||
|
dest='color', action='store_false', default=True,
|
||||||
|
help='does not display the diff in color.')
|
||||||
|
p.add_option('--pretty-format',
|
||||||
|
dest='pretty_format', action='store',
|
||||||
|
metavar='<FORMAT>',
|
||||||
|
help='print the log using a custom git pretty format string')
|
||||||
|
|
||||||
|
def _printRawDiff(self, diff):
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printText("A %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printText("R %s %s" % (project.relpath, project.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printText("C %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=True, color=False)
|
||||||
|
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printText("U %s %s %s" % (project.relpath, project.revisionExpr,
|
||||||
|
otherProject.revisionExpr))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printDiff(self, diff, color=True, pretty_format=None):
|
||||||
|
if diff['added']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('added projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['added']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['removed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('removed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project in diff['removed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' at revision ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['changed']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('changed projects : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['changed']:
|
||||||
|
self.printProject('\t%s' % (project.relpath))
|
||||||
|
self.printText(' changed from ')
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' to ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
self._printLogs(project, otherProject, raw=False, color=color,
|
||||||
|
pretty_format=pretty_format)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if diff['unreachable']:
|
||||||
|
self.out.nl()
|
||||||
|
self.printText('projects with unreachable revisions : \n')
|
||||||
|
self.out.nl()
|
||||||
|
for project, otherProject in diff['unreachable']:
|
||||||
|
self.printProject('\t%s ' % (project.relpath))
|
||||||
|
self.printRevision(project.revisionExpr)
|
||||||
|
self.printText(' or ')
|
||||||
|
self.printRevision(otherProject.revisionExpr)
|
||||||
|
self.printText(' not found')
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def _printLogs(self, project, otherProject, raw=False, color=True,
|
||||||
|
pretty_format=None):
|
||||||
|
|
||||||
|
logs = project.getAddedAndRemovedLogs(otherProject,
|
||||||
|
oneline=(pretty_format is None),
|
||||||
|
color=color,
|
||||||
|
pretty_format=pretty_format)
|
||||||
|
if logs['removed']:
|
||||||
|
removedLogs = logs['removed'].split('\n')
|
||||||
|
for log in removedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' R ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printRemoved('\t\t[-] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if logs['added']:
|
||||||
|
addedLogs = logs['added'].split('\n')
|
||||||
|
for log in addedLogs:
|
||||||
|
if log.strip():
|
||||||
|
if raw:
|
||||||
|
self.printText(' A ' + log)
|
||||||
|
self.out.nl()
|
||||||
|
else:
|
||||||
|
self.printAdded('\t\t[+] ')
|
||||||
|
self.printText(log)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if not args or len(args) > 2:
|
||||||
|
self.OptionParser.error('missing manifests to diff')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
self.out = _Coloring(self.manifest.globalConfig)
|
||||||
|
self.printText = self.out.nofmt_printer('text')
|
||||||
|
if opt.color:
|
||||||
|
self.printProject = self.out.nofmt_printer('project', attr = 'bold')
|
||||||
|
self.printAdded = self.out.nofmt_printer('green', fg = 'green', attr = 'bold')
|
||||||
|
self.printRemoved = self.out.nofmt_printer('red', fg = 'red', attr = 'bold')
|
||||||
|
self.printRevision = self.out.nofmt_printer('revision', fg = 'yellow')
|
||||||
|
else:
|
||||||
|
self.printProject = self.printAdded = self.printRemoved = self.printRevision = self.printText
|
||||||
|
|
||||||
|
manifest1 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest1.Override(args[0], load_local_manifests=False)
|
||||||
|
if len(args) == 1:
|
||||||
|
manifest2 = self.manifest
|
||||||
|
else:
|
||||||
|
manifest2 = XmlManifest(self.manifest.repodir)
|
||||||
|
manifest2.Override(args[1], load_local_manifests=False)
|
||||||
|
|
||||||
|
diff = manifest1.projectsDiff(manifest2)
|
||||||
|
if opt.raw:
|
||||||
|
self._printRawDiff(diff)
|
||||||
|
else:
|
||||||
|
self._printDiff(diff, color=opt.color, pretty_format=opt.pretty_format)
|
114
subcmds/download.py
Normal file
114
subcmds/download.py
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import Command
|
||||||
|
from error import GitError
|
||||||
|
|
||||||
|
CHANGE_RE = re.compile(r'^([1-9][0-9]*)(?:[/\.-]([1-9][0-9]*))?$')
|
||||||
|
|
||||||
|
class Download(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Download and checkout a change"
|
||||||
|
helpUsage = """
|
||||||
|
%prog {[project] change[/patchset]}...
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command downloads a change from the review system and
|
||||||
|
makes it available in your project's local working directory.
|
||||||
|
If no project is specified try to use current directory as a project.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-c', '--cherry-pick',
|
||||||
|
dest='cherrypick', action='store_true',
|
||||||
|
help="cherry-pick instead of checkout")
|
||||||
|
p.add_option('-r', '--revert',
|
||||||
|
dest='revert', action='store_true',
|
||||||
|
help="revert instead of checkout")
|
||||||
|
p.add_option('-f', '--ff-only',
|
||||||
|
dest='ffonly', action='store_true',
|
||||||
|
help="force fast-forward merge")
|
||||||
|
|
||||||
|
def _ParseChangeIds(self, args):
|
||||||
|
if not args:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
to_get = []
|
||||||
|
project = None
|
||||||
|
|
||||||
|
for a in args:
|
||||||
|
m = CHANGE_RE.match(a)
|
||||||
|
if m:
|
||||||
|
if not project:
|
||||||
|
project = self.GetProjects(".")[0]
|
||||||
|
chg_id = int(m.group(1))
|
||||||
|
if m.group(2):
|
||||||
|
ps_id = int(m.group(2))
|
||||||
|
else:
|
||||||
|
ps_id = 1
|
||||||
|
refs = 'refs/changes/%2.2d/%d/' % (chg_id % 100, chg_id)
|
||||||
|
output = project._LsRemote(refs + '*')
|
||||||
|
if output:
|
||||||
|
regex = refs + r'(\d+)'
|
||||||
|
rcomp = re.compile(regex, re.I)
|
||||||
|
for line in output.splitlines():
|
||||||
|
match = rcomp.search(line)
|
||||||
|
if match:
|
||||||
|
ps_id = max(int(match.group(1)), ps_id)
|
||||||
|
to_get.append((project, chg_id, ps_id))
|
||||||
|
else:
|
||||||
|
project = self.GetProjects([a])[0]
|
||||||
|
return to_get
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
for project, change_id, ps_id in self._ParseChangeIds(args):
|
||||||
|
dl = project.DownloadPatchSet(change_id, ps_id)
|
||||||
|
if not dl:
|
||||||
|
print('[%s] change %d/%d not found'
|
||||||
|
% (project.name, change_id, ps_id),
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if not opt.revert and not dl.commits:
|
||||||
|
print('[%s] change %d/%d has already been merged'
|
||||||
|
% (project.name, change_id, ps_id),
|
||||||
|
file=sys.stderr)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if len(dl.commits) > 1:
|
||||||
|
print('[%s] %d/%d depends on %d unmerged changes:' \
|
||||||
|
% (project.name, change_id, ps_id, len(dl.commits)),
|
||||||
|
file=sys.stderr)
|
||||||
|
for c in dl.commits:
|
||||||
|
print(' %s' % (c), file=sys.stderr)
|
||||||
|
if opt.cherrypick:
|
||||||
|
try:
|
||||||
|
project._CherryPick(dl.commit)
|
||||||
|
except GitError:
|
||||||
|
print('[%s] Could not complete the cherry-pick of %s' \
|
||||||
|
% (project.name, dl.commit), file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
elif opt.revert:
|
||||||
|
project._Revert(dl.commit)
|
||||||
|
elif opt.ffonly:
|
||||||
|
project._FastForward(dl.commit, ffonly=True)
|
||||||
|
else:
|
||||||
|
project._Checkout(dl.commit)
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,32 +14,107 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import errno
|
||||||
|
import multiprocessing
|
||||||
import re
|
import re
|
||||||
import os
|
import os
|
||||||
|
import signal
|
||||||
import sys
|
import sys
|
||||||
import subprocess
|
import subprocess
|
||||||
from command import Command
|
|
||||||
|
|
||||||
class Forall(Command):
|
from color import Coloring
|
||||||
|
from command import Command, MirrorSafeCommand
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
|
_CAN_COLOR = [
|
||||||
|
'branch',
|
||||||
|
'diff',
|
||||||
|
'grep',
|
||||||
|
'log',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class ForallColoring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'forall')
|
||||||
|
self.project = self.printer('project', attr='bold')
|
||||||
|
|
||||||
|
|
||||||
|
class Forall(Command, MirrorSafeCommand):
|
||||||
common = False
|
common = False
|
||||||
helpSummary = "Run a shell command in each project"
|
helpSummary = "Run a shell command in each project"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
%prog [<project>...] -c <command> [<arg>...]
|
%prog [<project>...] -c <command> [<arg>...]
|
||||||
|
%prog -r str1 [str2] ... -c <command> [<arg>...]"
|
||||||
"""
|
"""
|
||||||
helpDescription = """
|
helpDescription = """
|
||||||
Executes the same shell command in each project.
|
Executes the same shell command in each project.
|
||||||
|
|
||||||
Environment
|
The -r option allows running the command only on projects matching
|
||||||
-----------
|
regex or wildcard expression.
|
||||||
pwd is the project's working directory.
|
|
||||||
|
# Output Formatting
|
||||||
|
|
||||||
|
The -p option causes '%prog' to bind pipes to the command's stdin,
|
||||||
|
stdout and stderr streams, and pipe all output into a continuous
|
||||||
|
stream that is displayed in a single pager session. Project headings
|
||||||
|
are inserted before the output of each command is displayed. If the
|
||||||
|
command produces no output in a project, no heading is displayed.
|
||||||
|
|
||||||
|
The formatting convention used by -p is very suitable for some
|
||||||
|
types of searching, e.g. `repo forall -p -c git log -SFoo` will
|
||||||
|
print all commits that add or remove references to Foo.
|
||||||
|
|
||||||
|
The -v option causes '%prog' to display stderr messages if a
|
||||||
|
command produces output only on stderr. Normally the -p option
|
||||||
|
causes command output to be suppressed until the command produces
|
||||||
|
at least one byte of output on stdout.
|
||||||
|
|
||||||
|
# Environment
|
||||||
|
|
||||||
|
pwd is the project's working directory. If the current client is
|
||||||
|
a mirror client, then pwd is the Git repository.
|
||||||
|
|
||||||
REPO_PROJECT is set to the unique name of the project.
|
REPO_PROJECT is set to the unique name of the project.
|
||||||
|
|
||||||
|
REPO_PATH is the path relative the the root of the client.
|
||||||
|
|
||||||
|
REPO_REMOTE is the name of the remote system from the manifest.
|
||||||
|
|
||||||
|
REPO_LREV is the name of the revision from the manifest, translated
|
||||||
|
to a local tracking branch. If you need to pass the manifest
|
||||||
|
revision to a locally executed git command, use REPO_LREV.
|
||||||
|
|
||||||
|
REPO_RREV is the name of the revision from the manifest, exactly
|
||||||
|
as written in the manifest.
|
||||||
|
|
||||||
|
REPO_COUNT is the total number of projects being iterated.
|
||||||
|
|
||||||
|
REPO_I is the current (1-based) iteration count. Can be used in
|
||||||
|
conjunction with REPO_COUNT to add a simple progress indicator to your
|
||||||
|
command.
|
||||||
|
|
||||||
|
REPO__* are any extra environment variables, specified by the
|
||||||
|
"annotation" element under any project element. This can be useful
|
||||||
|
for differentiating trees based on user-specific criteria, or simply
|
||||||
|
annotating tree details.
|
||||||
|
|
||||||
shell positional arguments ($1, $2, .., $#) are set to any arguments
|
shell positional arguments ($1, $2, .., $#) are set to any arguments
|
||||||
following <command>.
|
following <command>.
|
||||||
|
|
||||||
stdin, stdout, stderr are inherited from the terminal and are
|
Example: to list projects:
|
||||||
not redirected.
|
|
||||||
|
%prog -c 'echo $REPO_PROJECT'
|
||||||
|
|
||||||
|
Notice that $REPO_PROJECT is quoted to ensure it is expanded in
|
||||||
|
the context of running <command> instead of in the calling shell.
|
||||||
|
|
||||||
|
Unless -p is used, stdin, stdout, stderr are inherited from the
|
||||||
|
terminal and are not redirected.
|
||||||
|
|
||||||
|
If -e is used, when a command exits unsuccessfully, '%prog' will abort
|
||||||
|
without iterating through the remaining projects.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _Options(self, p):
|
def _Options(self, p):
|
||||||
@ -46,16 +122,69 @@ not redirected.
|
|||||||
setattr(parser.values, option.dest, list(parser.rargs))
|
setattr(parser.values, option.dest, list(parser.rargs))
|
||||||
while parser.rargs:
|
while parser.rargs:
|
||||||
del parser.rargs[0]
|
del parser.rargs[0]
|
||||||
|
p.add_option('-r', '--regex',
|
||||||
|
dest='regex', action='store_true',
|
||||||
|
help="Execute the command only on projects matching regex or wildcard expression")
|
||||||
|
p.add_option('-i', '--inverse-regex',
|
||||||
|
dest='inverse_regex', action='store_true',
|
||||||
|
help="Execute the command only on projects not matching regex or wildcard expression")
|
||||||
|
p.add_option('-g', '--groups',
|
||||||
|
dest='groups',
|
||||||
|
help="Execute the command only on projects matching the specified groups")
|
||||||
p.add_option('-c', '--command',
|
p.add_option('-c', '--command',
|
||||||
help='Command (and arguments) to execute',
|
help='Command (and arguments) to execute',
|
||||||
dest='command',
|
dest='command',
|
||||||
action='callback',
|
action='callback',
|
||||||
callback=cmd)
|
callback=cmd)
|
||||||
|
p.add_option('-e', '--abort-on-errors',
|
||||||
|
dest='abort_on_errors', action='store_true',
|
||||||
|
help='Abort if a command exits unsuccessfully')
|
||||||
|
p.add_option('--ignore-missing', action='store_true',
|
||||||
|
help='Silently skip & do not exit non-zero due missing '
|
||||||
|
'checkouts')
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
g = p.add_option_group('Output')
|
||||||
|
g.add_option('-p',
|
||||||
|
dest='project_header', action='store_true',
|
||||||
|
help='Show project headers before output')
|
||||||
|
g.add_option('-v', '--verbose',
|
||||||
|
dest='verbose', action='store_true',
|
||||||
|
help='Show command error messages')
|
||||||
|
g.add_option('-j', '--jobs',
|
||||||
|
dest='jobs', action='store', type='int', default=1,
|
||||||
|
help='number of commands to execute simultaneously')
|
||||||
|
|
||||||
|
def WantPager(self, opt):
|
||||||
|
return opt.project_header and opt.jobs == 1
|
||||||
|
|
||||||
|
def _SerializeProject(self, project):
|
||||||
|
""" Serialize a project._GitGetByExec instance.
|
||||||
|
|
||||||
|
project._GitGetByExec is not pickle-able. Instead of trying to pass it
|
||||||
|
around between processes, make a dict ourselves containing only the
|
||||||
|
attributes that we need.
|
||||||
|
|
||||||
|
"""
|
||||||
|
if not self.manifest.IsMirror:
|
||||||
|
lrev = project.GetRevisionId()
|
||||||
|
else:
|
||||||
|
lrev = None
|
||||||
|
return {
|
||||||
|
'name': project.name,
|
||||||
|
'relpath': project.relpath,
|
||||||
|
'remote_name': project.remote.name,
|
||||||
|
'lrev': lrev,
|
||||||
|
'rrev': project.revisionExpr,
|
||||||
|
'annotations': dict((a.name, a.value) for a in project.annotations),
|
||||||
|
'gitdir': project.gitdir,
|
||||||
|
'worktree': project.worktree,
|
||||||
|
}
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
if not opt.command:
|
if not opt.command:
|
||||||
self.Usage()
|
self.Usage()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
cmd = [opt.command[0]]
|
cmd = [opt.command[0]]
|
||||||
|
|
||||||
shell = True
|
shell = True
|
||||||
@ -66,17 +195,207 @@ not redirected.
|
|||||||
cmd.append(cmd[0])
|
cmd.append(cmd[0])
|
||||||
cmd.extend(opt.command[1:])
|
cmd.extend(opt.command[1:])
|
||||||
|
|
||||||
rc = 0
|
if opt.project_header \
|
||||||
for project in self.GetProjects(args):
|
and not shell \
|
||||||
env = dict(os.environ.iteritems())
|
and cmd[0] == 'git':
|
||||||
env['REPO_PROJECT'] = project.name
|
# If this is a direct git command that can enable colorized
|
||||||
|
# output and the user prefers coloring, add --color into the
|
||||||
|
# command line because we are going to wrap the command into
|
||||||
|
# a pipe and git won't know coloring should activate.
|
||||||
|
#
|
||||||
|
for cn in cmd[1:]:
|
||||||
|
if not cn.startswith('-'):
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
cn = None
|
||||||
|
if cn and cn in _CAN_COLOR:
|
||||||
|
class ColorCmd(Coloring):
|
||||||
|
def __init__(self, config, cmd):
|
||||||
|
Coloring.__init__(self, config, cmd)
|
||||||
|
if ColorCmd(self.manifest.manifestProject.config, cn).is_on:
|
||||||
|
cmd.insert(cmd.index(cn) + 1, '--color')
|
||||||
|
|
||||||
p = subprocess.Popen(cmd,
|
mirror = self.manifest.IsMirror
|
||||||
cwd = project.worktree,
|
rc = 0
|
||||||
shell = shell,
|
|
||||||
env = env)
|
smart_sync_manifest_name = "smart_sync_override.xml"
|
||||||
r = p.wait()
|
smart_sync_manifest_path = os.path.join(
|
||||||
if r != 0 and r != rc:
|
self.manifest.manifestProject.worktree, smart_sync_manifest_name)
|
||||||
rc = r
|
|
||||||
|
if os.path.isfile(smart_sync_manifest_path):
|
||||||
|
self.manifest.Override(smart_sync_manifest_path)
|
||||||
|
|
||||||
|
if opt.regex:
|
||||||
|
projects = self.FindProjects(args)
|
||||||
|
elif opt.inverse_regex:
|
||||||
|
projects = self.FindProjects(args, inverse=True)
|
||||||
|
else:
|
||||||
|
projects = self.GetProjects(args, groups=opt.groups)
|
||||||
|
|
||||||
|
os.environ['REPO_COUNT'] = str(len(projects))
|
||||||
|
|
||||||
|
pool = multiprocessing.Pool(opt.jobs, InitWorker)
|
||||||
|
try:
|
||||||
|
config = self.manifest.manifestProject.config
|
||||||
|
results_it = pool.imap(
|
||||||
|
DoWorkWrapper,
|
||||||
|
self.ProjectArgs(projects, mirror, opt, cmd, shell, config))
|
||||||
|
pool.close()
|
||||||
|
for r in results_it:
|
||||||
|
rc = rc or r
|
||||||
|
if r != 0 and opt.abort_on_errors:
|
||||||
|
raise Exception('Aborting due to previous error')
|
||||||
|
except (KeyboardInterrupt, WorkerKeyboardInterrupt):
|
||||||
|
# Catch KeyboardInterrupt raised inside and outside of workers
|
||||||
|
print('Interrupted - terminating the pool')
|
||||||
|
pool.terminate()
|
||||||
|
rc = rc or errno.EINTR
|
||||||
|
except Exception as e:
|
||||||
|
# Catch any other exceptions raised
|
||||||
|
print('Got an error, terminating the pool: %s: %s' %
|
||||||
|
(type(e).__name__, e),
|
||||||
|
file=sys.stderr)
|
||||||
|
pool.terminate()
|
||||||
|
rc = rc or getattr(e, 'errno', 1)
|
||||||
|
finally:
|
||||||
|
pool.join()
|
||||||
if rc != 0:
|
if rc != 0:
|
||||||
sys.exit(rc)
|
sys.exit(rc)
|
||||||
|
|
||||||
|
def ProjectArgs(self, projects, mirror, opt, cmd, shell, config):
|
||||||
|
for cnt, p in enumerate(projects):
|
||||||
|
try:
|
||||||
|
project = self._SerializeProject(p)
|
||||||
|
except Exception as e:
|
||||||
|
print('Project list error on project %s: %s: %s' %
|
||||||
|
(p.name, type(e).__name__, e),
|
||||||
|
file=sys.stderr)
|
||||||
|
return
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('Project list interrupted',
|
||||||
|
file=sys.stderr)
|
||||||
|
return
|
||||||
|
yield [mirror, opt, cmd, shell, cnt, config, project]
|
||||||
|
|
||||||
|
class WorkerKeyboardInterrupt(Exception):
|
||||||
|
""" Keyboard interrupt exception for worker processes. """
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
def InitWorker():
|
||||||
|
signal.signal(signal.SIGINT, signal.SIG_IGN)
|
||||||
|
|
||||||
|
def DoWorkWrapper(args):
|
||||||
|
""" A wrapper around the DoWork() method.
|
||||||
|
|
||||||
|
Catch the KeyboardInterrupt exceptions here and re-raise them as a different,
|
||||||
|
``Exception``-based exception to stop it flooding the console with stacktraces
|
||||||
|
and making the parent hang indefinitely.
|
||||||
|
|
||||||
|
"""
|
||||||
|
project = args.pop()
|
||||||
|
try:
|
||||||
|
return DoWork(project, *args)
|
||||||
|
except KeyboardInterrupt:
|
||||||
|
print('%s: Worker interrupted' % project['name'])
|
||||||
|
raise WorkerKeyboardInterrupt()
|
||||||
|
|
||||||
|
|
||||||
|
def DoWork(project, mirror, opt, cmd, shell, cnt, config):
|
||||||
|
env = os.environ.copy()
|
||||||
|
def setenv(name, val):
|
||||||
|
if val is None:
|
||||||
|
val = ''
|
||||||
|
if hasattr(val, 'encode'):
|
||||||
|
val = val.encode()
|
||||||
|
env[name] = val
|
||||||
|
|
||||||
|
setenv('REPO_PROJECT', project['name'])
|
||||||
|
setenv('REPO_PATH', project['relpath'])
|
||||||
|
setenv('REPO_REMOTE', project['remote_name'])
|
||||||
|
setenv('REPO_LREV', project['lrev'])
|
||||||
|
setenv('REPO_RREV', project['rrev'])
|
||||||
|
setenv('REPO_I', str(cnt + 1))
|
||||||
|
for name in project['annotations']:
|
||||||
|
setenv("REPO__%s" % (name), project['annotations'][name])
|
||||||
|
|
||||||
|
if mirror:
|
||||||
|
setenv('GIT_DIR', project['gitdir'])
|
||||||
|
cwd = project['gitdir']
|
||||||
|
else:
|
||||||
|
cwd = project['worktree']
|
||||||
|
|
||||||
|
if not os.path.exists(cwd):
|
||||||
|
# Allow the user to silently ignore missing checkouts so they can run on
|
||||||
|
# partial checkouts (good for infra recovery tools).
|
||||||
|
if opt.ignore_missing:
|
||||||
|
return 0
|
||||||
|
if ((opt.project_header and opt.verbose)
|
||||||
|
or not opt.project_header):
|
||||||
|
print('skipping %s/' % project['relpath'], file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if opt.project_header:
|
||||||
|
stdin = subprocess.PIPE
|
||||||
|
stdout = subprocess.PIPE
|
||||||
|
stderr = subprocess.PIPE
|
||||||
|
else:
|
||||||
|
stdin = None
|
||||||
|
stdout = None
|
||||||
|
stderr = None
|
||||||
|
|
||||||
|
p = subprocess.Popen(cmd,
|
||||||
|
cwd=cwd,
|
||||||
|
shell=shell,
|
||||||
|
env=env,
|
||||||
|
stdin=stdin,
|
||||||
|
stdout=stdout,
|
||||||
|
stderr=stderr)
|
||||||
|
|
||||||
|
if opt.project_header:
|
||||||
|
out = ForallColoring(config)
|
||||||
|
out.redirect(sys.stdout)
|
||||||
|
empty = True
|
||||||
|
errbuf = ''
|
||||||
|
|
||||||
|
p.stdin.close()
|
||||||
|
s_in = platform_utils.FileDescriptorStreams.create()
|
||||||
|
s_in.add(p.stdout, sys.stdout, 'stdout')
|
||||||
|
s_in.add(p.stderr, sys.stderr, 'stderr')
|
||||||
|
|
||||||
|
while not s_in.is_done:
|
||||||
|
in_ready = s_in.select()
|
||||||
|
for s in in_ready:
|
||||||
|
buf = s.read().decode()
|
||||||
|
if not buf:
|
||||||
|
s.close()
|
||||||
|
s_in.remove(s)
|
||||||
|
continue
|
||||||
|
|
||||||
|
if not opt.verbose:
|
||||||
|
if s.std_name == 'stderr':
|
||||||
|
errbuf += buf
|
||||||
|
continue
|
||||||
|
|
||||||
|
if empty and out:
|
||||||
|
if not cnt == 0:
|
||||||
|
out.nl()
|
||||||
|
|
||||||
|
if mirror:
|
||||||
|
project_header_path = project['name']
|
||||||
|
else:
|
||||||
|
project_header_path = project['relpath']
|
||||||
|
out.project('project %s/', project_header_path)
|
||||||
|
out.nl()
|
||||||
|
out.flush()
|
||||||
|
if errbuf:
|
||||||
|
sys.stderr.write(errbuf)
|
||||||
|
sys.stderr.flush()
|
||||||
|
errbuf = ''
|
||||||
|
empty = False
|
||||||
|
|
||||||
|
s.dest.write(buf)
|
||||||
|
s.dest.flush()
|
||||||
|
|
||||||
|
r = p.wait()
|
||||||
|
return r
|
||||||
|
52
subcmds/gitc_delete.py
Normal file
52
subcmds/gitc_delete.py
Normal file
@ -0,0 +1,52 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import Command, GitcClientCommand
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
|
from pyversion import is_python3
|
||||||
|
if not is_python3():
|
||||||
|
input = raw_input
|
||||||
|
|
||||||
|
class GitcDelete(Command, GitcClientCommand):
|
||||||
|
common = True
|
||||||
|
visible_everywhere = False
|
||||||
|
helpSummary = "Delete a GITC Client."
|
||||||
|
helpUsage = """
|
||||||
|
%prog
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
This subcommand deletes the current GITC client, deleting the GITC manifest
|
||||||
|
and all locally downloaded sources.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-f', '--force',
|
||||||
|
dest='force', action='store_true',
|
||||||
|
help='Force the deletion (no prompt).')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
if not opt.force:
|
||||||
|
prompt = ('This will delete GITC client: %s\nAre you sure? (yes/no) ' %
|
||||||
|
self.gitc_manifest.gitc_client_name)
|
||||||
|
response = input(prompt).lower()
|
||||||
|
if not response == 'yes':
|
||||||
|
print('Response was not "yes"\n Exiting...')
|
||||||
|
sys.exit(1)
|
||||||
|
platform_utils.rmtree(self.gitc_manifest.gitc_client_dir)
|
83
subcmds/gitc_init.py
Normal file
83
subcmds/gitc_init.py
Normal file
@ -0,0 +1,83 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
import gitc_utils
|
||||||
|
from command import GitcAvailableCommand
|
||||||
|
from manifest_xml import GitcManifest
|
||||||
|
from subcmds import init
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
|
||||||
|
class GitcInit(init.Init, GitcAvailableCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Initialize a GITC Client."
|
||||||
|
helpUsage = """
|
||||||
|
%prog [options] [client name]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command is ran to initialize a new GITC client for use
|
||||||
|
with the GITC file system.
|
||||||
|
|
||||||
|
This command will setup the client directory, initialize repo, just
|
||||||
|
like repo init does, and then downloads the manifest collection
|
||||||
|
and installs it in the .repo/directory of the GITC client.
|
||||||
|
|
||||||
|
Once this is done, a GITC manifest is generated by pulling the HEAD
|
||||||
|
SHA for each project and generates the properly formatted XML file
|
||||||
|
and installs it as .manifest in the GITC client directory.
|
||||||
|
|
||||||
|
The -c argument is required to specify the GITC client name.
|
||||||
|
|
||||||
|
The optional -f argument can be used to specify the manifest file to
|
||||||
|
use for this GITC client.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
super(GitcInit, self)._Options(p, gitc_init=True)
|
||||||
|
g = p.add_option_group('GITC options')
|
||||||
|
g.add_option('-f', '--manifest-file',
|
||||||
|
dest='manifest_file',
|
||||||
|
help='Optional manifest file to use for this GITC client.')
|
||||||
|
g.add_option('-c', '--gitc-client',
|
||||||
|
dest='gitc_client',
|
||||||
|
help='The name of the gitc_client instance to create or modify.')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
gitc_client = gitc_utils.parse_clientdir(os.getcwd())
|
||||||
|
if not gitc_client or (opt.gitc_client and gitc_client != opt.gitc_client):
|
||||||
|
print('fatal: Please update your repo command. See go/gitc for instructions.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
self.client_dir = os.path.join(gitc_utils.get_gitc_manifest_dir(),
|
||||||
|
gitc_client)
|
||||||
|
super(GitcInit, self).Execute(opt, args)
|
||||||
|
|
||||||
|
manifest_file = self.manifest.manifestFile
|
||||||
|
if opt.manifest_file:
|
||||||
|
if not os.path.exists(opt.manifest_file):
|
||||||
|
print('fatal: Specified manifest file %s does not exist.' %
|
||||||
|
opt.manifest_file)
|
||||||
|
sys.exit(1)
|
||||||
|
manifest_file = opt.manifest_file
|
||||||
|
|
||||||
|
manifest = GitcManifest(self.repodir, gitc_client)
|
||||||
|
manifest.Override(manifest_file)
|
||||||
|
gitc_utils.generate_gitc_manifest(None, manifest)
|
||||||
|
print('Please run `cd %s` to view your GITC client.' %
|
||||||
|
os.path.join(wrapper.Wrapper().GITC_FS_ROOT_DIR, gitc_client))
|
257
subcmds/grep.py
Normal file
257
subcmds/grep.py
Normal file
@ -0,0 +1,257 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
from command import PagedCommand
|
||||||
|
from error import GitError
|
||||||
|
from git_command import git_require, GitCommand
|
||||||
|
|
||||||
|
class GrepColoring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'grep')
|
||||||
|
self.project = self.printer('project', attr='bold')
|
||||||
|
self.fail = self.printer('fail', fg='red')
|
||||||
|
|
||||||
|
class Grep(PagedCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Print lines matching a pattern"
|
||||||
|
helpUsage = """
|
||||||
|
%prog {pattern | -e pattern} [<project>...]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
Search for the specified patterns in all project files.
|
||||||
|
|
||||||
|
# Boolean Options
|
||||||
|
|
||||||
|
The following options can appear as often as necessary to express
|
||||||
|
the pattern to locate:
|
||||||
|
|
||||||
|
-e PATTERN
|
||||||
|
--and, --or, --not, -(, -)
|
||||||
|
|
||||||
|
Further, the -r/--revision option may be specified multiple times
|
||||||
|
in order to scan multiple trees. If the same file matches in more
|
||||||
|
than one tree, only the first result is reported, prefixed by the
|
||||||
|
revision name it was found under.
|
||||||
|
|
||||||
|
# Examples
|
||||||
|
|
||||||
|
Look for a line that has '#define' and either 'MAX_PATH or 'PATH_MAX':
|
||||||
|
|
||||||
|
repo grep -e '#define' --and -\\( -e MAX_PATH -e PATH_MAX \\)
|
||||||
|
|
||||||
|
Look for a line that has 'NODE' or 'Unexpected' in files that
|
||||||
|
contain a line that matches both expressions:
|
||||||
|
|
||||||
|
repo grep --all-match -e NODE -e Unexpected
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
def carry(option,
|
||||||
|
opt_str,
|
||||||
|
value,
|
||||||
|
parser):
|
||||||
|
pt = getattr(parser.values, 'cmd_argv', None)
|
||||||
|
if pt is None:
|
||||||
|
pt = []
|
||||||
|
setattr(parser.values, 'cmd_argv', pt)
|
||||||
|
|
||||||
|
if opt_str == '-(':
|
||||||
|
pt.append('(')
|
||||||
|
elif opt_str == '-)':
|
||||||
|
pt.append(')')
|
||||||
|
else:
|
||||||
|
pt.append(opt_str)
|
||||||
|
|
||||||
|
if value is not None:
|
||||||
|
pt.append(value)
|
||||||
|
|
||||||
|
g = p.add_option_group('Sources')
|
||||||
|
g.add_option('--cached',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Search the index, instead of the work tree')
|
||||||
|
g.add_option('-r', '--revision',
|
||||||
|
dest='revision', action='append', metavar='TREEish',
|
||||||
|
help='Search TREEish, instead of the work tree')
|
||||||
|
|
||||||
|
g = p.add_option_group('Pattern')
|
||||||
|
g.add_option('-e',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
metavar='PATTERN', type='str',
|
||||||
|
help='Pattern to search for')
|
||||||
|
g.add_option('-i', '--ignore-case',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Ignore case differences')
|
||||||
|
g.add_option('-a', '--text',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help="Process binary files as if they were text")
|
||||||
|
g.add_option('-I',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help="Don't match the pattern in binary files")
|
||||||
|
g.add_option('-w', '--word-regexp',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Match the pattern only at word boundaries')
|
||||||
|
g.add_option('-v', '--invert-match',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Select non-matching lines')
|
||||||
|
g.add_option('-G', '--basic-regexp',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Use POSIX basic regexp for patterns (default)')
|
||||||
|
g.add_option('-E', '--extended-regexp',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Use POSIX extended regexp for patterns')
|
||||||
|
g.add_option('-F', '--fixed-strings',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Use fixed strings (not regexp) for pattern')
|
||||||
|
|
||||||
|
g = p.add_option_group('Pattern Grouping')
|
||||||
|
g.add_option('--all-match',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Limit match to lines that have all patterns')
|
||||||
|
g.add_option('--and', '--or', '--not',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Boolean operators to combine patterns')
|
||||||
|
g.add_option('-(', '-)',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Boolean operator grouping')
|
||||||
|
|
||||||
|
g = p.add_option_group('Output')
|
||||||
|
g.add_option('-n',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Prefix the line number to matching lines')
|
||||||
|
g.add_option('-C',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
metavar='CONTEXT', type='str',
|
||||||
|
help='Show CONTEXT lines around match')
|
||||||
|
g.add_option('-B',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
metavar='CONTEXT', type='str',
|
||||||
|
help='Show CONTEXT lines before match')
|
||||||
|
g.add_option('-A',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
metavar='CONTEXT', type='str',
|
||||||
|
help='Show CONTEXT lines after match')
|
||||||
|
g.add_option('-l', '--name-only', '--files-with-matches',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Show only file names containing matching lines')
|
||||||
|
g.add_option('-L', '--files-without-match',
|
||||||
|
action='callback', callback=carry,
|
||||||
|
help='Show only file names not containing matching lines')
|
||||||
|
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
out = GrepColoring(self.manifest.manifestProject.config)
|
||||||
|
|
||||||
|
cmd_argv = ['grep']
|
||||||
|
if out.is_on and git_require((1, 6, 3)):
|
||||||
|
cmd_argv.append('--color')
|
||||||
|
cmd_argv.extend(getattr(opt, 'cmd_argv', []))
|
||||||
|
|
||||||
|
if '-e' not in cmd_argv:
|
||||||
|
if not args:
|
||||||
|
self.Usage()
|
||||||
|
cmd_argv.append('-e')
|
||||||
|
cmd_argv.append(args[0])
|
||||||
|
args = args[1:]
|
||||||
|
|
||||||
|
projects = self.GetProjects(args)
|
||||||
|
|
||||||
|
full_name = False
|
||||||
|
if len(projects) > 1:
|
||||||
|
cmd_argv.append('--full-name')
|
||||||
|
full_name = True
|
||||||
|
|
||||||
|
have_rev = False
|
||||||
|
if opt.revision:
|
||||||
|
if '--cached' in cmd_argv:
|
||||||
|
print('fatal: cannot combine --cached and --revision', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
have_rev = True
|
||||||
|
cmd_argv.extend(opt.revision)
|
||||||
|
cmd_argv.append('--')
|
||||||
|
|
||||||
|
git_failed = False
|
||||||
|
bad_rev = False
|
||||||
|
have_match = False
|
||||||
|
|
||||||
|
for project in projects:
|
||||||
|
try:
|
||||||
|
p = GitCommand(project,
|
||||||
|
cmd_argv,
|
||||||
|
bare=False,
|
||||||
|
capture_stdout=True,
|
||||||
|
capture_stderr=True)
|
||||||
|
except GitError as e:
|
||||||
|
git_failed = True
|
||||||
|
out.project('--- project %s ---' % project.relpath)
|
||||||
|
out.nl()
|
||||||
|
out.fail('%s', str(e))
|
||||||
|
out.nl()
|
||||||
|
continue
|
||||||
|
|
||||||
|
if p.Wait() != 0:
|
||||||
|
# no results
|
||||||
|
#
|
||||||
|
if p.stderr:
|
||||||
|
if have_rev and 'fatal: ambiguous argument' in p.stderr:
|
||||||
|
bad_rev = True
|
||||||
|
else:
|
||||||
|
out.project('--- project %s ---' % project.relpath)
|
||||||
|
out.nl()
|
||||||
|
out.fail('%s', p.stderr.strip())
|
||||||
|
out.nl()
|
||||||
|
continue
|
||||||
|
have_match = True
|
||||||
|
|
||||||
|
# We cut the last element, to avoid a blank line.
|
||||||
|
#
|
||||||
|
r = p.stdout.split('\n')
|
||||||
|
r = r[0:-1]
|
||||||
|
|
||||||
|
if have_rev and full_name:
|
||||||
|
for line in r:
|
||||||
|
rev, line = line.split(':', 1)
|
||||||
|
out.write("%s", rev)
|
||||||
|
out.write(':')
|
||||||
|
out.project(project.relpath)
|
||||||
|
out.write('/')
|
||||||
|
out.write("%s", line)
|
||||||
|
out.nl()
|
||||||
|
elif full_name:
|
||||||
|
for line in r:
|
||||||
|
out.project(project.relpath)
|
||||||
|
out.write('/')
|
||||||
|
out.write("%s", line)
|
||||||
|
out.nl()
|
||||||
|
else:
|
||||||
|
for line in r:
|
||||||
|
print(line)
|
||||||
|
|
||||||
|
if git_failed:
|
||||||
|
sys.exit(1)
|
||||||
|
elif have_match:
|
||||||
|
sys.exit(0)
|
||||||
|
elif have_rev and bad_rev:
|
||||||
|
for r in opt.revision:
|
||||||
|
print("error: can't search revision %s" % r, file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
114
subcmds/help.py
114
subcmds/help.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,13 +14,16 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
from formatter import AbstractFormatter, DumbWriter
|
from formatter import AbstractFormatter, DumbWriter
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import PagedCommand
|
from command import PagedCommand, MirrorSafeCommand, GitcAvailableCommand, GitcClientCommand
|
||||||
|
import gitc_utils
|
||||||
|
|
||||||
class Help(PagedCommand):
|
class Help(PagedCommand, MirrorSafeCommand):
|
||||||
common = False
|
common = False
|
||||||
helpSummary = "Display detailed help on a command"
|
helpSummary = "Display detailed help on a command"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
@ -29,14 +33,8 @@ class Help(PagedCommand):
|
|||||||
Displays detailed usage information about a command.
|
Displays detailed usage information about a command.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _PrintAllCommands(self):
|
def _PrintCommands(self, commandNames):
|
||||||
print 'usage: repo COMMAND [ARGS]'
|
"""Helper to display |commandNames| summaries."""
|
||||||
print """
|
|
||||||
The complete list of recognized repo commands are:
|
|
||||||
"""
|
|
||||||
commandNames = self.commands.keys()
|
|
||||||
commandNames.sort()
|
|
||||||
|
|
||||||
maxlen = 0
|
maxlen = 0
|
||||||
for name in commandNames:
|
for name in commandNames:
|
||||||
maxlen = max(maxlen, len(name))
|
maxlen = max(maxlen, len(name))
|
||||||
@ -48,38 +46,41 @@ The complete list of recognized repo commands are:
|
|||||||
summary = command.helpSummary.strip()
|
summary = command.helpSummary.strip()
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
summary = ''
|
summary = ''
|
||||||
print fmt % (name, summary)
|
print(fmt % (name, summary))
|
||||||
print """
|
|
||||||
See 'repo help <command>' for more information on a specific command.
|
def _PrintAllCommands(self):
|
||||||
"""
|
print('usage: repo COMMAND [ARGS]')
|
||||||
|
print('The complete list of recognized repo commands are:')
|
||||||
|
commandNames = list(sorted(self.commands))
|
||||||
|
self._PrintCommands(commandNames)
|
||||||
|
print("See 'repo help <command>' for more information on a "
|
||||||
|
'specific command.')
|
||||||
|
|
||||||
def _PrintCommonCommands(self):
|
def _PrintCommonCommands(self):
|
||||||
print 'usage: repo COMMAND [ARGS]'
|
print('usage: repo COMMAND [ARGS]')
|
||||||
print """
|
print('The most commonly used repo commands are:')
|
||||||
The most commonly used repo commands are:
|
|
||||||
"""
|
|
||||||
commandNames = [name
|
|
||||||
for name in self.commands.keys()
|
|
||||||
if self.commands[name].common]
|
|
||||||
commandNames.sort()
|
|
||||||
|
|
||||||
maxlen = 0
|
def gitc_supported(cmd):
|
||||||
for name in commandNames:
|
if not isinstance(cmd, GitcAvailableCommand) and not isinstance(cmd, GitcClientCommand):
|
||||||
maxlen = max(maxlen, len(name))
|
return True
|
||||||
fmt = ' %%-%ds %%s' % maxlen
|
if self.manifest.isGitcClient:
|
||||||
|
return True
|
||||||
|
if isinstance(cmd, GitcClientCommand):
|
||||||
|
return False
|
||||||
|
if gitc_utils.get_gitc_manifest_dir():
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
for name in commandNames:
|
commandNames = list(sorted([name
|
||||||
command = self.commands[name]
|
for name, command in self.commands.items()
|
||||||
try:
|
if command.common and gitc_supported(command)]))
|
||||||
summary = command.helpSummary.strip()
|
self._PrintCommands(commandNames)
|
||||||
except AttributeError:
|
|
||||||
summary = ''
|
|
||||||
print fmt % (name, summary)
|
|
||||||
print """
|
|
||||||
See 'repo help <command>' for more information on a specific command.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def _PrintCommandHelp(self, cmd):
|
print(
|
||||||
|
"See 'repo help <command>' for more information on a specific command.\n"
|
||||||
|
"See 'repo help --all' for a complete list of recognized commands.")
|
||||||
|
|
||||||
|
def _PrintCommandHelp(self, cmd, header_prefix=''):
|
||||||
class _Out(Coloring):
|
class _Out(Coloring):
|
||||||
def __init__(self, gc):
|
def __init__(self, gc):
|
||||||
Coloring.__init__(self, gc, 'help')
|
Coloring.__init__(self, gc, 'help')
|
||||||
@ -92,42 +93,62 @@ See 'repo help <command>' for more information on a specific command.
|
|||||||
body = getattr(cmd, bodyAttr)
|
body = getattr(cmd, bodyAttr)
|
||||||
except AttributeError:
|
except AttributeError:
|
||||||
return
|
return
|
||||||
|
if body == '' or body is None:
|
||||||
|
return
|
||||||
|
|
||||||
self.nl()
|
self.nl()
|
||||||
|
|
||||||
self.heading('%s', heading)
|
self.heading('%s%s', header_prefix, heading)
|
||||||
self.nl()
|
self.nl()
|
||||||
|
|
||||||
self.heading('%s', ''.ljust(len(heading), '-'))
|
|
||||||
self.nl()
|
self.nl()
|
||||||
|
|
||||||
me = 'repo %s' % cmd.NAME
|
me = 'repo %s' % cmd.NAME
|
||||||
body = body.strip()
|
body = body.strip()
|
||||||
body = body.replace('%prog', me)
|
body = body.replace('%prog', me)
|
||||||
|
|
||||||
|
asciidoc_hdr = re.compile(r'^\n?#+ (.+)$')
|
||||||
for para in body.split("\n\n"):
|
for para in body.split("\n\n"):
|
||||||
if para.startswith(' '):
|
if para.startswith(' '):
|
||||||
self.write('%s', para)
|
self.write('%s', para)
|
||||||
self.nl()
|
self.nl()
|
||||||
self.nl()
|
self.nl()
|
||||||
else:
|
continue
|
||||||
self.wrap.add_flowing_data(para)
|
|
||||||
self.wrap.end_paragraph(1)
|
m = asciidoc_hdr.match(para)
|
||||||
|
if m:
|
||||||
|
self.heading('%s%s', header_prefix, m.group(1))
|
||||||
|
self.nl()
|
||||||
|
self.nl()
|
||||||
|
continue
|
||||||
|
|
||||||
|
self.wrap.add_flowing_data(para)
|
||||||
|
self.wrap.end_paragraph(1)
|
||||||
self.wrap.end_paragraph(0)
|
self.wrap.end_paragraph(0)
|
||||||
|
|
||||||
out = _Out(self.manifest.globalConfig)
|
out = _Out(self.manifest.globalConfig)
|
||||||
cmd.OptionParser.print_help()
|
|
||||||
out._PrintSection('Summary', 'helpSummary')
|
out._PrintSection('Summary', 'helpSummary')
|
||||||
|
cmd.OptionParser.print_help()
|
||||||
out._PrintSection('Description', 'helpDescription')
|
out._PrintSection('Description', 'helpDescription')
|
||||||
|
|
||||||
|
def _PrintAllCommandHelp(self):
|
||||||
|
for name in sorted(self.commands):
|
||||||
|
cmd = self.commands[name]
|
||||||
|
cmd.manifest = self.manifest
|
||||||
|
self._PrintCommandHelp(cmd, header_prefix='[%s] ' % (name,))
|
||||||
|
|
||||||
def _Options(self, p):
|
def _Options(self, p):
|
||||||
p.add_option('-a', '--all',
|
p.add_option('-a', '--all',
|
||||||
dest='show_all', action='store_true',
|
dest='show_all', action='store_true',
|
||||||
help='show the complete list of commands')
|
help='show the complete list of commands')
|
||||||
|
p.add_option('--help-all',
|
||||||
|
dest='show_all_help', action='store_true',
|
||||||
|
help='show the --help of all commands')
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
if len(args) == 0:
|
if len(args) == 0:
|
||||||
if opt.show_all:
|
if opt.show_all_help:
|
||||||
|
self._PrintAllCommandHelp()
|
||||||
|
elif opt.show_all:
|
||||||
self._PrintAllCommands()
|
self._PrintAllCommands()
|
||||||
else:
|
else:
|
||||||
self._PrintCommonCommands()
|
self._PrintCommonCommands()
|
||||||
@ -138,9 +159,10 @@ See 'repo help <command>' for more information on a specific command.
|
|||||||
try:
|
try:
|
||||||
cmd = self.commands[name]
|
cmd = self.commands[name]
|
||||||
except KeyError:
|
except KeyError:
|
||||||
print >>sys.stderr, "repo: '%s' is not a repo command." % name
|
print("repo: '%s' is not a repo command." % name, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
cmd.manifest = self.manifest
|
||||||
self._PrintCommandHelp(cmd)
|
self._PrintCommandHelp(cmd)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
|
210
subcmds/info.py
Normal file
210
subcmds/info.py
Normal file
@ -0,0 +1,210 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from command import PagedCommand
|
||||||
|
from color import Coloring
|
||||||
|
from git_refs import R_M
|
||||||
|
|
||||||
|
class _Coloring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, "status")
|
||||||
|
|
||||||
|
class Info(PagedCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Get info on the manifest branch, current branch or unmerged branches"
|
||||||
|
helpUsage = "%prog [-dl] [-o [-b]] [<project>...]"
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-d', '--diff',
|
||||||
|
dest='all', action='store_true',
|
||||||
|
help="show full info and commit diff including remote branches")
|
||||||
|
p.add_option('-o', '--overview',
|
||||||
|
dest='overview', action='store_true',
|
||||||
|
help='show overview of all local commits')
|
||||||
|
p.add_option('-b', '--current-branch',
|
||||||
|
dest="current_branch", action="store_true",
|
||||||
|
help="consider only checked out branches")
|
||||||
|
p.add_option('-l', '--local-only',
|
||||||
|
dest="local", action="store_true",
|
||||||
|
help="Disable all remote operations")
|
||||||
|
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
self.out = _Coloring(self.manifest.globalConfig)
|
||||||
|
self.heading = self.out.printer('heading', attr = 'bold')
|
||||||
|
self.headtext = self.out.nofmt_printer('headtext', fg = 'yellow')
|
||||||
|
self.redtext = self.out.printer('redtext', fg = 'red')
|
||||||
|
self.sha = self.out.printer("sha", fg = 'yellow')
|
||||||
|
self.text = self.out.nofmt_printer('text')
|
||||||
|
self.dimtext = self.out.printer('dimtext', attr = 'dim')
|
||||||
|
|
||||||
|
self.opt = opt
|
||||||
|
|
||||||
|
manifestConfig = self.manifest.manifestProject.config
|
||||||
|
mergeBranch = manifestConfig.GetBranch("default").merge
|
||||||
|
manifestGroups = (manifestConfig.GetString('manifest.groups')
|
||||||
|
or 'all,-notdefault')
|
||||||
|
|
||||||
|
self.heading("Manifest branch: ")
|
||||||
|
if self.manifest.default.revisionExpr:
|
||||||
|
self.headtext(self.manifest.default.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
self.heading("Manifest merge branch: ")
|
||||||
|
self.headtext(mergeBranch)
|
||||||
|
self.out.nl()
|
||||||
|
self.heading("Manifest groups: ")
|
||||||
|
self.headtext(manifestGroups)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
self.printSeparator()
|
||||||
|
|
||||||
|
if not opt.overview:
|
||||||
|
self.printDiffInfo(args)
|
||||||
|
else:
|
||||||
|
self.printCommitOverview(args)
|
||||||
|
|
||||||
|
def printSeparator(self):
|
||||||
|
self.text("----------------------------")
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def printDiffInfo(self, args):
|
||||||
|
# We let exceptions bubble up to main as they'll be well structured.
|
||||||
|
projs = self.GetProjects(args)
|
||||||
|
|
||||||
|
for p in projs:
|
||||||
|
self.heading("Project: ")
|
||||||
|
self.headtext(p.name)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
self.heading("Mount path: ")
|
||||||
|
self.headtext(p.worktree)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
self.heading("Current revision: ")
|
||||||
|
self.headtext(p.GetRevisionId())
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
currentBranch = p.CurrentBranch
|
||||||
|
if currentBranch:
|
||||||
|
self.heading('Current branch: ')
|
||||||
|
self.headtext(currentBranch)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
self.heading("Manifest revision: ")
|
||||||
|
self.headtext(p.revisionExpr)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
localBranches = list(p.GetBranches().keys())
|
||||||
|
self.heading("Local Branches: ")
|
||||||
|
self.redtext(str(len(localBranches)))
|
||||||
|
if localBranches:
|
||||||
|
self.text(" [")
|
||||||
|
self.text(", ".join(localBranches))
|
||||||
|
self.text("]")
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
if self.opt.all:
|
||||||
|
self.findRemoteLocalDiff(p)
|
||||||
|
|
||||||
|
self.printSeparator()
|
||||||
|
|
||||||
|
def findRemoteLocalDiff(self, project):
|
||||||
|
#Fetch all the latest commits
|
||||||
|
if not self.opt.local:
|
||||||
|
project.Sync_NetworkHalf(quiet=True, current_branch_only=True)
|
||||||
|
|
||||||
|
logTarget = R_M + self.manifest.manifestProject.config.GetBranch("default").merge
|
||||||
|
|
||||||
|
bareTmp = project.bare_git._bare
|
||||||
|
project.bare_git._bare = False
|
||||||
|
localCommits = project.bare_git.rev_list(
|
||||||
|
'--abbrev=8',
|
||||||
|
'--abbrev-commit',
|
||||||
|
'--pretty=oneline',
|
||||||
|
logTarget + "..",
|
||||||
|
'--')
|
||||||
|
|
||||||
|
originCommits = project.bare_git.rev_list(
|
||||||
|
'--abbrev=8',
|
||||||
|
'--abbrev-commit',
|
||||||
|
'--pretty=oneline',
|
||||||
|
".." + logTarget,
|
||||||
|
'--')
|
||||||
|
project.bare_git._bare = bareTmp
|
||||||
|
|
||||||
|
self.heading("Local Commits: ")
|
||||||
|
self.redtext(str(len(localCommits)))
|
||||||
|
self.dimtext(" (on current branch)")
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for c in localCommits:
|
||||||
|
split = c.split()
|
||||||
|
self.sha(split[0] + " ")
|
||||||
|
self.text(" ".join(split[1:]))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
self.printSeparator()
|
||||||
|
|
||||||
|
self.heading("Remote Commits: ")
|
||||||
|
self.redtext(str(len(originCommits)))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for c in originCommits:
|
||||||
|
split = c.split()
|
||||||
|
self.sha(split[0] + " ")
|
||||||
|
self.text(" ".join(split[1:]))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
def printCommitOverview(self, args):
|
||||||
|
all_branches = []
|
||||||
|
for project in self.GetProjects(args):
|
||||||
|
br = [project.GetUploadableBranch(x)
|
||||||
|
for x in project.GetBranches()]
|
||||||
|
br = [x for x in br if x]
|
||||||
|
if self.opt.current_branch:
|
||||||
|
br = [x for x in br if x.name == project.CurrentBranch]
|
||||||
|
all_branches.extend(br)
|
||||||
|
|
||||||
|
if not all_branches:
|
||||||
|
return
|
||||||
|
|
||||||
|
self.out.nl()
|
||||||
|
self.heading('Projects Overview')
|
||||||
|
project = None
|
||||||
|
|
||||||
|
for branch in all_branches:
|
||||||
|
if project != branch.project:
|
||||||
|
project = branch.project
|
||||||
|
self.out.nl()
|
||||||
|
self.headtext(project.relpath)
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
commits = branch.commits
|
||||||
|
date = branch.date
|
||||||
|
self.text('%s %-33s (%2d commit%s, %s)' % (
|
||||||
|
branch.name == project.CurrentBranch and '*' or ' ',
|
||||||
|
branch.name,
|
||||||
|
len(commits),
|
||||||
|
len(commits) != 1 and 's' or '',
|
||||||
|
date))
|
||||||
|
self.out.nl()
|
||||||
|
|
||||||
|
for commit in commits:
|
||||||
|
split = commit.split()
|
||||||
|
self.text('{0:38}{1} '.format('','-'))
|
||||||
|
self.sha(split[0] + " ")
|
||||||
|
self.text(" ".join(split[1:]))
|
||||||
|
self.out.nl()
|
381
subcmds/init.py
381
subcmds/init.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,16 +14,30 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import os
|
import os
|
||||||
|
import platform
|
||||||
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from color import Coloring
|
from pyversion import is_python3
|
||||||
from command import InteractiveCommand
|
if is_python3():
|
||||||
from error import ManifestParseError
|
import urllib.parse
|
||||||
from remote import Remote
|
else:
|
||||||
from git_command import git, MIN_GIT_VERSION
|
import imp
|
||||||
|
import urlparse
|
||||||
|
urllib = imp.new_module('urllib')
|
||||||
|
urllib.parse = urlparse
|
||||||
|
|
||||||
class Init(InteractiveCommand):
|
from color import Coloring
|
||||||
|
from command import InteractiveCommand, MirrorSafeCommand
|
||||||
|
from error import ManifestParseError
|
||||||
|
from project import SyncBuffer
|
||||||
|
from git_config import GitConfig
|
||||||
|
from git_command import git_require, MIN_GIT_VERSION
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
|
class Init(InteractiveCommand, MirrorSafeCommand):
|
||||||
common = True
|
common = True
|
||||||
helpSummary = "Initialize repo in the current directory"
|
helpSummary = "Initialize repo in the current directory"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
@ -34,12 +49,39 @@ The latest repo source code and manifest collection is downloaded
|
|||||||
from the server and is installed in the .repo/ directory in the
|
from the server and is installed in the .repo/ directory in the
|
||||||
current working directory.
|
current working directory.
|
||||||
|
|
||||||
The optional <manifest> argument can be used to specify an alternate
|
The optional -b argument can be used to select the manifest branch
|
||||||
manifest to be used. If no manifest is specified, the manifest
|
to checkout and use. If no branch is specified, master is assumed.
|
||||||
default.xml will be used.
|
|
||||||
|
The optional -m argument can be used to specify an alternate manifest
|
||||||
|
to be used. If no manifest is specified, the manifest default.xml
|
||||||
|
will be used.
|
||||||
|
|
||||||
|
The --reference option can be used to point to a directory that
|
||||||
|
has the content of a --mirror sync. This will make the working
|
||||||
|
directory use as much data as possible from the local reference
|
||||||
|
directory when fetching from the server. This will make the sync
|
||||||
|
go a lot faster by reducing data traffic on the network.
|
||||||
|
|
||||||
|
The --dissociate option can be used to borrow the objects from
|
||||||
|
the directory specified with the --reference option only to reduce
|
||||||
|
network transfer, and stop borrowing from them after a first clone
|
||||||
|
is made by making necessary local copies of borrowed objects.
|
||||||
|
|
||||||
|
The --no-clone-bundle option disables any attempt to use
|
||||||
|
$URL/clone.bundle to bootstrap a new Git repository from a
|
||||||
|
resumeable bundle file on a content delivery network. This
|
||||||
|
may be necessary if there are problems with the local Python
|
||||||
|
HTTP client or proxy configuration, but the Git binary works.
|
||||||
|
|
||||||
|
# Switching Manifest Branches
|
||||||
|
|
||||||
|
To switch to another manifest branch, `repo init -b otherbranch`
|
||||||
|
may be used in an existing client. However, as this only updates the
|
||||||
|
manifest, a subsequent `repo sync` (or `repo sync -d`) is necessary
|
||||||
|
to update the working directory files.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _Options(self, p):
|
def _Options(self, p, gitc_init=False):
|
||||||
# Logging
|
# Logging
|
||||||
g = p.add_option_group('Logging options')
|
g = p.add_option_group('Logging options')
|
||||||
g.add_option('-q', '--quiet',
|
g.add_option('-q', '--quiet',
|
||||||
@ -54,12 +96,63 @@ default.xml will be used.
|
|||||||
g.add_option('-b', '--manifest-branch',
|
g.add_option('-b', '--manifest-branch',
|
||||||
dest='manifest_branch',
|
dest='manifest_branch',
|
||||||
help='manifest branch or revision', metavar='REVISION')
|
help='manifest branch or revision', metavar='REVISION')
|
||||||
|
cbr_opts = ['--current-branch']
|
||||||
|
# The gitc-init subcommand allocates -c itself, but a lot of init users
|
||||||
|
# want -c, so try to satisfy both as best we can.
|
||||||
|
if not gitc_init:
|
||||||
|
cbr_opts += ['-c']
|
||||||
|
g.add_option(*cbr_opts,
|
||||||
|
dest='current_branch_only', action='store_true',
|
||||||
|
help='fetch only current manifest branch from server')
|
||||||
g.add_option('-m', '--manifest-name',
|
g.add_option('-m', '--manifest-name',
|
||||||
dest='manifest_name', default='default.xml',
|
dest='manifest_name', default='default.xml',
|
||||||
help='initial manifest file', metavar='NAME.xml')
|
help='initial manifest file', metavar='NAME.xml')
|
||||||
|
g.add_option('--mirror',
|
||||||
|
dest='mirror', action='store_true',
|
||||||
|
help='create a replica of the remote repositories '
|
||||||
|
'rather than a client working directory')
|
||||||
|
g.add_option('--reference',
|
||||||
|
dest='reference',
|
||||||
|
help='location of mirror directory', metavar='DIR')
|
||||||
|
g.add_option('--dissociate',
|
||||||
|
dest='dissociate', action='store_true',
|
||||||
|
help='dissociate from reference mirrors after clone')
|
||||||
|
g.add_option('--depth', type='int', default=None,
|
||||||
|
dest='depth',
|
||||||
|
help='create a shallow clone with given depth; see git clone')
|
||||||
|
g.add_option('--partial-clone', action='store_true',
|
||||||
|
dest='partial_clone',
|
||||||
|
help='perform partial clone (https://git-scm.com/'
|
||||||
|
'docs/gitrepository-layout#_code_partialclone_code)')
|
||||||
|
g.add_option('--clone-filter', action='store', default='blob:none',
|
||||||
|
dest='clone_filter',
|
||||||
|
help='filter for use with --partial-clone [default: %default]')
|
||||||
|
g.add_option('--archive',
|
||||||
|
dest='archive', action='store_true',
|
||||||
|
help='checkout an archive instead of a git repository for '
|
||||||
|
'each project. See git archive.')
|
||||||
|
g.add_option('--submodules',
|
||||||
|
dest='submodules', action='store_true',
|
||||||
|
help='sync any submodules associated with the manifest repo')
|
||||||
|
g.add_option('-g', '--groups',
|
||||||
|
dest='groups', default='default',
|
||||||
|
help='restrict manifest projects to ones with specified '
|
||||||
|
'group(s) [default|all|G1,G2,G3|G4,-G5,-G6]',
|
||||||
|
metavar='GROUP')
|
||||||
|
g.add_option('-p', '--platform',
|
||||||
|
dest='platform', default='auto',
|
||||||
|
help='restrict manifest projects to ones with a specified '
|
||||||
|
'platform group [auto|all|none|linux|darwin|...]',
|
||||||
|
metavar='PLATFORM')
|
||||||
|
g.add_option('--no-clone-bundle',
|
||||||
|
dest='no_clone_bundle', action='store_true',
|
||||||
|
help='disable use of /clone.bundle on HTTP/HTTPS')
|
||||||
|
g.add_option('--no-tags',
|
||||||
|
dest='no_tags', action='store_true',
|
||||||
|
help="don't fetch tags in the manifest")
|
||||||
|
|
||||||
# Tool
|
# Tool
|
||||||
g = p.add_option_group('Version options')
|
g = p.add_option_group('repo Version options')
|
||||||
g.add_option('--repo-url',
|
g.add_option('--repo-url',
|
||||||
dest='repo_url',
|
dest='repo_url',
|
||||||
help='repo repository location', metavar='URL')
|
help='repo repository location', metavar='URL')
|
||||||
@ -70,78 +163,208 @@ default.xml will be used.
|
|||||||
dest='no_repo_verify', action='store_true',
|
dest='no_repo_verify', action='store_true',
|
||||||
help='do not verify repo source code')
|
help='do not verify repo source code')
|
||||||
|
|
||||||
def _CheckGitVersion(self):
|
# Other
|
||||||
ver_str = git.version()
|
g = p.add_option_group('Other options')
|
||||||
if not ver_str.startswith('git version '):
|
g.add_option('--config-name',
|
||||||
print >>sys.stderr, 'error: "%s" unsupported' % ver_str
|
dest='config_name', action="store_true", default=False,
|
||||||
sys.exit(1)
|
help='Always prompt for name/e-mail')
|
||||||
|
|
||||||
ver_str = ver_str[len('git version '):].strip()
|
def _RegisteredEnvironmentOptions(self):
|
||||||
ver_act = tuple(map(lambda x: int(x), ver_str.split('.')[0:3]))
|
return {'REPO_MANIFEST_URL': 'manifest_url',
|
||||||
if ver_act < MIN_GIT_VERSION:
|
'REPO_MIRROR_LOCATION': 'reference'}
|
||||||
need = '.'.join(map(lambda x: str(x), MIN_GIT_VERSION))
|
|
||||||
print >>sys.stderr, 'fatal: git %s or later required' % need
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
def _SyncManifest(self, opt):
|
def _SyncManifest(self, opt):
|
||||||
m = self.manifest.manifestProject
|
m = self.manifest.manifestProject
|
||||||
|
is_new = not m.Exists
|
||||||
|
|
||||||
if not m.Exists:
|
if is_new:
|
||||||
if not opt.manifest_url:
|
if not opt.manifest_url:
|
||||||
print >>sys.stderr, 'fatal: manifest url (-u) is required.'
|
print('fatal: manifest url (-u) is required.', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if not opt.quiet:
|
if not opt.quiet:
|
||||||
print >>sys.stderr, 'Getting manifest ...'
|
print('Get %s' % GitConfig.ForUser().UrlInsteadOf(opt.manifest_url),
|
||||||
print >>sys.stderr, ' from %s' % opt.manifest_url
|
file=sys.stderr)
|
||||||
m._InitGitDir()
|
|
||||||
|
# The manifest project object doesn't keep track of the path on the
|
||||||
|
# server where this git is located, so let's save that here.
|
||||||
|
mirrored_manifest_git = None
|
||||||
|
if opt.reference:
|
||||||
|
manifest_git_path = urllib.parse.urlparse(opt.manifest_url).path[1:]
|
||||||
|
mirrored_manifest_git = os.path.join(opt.reference, manifest_git_path)
|
||||||
|
if not mirrored_manifest_git.endswith(".git"):
|
||||||
|
mirrored_manifest_git += ".git"
|
||||||
|
if not os.path.exists(mirrored_manifest_git):
|
||||||
|
mirrored_manifest_git = os.path.join(opt.reference,
|
||||||
|
'.repo/manifests.git')
|
||||||
|
|
||||||
|
m._InitGitDir(mirror_git=mirrored_manifest_git)
|
||||||
|
|
||||||
if opt.manifest_branch:
|
if opt.manifest_branch:
|
||||||
m.revision = opt.manifest_branch
|
m.revisionExpr = opt.manifest_branch
|
||||||
else:
|
else:
|
||||||
m.revision = 'refs/heads/master'
|
m.revisionExpr = 'refs/heads/master'
|
||||||
else:
|
else:
|
||||||
if opt.manifest_branch:
|
if opt.manifest_branch:
|
||||||
m.revision = opt.manifest_branch
|
m.revisionExpr = opt.manifest_branch
|
||||||
else:
|
else:
|
||||||
m.PreSync()
|
m.PreSync()
|
||||||
|
|
||||||
|
self._ConfigureDepth(opt)
|
||||||
|
|
||||||
if opt.manifest_url:
|
if opt.manifest_url:
|
||||||
r = m.GetRemote(m.remote.name)
|
r = m.GetRemote(m.remote.name)
|
||||||
r.url = opt.manifest_url
|
r.url = opt.manifest_url
|
||||||
r.ResetFetch()
|
r.ResetFetch()
|
||||||
r.Save()
|
r.Save()
|
||||||
|
|
||||||
m.Sync_NetworkHalf()
|
groups = re.split(r'[,\s]+', opt.groups)
|
||||||
m.Sync_LocalHalf()
|
all_platforms = ['linux', 'darwin', 'windows']
|
||||||
m.StartBranch('default')
|
platformize = lambda x: 'platform-' + x
|
||||||
|
if opt.platform == 'auto':
|
||||||
|
if (not opt.mirror and
|
||||||
|
not m.config.GetString('repo.mirror') == 'true'):
|
||||||
|
groups.append(platformize(platform.system().lower()))
|
||||||
|
elif opt.platform == 'all':
|
||||||
|
groups.extend(map(platformize, all_platforms))
|
||||||
|
elif opt.platform in all_platforms:
|
||||||
|
groups.append(platformize(opt.platform))
|
||||||
|
elif opt.platform != 'none':
|
||||||
|
print('fatal: invalid platform flag', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
groups = [x for x in groups if x]
|
||||||
|
groupstr = ','.join(groups)
|
||||||
|
if opt.platform == 'auto' and groupstr == 'default,platform-' + platform.system().lower():
|
||||||
|
groupstr = None
|
||||||
|
m.config.SetString('manifest.groups', groupstr)
|
||||||
|
|
||||||
|
if opt.reference:
|
||||||
|
m.config.SetString('repo.reference', opt.reference)
|
||||||
|
|
||||||
|
if opt.dissociate:
|
||||||
|
m.config.SetString('repo.dissociate', 'true')
|
||||||
|
|
||||||
|
if opt.archive:
|
||||||
|
if is_new:
|
||||||
|
m.config.SetString('repo.archive', 'true')
|
||||||
|
else:
|
||||||
|
print('fatal: --archive is only supported when initializing a new '
|
||||||
|
'workspace.', file=sys.stderr)
|
||||||
|
print('Either delete the .repo folder in this workspace, or initialize '
|
||||||
|
'in another location.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if opt.mirror:
|
||||||
|
if is_new:
|
||||||
|
m.config.SetString('repo.mirror', 'true')
|
||||||
|
else:
|
||||||
|
print('fatal: --mirror is only supported when initializing a new '
|
||||||
|
'workspace.', file=sys.stderr)
|
||||||
|
print('Either delete the .repo folder in this workspace, or initialize '
|
||||||
|
'in another location.', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if opt.partial_clone:
|
||||||
|
if opt.mirror:
|
||||||
|
print('fatal: --mirror and --partial-clone are mutually exclusive',
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
m.config.SetString('repo.partialclone', 'true')
|
||||||
|
if opt.clone_filter:
|
||||||
|
m.config.SetString('repo.clonefilter', opt.clone_filter)
|
||||||
|
else:
|
||||||
|
opt.clone_filter = None
|
||||||
|
|
||||||
|
if opt.submodules:
|
||||||
|
m.config.SetString('repo.submodules', 'true')
|
||||||
|
|
||||||
|
if not m.Sync_NetworkHalf(is_new=is_new, quiet=opt.quiet,
|
||||||
|
clone_bundle=not opt.no_clone_bundle,
|
||||||
|
current_branch_only=opt.current_branch_only,
|
||||||
|
no_tags=opt.no_tags, submodules=opt.submodules,
|
||||||
|
clone_filter=opt.clone_filter):
|
||||||
|
r = m.GetRemote(m.remote.name)
|
||||||
|
print('fatal: cannot obtain manifest %s' % r.url, file=sys.stderr)
|
||||||
|
|
||||||
|
# Better delete the manifest git dir if we created it; otherwise next
|
||||||
|
# time (when user fixes problems) we won't go through the "is_new" logic.
|
||||||
|
if is_new:
|
||||||
|
platform_utils.rmtree(m.gitdir)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
if opt.manifest_branch:
|
||||||
|
m.MetaBranchSwitch(submodules=opt.submodules)
|
||||||
|
|
||||||
|
syncbuf = SyncBuffer(m.config)
|
||||||
|
m.Sync_LocalHalf(syncbuf, submodules=opt.submodules)
|
||||||
|
syncbuf.Finish()
|
||||||
|
|
||||||
|
if is_new or m.CurrentBranch is None:
|
||||||
|
if not m.StartBranch('default'):
|
||||||
|
print('fatal: cannot create default in manifest', file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
def _LinkManifest(self, name):
|
def _LinkManifest(self, name):
|
||||||
if not name:
|
if not name:
|
||||||
print >>sys.stderr, 'fatal: manifest name (-m) is required.'
|
print('fatal: manifest name (-m) is required.', file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
self.manifest.Link(name)
|
self.manifest.Link(name)
|
||||||
except ManifestParseError, e:
|
except ManifestParseError as e:
|
||||||
print >>sys.stderr, "fatal: manifest '%s' not available" % name
|
print("fatal: manifest '%s' not available" % name, file=sys.stderr)
|
||||||
print >>sys.stderr, 'fatal: %s' % str(e)
|
print('fatal: %s' % str(e), file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
def _PromptKey(self, prompt, key, value):
|
def _Prompt(self, prompt, value):
|
||||||
|
print('%-10s [%s]: ' % (prompt, value), end='')
|
||||||
|
# TODO: When we require Python 3, use flush=True w/print above.
|
||||||
|
sys.stdout.flush()
|
||||||
|
a = sys.stdin.readline().strip()
|
||||||
|
if a == '':
|
||||||
|
return value
|
||||||
|
return a
|
||||||
|
|
||||||
|
def _ShouldConfigureUser(self):
|
||||||
|
gc = self.manifest.globalConfig
|
||||||
mp = self.manifest.manifestProject
|
mp = self.manifest.manifestProject
|
||||||
|
|
||||||
sys.stdout.write('%-10s [%s]: ' % (prompt, value))
|
# If we don't have local settings, get from global.
|
||||||
a = sys.stdin.readline().strip()
|
if not mp.config.Has('user.name') or not mp.config.Has('user.email'):
|
||||||
if a != '' and a != value:
|
if not gc.Has('user.name') or not gc.Has('user.email'):
|
||||||
mp.config.SetString(key, a)
|
return True
|
||||||
|
|
||||||
|
mp.config.SetString('user.name', gc.GetString('user.name'))
|
||||||
|
mp.config.SetString('user.email', gc.GetString('user.email'))
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Your identity is: %s <%s>' % (mp.config.GetString('user.name'),
|
||||||
|
mp.config.GetString('user.email')))
|
||||||
|
print('If you want to change this, please re-run \'repo init\' with --config-name')
|
||||||
|
return False
|
||||||
|
|
||||||
def _ConfigureUser(self):
|
def _ConfigureUser(self):
|
||||||
mp = self.manifest.manifestProject
|
mp = self.manifest.manifestProject
|
||||||
|
|
||||||
print ''
|
while True:
|
||||||
self._PromptKey('Your Name', 'user.name', mp.UserName)
|
print()
|
||||||
self._PromptKey('Your Email', 'user.email', mp.UserEmail)
|
name = self._Prompt('Your Name', mp.UserName)
|
||||||
|
email = self._Prompt('Your Email', mp.UserEmail)
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('Your identity is: %s <%s>' % (name, email))
|
||||||
|
print('is this correct [y/N]? ', end='')
|
||||||
|
# TODO: When we require Python 3, use flush=True w/print above.
|
||||||
|
sys.stdout.flush()
|
||||||
|
a = sys.stdin.readline().strip().lower()
|
||||||
|
if a in ('yes', 'y', 't', 'true'):
|
||||||
|
break
|
||||||
|
|
||||||
|
if name != mp.UserName:
|
||||||
|
mp.config.SetString('user.name', name)
|
||||||
|
if email != mp.UserEmail:
|
||||||
|
mp.config.SetString('user.email', email)
|
||||||
|
|
||||||
def _HasColorSet(self, gc):
|
def _HasColorSet(self, gc):
|
||||||
for n in ['ui', 'diff', 'status']:
|
for n in ['ui', 'diff', 'status']:
|
||||||
@ -160,34 +383,82 @@ default.xml will be used.
|
|||||||
self._on = True
|
self._on = True
|
||||||
out = _Test()
|
out = _Test()
|
||||||
|
|
||||||
print ''
|
print()
|
||||||
print "Testing colorized output (for 'repo diff', 'repo status'):"
|
print("Testing colorized output (for 'repo diff', 'repo status'):")
|
||||||
|
|
||||||
for c in ['black','red','green','yellow','blue','magenta','cyan']:
|
for c in ['black', 'red', 'green', 'yellow', 'blue', 'magenta', 'cyan']:
|
||||||
out.write(' ')
|
out.write(' ')
|
||||||
out.printer(fg=c)(' %-6s ', c)
|
out.printer(fg=c)(' %-6s ', c)
|
||||||
out.write(' ')
|
out.write(' ')
|
||||||
out.printer(fg='white', bg='black')(' %s ' % 'white')
|
out.printer(fg='white', bg='black')(' %s ' % 'white')
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
for c in ['bold','dim','ul','reverse']:
|
for c in ['bold', 'dim', 'ul', 'reverse']:
|
||||||
out.write(' ')
|
out.write(' ')
|
||||||
out.printer(fg='black', attr=c)(' %-6s ', c)
|
out.printer(fg='black', attr=c)(' %-6s ', c)
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
sys.stdout.write('Enable color display in this user account (y/n)? ')
|
print('Enable color display in this user account (y/N)? ', end='')
|
||||||
|
# TODO: When we require Python 3, use flush=True w/print above.
|
||||||
|
sys.stdout.flush()
|
||||||
a = sys.stdin.readline().strip().lower()
|
a = sys.stdin.readline().strip().lower()
|
||||||
if a in ('y', 'yes', 't', 'true', 'on'):
|
if a in ('y', 'yes', 't', 'true', 'on'):
|
||||||
gc.SetString('color.ui', 'auto')
|
gc.SetString('color.ui', 'auto')
|
||||||
|
|
||||||
|
def _ConfigureDepth(self, opt):
|
||||||
|
"""Configure the depth we'll sync down.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
opt: Options from optparse. We care about opt.depth.
|
||||||
|
"""
|
||||||
|
# Opt.depth will be non-None if user actually passed --depth to repo init.
|
||||||
|
if opt.depth is not None:
|
||||||
|
if opt.depth > 0:
|
||||||
|
# Positive values will set the depth.
|
||||||
|
depth = str(opt.depth)
|
||||||
|
else:
|
||||||
|
# Negative numbers will clear the depth; passing None to SetString
|
||||||
|
# will do that.
|
||||||
|
depth = None
|
||||||
|
|
||||||
|
# We store the depth in the main manifest project.
|
||||||
|
self.manifest.manifestProject.config.SetString('repo.depth', depth)
|
||||||
|
|
||||||
|
def _DisplayResult(self):
|
||||||
|
if self.manifest.IsMirror:
|
||||||
|
init_type = 'mirror '
|
||||||
|
else:
|
||||||
|
init_type = ''
|
||||||
|
|
||||||
|
print()
|
||||||
|
print('repo %shas been initialized in %s'
|
||||||
|
% (init_type, self.manifest.topdir))
|
||||||
|
|
||||||
|
current_dir = os.getcwd()
|
||||||
|
if current_dir != self.manifest.topdir:
|
||||||
|
print('If this is not the directory in which you want to initialize '
|
||||||
|
'repo, please run:')
|
||||||
|
print(' rm -r %s/.repo' % self.manifest.topdir)
|
||||||
|
print('and try again.')
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if opt.reference:
|
||||||
|
opt.reference = os.path.expanduser(opt.reference)
|
||||||
|
|
||||||
|
# Check this here, else manifest will be tagged "not new" and init won't be
|
||||||
|
# possible anymore without removing the .repo/manifests directory.
|
||||||
|
if opt.archive and opt.mirror:
|
||||||
|
self.OptionParser.error('--mirror and --archive cannot be used together.')
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
self._CheckGitVersion()
|
git_require(MIN_GIT_VERSION, fail=True)
|
||||||
|
|
||||||
self._SyncManifest(opt)
|
self._SyncManifest(opt)
|
||||||
self._LinkManifest(opt.manifest_name)
|
self._LinkManifest(opt.manifest_name)
|
||||||
|
|
||||||
if os.isatty(0) and os.isatty(1):
|
if os.isatty(0) and os.isatty(1) and not self.manifest.IsMirror:
|
||||||
self._ConfigureUser()
|
if opt.config_name or self._ShouldConfigureUser():
|
||||||
|
self._ConfigureUser()
|
||||||
self._ConfigureColor()
|
self._ConfigureColor()
|
||||||
|
|
||||||
print ''
|
self._DisplayResult()
|
||||||
print 'repo initialized in %s' % self.manifest.topdir
|
|
||||||
|
87
subcmds/list.py
Normal file
87
subcmds/list.py
Normal file
@ -0,0 +1,87 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2011 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import Command, MirrorSafeCommand
|
||||||
|
|
||||||
|
class List(Command, MirrorSafeCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "List projects and their associated directories"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [-f] [<project>...]
|
||||||
|
%prog [-f] -r str1 [str2]..."
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
List all projects; pass '.' to list the project for the cwd.
|
||||||
|
|
||||||
|
This is similar to running: repo forall -c 'echo "$REPO_PATH : $REPO_PROJECT"'.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-r', '--regex',
|
||||||
|
dest='regex', action='store_true',
|
||||||
|
help="Filter the project list based on regex or wildcard matching of strings")
|
||||||
|
p.add_option('-g', '--groups',
|
||||||
|
dest='groups',
|
||||||
|
help="Filter the project list based on the groups the project is in")
|
||||||
|
p.add_option('-f', '--fullpath',
|
||||||
|
dest='fullpath', action='store_true',
|
||||||
|
help="Display the full work tree path instead of the relative path")
|
||||||
|
p.add_option('-n', '--name-only',
|
||||||
|
dest='name_only', action='store_true',
|
||||||
|
help="Display only the name of the repository")
|
||||||
|
p.add_option('-p', '--path-only',
|
||||||
|
dest='path_only', action='store_true',
|
||||||
|
help="Display only the path of the repository")
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if opt.fullpath and opt.name_only:
|
||||||
|
self.OptionParser.error('cannot combine -f and -n')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
"""List all projects and the associated directories.
|
||||||
|
|
||||||
|
This may be possible to do with 'repo forall', but repo newbies have
|
||||||
|
trouble figuring that out. The idea here is that it should be more
|
||||||
|
discoverable.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
opt: The options.
|
||||||
|
args: Positional args. Can be a list of projects to list, or empty.
|
||||||
|
"""
|
||||||
|
if not opt.regex:
|
||||||
|
projects = self.GetProjects(args, groups=opt.groups)
|
||||||
|
else:
|
||||||
|
projects = self.FindProjects(args)
|
||||||
|
|
||||||
|
def _getpath(x):
|
||||||
|
if opt.fullpath:
|
||||||
|
return x.worktree
|
||||||
|
return x.relpath
|
||||||
|
|
||||||
|
lines = []
|
||||||
|
for project in projects:
|
||||||
|
if opt.name_only and not opt.path_only:
|
||||||
|
lines.append("%s" % ( project.name))
|
||||||
|
elif opt.path_only and not opt.name_only:
|
||||||
|
lines.append("%s" % (_getpath(project)))
|
||||||
|
else:
|
||||||
|
lines.append("%s : %s" % (_getpath(project), project.name))
|
||||||
|
|
||||||
|
lines.sort()
|
||||||
|
print('\n'.join(lines))
|
80
subcmds/manifest.py
Normal file
80
subcmds/manifest.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import PagedCommand
|
||||||
|
|
||||||
|
class Manifest(PagedCommand):
|
||||||
|
common = False
|
||||||
|
helpSummary = "Manifest inspection utility"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [-o {-|NAME.xml} [-r]]
|
||||||
|
"""
|
||||||
|
_helpDescription = """
|
||||||
|
|
||||||
|
With the -o option, exports the current manifest for inspection.
|
||||||
|
The manifest and (if present) local_manifest.xml are combined
|
||||||
|
together to produce a single manifest file. This file can be stored
|
||||||
|
in a Git repository for use during future 'repo init' invocations.
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
@property
|
||||||
|
def helpDescription(self):
|
||||||
|
helptext = self._helpDescription + '\n'
|
||||||
|
r = os.path.dirname(__file__)
|
||||||
|
r = os.path.dirname(r)
|
||||||
|
with open(os.path.join(r, 'docs', 'manifest-format.md')) as fd:
|
||||||
|
for line in fd:
|
||||||
|
helptext += line
|
||||||
|
return helptext
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-r', '--revision-as-HEAD',
|
||||||
|
dest='peg_rev', action='store_true',
|
||||||
|
help='Save revisions as current HEAD')
|
||||||
|
p.add_option('--suppress-upstream-revision', dest='peg_rev_upstream',
|
||||||
|
default=True, action='store_false',
|
||||||
|
help='If in -r mode, do not write the upstream field. '
|
||||||
|
'Only of use if the branch names for a sha1 manifest are '
|
||||||
|
'sensitive.')
|
||||||
|
p.add_option('-o', '--output-file',
|
||||||
|
dest='output_file',
|
||||||
|
default='-',
|
||||||
|
help='File to save the manifest to',
|
||||||
|
metavar='-|NAME.xml')
|
||||||
|
|
||||||
|
def _Output(self, opt):
|
||||||
|
if opt.output_file == '-':
|
||||||
|
fd = sys.stdout
|
||||||
|
else:
|
||||||
|
fd = open(opt.output_file, 'w')
|
||||||
|
self.manifest.Save(fd,
|
||||||
|
peg_rev = opt.peg_rev,
|
||||||
|
peg_rev_upstream = opt.peg_rev_upstream)
|
||||||
|
fd.close()
|
||||||
|
if opt.output_file != '-':
|
||||||
|
print('Saved manifest to %s' % opt.output_file, file=sys.stderr)
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
|
if args:
|
||||||
|
self.Usage()
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
self._Output(opt)
|
85
subcmds/overview.py
Normal file
85
subcmds/overview.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2012 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from color import Coloring
|
||||||
|
from command import PagedCommand
|
||||||
|
|
||||||
|
|
||||||
|
class Overview(PagedCommand):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Display overview of unmerged project branches"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [--current-branch] [<project>...]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command is used to display an overview of the projects branches,
|
||||||
|
and list any local commits that have not yet been merged into the project.
|
||||||
|
|
||||||
|
The -b/--current-branch option can be used to restrict the output to only
|
||||||
|
branches currently checked out in each project. By default, all branches
|
||||||
|
are displayed.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-b', '--current-branch',
|
||||||
|
dest="current_branch", action="store_true",
|
||||||
|
help="Consider only checked out branches")
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
all_branches = []
|
||||||
|
for project in self.GetProjects(args):
|
||||||
|
br = [project.GetUploadableBranch(x)
|
||||||
|
for x in project.GetBranches()]
|
||||||
|
br = [x for x in br if x]
|
||||||
|
if opt.current_branch:
|
||||||
|
br = [x for x in br if x.name == project.CurrentBranch]
|
||||||
|
all_branches.extend(br)
|
||||||
|
|
||||||
|
if not all_branches:
|
||||||
|
return
|
||||||
|
|
||||||
|
class Report(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'status')
|
||||||
|
self.project = self.printer('header', attr='bold')
|
||||||
|
self.text = self.printer('text')
|
||||||
|
|
||||||
|
out = Report(all_branches[0].project.config)
|
||||||
|
out.text("Deprecated. See repo info -o.")
|
||||||
|
out.nl()
|
||||||
|
out.project('Projects Overview')
|
||||||
|
out.nl()
|
||||||
|
|
||||||
|
project = None
|
||||||
|
|
||||||
|
for branch in all_branches:
|
||||||
|
if project != branch.project:
|
||||||
|
project = branch.project
|
||||||
|
out.nl()
|
||||||
|
out.project('project %s/' % project.relpath)
|
||||||
|
out.nl()
|
||||||
|
|
||||||
|
commits = branch.commits
|
||||||
|
date = branch.date
|
||||||
|
print('%s %-33s (%2d commit%s, %s)' % (
|
||||||
|
branch.name == project.CurrentBranch and '*' or ' ',
|
||||||
|
branch.name,
|
||||||
|
len(commits),
|
||||||
|
len(commits) != 1 and 's' or ' ',
|
||||||
|
date))
|
||||||
|
for commit in commits:
|
||||||
|
print('%-35s - %s' % ('', commit))
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,6 +14,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
from command import PagedCommand
|
from command import PagedCommand
|
||||||
|
|
||||||
@ -24,11 +26,11 @@ class Prune(PagedCommand):
|
|||||||
"""
|
"""
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
all = []
|
all_branches = []
|
||||||
for project in self.GetProjects(args):
|
for project in self.GetProjects(args):
|
||||||
all.extend(project.PruneHeads())
|
all_branches.extend(project.PruneHeads())
|
||||||
|
|
||||||
if not all:
|
if not all_branches:
|
||||||
return
|
return
|
||||||
|
|
||||||
class Report(Coloring):
|
class Report(Coloring):
|
||||||
@ -36,24 +38,29 @@ class Prune(PagedCommand):
|
|||||||
Coloring.__init__(self, config, 'status')
|
Coloring.__init__(self, config, 'status')
|
||||||
self.project = self.printer('header', attr='bold')
|
self.project = self.printer('header', attr='bold')
|
||||||
|
|
||||||
out = Report(all[0].project.config)
|
out = Report(all_branches[0].project.config)
|
||||||
out.project('Pending Branches')
|
out.project('Pending Branches')
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
project = None
|
project = None
|
||||||
|
|
||||||
for branch in all:
|
for branch in all_branches:
|
||||||
if project != branch.project:
|
if project != branch.project:
|
||||||
project = branch.project
|
project = branch.project
|
||||||
out.nl()
|
out.nl()
|
||||||
out.project('project %s/' % project.relpath)
|
out.project('project %s/' % project.relpath)
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
commits = branch.commits
|
print('%s %-33s ' % (
|
||||||
date = branch.date
|
|
||||||
print '%s %-33s (%2d commit%s, %s)' % (
|
|
||||||
branch.name == project.CurrentBranch and '*' or ' ',
|
branch.name == project.CurrentBranch and '*' or ' ',
|
||||||
branch.name,
|
branch.name), end='')
|
||||||
|
|
||||||
|
if not branch.base_exists:
|
||||||
|
print('(ignoring: tracking branch is gone: %s)' % (branch.base,))
|
||||||
|
else:
|
||||||
|
commits = branch.commits
|
||||||
|
date = branch.date
|
||||||
|
print('(%2d commit%s, %s)' % (
|
||||||
len(commits),
|
len(commits),
|
||||||
len(commits) != 1 and 's' or ' ',
|
len(commits) != 1 and 's' or ' ',
|
||||||
date)
|
date))
|
||||||
|
169
subcmds/rebase.py
Normal file
169
subcmds/rebase.py
Normal file
@ -0,0 +1,169 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2010 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
from command import Command
|
||||||
|
from git_command import GitCommand
|
||||||
|
|
||||||
|
|
||||||
|
class RebaseColoring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'rebase')
|
||||||
|
self.project = self.printer('project', attr='bold')
|
||||||
|
self.fail = self.printer('fail', fg='red')
|
||||||
|
|
||||||
|
|
||||||
|
class Rebase(Command):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Rebase local branches on upstream branch"
|
||||||
|
helpUsage = """
|
||||||
|
%prog {[<project>...] | -i <project>...}
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
'%prog' uses git rebase to move local changes in the current topic branch to
|
||||||
|
the HEAD of the upstream history, useful when you have made commits in a topic
|
||||||
|
branch but need to incorporate new upstream changes "underneath" them.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-i', '--interactive',
|
||||||
|
dest="interactive", action="store_true",
|
||||||
|
help="interactive rebase (single project only)")
|
||||||
|
|
||||||
|
p.add_option('--fail-fast',
|
||||||
|
dest='fail_fast', action='store_true',
|
||||||
|
help='Stop rebasing after first error is hit')
|
||||||
|
p.add_option('-f', '--force-rebase',
|
||||||
|
dest='force_rebase', action='store_true',
|
||||||
|
help='Pass --force-rebase to git rebase')
|
||||||
|
p.add_option('--no-ff',
|
||||||
|
dest='no_ff', action='store_true',
|
||||||
|
help='Pass --no-ff to git rebase')
|
||||||
|
p.add_option('-q', '--quiet',
|
||||||
|
dest='quiet', action='store_true',
|
||||||
|
help='Pass --quiet to git rebase')
|
||||||
|
p.add_option('--autosquash',
|
||||||
|
dest='autosquash', action='store_true',
|
||||||
|
help='Pass --autosquash to git rebase')
|
||||||
|
p.add_option('--whitespace',
|
||||||
|
dest='whitespace', action='store', metavar='WS',
|
||||||
|
help='Pass --whitespace to git rebase')
|
||||||
|
p.add_option('--auto-stash',
|
||||||
|
dest='auto_stash', action='store_true',
|
||||||
|
help='Stash local modifications before starting')
|
||||||
|
p.add_option('-m', '--onto-manifest',
|
||||||
|
dest='onto_manifest', action='store_true',
|
||||||
|
help='Rebase onto the manifest version instead of upstream '
|
||||||
|
'HEAD. This helps to make sure the local tree stays '
|
||||||
|
'consistent if you previously synced to a manifest.')
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
all_projects = self.GetProjects(args)
|
||||||
|
one_project = len(all_projects) == 1
|
||||||
|
|
||||||
|
if opt.interactive and not one_project:
|
||||||
|
print('error: interactive rebase not supported with multiple projects',
|
||||||
|
file=sys.stderr)
|
||||||
|
if len(args) == 1:
|
||||||
|
print('note: project %s is mapped to more than one path' % (args[0],),
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
|
||||||
|
# Setup the common git rebase args that we use for all projects.
|
||||||
|
common_args = ['rebase']
|
||||||
|
if opt.whitespace:
|
||||||
|
common_args.append('--whitespace=%s' % opt.whitespace)
|
||||||
|
if opt.quiet:
|
||||||
|
common_args.append('--quiet')
|
||||||
|
if opt.force_rebase:
|
||||||
|
common_args.append('--force-rebase')
|
||||||
|
if opt.no_ff:
|
||||||
|
common_args.append('--no-ff')
|
||||||
|
if opt.autosquash:
|
||||||
|
common_args.append('--autosquash')
|
||||||
|
if opt.interactive:
|
||||||
|
common_args.append('-i')
|
||||||
|
|
||||||
|
config = self.manifest.manifestProject.config
|
||||||
|
out = RebaseColoring(config)
|
||||||
|
out.redirect(sys.stdout)
|
||||||
|
|
||||||
|
ret = 0
|
||||||
|
for project in all_projects:
|
||||||
|
if ret and opt.fail_fast:
|
||||||
|
break
|
||||||
|
|
||||||
|
cb = project.CurrentBranch
|
||||||
|
if not cb:
|
||||||
|
if one_project:
|
||||||
|
print("error: project %s has a detached HEAD" % project.relpath,
|
||||||
|
file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
# ignore branches with detatched HEADs
|
||||||
|
continue
|
||||||
|
|
||||||
|
upbranch = project.GetBranch(cb)
|
||||||
|
if not upbranch.LocalMerge:
|
||||||
|
if one_project:
|
||||||
|
print("error: project %s does not track any remote branches"
|
||||||
|
% project.relpath, file=sys.stderr)
|
||||||
|
return 1
|
||||||
|
# ignore branches without remotes
|
||||||
|
continue
|
||||||
|
|
||||||
|
args = common_args[:]
|
||||||
|
if opt.onto_manifest:
|
||||||
|
args.append('--onto')
|
||||||
|
args.append(project.revisionExpr)
|
||||||
|
|
||||||
|
args.append(upbranch.LocalMerge)
|
||||||
|
|
||||||
|
out.project('project %s: rebasing %s -> %s',
|
||||||
|
project.relpath, cb, upbranch.LocalMerge)
|
||||||
|
out.nl()
|
||||||
|
out.flush()
|
||||||
|
|
||||||
|
needs_stash = False
|
||||||
|
if opt.auto_stash:
|
||||||
|
stash_args = ["update-index", "--refresh", "-q"]
|
||||||
|
|
||||||
|
if GitCommand(project, stash_args).Wait() != 0:
|
||||||
|
needs_stash = True
|
||||||
|
# Dirty index, requires stash...
|
||||||
|
stash_args = ["stash"]
|
||||||
|
|
||||||
|
if GitCommand(project, stash_args).Wait() != 0:
|
||||||
|
ret += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if GitCommand(project, args).Wait() != 0:
|
||||||
|
ret += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
if needs_stash:
|
||||||
|
stash_args.append('pop')
|
||||||
|
stash_args.append('--quiet')
|
||||||
|
if GitCommand(project, stash_args).Wait() != 0:
|
||||||
|
ret += 1
|
||||||
|
|
||||||
|
if ret:
|
||||||
|
out.fail('%i projects had errors', ret)
|
||||||
|
out.nl()
|
||||||
|
|
||||||
|
return ret
|
63
subcmds/selfupdate.py
Normal file
63
subcmds/selfupdate.py
Normal file
@ -0,0 +1,63 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
from optparse import SUPPRESS_HELP
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from command import Command, MirrorSafeCommand
|
||||||
|
from subcmds.sync import _PostRepoUpgrade
|
||||||
|
from subcmds.sync import _PostRepoFetch
|
||||||
|
|
||||||
|
class Selfupdate(Command, MirrorSafeCommand):
|
||||||
|
common = False
|
||||||
|
helpSummary = "Update repo to the latest version"
|
||||||
|
helpUsage = """
|
||||||
|
%prog
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command upgrades repo to the latest version, if a
|
||||||
|
newer version is available.
|
||||||
|
|
||||||
|
Normally this is done automatically by 'repo sync' and does not
|
||||||
|
need to be performed by an end-user.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
g = p.add_option_group('repo Version options')
|
||||||
|
g.add_option('--no-repo-verify',
|
||||||
|
dest='no_repo_verify', action='store_true',
|
||||||
|
help='do not verify repo source code')
|
||||||
|
g.add_option('--repo-upgraded',
|
||||||
|
dest='repo_upgraded', action='store_true',
|
||||||
|
help=SUPPRESS_HELP)
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
rp = self.manifest.repoProject
|
||||||
|
rp.PreSync()
|
||||||
|
|
||||||
|
if opt.repo_upgraded:
|
||||||
|
_PostRepoUpgrade(self.manifest)
|
||||||
|
|
||||||
|
else:
|
||||||
|
if not rp.Sync_NetworkHalf():
|
||||||
|
print("error: can't update repo", file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
rp.bare_git.gc('--auto')
|
||||||
|
_PostRepoFetch(rp,
|
||||||
|
no_repo_verify = opt.no_repo_verify,
|
||||||
|
verbose = True)
|
34
subcmds/smartsync.py
Normal file
34
subcmds/smartsync.py
Normal file
@ -0,0 +1,34 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2010 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from subcmds.sync import Sync
|
||||||
|
|
||||||
|
class Smartsync(Sync):
|
||||||
|
common = True
|
||||||
|
helpSummary = "Update working tree to the latest known good revision"
|
||||||
|
helpUsage = """
|
||||||
|
%prog [<project>...]
|
||||||
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
The '%prog' command is a shortcut for sync -s.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
Sync._Options(self, p, show_smart=False)
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
opt.smart_sync = True
|
||||||
|
Sync.Execute(self, opt, args)
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,6 +14,7 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from color import Coloring
|
from color import Coloring
|
||||||
@ -48,19 +50,19 @@ The '%prog' command stages files to prepare the next commit.
|
|||||||
self.Usage()
|
self.Usage()
|
||||||
|
|
||||||
def _Interactive(self, opt, args):
|
def _Interactive(self, opt, args):
|
||||||
all = filter(lambda x: x.IsDirty(), self.GetProjects(args))
|
all_projects = [p for p in self.GetProjects(args) if p.IsDirty()]
|
||||||
if not all:
|
if not all_projects:
|
||||||
print >>sys.stderr,'no projects have uncommitted modifications'
|
print('no projects have uncommitted modifications', file=sys.stderr)
|
||||||
return
|
return
|
||||||
|
|
||||||
out = _ProjectList(self.manifest.manifestProject.config)
|
out = _ProjectList(self.manifest.manifestProject.config)
|
||||||
while True:
|
while True:
|
||||||
out.header(' %-20s %s', 'project', 'path')
|
out.header(' %s', 'project')
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
for i in xrange(0, len(all)):
|
for i in range(len(all_projects)):
|
||||||
p = all[i]
|
project = all_projects[i]
|
||||||
out.write('%3d: %-20s %s', i + 1, p.name, p.relpath + '/')
|
out.write('%3d: %s', i + 1, project.relpath + '/')
|
||||||
out.nl()
|
out.nl()
|
||||||
out.nl()
|
out.nl()
|
||||||
|
|
||||||
@ -93,15 +95,15 @@ The '%prog' command stages files to prepare the next commit.
|
|||||||
if a_index is not None:
|
if a_index is not None:
|
||||||
if a_index == 0:
|
if a_index == 0:
|
||||||
break
|
break
|
||||||
if 0 < a_index and a_index <= len(all):
|
if 0 < a_index and a_index <= len(all_projects):
|
||||||
_AddI(all[a_index - 1])
|
_AddI(all_projects[a_index - 1])
|
||||||
continue
|
continue
|
||||||
|
|
||||||
p = filter(lambda x: x.name == a or x.relpath == a, all)
|
projects = [p for p in all_projects if a in [p.name, p.relpath]]
|
||||||
if len(p) == 1:
|
if len(projects) == 1:
|
||||||
_AddI(p[0])
|
_AddI(projects[0])
|
||||||
continue
|
continue
|
||||||
print 'Bye.'
|
print('Bye.')
|
||||||
|
|
||||||
def _AddI(project):
|
def _AddI(project):
|
||||||
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
p = GitCommand(project, ['add', '--interactive'], bare=False)
|
||||||
|
113
subcmds/start.py
113
subcmds/start.py
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,39 +14,111 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import os
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from command import Command
|
from command import Command
|
||||||
|
from git_config import IsImmutable
|
||||||
from git_command import git
|
from git_command import git
|
||||||
|
import gitc_utils
|
||||||
|
from progress import Progress
|
||||||
|
from project import SyncBuffer
|
||||||
|
|
||||||
class Start(Command):
|
class Start(Command):
|
||||||
common = True
|
common = True
|
||||||
helpSummary = "Start a new branch for development"
|
helpSummary = "Start a new branch for development"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
%prog <newbranchname> [<project>...]
|
%prog <newbranchname> [--all | <project>...]
|
||||||
|
"""
|
||||||
This subcommand starts a new branch of development that is automatically
|
helpDescription = """
|
||||||
pulled from a remote branch.
|
'%prog' begins a new branch of development, starting from the
|
||||||
|
revision specified in the manifest.
|
||||||
It is equivalent to the following git commands:
|
|
||||||
|
|
||||||
"git branch --track <newbranchname> m/<codeline>",
|
|
||||||
or
|
|
||||||
"git checkout --track -b <newbranchname> m/<codeline>".
|
|
||||||
|
|
||||||
All three forms set up the config entries that repo bases some of its
|
|
||||||
processing on. Use %prog or git branch or checkout with --track to ensure
|
|
||||||
the configuration data is set up properly.
|
|
||||||
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def _Options(self, p):
|
||||||
|
p.add_option('--all',
|
||||||
|
dest='all', action='store_true',
|
||||||
|
help='begin branch in all projects')
|
||||||
|
p.add_option('-r', '--rev', '--revision', dest='revision',
|
||||||
|
help='point branch at this revision instead of upstream')
|
||||||
|
p.add_option('--head', dest='revision', action='store_const', const='HEAD',
|
||||||
|
help='abbreviation for --rev HEAD')
|
||||||
|
|
||||||
|
def ValidateOptions(self, opt, args):
|
||||||
if not args:
|
if not args:
|
||||||
self.Usage()
|
self.Usage()
|
||||||
|
|
||||||
nb = args[0]
|
nb = args[0]
|
||||||
if not git.check_ref_format('heads/%s' % nb):
|
if not git.check_ref_format('heads/%s' % nb):
|
||||||
print >>sys.stderr, "error: '%s' is not a valid name" % nb
|
self.OptionParser.error("'%s' is not a valid name" % nb)
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
for project in self.GetProjects(args[1:]):
|
def Execute(self, opt, args):
|
||||||
project.StartBranch(nb)
|
nb = args[0]
|
||||||
|
err = []
|
||||||
|
projects = []
|
||||||
|
if not opt.all:
|
||||||
|
projects = args[1:]
|
||||||
|
if len(projects) < 1:
|
||||||
|
projects = ['.',] # start it in the local project by default
|
||||||
|
|
||||||
|
all_projects = self.GetProjects(projects,
|
||||||
|
missing_ok=bool(self.gitc_manifest))
|
||||||
|
|
||||||
|
# This must happen after we find all_projects, since GetProjects may need
|
||||||
|
# the local directory, which will disappear once we save the GITC manifest.
|
||||||
|
if self.gitc_manifest:
|
||||||
|
gitc_projects = self.GetProjects(projects, manifest=self.gitc_manifest,
|
||||||
|
missing_ok=True)
|
||||||
|
for project in gitc_projects:
|
||||||
|
if project.old_revision:
|
||||||
|
project.already_synced = True
|
||||||
|
else:
|
||||||
|
project.already_synced = False
|
||||||
|
project.old_revision = project.revisionExpr
|
||||||
|
project.revisionExpr = None
|
||||||
|
# Save the GITC manifest.
|
||||||
|
gitc_utils.save_manifest(self.gitc_manifest)
|
||||||
|
|
||||||
|
# Make sure we have a valid CWD
|
||||||
|
if not os.path.exists(os.getcwd()):
|
||||||
|
os.chdir(self.manifest.topdir)
|
||||||
|
|
||||||
|
pm = Progress('Starting %s' % nb, len(all_projects))
|
||||||
|
for project in all_projects:
|
||||||
|
pm.update()
|
||||||
|
|
||||||
|
if self.gitc_manifest:
|
||||||
|
gitc_project = self.gitc_manifest.paths[project.relpath]
|
||||||
|
# Sync projects that have not been opened.
|
||||||
|
if not gitc_project.already_synced:
|
||||||
|
proj_localdir = os.path.join(self.gitc_manifest.gitc_client_dir,
|
||||||
|
project.relpath)
|
||||||
|
project.worktree = proj_localdir
|
||||||
|
if not os.path.exists(proj_localdir):
|
||||||
|
os.makedirs(proj_localdir)
|
||||||
|
project.Sync_NetworkHalf()
|
||||||
|
sync_buf = SyncBuffer(self.manifest.manifestProject.config)
|
||||||
|
project.Sync_LocalHalf(sync_buf)
|
||||||
|
project.revisionId = gitc_project.old_revision
|
||||||
|
|
||||||
|
# If the current revision is immutable, such as a SHA1, a tag or
|
||||||
|
# a change, then we can't push back to it. Substitute with
|
||||||
|
# dest_branch, if defined; or with manifest default revision instead.
|
||||||
|
branch_merge = ''
|
||||||
|
if IsImmutable(project.revisionExpr):
|
||||||
|
if project.dest_branch:
|
||||||
|
branch_merge = project.dest_branch
|
||||||
|
else:
|
||||||
|
branch_merge = self.manifest.default.revisionExpr
|
||||||
|
|
||||||
|
if not project.StartBranch(
|
||||||
|
nb, branch_merge=branch_merge, revision=opt.revision):
|
||||||
|
err.append(project)
|
||||||
|
pm.end()
|
||||||
|
|
||||||
|
if err:
|
||||||
|
for p in err:
|
||||||
|
print("error: %s/: cannot start %s" % (p.relpath, nb),
|
||||||
|
file=sys.stderr)
|
||||||
|
sys.exit(1)
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,15 +14,184 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
from command import PagedCommand
|
from command import PagedCommand
|
||||||
|
|
||||||
|
try:
|
||||||
|
import threading as _threading
|
||||||
|
except ImportError:
|
||||||
|
import dummy_threading as _threading
|
||||||
|
|
||||||
|
import glob
|
||||||
|
|
||||||
|
import itertools
|
||||||
|
import os
|
||||||
|
|
||||||
|
from color import Coloring
|
||||||
|
import platform_utils
|
||||||
|
|
||||||
class Status(PagedCommand):
|
class Status(PagedCommand):
|
||||||
common = True
|
common = True
|
||||||
helpSummary = "Show the working tree status"
|
helpSummary = "Show the working tree status"
|
||||||
helpUsage = """
|
helpUsage = """
|
||||||
%prog [<project>...]
|
%prog [<project>...]
|
||||||
"""
|
"""
|
||||||
|
helpDescription = """
|
||||||
|
'%prog' compares the working tree to the staging area (aka index),
|
||||||
|
and the most recent commit on this branch (HEAD), in each project
|
||||||
|
specified. A summary is displayed, one line per file where there
|
||||||
|
is a difference between these three states.
|
||||||
|
|
||||||
|
The -j/--jobs option can be used to run multiple status queries
|
||||||
|
in parallel.
|
||||||
|
|
||||||
|
The -o/--orphans option can be used to show objects that are in
|
||||||
|
the working directory, but not associated with a repo project.
|
||||||
|
This includes unmanaged top-level files and directories, but also
|
||||||
|
includes deeper items. For example, if dir/subdir/proj1 and
|
||||||
|
dir/subdir/proj2 are repo projects, dir/subdir/proj3 will be shown
|
||||||
|
if it is not known to repo.
|
||||||
|
|
||||||
|
# Status Display
|
||||||
|
|
||||||
|
The status display is organized into three columns of information,
|
||||||
|
for example if the file 'subcmds/status.py' is modified in the
|
||||||
|
project 'repo' on branch 'devwork':
|
||||||
|
|
||||||
|
project repo/ branch devwork
|
||||||
|
-m subcmds/status.py
|
||||||
|
|
||||||
|
The first column explains how the staging area (index) differs from
|
||||||
|
the last commit (HEAD). Its values are always displayed in upper
|
||||||
|
case and have the following meanings:
|
||||||
|
|
||||||
|
-: no difference
|
||||||
|
A: added (not in HEAD, in index )
|
||||||
|
M: modified ( in HEAD, in index, different content )
|
||||||
|
D: deleted ( in HEAD, not in index )
|
||||||
|
R: renamed (not in HEAD, in index, path changed )
|
||||||
|
C: copied (not in HEAD, in index, copied from another)
|
||||||
|
T: mode changed ( in HEAD, in index, same content )
|
||||||
|
U: unmerged; conflict resolution required
|
||||||
|
|
||||||
|
The second column explains how the working directory differs from
|
||||||
|
the index. Its values are always displayed in lower case and have
|
||||||
|
the following meanings:
|
||||||
|
|
||||||
|
-: new / unknown (not in index, in work tree )
|
||||||
|
m: modified ( in index, in work tree, modified )
|
||||||
|
d: deleted ( in index, not in work tree )
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
def _Options(self, p):
|
||||||
|
p.add_option('-j', '--jobs',
|
||||||
|
dest='jobs', action='store', type='int', default=2,
|
||||||
|
help="number of projects to check simultaneously")
|
||||||
|
p.add_option('-o', '--orphans',
|
||||||
|
dest='orphans', action='store_true',
|
||||||
|
help="include objects in working directory outside of repo projects")
|
||||||
|
p.add_option('-q', '--quiet', action='store_true',
|
||||||
|
help="only print the name of modified projects")
|
||||||
|
|
||||||
|
def _StatusHelper(self, project, clean_counter, sem, quiet):
|
||||||
|
"""Obtains the status for a specific project.
|
||||||
|
|
||||||
|
Obtains the status for a project, redirecting the output to
|
||||||
|
the specified object. It will release the semaphore
|
||||||
|
when done.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
project: Project to get status of.
|
||||||
|
clean_counter: Counter for clean projects.
|
||||||
|
sem: Semaphore, will call release() when complete.
|
||||||
|
output: Where to output the status.
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
state = project.PrintWorkTreeStatus(quiet=quiet)
|
||||||
|
if state == 'CLEAN':
|
||||||
|
next(clean_counter)
|
||||||
|
finally:
|
||||||
|
sem.release()
|
||||||
|
|
||||||
|
def _FindOrphans(self, dirs, proj_dirs, proj_dirs_parents, outstring):
|
||||||
|
"""find 'dirs' that are present in 'proj_dirs_parents' but not in 'proj_dirs'"""
|
||||||
|
status_header = ' --\t'
|
||||||
|
for item in dirs:
|
||||||
|
if not platform_utils.isdir(item):
|
||||||
|
outstring.append(''.join([status_header, item]))
|
||||||
|
continue
|
||||||
|
if item in proj_dirs:
|
||||||
|
continue
|
||||||
|
if item in proj_dirs_parents:
|
||||||
|
self._FindOrphans(glob.glob('%s/.*' % item) +
|
||||||
|
glob.glob('%s/*' % item),
|
||||||
|
proj_dirs, proj_dirs_parents, outstring)
|
||||||
|
continue
|
||||||
|
outstring.append(''.join([status_header, item, '/']))
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
for project in self.GetProjects(args):
|
all_projects = self.GetProjects(args)
|
||||||
project.PrintWorkTreeStatus()
|
counter = itertools.count()
|
||||||
|
|
||||||
|
if opt.jobs == 1:
|
||||||
|
for project in all_projects:
|
||||||
|
state = project.PrintWorkTreeStatus(quiet=opt.quiet)
|
||||||
|
if state == 'CLEAN':
|
||||||
|
next(counter)
|
||||||
|
else:
|
||||||
|
sem = _threading.Semaphore(opt.jobs)
|
||||||
|
threads = []
|
||||||
|
for project in all_projects:
|
||||||
|
sem.acquire()
|
||||||
|
|
||||||
|
t = _threading.Thread(target=self._StatusHelper,
|
||||||
|
args=(project, counter, sem, opt.quiet))
|
||||||
|
threads.append(t)
|
||||||
|
t.daemon = True
|
||||||
|
t.start()
|
||||||
|
for t in threads:
|
||||||
|
t.join()
|
||||||
|
if not opt.quiet and len(all_projects) == next(counter):
|
||||||
|
print('nothing to commit (working directory clean)')
|
||||||
|
|
||||||
|
if opt.orphans:
|
||||||
|
proj_dirs = set()
|
||||||
|
proj_dirs_parents = set()
|
||||||
|
for project in self.GetProjects(None, missing_ok=True):
|
||||||
|
proj_dirs.add(project.relpath)
|
||||||
|
(head, _tail) = os.path.split(project.relpath)
|
||||||
|
while head != "":
|
||||||
|
proj_dirs_parents.add(head)
|
||||||
|
(head, _tail) = os.path.split(head)
|
||||||
|
proj_dirs.add('.repo')
|
||||||
|
|
||||||
|
class StatusColoring(Coloring):
|
||||||
|
def __init__(self, config):
|
||||||
|
Coloring.__init__(self, config, 'status')
|
||||||
|
self.project = self.printer('header', attr = 'bold')
|
||||||
|
self.untracked = self.printer('untracked', fg = 'red')
|
||||||
|
|
||||||
|
orig_path = os.getcwd()
|
||||||
|
try:
|
||||||
|
os.chdir(self.manifest.topdir)
|
||||||
|
|
||||||
|
outstring = []
|
||||||
|
self._FindOrphans(glob.glob('.*') +
|
||||||
|
glob.glob('*'),
|
||||||
|
proj_dirs, proj_dirs_parents, outstring)
|
||||||
|
|
||||||
|
if outstring:
|
||||||
|
output = StatusColoring(self.manifest.globalConfig)
|
||||||
|
output.project('Objects not within a project (orphans)')
|
||||||
|
output.nl()
|
||||||
|
for entry in outstring:
|
||||||
|
output.untracked(entry)
|
||||||
|
output.nl()
|
||||||
|
else:
|
||||||
|
print('No orphan files or directories')
|
||||||
|
|
||||||
|
finally:
|
||||||
|
# Restore CWD.
|
||||||
|
os.chdir(orig_path)
|
||||||
|
1217
subcmds/sync.py
1217
subcmds/sync.py
File diff suppressed because it is too large
Load Diff
@ -1,3 +1,4 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
#
|
#
|
||||||
# Copyright (C) 2008 The Android Open Source Project
|
# Copyright (C) 2008 The Android Open Source Project
|
||||||
#
|
#
|
||||||
@ -13,62 +14,229 @@
|
|||||||
# See the License for the specific language governing permissions and
|
# See the License for the specific language governing permissions and
|
||||||
# limitations under the License.
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import copy
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
from command import InteractiveCommand
|
from command import InteractiveCommand
|
||||||
from editor import Editor
|
from editor import Editor
|
||||||
from error import UploadError
|
from error import HookError, UploadError
|
||||||
|
from git_command import GitCommand
|
||||||
|
from project import RepoHook
|
||||||
|
|
||||||
|
from pyversion import is_python3
|
||||||
|
if not is_python3():
|
||||||
|
input = raw_input
|
||||||
|
else:
|
||||||
|
unicode = str
|
||||||
|
|
||||||
|
UNUSUAL_COMMIT_THRESHOLD = 5
|
||||||
|
|
||||||
|
def _ConfirmManyUploads(multiple_branches=False):
|
||||||
|
if multiple_branches:
|
||||||
|
print('ATTENTION: One or more branches has an unusually high number '
|
||||||
|
'of commits.')
|
||||||
|
else:
|
||||||
|
print('ATTENTION: You are uploading an unusually high number of commits.')
|
||||||
|
print('YOU PROBABLY DO NOT MEAN TO DO THIS. (Did you rebase across '
|
||||||
|
'branches?)')
|
||||||
|
answer = input("If you are sure you intend to do this, type 'yes': ").strip()
|
||||||
|
return answer == "yes"
|
||||||
|
|
||||||
def _die(fmt, *args):
|
def _die(fmt, *args):
|
||||||
msg = fmt % args
|
msg = fmt % args
|
||||||
print >>sys.stderr, 'error: %s' % msg
|
print('error: %s' % msg, file=sys.stderr)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
def _SplitEmails(values):
|
||||||
|
result = []
|
||||||
|
for value in values:
|
||||||
|
result.extend([s.strip() for s in value.split(',')])
|
||||||
|
return result
|
||||||
|
|
||||||
class Upload(InteractiveCommand):
|
class Upload(InteractiveCommand):
|
||||||
common = True
|
common = True
|
||||||
helpSummary = "Upload changes for code review"
|
helpSummary = "Upload changes for code review"
|
||||||
helpUsage="""
|
helpUsage = """
|
||||||
%prog [<project>]...
|
%prog [--re --cc] [<project>]...
|
||||||
"""
|
"""
|
||||||
helpDescription = """
|
helpDescription = """
|
||||||
The '%prog' command is used to send changes to the Gerrit code
|
The '%prog' command is used to send changes to the Gerrit Code
|
||||||
review system. It searches for changes in local projects that do
|
Review system. It searches for topic branches in local projects
|
||||||
not yet exist in the corresponding remote repository. If multiple
|
that have not yet been published for review. If multiple topic
|
||||||
changes are found, '%prog' opens an editor to allow the
|
branches are found, '%prog' opens an editor to allow the user to
|
||||||
user to choose which change to upload. After a successful upload,
|
select which branches to upload.
|
||||||
repo prints the URL for the change in the Gerrit code review system.
|
|
||||||
|
'%prog' searches for uploadable changes in all projects listed at
|
||||||
|
the command line. Projects can be specified either by name, or by
|
||||||
|
a relative or absolute path to the project's local directory. If no
|
||||||
|
projects are specified, '%prog' will search for uploadable changes
|
||||||
|
in all projects listed in the manifest.
|
||||||
|
|
||||||
|
If the --reviewers or --cc options are passed, those emails are
|
||||||
|
added to the respective list of users, and emails are sent to any
|
||||||
|
new users. Users passed as --reviewers must already be registered
|
||||||
|
with the code review system, or the upload will fail.
|
||||||
|
|
||||||
|
# Configuration
|
||||||
|
|
||||||
|
review.URL.autoupload:
|
||||||
|
|
||||||
|
To disable the "Upload ... (y/N)?" prompt, you can set a per-project
|
||||||
|
or global Git configuration option. If review.URL.autoupload is set
|
||||||
|
to "true" then repo will assume you always answer "y" at the prompt,
|
||||||
|
and will not prompt you further. If it is set to "false" then repo
|
||||||
|
will assume you always answer "n", and will abort.
|
||||||
|
|
||||||
|
review.URL.autoreviewer:
|
||||||
|
|
||||||
|
To automatically append a user or mailing list to reviews, you can set
|
||||||
|
a per-project or global Git option to do so.
|
||||||
|
|
||||||
|
review.URL.autocopy:
|
||||||
|
|
||||||
|
To automatically copy a user or mailing list to all uploaded reviews,
|
||||||
|
you can set a per-project or global Git option to do so. Specifically,
|
||||||
|
review.URL.autocopy can be set to a comma separated list of reviewers
|
||||||
|
who you always want copied on all uploads with a non-empty --re
|
||||||
|
argument.
|
||||||
|
|
||||||
|
review.URL.username:
|
||||||
|
|
||||||
|
Override the username used to connect to Gerrit Code Review.
|
||||||
|
By default the local part of the email address is used.
|
||||||
|
|
||||||
|
The URL must match the review URL listed in the manifest XML file,
|
||||||
|
or in the .git/config within the project. For example:
|
||||||
|
|
||||||
|
[remote "origin"]
|
||||||
|
url = git://git.example.com/project.git
|
||||||
|
review = http://review.example.com/
|
||||||
|
|
||||||
|
[review "http://review.example.com/"]
|
||||||
|
autoupload = true
|
||||||
|
autocopy = johndoe@company.com,my-team-alias@company.com
|
||||||
|
|
||||||
|
review.URL.uploadtopic:
|
||||||
|
|
||||||
|
To add a topic branch whenever uploading a commit, you can set a
|
||||||
|
per-project or global Git option to do so. If review.URL.uploadtopic
|
||||||
|
is set to "true" then repo will assume you always want the equivalent
|
||||||
|
of the -t option to the repo command. If unset or set to "false" then
|
||||||
|
repo will make use of only the command line option.
|
||||||
|
|
||||||
|
# References
|
||||||
|
|
||||||
|
Gerrit Code Review: https://www.gerritcodereview.com/
|
||||||
|
|
||||||
'%prog' searches for uploadable changes in all projects listed
|
|
||||||
at the command line. Projects can be specified either by name, or
|
|
||||||
by a relative or absolute path to the project's local directory. If
|
|
||||||
no projects are specified, '%prog' will search for uploadable
|
|
||||||
changes in all projects listed in the manifest.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def _SingleBranch(self, branch):
|
def _Options(self, p):
|
||||||
|
p.add_option('-t',
|
||||||
|
dest='auto_topic', action='store_true',
|
||||||
|
help='Send local branch name to Gerrit Code Review')
|
||||||
|
p.add_option('--re', '--reviewers',
|
||||||
|
type='string', action='append', dest='reviewers',
|
||||||
|
help='Request reviews from these people.')
|
||||||
|
p.add_option('--cc',
|
||||||
|
type='string', action='append', dest='cc',
|
||||||
|
help='Also send email to these email addresses.')
|
||||||
|
p.add_option('--br',
|
||||||
|
type='string', action='store', dest='branch',
|
||||||
|
help='Branch to upload.')
|
||||||
|
p.add_option('--cbr', '--current-branch',
|
||||||
|
dest='current_branch', action='store_true',
|
||||||
|
help='Upload current git branch.')
|
||||||
|
p.add_option('-d', '--draft',
|
||||||
|
action='store_true', dest='draft', default=False,
|
||||||
|
help='If specified, upload as a draft.')
|
||||||
|
p.add_option('--ne', '--no-emails',
|
||||||
|
action='store_false', dest='notify', default=True,
|
||||||
|
help='If specified, do not send emails on upload.')
|
||||||
|
p.add_option('-p', '--private',
|
||||||
|
action='store_true', dest='private', default=False,
|
||||||
|
help='If specified, upload as a private change.')
|
||||||
|
p.add_option('-w', '--wip',
|
||||||
|
action='store_true', dest='wip', default=False,
|
||||||
|
help='If specified, upload as a work-in-progress change.')
|
||||||
|
p.add_option('-o', '--push-option',
|
||||||
|
type='string', action='append', dest='push_options',
|
||||||
|
default=[],
|
||||||
|
help='Additional push options to transmit')
|
||||||
|
p.add_option('-D', '--destination', '--dest',
|
||||||
|
type='string', action='store', dest='dest_branch',
|
||||||
|
metavar='BRANCH',
|
||||||
|
help='Submit for review on this target branch.')
|
||||||
|
|
||||||
|
# Options relating to upload hook. Note that verify and no-verify are NOT
|
||||||
|
# opposites of each other, which is why they store to different locations.
|
||||||
|
# We are using them to match 'git commit' syntax.
|
||||||
|
#
|
||||||
|
# Combinations:
|
||||||
|
# - no-verify=False, verify=False (DEFAULT):
|
||||||
|
# If stdout is a tty, can prompt about running upload hooks if needed.
|
||||||
|
# If user denies running hooks, the upload is cancelled. If stdout is
|
||||||
|
# not a tty and we would need to prompt about upload hooks, upload is
|
||||||
|
# cancelled.
|
||||||
|
# - no-verify=False, verify=True:
|
||||||
|
# Always run upload hooks with no prompt.
|
||||||
|
# - no-verify=True, verify=False:
|
||||||
|
# Never run upload hooks, but upload anyway (AKA bypass hooks).
|
||||||
|
# - no-verify=True, verify=True:
|
||||||
|
# Invalid
|
||||||
|
p.add_option('--no-cert-checks',
|
||||||
|
dest='validate_certs', action='store_false', default=True,
|
||||||
|
help='Disable verifying ssl certs (unsafe).')
|
||||||
|
p.add_option('--no-verify',
|
||||||
|
dest='bypass_hooks', action='store_true',
|
||||||
|
help='Do not run the upload hook.')
|
||||||
|
p.add_option('--verify',
|
||||||
|
dest='allow_all_hooks', action='store_true',
|
||||||
|
help='Run the upload hook without prompting.')
|
||||||
|
|
||||||
|
def _SingleBranch(self, opt, branch, people):
|
||||||
project = branch.project
|
project = branch.project
|
||||||
name = branch.name
|
name = branch.name
|
||||||
date = branch.date
|
remote = project.GetBranch(name).remote
|
||||||
list = branch.commits
|
|
||||||
|
|
||||||
print 'Upload project %s/:' % project.relpath
|
key = 'review.%s.autoupload' % remote.review
|
||||||
print ' branch %s (%2d commit%s, %s):' % (
|
answer = project.config.GetBoolean(key)
|
||||||
name,
|
|
||||||
len(list),
|
|
||||||
len(list) != 1 and 's' or '',
|
|
||||||
date)
|
|
||||||
for commit in list:
|
|
||||||
print ' %s' % commit
|
|
||||||
|
|
||||||
sys.stdout.write('(y/n)? ')
|
if answer is False:
|
||||||
answer = sys.stdin.readline().strip()
|
_die("upload blocked by %s = false" % key)
|
||||||
if answer in ('y', 'Y', 'yes', '1', 'true', 't'):
|
|
||||||
self._UploadAndReport([branch])
|
if answer is None:
|
||||||
|
date = branch.date
|
||||||
|
commit_list = branch.commits
|
||||||
|
|
||||||
|
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||||
|
print('Upload project %s/ to remote branch %s%s:' %
|
||||||
|
(project.relpath, destination, ' (draft)' if opt.draft else ''))
|
||||||
|
print(' branch %s (%2d commit%s, %s):' % (
|
||||||
|
name,
|
||||||
|
len(commit_list),
|
||||||
|
len(commit_list) != 1 and 's' or '',
|
||||||
|
date))
|
||||||
|
for commit in commit_list:
|
||||||
|
print(' %s' % commit)
|
||||||
|
|
||||||
|
print('to %s (y/N)? ' % remote.review, end='')
|
||||||
|
# TODO: When we require Python 3, use flush=True w/print above.
|
||||||
|
sys.stdout.flush()
|
||||||
|
answer = sys.stdin.readline().strip().lower()
|
||||||
|
answer = answer in ('y', 'yes', '1', 'true', 't')
|
||||||
|
|
||||||
|
if answer:
|
||||||
|
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||||
|
answer = _ConfirmManyUploads()
|
||||||
|
|
||||||
|
if answer:
|
||||||
|
self._UploadAndReport(opt, [branch], people)
|
||||||
else:
|
else:
|
||||||
_die("upload aborted by user")
|
_die("upload aborted by user")
|
||||||
|
|
||||||
def _MultipleBranches(self, pending):
|
def _MultipleBranches(self, opt, pending, people):
|
||||||
projects = {}
|
projects = {}
|
||||||
branches = {}
|
branches = {}
|
||||||
|
|
||||||
@ -80,18 +248,22 @@ changes in all projects listed in the manifest.
|
|||||||
|
|
||||||
b = {}
|
b = {}
|
||||||
for branch in avail:
|
for branch in avail:
|
||||||
|
if branch is None:
|
||||||
|
continue
|
||||||
name = branch.name
|
name = branch.name
|
||||||
date = branch.date
|
date = branch.date
|
||||||
list = branch.commits
|
commit_list = branch.commits
|
||||||
|
|
||||||
if b:
|
if b:
|
||||||
script.append('#')
|
script.append('#')
|
||||||
script.append('# branch %s (%2d commit%s, %s):' % (
|
destination = opt.dest_branch or project.dest_branch or project.revisionExpr
|
||||||
|
script.append('# branch %s (%2d commit%s, %s) to remote branch %s:' % (
|
||||||
name,
|
name,
|
||||||
len(list),
|
len(commit_list),
|
||||||
len(list) != 1 and 's' or '',
|
len(commit_list) != 1 and 's' or '',
|
||||||
date))
|
date,
|
||||||
for commit in list:
|
destination))
|
||||||
|
for commit in commit_list:
|
||||||
script.append('# %s' % commit)
|
script.append('# %s' % commit)
|
||||||
b[name] = branch
|
b[name] = branch
|
||||||
|
|
||||||
@ -127,54 +299,209 @@ changes in all projects listed in the manifest.
|
|||||||
todo.append(branch)
|
todo.append(branch)
|
||||||
if not todo:
|
if not todo:
|
||||||
_die("nothing uncommented for upload")
|
_die("nothing uncommented for upload")
|
||||||
self._UploadAndReport(todo)
|
|
||||||
|
|
||||||
def _UploadAndReport(self, todo):
|
many_commits = False
|
||||||
|
for branch in todo:
|
||||||
|
if len(branch.commits) > UNUSUAL_COMMIT_THRESHOLD:
|
||||||
|
many_commits = True
|
||||||
|
break
|
||||||
|
if many_commits:
|
||||||
|
if not _ConfirmManyUploads(multiple_branches=True):
|
||||||
|
_die("upload aborted by user")
|
||||||
|
|
||||||
|
self._UploadAndReport(opt, todo, people)
|
||||||
|
|
||||||
|
def _AppendAutoList(self, branch, people):
|
||||||
|
"""
|
||||||
|
Appends the list of reviewers in the git project's config.
|
||||||
|
Appends the list of users in the CC list in the git project's config if a
|
||||||
|
non-empty reviewer list was found.
|
||||||
|
"""
|
||||||
|
name = branch.name
|
||||||
|
project = branch.project
|
||||||
|
|
||||||
|
key = 'review.%s.autoreviewer' % project.GetBranch(name).remote.review
|
||||||
|
raw_list = project.config.GetString(key)
|
||||||
|
if not raw_list is None:
|
||||||
|
people[0].extend([entry.strip() for entry in raw_list.split(',')])
|
||||||
|
|
||||||
|
key = 'review.%s.autocopy' % project.GetBranch(name).remote.review
|
||||||
|
raw_list = project.config.GetString(key)
|
||||||
|
if not raw_list is None and len(people[0]) > 0:
|
||||||
|
people[1].extend([entry.strip() for entry in raw_list.split(',')])
|
||||||
|
|
||||||
|
def _FindGerritChange(self, branch):
|
||||||
|
last_pub = branch.project.WasPublished(branch.name)
|
||||||
|
if last_pub is None:
|
||||||
|
return ""
|
||||||
|
|
||||||
|
refs = branch.GetPublishedRefs()
|
||||||
|
try:
|
||||||
|
# refs/changes/XYZ/N --> XYZ
|
||||||
|
return refs.get(last_pub).split('/')[-2]
|
||||||
|
except (AttributeError, IndexError):
|
||||||
|
return ""
|
||||||
|
|
||||||
|
def _UploadAndReport(self, opt, todo, original_people):
|
||||||
have_errors = False
|
have_errors = False
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
try:
|
try:
|
||||||
branch.UploadForReview()
|
people = copy.deepcopy(original_people)
|
||||||
|
self._AppendAutoList(branch, people)
|
||||||
|
|
||||||
|
# Check if there are local changes that may have been forgotten
|
||||||
|
changes = branch.project.UncommitedFiles()
|
||||||
|
if changes:
|
||||||
|
key = 'review.%s.autoupload' % branch.project.remote.review
|
||||||
|
answer = branch.project.config.GetBoolean(key)
|
||||||
|
|
||||||
|
# if they want to auto upload, let's not ask because it could be automated
|
||||||
|
if answer is None:
|
||||||
|
print()
|
||||||
|
print('Uncommitted changes in %s (did you forget to amend?):'
|
||||||
|
% branch.project.name)
|
||||||
|
print('\n'.join(changes))
|
||||||
|
print('Continue uploading? (y/N) ', end='')
|
||||||
|
# TODO: When we require Python 3, use flush=True w/print above.
|
||||||
|
sys.stdout.flush()
|
||||||
|
a = sys.stdin.readline().strip().lower()
|
||||||
|
if a not in ('y', 'yes', 't', 'true', 'on'):
|
||||||
|
print("skipping upload", file=sys.stderr)
|
||||||
|
branch.uploaded = False
|
||||||
|
branch.error = 'User aborted'
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check if topic branches should be sent to the server during upload
|
||||||
|
if opt.auto_topic is not True:
|
||||||
|
key = 'review.%s.uploadtopic' % branch.project.remote.review
|
||||||
|
opt.auto_topic = branch.project.config.GetBoolean(key)
|
||||||
|
|
||||||
|
destination = opt.dest_branch or branch.project.dest_branch
|
||||||
|
|
||||||
|
# Make sure our local branch is not setup to track a different remote branch
|
||||||
|
merge_branch = self._GetMergeBranch(branch.project)
|
||||||
|
if destination:
|
||||||
|
full_dest = 'refs/heads/%s' % destination
|
||||||
|
if not opt.dest_branch and merge_branch and merge_branch != full_dest:
|
||||||
|
print('merge branch %s does not match destination branch %s'
|
||||||
|
% (merge_branch, full_dest))
|
||||||
|
print('skipping upload.')
|
||||||
|
print('Please use `--destination %s` if this is intentional'
|
||||||
|
% destination)
|
||||||
|
branch.uploaded = False
|
||||||
|
continue
|
||||||
|
|
||||||
|
branch.UploadForReview(people,
|
||||||
|
auto_topic=opt.auto_topic,
|
||||||
|
draft=opt.draft,
|
||||||
|
private=opt.private,
|
||||||
|
notify=None if opt.notify else 'NONE',
|
||||||
|
wip=opt.wip,
|
||||||
|
dest_branch=destination,
|
||||||
|
validate_certs=opt.validate_certs,
|
||||||
|
push_options=opt.push_options)
|
||||||
|
|
||||||
branch.uploaded = True
|
branch.uploaded = True
|
||||||
except UploadError, e:
|
except UploadError as e:
|
||||||
branch.error = e
|
branch.error = e
|
||||||
branch.uploaded = False
|
branch.uploaded = False
|
||||||
have_errors = True
|
have_errors = True
|
||||||
|
|
||||||
print >>sys.stderr, ''
|
print(file=sys.stderr)
|
||||||
print >>sys.stderr, '--------------------------------------------'
|
print('----------------------------------------------------------------------', file=sys.stderr)
|
||||||
|
|
||||||
if have_errors:
|
if have_errors:
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
if not branch.uploaded:
|
if not branch.uploaded:
|
||||||
print >>sys.stderr, '[FAILED] %-15s %-15s (%s)' % (
|
if len(str(branch.error)) <= 30:
|
||||||
|
fmt = ' (%s)'
|
||||||
|
else:
|
||||||
|
fmt = '\n (%s)'
|
||||||
|
print(('[FAILED] %-15s %-15s' + fmt) % (
|
||||||
branch.project.relpath + '/', \
|
branch.project.relpath + '/', \
|
||||||
branch.name, \
|
branch.name, \
|
||||||
branch.error)
|
str(branch.error)),
|
||||||
print >>sys.stderr, ''
|
file=sys.stderr)
|
||||||
|
print()
|
||||||
|
|
||||||
for branch in todo:
|
for branch in todo:
|
||||||
if branch.uploaded:
|
if branch.uploaded:
|
||||||
print >>sys.stderr, '[OK ] %-15s %s' % (
|
print('[OK ] %-15s %s' % (
|
||||||
branch.project.relpath + '/',
|
branch.project.relpath + '/',
|
||||||
branch.name)
|
branch.name),
|
||||||
print >>sys.stderr, '%s' % branch.tip_url
|
file=sys.stderr)
|
||||||
print >>sys.stderr, ''
|
|
||||||
|
|
||||||
if have_errors:
|
if have_errors:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
|
def _GetMergeBranch(self, project):
|
||||||
|
p = GitCommand(project,
|
||||||
|
['rev-parse', '--abbrev-ref', 'HEAD'],
|
||||||
|
capture_stdout = True,
|
||||||
|
capture_stderr = True)
|
||||||
|
p.Wait()
|
||||||
|
local_branch = p.stdout.strip()
|
||||||
|
p = GitCommand(project,
|
||||||
|
['config', '--get', 'branch.%s.merge' % local_branch],
|
||||||
|
capture_stdout = True,
|
||||||
|
capture_stderr = True)
|
||||||
|
p.Wait()
|
||||||
|
merge_branch = p.stdout.strip()
|
||||||
|
return merge_branch
|
||||||
|
|
||||||
def Execute(self, opt, args):
|
def Execute(self, opt, args):
|
||||||
project_list = self.GetProjects(args)
|
project_list = self.GetProjects(args)
|
||||||
pending = []
|
pending = []
|
||||||
|
reviewers = []
|
||||||
|
cc = []
|
||||||
|
branch = None
|
||||||
|
|
||||||
|
if opt.branch:
|
||||||
|
branch = opt.branch
|
||||||
|
|
||||||
for project in project_list:
|
for project in project_list:
|
||||||
avail = project.GetUploadableBranches()
|
if opt.current_branch:
|
||||||
|
cbr = project.CurrentBranch
|
||||||
|
up_branch = project.GetUploadableBranch(cbr)
|
||||||
|
if up_branch:
|
||||||
|
avail = [up_branch]
|
||||||
|
else:
|
||||||
|
avail = None
|
||||||
|
print('ERROR: Current branch (%s) not uploadable. '
|
||||||
|
'You may be able to type '
|
||||||
|
'"git branch --set-upstream-to m/master" to fix '
|
||||||
|
'your branch.' % str(cbr),
|
||||||
|
file=sys.stderr)
|
||||||
|
else:
|
||||||
|
avail = project.GetUploadableBranches(branch)
|
||||||
if avail:
|
if avail:
|
||||||
pending.append((project, avail))
|
pending.append((project, avail))
|
||||||
|
|
||||||
if not pending:
|
if not pending:
|
||||||
print >>sys.stdout, "no branches ready for upload"
|
print("no branches ready for upload", file=sys.stderr)
|
||||||
elif len(pending) == 1 and len(pending[0][1]) == 1:
|
return
|
||||||
self._SingleBranch(pending[0][1][0])
|
|
||||||
|
if not opt.bypass_hooks:
|
||||||
|
hook = RepoHook('pre-upload', self.manifest.repo_hooks_project,
|
||||||
|
self.manifest.topdir,
|
||||||
|
self.manifest.manifestProject.GetRemote('origin').url,
|
||||||
|
abort_if_user_denies=True)
|
||||||
|
pending_proj_names = [project.name for (project, available) in pending]
|
||||||
|
pending_worktrees = [project.worktree for (project, available) in pending]
|
||||||
|
try:
|
||||||
|
hook.Run(opt.allow_all_hooks, project_list=pending_proj_names,
|
||||||
|
worktree_list=pending_worktrees)
|
||||||
|
except HookError as e:
|
||||||
|
print("ERROR: %s" % str(e), file=sys.stderr)
|
||||||
|
return
|
||||||
|
|
||||||
|
if opt.reviewers:
|
||||||
|
reviewers = _SplitEmails(opt.reviewers)
|
||||||
|
if opt.cc:
|
||||||
|
cc = _SplitEmails(opt.cc)
|
||||||
|
people = (reviewers, cc)
|
||||||
|
|
||||||
|
if len(pending) == 1 and len(pending[0][1]) == 1:
|
||||||
|
self._SingleBranch(opt, pending[0][1][0], people)
|
||||||
else:
|
else:
|
||||||
self._MultipleBranches(pending)
|
self._MultipleBranches(opt, pending, people)
|
||||||
|
53
subcmds/version.py
Normal file
53
subcmds/version.py
Normal file
@ -0,0 +1,53 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
import sys
|
||||||
|
from command import Command, MirrorSafeCommand
|
||||||
|
from git_command import git, RepoSourceVersion, user_agent
|
||||||
|
from git_refs import HEAD
|
||||||
|
|
||||||
|
class Version(Command, MirrorSafeCommand):
|
||||||
|
wrapper_version = None
|
||||||
|
wrapper_path = None
|
||||||
|
|
||||||
|
common = False
|
||||||
|
helpSummary = "Display the version of repo"
|
||||||
|
helpUsage = """
|
||||||
|
%prog
|
||||||
|
"""
|
||||||
|
|
||||||
|
def Execute(self, opt, args):
|
||||||
|
rp = self.manifest.repoProject
|
||||||
|
rem = rp.GetRemote(rp.remote.name)
|
||||||
|
|
||||||
|
# These might not be the same. Report them both.
|
||||||
|
src_ver = RepoSourceVersion()
|
||||||
|
rp_ver = rp.bare_git.describe(HEAD)
|
||||||
|
print('repo version %s' % rp_ver)
|
||||||
|
print(' (from %s)' % rem.url)
|
||||||
|
|
||||||
|
if Version.wrapper_path is not None:
|
||||||
|
print('repo launcher version %s' % Version.wrapper_version)
|
||||||
|
print(' (from %s)' % Version.wrapper_path)
|
||||||
|
|
||||||
|
if src_ver != rp_ver:
|
||||||
|
print(' (currently at %s)' % src_ver)
|
||||||
|
|
||||||
|
print('repo User-Agent %s' % user_agent.repo)
|
||||||
|
print('git %s' % git.version_tuple().full)
|
||||||
|
print('git User-Agent %s' % user_agent.git)
|
||||||
|
print('Python %s' % sys.version)
|
2
tests/fixtures/.gitignore
vendored
Normal file
2
tests/fixtures/.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
/.repo_not.present.gitconfig.json
|
||||||
|
/.repo_test.gitconfig.json
|
1
tests/fixtures/gitc_config
vendored
Normal file
1
tests/fixtures/gitc_config
vendored
Normal file
@ -0,0 +1 @@
|
|||||||
|
gitc_dir=/test/usr/local/google/gitc
|
3
tests/fixtures/test.gitconfig
vendored
Normal file
3
tests/fixtures/test.gitconfig
vendored
Normal file
@ -0,0 +1,3 @@
|
|||||||
|
[section]
|
||||||
|
empty
|
||||||
|
nonempty = true
|
60
tests/test_editor.py
Normal file
60
tests/test_editor.py
Normal file
@ -0,0 +1,60 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the editor.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
from editor import Editor
|
||||||
|
|
||||||
|
|
||||||
|
class EditorTestCase(unittest.TestCase):
|
||||||
|
"""Take care of resetting Editor state across tests."""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.setEditor(None)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
self.setEditor(None)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def setEditor(editor):
|
||||||
|
Editor._editor = editor
|
||||||
|
|
||||||
|
|
||||||
|
class GetEditor(EditorTestCase):
|
||||||
|
"""Check GetEditor behavior."""
|
||||||
|
|
||||||
|
def test_basic(self):
|
||||||
|
"""Basic checking of _GetEditor."""
|
||||||
|
self.setEditor(':')
|
||||||
|
self.assertEqual(':', Editor._GetEditor())
|
||||||
|
|
||||||
|
|
||||||
|
class EditString(EditorTestCase):
|
||||||
|
"""Check EditString behavior."""
|
||||||
|
|
||||||
|
def test_no_editor(self):
|
||||||
|
"""Check behavior when no editor is available."""
|
||||||
|
self.setEditor(':')
|
||||||
|
self.assertEqual('foo', Editor.EditString('foo'))
|
||||||
|
|
||||||
|
def test_cat_editor(self):
|
||||||
|
"""Check behavior when editor is `cat`."""
|
||||||
|
self.setEditor('cat')
|
||||||
|
self.assertEqual('foo', Editor.EditString('foo'))
|
78
tests/test_git_command.py
Normal file
78
tests/test_git_command.py
Normal file
@ -0,0 +1,78 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the git_command.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import re
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import git_command
|
||||||
|
|
||||||
|
|
||||||
|
class GitCallUnitTest(unittest.TestCase):
|
||||||
|
"""Tests the _GitCall class (via git_command.git)."""
|
||||||
|
|
||||||
|
def test_version_tuple(self):
|
||||||
|
"""Check git.version_tuple() handling."""
|
||||||
|
ver = git_command.git.version_tuple()
|
||||||
|
self.assertIsNotNone(ver)
|
||||||
|
|
||||||
|
# We don't dive too deep into the values here to avoid having to update
|
||||||
|
# whenever git versions change. We do check relative to this min version
|
||||||
|
# as this is what `repo` itself requires via MIN_GIT_VERSION.
|
||||||
|
MIN_GIT_VERSION = (2, 10, 2)
|
||||||
|
self.assertTrue(isinstance(ver.major, int))
|
||||||
|
self.assertTrue(isinstance(ver.minor, int))
|
||||||
|
self.assertTrue(isinstance(ver.micro, int))
|
||||||
|
|
||||||
|
self.assertGreater(ver.major, MIN_GIT_VERSION[0] - 1)
|
||||||
|
self.assertGreaterEqual(ver.micro, 0)
|
||||||
|
self.assertGreaterEqual(ver.major, 0)
|
||||||
|
|
||||||
|
self.assertGreaterEqual(ver, MIN_GIT_VERSION)
|
||||||
|
self.assertLess(ver, (9999, 9999, 9999))
|
||||||
|
|
||||||
|
self.assertNotEqual('', ver.full)
|
||||||
|
|
||||||
|
|
||||||
|
class UserAgentUnitTest(unittest.TestCase):
|
||||||
|
"""Tests the UserAgent function."""
|
||||||
|
|
||||||
|
def test_smoke_os(self):
|
||||||
|
"""Make sure UA OS setting returns something useful."""
|
||||||
|
os_name = git_command.user_agent.os
|
||||||
|
# We can't dive too deep because of OS/tool differences, but we can check
|
||||||
|
# the general form.
|
||||||
|
m = re.match(r'^[^ ]+$', os_name)
|
||||||
|
self.assertIsNotNone(m)
|
||||||
|
|
||||||
|
def test_smoke_repo(self):
|
||||||
|
"""Make sure repo UA returns something useful."""
|
||||||
|
ua = git_command.user_agent.repo
|
||||||
|
# We can't dive too deep because of OS/tool differences, but we can check
|
||||||
|
# the general form.
|
||||||
|
m = re.match(r'^git-repo/[^ ]+ ([^ ]+) git/[^ ]+ Python/[0-9.]+', ua)
|
||||||
|
self.assertIsNotNone(m)
|
||||||
|
|
||||||
|
def test_smoke_git(self):
|
||||||
|
"""Make sure git UA returns something useful."""
|
||||||
|
ua = git_command.user_agent.git
|
||||||
|
# We can't dive too deep because of OS/tool differences, but we can check
|
||||||
|
# the general form.
|
||||||
|
m = re.match(r'^git/[^ ]+ ([^ ]+) git-repo/[^ ]+', ua)
|
||||||
|
self.assertIsNotNone(m)
|
72
tests/test_git_config.py
Normal file
72
tests/test_git_config.py
Normal file
@ -0,0 +1,72 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2009 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the git_config.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import git_config
|
||||||
|
|
||||||
|
def fixture(*paths):
|
||||||
|
"""Return a path relative to test/fixtures.
|
||||||
|
"""
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||||
|
|
||||||
|
class GitConfigUnitTest(unittest.TestCase):
|
||||||
|
"""Tests the GitConfig class.
|
||||||
|
"""
|
||||||
|
def setUp(self):
|
||||||
|
"""Create a GitConfig object using the test.gitconfig fixture.
|
||||||
|
"""
|
||||||
|
config_fixture = fixture('test.gitconfig')
|
||||||
|
self.config = git_config.GitConfig(config_fixture)
|
||||||
|
|
||||||
|
def test_GetString_with_empty_config_values(self):
|
||||||
|
"""
|
||||||
|
Test config entries with no value.
|
||||||
|
|
||||||
|
[section]
|
||||||
|
empty
|
||||||
|
|
||||||
|
"""
|
||||||
|
val = self.config.GetString('section.empty')
|
||||||
|
self.assertEqual(val, None)
|
||||||
|
|
||||||
|
def test_GetString_with_true_value(self):
|
||||||
|
"""
|
||||||
|
Test config entries with a string value.
|
||||||
|
|
||||||
|
[section]
|
||||||
|
nonempty = true
|
||||||
|
|
||||||
|
"""
|
||||||
|
val = self.config.GetString('section.nonempty')
|
||||||
|
self.assertEqual(val, 'true')
|
||||||
|
|
||||||
|
def test_GetString_from_missing_file(self):
|
||||||
|
"""
|
||||||
|
Test missing config file
|
||||||
|
"""
|
||||||
|
config_fixture = fixture('not.present.gitconfig')
|
||||||
|
config = git_config.GitConfig(config_fixture)
|
||||||
|
val = config.GetString('empty')
|
||||||
|
self.assertEqual(val, None)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
85
tests/test_manifest_xml.py
Normal file
85
tests/test_manifest_xml.py
Normal file
@ -0,0 +1,85 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the manifest_xml.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import error
|
||||||
|
import manifest_xml
|
||||||
|
|
||||||
|
|
||||||
|
class ManifestValidateFilePaths(unittest.TestCase):
|
||||||
|
"""Check _ValidateFilePaths helper.
|
||||||
|
|
||||||
|
This doesn't access a real filesystem.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def check_both(self, *args):
|
||||||
|
manifest_xml.XmlManifest._ValidateFilePaths('copyfile', *args)
|
||||||
|
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||||
|
|
||||||
|
def test_normal_path(self):
|
||||||
|
"""Make sure good paths are accepted."""
|
||||||
|
self.check_both('foo', 'bar')
|
||||||
|
self.check_both('foo/bar', 'bar')
|
||||||
|
self.check_both('foo', 'bar/bar')
|
||||||
|
self.check_both('foo/bar', 'bar/bar')
|
||||||
|
|
||||||
|
def test_symlink_targets(self):
|
||||||
|
"""Some extra checks for symlinks."""
|
||||||
|
def check(*args):
|
||||||
|
manifest_xml.XmlManifest._ValidateFilePaths('linkfile', *args)
|
||||||
|
|
||||||
|
# We allow symlinks to end in a slash since we allow them to point to dirs
|
||||||
|
# in general. Technically the slash isn't necessary.
|
||||||
|
check('foo/', 'bar')
|
||||||
|
# We allow a single '.' to get a reference to the project itself.
|
||||||
|
check('.', 'bar')
|
||||||
|
|
||||||
|
def test_bad_paths(self):
|
||||||
|
"""Make sure bad paths (src & dest) are rejected."""
|
||||||
|
PATHS = (
|
||||||
|
'..',
|
||||||
|
'../',
|
||||||
|
'./',
|
||||||
|
'foo/',
|
||||||
|
'./foo',
|
||||||
|
'../foo',
|
||||||
|
'foo/./bar',
|
||||||
|
'foo/../../bar',
|
||||||
|
'/foo',
|
||||||
|
'./../foo',
|
||||||
|
'.git/foo',
|
||||||
|
# Check case folding.
|
||||||
|
'.GIT/foo',
|
||||||
|
'blah/.git/foo',
|
||||||
|
'.repo/foo',
|
||||||
|
'.repoconfig',
|
||||||
|
# Block ~ due to 8.3 filenames on Windows filesystems.
|
||||||
|
'~',
|
||||||
|
'foo~',
|
||||||
|
'blah/foo~',
|
||||||
|
# Block Unicode characters that get normalized out by filesystems.
|
||||||
|
u'foo\u200Cbar',
|
||||||
|
)
|
||||||
|
for path in PATHS:
|
||||||
|
self.assertRaises(
|
||||||
|
error.ManifestInvalidPathError, self.check_both, path, 'a')
|
||||||
|
self.assertRaises(
|
||||||
|
error.ManifestInvalidPathError, self.check_both, 'a', path)
|
363
tests/test_project.py
Normal file
363
tests/test_project.py
Normal file
@ -0,0 +1,363 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the project.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import contextlib
|
||||||
|
import os
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
import tempfile
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import error
|
||||||
|
import git_config
|
||||||
|
import project
|
||||||
|
|
||||||
|
|
||||||
|
@contextlib.contextmanager
|
||||||
|
def TempGitTree():
|
||||||
|
"""Create a new empty git checkout for testing."""
|
||||||
|
# TODO(vapier): Convert this to tempfile.TemporaryDirectory once we drop
|
||||||
|
# Python 2 support entirely.
|
||||||
|
try:
|
||||||
|
tempdir = tempfile.mkdtemp(prefix='repo-tests')
|
||||||
|
subprocess.check_call(['git', 'init'], cwd=tempdir)
|
||||||
|
yield tempdir
|
||||||
|
finally:
|
||||||
|
shutil.rmtree(tempdir)
|
||||||
|
|
||||||
|
|
||||||
|
class RepoHookShebang(unittest.TestCase):
|
||||||
|
"""Check shebang parsing in RepoHook."""
|
||||||
|
|
||||||
|
def test_no_shebang(self):
|
||||||
|
"""Lines w/out shebangs should be rejected."""
|
||||||
|
DATA = (
|
||||||
|
'',
|
||||||
|
'# -*- coding:utf-8 -*-\n',
|
||||||
|
'#\n# foo\n',
|
||||||
|
'# Bad shebang in script\n#!/foo\n'
|
||||||
|
)
|
||||||
|
for data in DATA:
|
||||||
|
self.assertIsNone(project.RepoHook._ExtractInterpFromShebang(data))
|
||||||
|
|
||||||
|
def test_direct_interp(self):
|
||||||
|
"""Lines whose shebang points directly to the interpreter."""
|
||||||
|
DATA = (
|
||||||
|
('#!/foo', '/foo'),
|
||||||
|
('#! /foo', '/foo'),
|
||||||
|
('#!/bin/foo ', '/bin/foo'),
|
||||||
|
('#! /usr/foo ', '/usr/foo'),
|
||||||
|
('#! /usr/foo -args', '/usr/foo'),
|
||||||
|
)
|
||||||
|
for shebang, interp in DATA:
|
||||||
|
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||||
|
interp)
|
||||||
|
|
||||||
|
def test_env_interp(self):
|
||||||
|
"""Lines whose shebang launches through `env`."""
|
||||||
|
DATA = (
|
||||||
|
('#!/usr/bin/env foo', 'foo'),
|
||||||
|
('#!/bin/env foo', 'foo'),
|
||||||
|
('#! /bin/env /bin/foo ', '/bin/foo'),
|
||||||
|
)
|
||||||
|
for shebang, interp in DATA:
|
||||||
|
self.assertEqual(project.RepoHook._ExtractInterpFromShebang(shebang),
|
||||||
|
interp)
|
||||||
|
|
||||||
|
|
||||||
|
class FakeProject(object):
|
||||||
|
"""A fake for Project for basic functionality."""
|
||||||
|
|
||||||
|
def __init__(self, worktree):
|
||||||
|
self.worktree = worktree
|
||||||
|
self.gitdir = os.path.join(worktree, '.git')
|
||||||
|
self.name = 'fakeproject'
|
||||||
|
self.work_git = project.Project._GitGetByExec(
|
||||||
|
self, bare=False, gitdir=self.gitdir)
|
||||||
|
self.bare_git = project.Project._GitGetByExec(
|
||||||
|
self, bare=True, gitdir=self.gitdir)
|
||||||
|
self.config = git_config.GitConfig.ForRepository(gitdir=self.gitdir)
|
||||||
|
|
||||||
|
|
||||||
|
class ReviewableBranchTests(unittest.TestCase):
|
||||||
|
"""Check ReviewableBranch behavior."""
|
||||||
|
|
||||||
|
def test_smoke(self):
|
||||||
|
"""A quick run through everything."""
|
||||||
|
with TempGitTree() as tempdir:
|
||||||
|
fakeproj = FakeProject(tempdir)
|
||||||
|
|
||||||
|
# Generate some commits.
|
||||||
|
with open(os.path.join(tempdir, 'readme'), 'w') as fp:
|
||||||
|
fp.write('txt')
|
||||||
|
fakeproj.work_git.add('readme')
|
||||||
|
fakeproj.work_git.commit('-mAdd file')
|
||||||
|
fakeproj.work_git.checkout('-b', 'work')
|
||||||
|
fakeproj.work_git.rm('-f', 'readme')
|
||||||
|
fakeproj.work_git.commit('-mDel file')
|
||||||
|
|
||||||
|
# Start off with the normal details.
|
||||||
|
rb = project.ReviewableBranch(
|
||||||
|
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||||
|
self.assertEqual('work', rb.name)
|
||||||
|
self.assertEqual(1, len(rb.commits))
|
||||||
|
self.assertIn('Del file', rb.commits[0])
|
||||||
|
d = rb.unabbrev_commits
|
||||||
|
self.assertEqual(1, len(d))
|
||||||
|
short, long = next(iter(d.items()))
|
||||||
|
self.assertTrue(long.startswith(short))
|
||||||
|
self.assertTrue(rb.base_exists)
|
||||||
|
# Hard to assert anything useful about this.
|
||||||
|
self.assertTrue(rb.date)
|
||||||
|
|
||||||
|
# Now delete the tracking branch!
|
||||||
|
fakeproj.work_git.branch('-D', 'master')
|
||||||
|
rb = project.ReviewableBranch(
|
||||||
|
fakeproj, fakeproj.config.GetBranch('work'), 'master')
|
||||||
|
self.assertEqual(0, len(rb.commits))
|
||||||
|
self.assertFalse(rb.base_exists)
|
||||||
|
# Hard to assert anything useful about this.
|
||||||
|
self.assertTrue(rb.date)
|
||||||
|
|
||||||
|
|
||||||
|
class CopyLinkTestCase(unittest.TestCase):
|
||||||
|
"""TestCase for stub repo client checkouts.
|
||||||
|
|
||||||
|
It'll have a layout like:
|
||||||
|
tempdir/ # self.tempdir
|
||||||
|
checkout/ # self.topdir
|
||||||
|
git-project/ # self.worktree
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
tempdir: A dedicated temporary directory.
|
||||||
|
worktree: The top of the repo client checkout.
|
||||||
|
topdir: The top of a project checkout.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def setUp(self):
|
||||||
|
self.tempdir = tempfile.mkdtemp(prefix='repo_tests')
|
||||||
|
self.topdir = os.path.join(self.tempdir, 'checkout')
|
||||||
|
self.worktree = os.path.join(self.topdir, 'git-project')
|
||||||
|
os.makedirs(self.topdir)
|
||||||
|
os.makedirs(self.worktree)
|
||||||
|
|
||||||
|
def tearDown(self):
|
||||||
|
shutil.rmtree(self.tempdir, ignore_errors=True)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def touch(path):
|
||||||
|
with open(path, 'w') as f:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def assertExists(self, path, msg=None):
|
||||||
|
"""Make sure |path| exists."""
|
||||||
|
if os.path.exists(path):
|
||||||
|
return
|
||||||
|
|
||||||
|
if msg is None:
|
||||||
|
msg = ['path is missing: %s' % path]
|
||||||
|
while path != '/':
|
||||||
|
path = os.path.dirname(path)
|
||||||
|
if not path:
|
||||||
|
# If we're given something like "foo", abort once we get to "".
|
||||||
|
break
|
||||||
|
result = os.path.exists(path)
|
||||||
|
msg.append('\tos.path.exists(%s): %s' % (path, result))
|
||||||
|
if result:
|
||||||
|
msg.append('\tcontents: %r' % os.listdir(path))
|
||||||
|
break
|
||||||
|
msg = '\n'.join(msg)
|
||||||
|
|
||||||
|
raise self.failureException(msg)
|
||||||
|
|
||||||
|
|
||||||
|
class CopyFile(CopyLinkTestCase):
|
||||||
|
"""Check _CopyFile handling."""
|
||||||
|
|
||||||
|
def CopyFile(self, src, dest):
|
||||||
|
return project._CopyFile(self.worktree, src, self.topdir, dest)
|
||||||
|
|
||||||
|
def test_basic(self):
|
||||||
|
"""Basic test of copying a file from a project to the toplevel."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
cf = self.CopyFile('foo.txt', 'foo')
|
||||||
|
cf._Copy()
|
||||||
|
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||||
|
|
||||||
|
def test_src_subdir(self):
|
||||||
|
"""Copy a file from a subdir of a project."""
|
||||||
|
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||||
|
os.makedirs(os.path.dirname(src))
|
||||||
|
self.touch(src)
|
||||||
|
cf = self.CopyFile('bar/foo.txt', 'new.txt')
|
||||||
|
cf._Copy()
|
||||||
|
self.assertExists(os.path.join(self.topdir, 'new.txt'))
|
||||||
|
|
||||||
|
def test_dest_subdir(self):
|
||||||
|
"""Copy a file to a subdir of a checkout."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
cf = self.CopyFile('foo.txt', 'sub/dir/new.txt')
|
||||||
|
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||||
|
cf._Copy()
|
||||||
|
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'new.txt'))
|
||||||
|
|
||||||
|
def test_update(self):
|
||||||
|
"""Make sure changed files get copied again."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
dest = os.path.join(self.topdir, 'bar')
|
||||||
|
with open(src, 'w') as f:
|
||||||
|
f.write('1st')
|
||||||
|
cf = self.CopyFile('foo.txt', 'bar')
|
||||||
|
cf._Copy()
|
||||||
|
self.assertExists(dest)
|
||||||
|
with open(dest) as f:
|
||||||
|
self.assertEqual(f.read(), '1st')
|
||||||
|
|
||||||
|
with open(src, 'w') as f:
|
||||||
|
f.write('2nd!')
|
||||||
|
cf._Copy()
|
||||||
|
with open(dest) as f:
|
||||||
|
self.assertEqual(f.read(), '2nd!')
|
||||||
|
|
||||||
|
def test_src_block_symlink(self):
|
||||||
|
"""Do not allow reading from a symlinked path."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
sym = os.path.join(self.worktree, 'sym')
|
||||||
|
self.touch(src)
|
||||||
|
os.symlink('foo.txt', sym)
|
||||||
|
self.assertExists(sym)
|
||||||
|
cf = self.CopyFile('sym', 'foo')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
def test_src_block_symlink_traversal(self):
|
||||||
|
"""Do not allow reading through a symlink dir."""
|
||||||
|
src = os.path.join(self.worktree, 'bar', 'passwd')
|
||||||
|
os.symlink('/etc', os.path.join(self.worktree, 'bar'))
|
||||||
|
self.assertExists(src)
|
||||||
|
cf = self.CopyFile('bar/foo.txt', 'foo')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
def test_src_block_dir(self):
|
||||||
|
"""Do not allow copying from a directory."""
|
||||||
|
src = os.path.join(self.worktree, 'dir')
|
||||||
|
os.makedirs(src)
|
||||||
|
cf = self.CopyFile('dir', 'foo')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
def test_dest_block_symlink(self):
|
||||||
|
"""Do not allow writing to a symlink."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
os.symlink('dest', os.path.join(self.topdir, 'sym'))
|
||||||
|
cf = self.CopyFile('foo.txt', 'sym')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
def test_dest_block_symlink_traversal(self):
|
||||||
|
"""Do not allow writing through a symlink dir."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
os.symlink('/tmp', os.path.join(self.topdir, 'sym'))
|
||||||
|
cf = self.CopyFile('foo.txt', 'sym/foo.txt')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
def test_src_block_dir(self):
|
||||||
|
"""Do not allow copying to a directory."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
os.makedirs(os.path.join(self.topdir, 'dir'))
|
||||||
|
cf = self.CopyFile('foo.txt', 'dir')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, cf._Copy)
|
||||||
|
|
||||||
|
|
||||||
|
class LinkFile(CopyLinkTestCase):
|
||||||
|
"""Check _LinkFile handling."""
|
||||||
|
|
||||||
|
def LinkFile(self, src, dest):
|
||||||
|
return project._LinkFile(self.worktree, src, self.topdir, dest)
|
||||||
|
|
||||||
|
def test_basic(self):
|
||||||
|
"""Basic test of linking a file from a project into the toplevel."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
lf = self.LinkFile('foo.txt', 'foo')
|
||||||
|
lf._Link()
|
||||||
|
dest = os.path.join(self.topdir, 'foo')
|
||||||
|
self.assertExists(dest)
|
||||||
|
self.assertTrue(os.path.islink(dest))
|
||||||
|
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
||||||
|
|
||||||
|
def test_src_subdir(self):
|
||||||
|
"""Link to a file in a subdir of a project."""
|
||||||
|
src = os.path.join(self.worktree, 'bar', 'foo.txt')
|
||||||
|
os.makedirs(os.path.dirname(src))
|
||||||
|
self.touch(src)
|
||||||
|
lf = self.LinkFile('bar/foo.txt', 'foo')
|
||||||
|
lf._Link()
|
||||||
|
self.assertExists(os.path.join(self.topdir, 'foo'))
|
||||||
|
|
||||||
|
def test_src_self(self):
|
||||||
|
"""Link to the project itself."""
|
||||||
|
dest = os.path.join(self.topdir, 'foo', 'bar')
|
||||||
|
lf = self.LinkFile('.', 'foo/bar')
|
||||||
|
lf._Link()
|
||||||
|
self.assertExists(dest)
|
||||||
|
self.assertEqual('../git-project', os.readlink(dest))
|
||||||
|
|
||||||
|
def test_dest_subdir(self):
|
||||||
|
"""Link a file to a subdir of a checkout."""
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
lf = self.LinkFile('foo.txt', 'sub/dir/foo/bar')
|
||||||
|
self.assertFalse(os.path.exists(os.path.join(self.topdir, 'sub')))
|
||||||
|
lf._Link()
|
||||||
|
self.assertExists(os.path.join(self.topdir, 'sub', 'dir', 'foo', 'bar'))
|
||||||
|
|
||||||
|
def test_src_block_relative(self):
|
||||||
|
"""Do not allow relative symlinks."""
|
||||||
|
BAD_SOURCES = (
|
||||||
|
'./',
|
||||||
|
'..',
|
||||||
|
'../',
|
||||||
|
'foo/.',
|
||||||
|
'foo/./bar',
|
||||||
|
'foo/..',
|
||||||
|
'foo/../foo',
|
||||||
|
)
|
||||||
|
for src in BAD_SOURCES:
|
||||||
|
lf = self.LinkFile(src, 'foo')
|
||||||
|
self.assertRaises(error.ManifestInvalidPathError, lf._Link)
|
||||||
|
|
||||||
|
def test_update(self):
|
||||||
|
"""Make sure changed targets get updated."""
|
||||||
|
dest = os.path.join(self.topdir, 'sym')
|
||||||
|
|
||||||
|
src = os.path.join(self.worktree, 'foo.txt')
|
||||||
|
self.touch(src)
|
||||||
|
lf = self.LinkFile('foo.txt', 'sym')
|
||||||
|
lf._Link()
|
||||||
|
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
||||||
|
|
||||||
|
# Point the symlink somewhere else.
|
||||||
|
os.unlink(dest)
|
||||||
|
os.symlink('/', dest)
|
||||||
|
lf._Link()
|
||||||
|
self.assertEqual('git-project/foo.txt', os.readlink(dest))
|
80
tests/test_wrapper.py
Normal file
80
tests/test_wrapper.py
Normal file
@ -0,0 +1,80 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2015 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
"""Unittests for the wrapper.py module."""
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
|
import unittest
|
||||||
|
|
||||||
|
import wrapper
|
||||||
|
|
||||||
|
def fixture(*paths):
|
||||||
|
"""Return a path relative to tests/fixtures.
|
||||||
|
"""
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'fixtures', *paths)
|
||||||
|
|
||||||
|
class RepoWrapperUnitTest(unittest.TestCase):
|
||||||
|
"""Tests helper functions in the repo wrapper
|
||||||
|
"""
|
||||||
|
def setUp(self):
|
||||||
|
"""Load the wrapper module every time
|
||||||
|
"""
|
||||||
|
wrapper._wrapper_module = None
|
||||||
|
self.wrapper = wrapper.Wrapper()
|
||||||
|
|
||||||
|
def test_get_gitc_manifest_dir_no_gitc(self):
|
||||||
|
"""
|
||||||
|
Test reading a missing gitc config file
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
|
||||||
|
val = self.wrapper.get_gitc_manifest_dir()
|
||||||
|
self.assertEqual(val, '')
|
||||||
|
|
||||||
|
def test_get_gitc_manifest_dir(self):
|
||||||
|
"""
|
||||||
|
Test reading the gitc config file and parsing the directory
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
|
||||||
|
val = self.wrapper.get_gitc_manifest_dir()
|
||||||
|
self.assertEqual(val, '/test/usr/local/google/gitc')
|
||||||
|
|
||||||
|
def test_gitc_parse_clientdir_no_gitc(self):
|
||||||
|
"""
|
||||||
|
Test parsing the gitc clientdir without gitc running
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('missing_gitc_config')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
|
||||||
|
|
||||||
|
def test_gitc_parse_clientdir(self):
|
||||||
|
"""
|
||||||
|
Test parsing the gitc clientdir
|
||||||
|
"""
|
||||||
|
self.wrapper.GITC_CONFIG_FILE = fixture('gitc_config')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/something'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/test/extra'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/test/extra'), 'test')
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/gitc/manifest-rw/'), None)
|
||||||
|
self.assertEqual(self.wrapper.gitc_parse_clientdir('/test/usr/local/google/gitc/'), None)
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
unittest.main()
|
22
tox.ini
Normal file
22
tox.ini
Normal file
@ -0,0 +1,22 @@
|
|||||||
|
# Copyright 2019 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
# https://tox.readthedocs.io/
|
||||||
|
|
||||||
|
[tox]
|
||||||
|
envlist = py27, py36, py37, py38
|
||||||
|
|
||||||
|
[testenv]
|
||||||
|
deps = pytest
|
||||||
|
commands = {toxinidir}/run_tests
|
35
wrapper.py
Normal file
35
wrapper.py
Normal file
@ -0,0 +1,35 @@
|
|||||||
|
# -*- coding:utf-8 -*-
|
||||||
|
#
|
||||||
|
# Copyright (C) 2014 The Android Open Source Project
|
||||||
|
#
|
||||||
|
# Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
# you may not use this file except in compliance with the License.
|
||||||
|
# You may obtain a copy of the License at
|
||||||
|
#
|
||||||
|
# http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
#
|
||||||
|
# Unless required by applicable law or agreed to in writing, software
|
||||||
|
# distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
# See the License for the specific language governing permissions and
|
||||||
|
# limitations under the License.
|
||||||
|
|
||||||
|
from __future__ import print_function
|
||||||
|
try:
|
||||||
|
from importlib.machinery import SourceFileLoader
|
||||||
|
_loader = lambda *args: SourceFileLoader(*args).load_module()
|
||||||
|
except ImportError:
|
||||||
|
import imp
|
||||||
|
_loader = lambda *args: imp.load_source(*args)
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
def WrapperPath():
|
||||||
|
return os.path.join(os.path.dirname(__file__), 'repo')
|
||||||
|
|
||||||
|
_wrapper_module = None
|
||||||
|
def Wrapper():
|
||||||
|
global _wrapper_module
|
||||||
|
if not _wrapper_module:
|
||||||
|
_wrapper_module = _loader('wrapper', WrapperPath())
|
||||||
|
return _wrapper_module
|
Reference in New Issue
Block a user