mirror of
https://git.launchpad.net/ubuntu-dev-tools
synced 2025-03-13 08:01:09 +00:00
Compare commits
990 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
466e2784de | ||
|
ba3f0511f9 | ||
|
2e550ceff2 | ||
|
6c8a5d74bd | ||
|
3d11516599 | ||
|
5a20308ab1 | ||
|
b551877651 | ||
|
4a4c4e0a27 | ||
|
865c1c97bc | ||
|
d09718e976 | ||
|
bff7baecc9 | ||
|
45fbbb5bd1 | ||
|
ca217c035e | ||
|
b5e117788b | ||
|
ddba2d1e98 | ||
|
02d65a5804 | ||
|
bda85fa6a8 | ||
|
86a83bf74d | ||
|
162e758671 | ||
|
049425adb7 | ||
|
f6ca6cad92 | ||
|
3dc17934d6 | ||
|
10a176567a | ||
|
86b366c6c5 | ||
|
50b580b30e | ||
|
6ba0641f63 | ||
|
1e815db9d2 | ||
|
e2f43318bd | ||
|
cdd81232d9 | ||
|
65044d84d9 | ||
|
19e40b49c2 | ||
|
55eb521461 | ||
|
983bb3b70e | ||
|
85f2e46f7d | ||
|
649c3db767 | ||
|
e7ba650414 | ||
|
3bc802a209 | ||
|
92c80d7bb7 | ||
|
d7362d9ed8 | ||
|
c7a855ff20 | ||
|
017941ad70 | ||
|
69914f861e | ||
|
454f1e30c8 | ||
|
55bc403a95 | ||
|
c9339aeae4 | ||
|
c205ee0381 | ||
|
7577e10f13 | ||
|
e328dc05c2 | ||
|
9a94c9dea1 | ||
|
47ab7b608b | ||
|
56044d8eac | ||
|
c523b4cfc4 | ||
|
3df40f6392 | ||
|
6ebffe3f4a | ||
|
f01234e8a5 | ||
|
43891eda88 | ||
|
132866e2ba | ||
|
a0fcac7777 | ||
|
490895075d | ||
|
5186e76d8d | ||
|
bf46f7fbc1 | ||
|
881602c4b9 | ||
|
c869d07f75 | ||
|
59041af613 | ||
|
0ec53180f2 | ||
|
c92fa6502f | ||
|
07d3158ade | ||
|
d5faa9b133 | ||
|
9e710a3d66 | ||
|
010af53d7c | ||
|
0bef4d7352 | ||
|
688202a7cf | ||
|
691c1381db | ||
|
f01502bda2 | ||
|
42f8e5c0d2 | ||
|
bb8a9f7394 | ||
|
a058c716b9 | ||
|
e64fe7e212 | ||
|
f07d3df40c | ||
|
f73f2c1df1 | ||
|
268d082226 | ||
|
6bc59d789e | ||
|
9a4cc312f4 | ||
|
ffc787b454 | ||
|
bce1ef88c5 | ||
|
a9eb902b83 | ||
|
cb7464cf61 | ||
|
19f1df1054 | ||
|
7f64dde12c | ||
|
c2539c6787 | ||
|
fd885ec239 | ||
|
abbc56e185 | ||
|
a2176110f0 | ||
|
a5185e4612 | ||
|
e90ceaf26b | ||
|
47fd5d7cca | ||
|
2f396fe549 | ||
|
5bda35f6b4 | ||
|
db916653cd | ||
|
784e7814e9 | ||
|
bed2dc470d | ||
|
414bc76b50 | ||
|
6f0caf1fc0 | ||
|
4bcc55372a | ||
|
232a73de31 | ||
|
9aab0135a2 | ||
|
23539f28b1 | ||
|
4a09d23db6 | ||
|
534cd254f4 | ||
|
29c3fa98bc | ||
|
7c9c7f2890 | ||
|
739279da3f | ||
|
7c11832ee0 | ||
|
f5512846d6 | ||
|
9e0dff4461 | ||
|
7129e6e27a | ||
|
79d30a9bfc | ||
|
2c6a8b5451 | ||
|
ad014685ea | ||
|
ff1c95e2c0 | ||
|
89e788bf48 | ||
|
a000e9db5e | ||
|
83158d24d9 | ||
|
6e6e1f1e1a | ||
|
c7a7767339 | ||
|
ac2f980e0f | ||
|
ccab82e054 | ||
|
2e4e8b35b2 | ||
|
53fcd577e8 | ||
|
8430d445d8 | ||
|
c8a757eb07 | ||
|
66a2773c1c | ||
|
17d2770451 | ||
|
3136541ca6 | ||
|
f3a0182e1a | ||
|
6498a13f18 | ||
|
d2debf9ed9 | ||
|
a11cb1f630 | ||
|
34578e6a1e | ||
|
21784052ba | ||
|
aa556af89d | ||
|
069a6926c0 | ||
|
444b319c12 | ||
|
4449cf2437 | ||
|
9fa29f6ad5 | ||
|
a160def2ab | ||
|
909d945af4 | ||
|
f6fde2e217 | ||
|
17bed46ffb | ||
|
72add78e9d | ||
|
ab64467f33 | ||
|
b1bc7e1cdc | ||
|
8692bc2b1c | ||
|
a685368ae9 | ||
|
4e27045f49 | ||
|
db0e091e44 | ||
|
3354b526b5 | ||
|
79d24c9df1 | ||
|
932166484b | ||
|
bd770fa6b1 | ||
|
3d54a17403 | ||
|
3bdb827516 | ||
|
0d94b5e747 | ||
|
0f3d2fed2a | ||
|
844d6d942c | ||
|
ae43fd1929 | ||
|
69ac109cdb | ||
|
9f2a53c166 | ||
|
a69c40d403 | ||
|
c1e4b14a98 | ||
|
096d5612e7 | ||
|
b510dbd91e | ||
|
803949ed8b | ||
|
e219eaa5fc | ||
|
60ee986014 | ||
|
dabe475067 | ||
|
0a9e18ed91 | ||
|
7859889438 | ||
|
a3c87e78aa | ||
|
05af489f64 | ||
|
d5fdc00396 | ||
|
7d278cde21 | ||
|
ad402231db | ||
|
562e6b13cd | ||
|
9c1561ff26 | ||
|
06a04f642f | ||
|
8f0005ce1d | ||
|
51ebfb21d3 | ||
|
f83161dcc2 | ||
|
bf5796c69e | ||
|
214da052b2 | ||
|
b9c9a21696 | ||
|
1f3e4a5ad7 | ||
|
835fe258ab | ||
|
5618358870 | ||
|
d1de55b320 | ||
|
87858a4387 | ||
|
cb48d71056 | ||
|
b2d259b415 | ||
|
533b9535aa | ||
|
7d3ea739a2 | ||
|
c53750694f | ||
|
0dde3262d1 | ||
|
3a903ca628 | ||
|
c7058559c5 | ||
|
13123c51c6 | ||
|
009b79224f | ||
|
7fc6788b35 | ||
|
3ef7c4a569 | ||
|
cd4d717551 | ||
|
d903160215 | ||
|
8fe22fbbb6 | ||
|
06038060b0 | ||
|
4d72d184db | ||
|
20261960f6 | ||
|
d05e023dfe | ||
|
de295fe524 | ||
|
1e2036399e | ||
|
85125e3c90 | ||
|
cfa45994d0 | ||
|
a74a49fb81 | ||
|
74867c90f4 | ||
|
b904993e05 | ||
|
000d3c1c2d | ||
|
9a44175a17 | ||
|
a9e2a2689d | ||
|
fec7a72ef7 | ||
|
9360b17bcb | ||
|
5eeb707142 | ||
|
c1b1c106dc | ||
|
266085d587 | ||
|
6ca12331d6 | ||
|
5fcc4b5b46 | ||
|
a3ff68be5a | ||
|
a1b56ac31f | ||
|
ceb020d0fa | ||
|
fbbcee9cc1 | ||
|
511aa3d80c | ||
|
6c8109b6ae | ||
|
296e498fe9 | ||
|
3f0d63d5b6 | ||
|
49527c36e1 | ||
|
14a8005d45 | ||
|
52032eb081 | ||
|
4fc36efcf2 | ||
|
7f227f6264 | ||
|
d598900df7 | ||
|
b4ca04efaa | ||
|
0eaf71737d | ||
|
1b12d8b4e3 | ||
|
be6e09a02b | ||
|
86276665cd | ||
|
3f2983c157 | ||
|
4f6d6bf2d8 | ||
|
df93a225a8 | ||
|
d8df8cc869 | ||
|
243a728d6c | ||
|
6e18d60de4 | ||
|
dbd453876e | ||
|
732ff00cac | ||
|
2ac69a89e3 | ||
|
4e5e6efdb1 | ||
|
e193c30695 | ||
|
6bf3640e8f | ||
|
5dcde81c58 | ||
|
30abe6eacd | ||
|
c0546396bf | ||
|
ef100f6166 | ||
|
3428a65b1c | ||
|
91e0babd93 | ||
|
952b331c22 | ||
|
e44bc63209 | ||
|
c2ea95c067 | ||
|
f25f815bef | ||
|
a0315dac8c | ||
|
ec36c7c792 | ||
|
9be49e7b93 | ||
|
90824e056c | ||
|
1093c372eb | ||
|
ff66707a4c | ||
|
128eca1a5b | ||
|
4f10be3f13 | ||
|
b687e11813 | ||
|
eca442bf35 | ||
|
ede8a77718 | ||
|
85ed9ad1ce | ||
|
f97b19554f | ||
|
a5ee35c812 | ||
|
d784fea1cd | ||
|
728849964e | ||
|
09537bd44d | ||
|
484a668c0a | ||
|
bc24ef23de | ||
|
ea549d6c19 | ||
|
7118411b89 | ||
|
2c6f87acd3 | ||
|
211416d2fd | ||
|
0eeb93ee0c | ||
|
b35712fa40 | ||
|
ee9b8756d9 | ||
|
9792f5b95c | ||
|
d8d4fd9285 | ||
|
9a38ad1b7b | ||
|
db181f4aa6 | ||
|
04ae04e17b | ||
|
ae3837be1d | ||
|
20036e6c36 | ||
|
e13a4eb45e | ||
|
cb865e3b86 | ||
|
61f0023c37 | ||
|
e5a42b1ba1 | ||
|
359cb18d8d | ||
|
b89ad962f4 | ||
|
58ca95e719 | ||
|
a26a154bc9 | ||
|
fed562405d | ||
|
248cf38d79 | ||
|
3d0921ee54 | ||
|
c29b660dbf | ||
|
b2f4ceee8e | ||
|
b6e0b5b388 | ||
|
494e0d6ddd | ||
|
2cfcbdb300 | ||
|
07d7381b4d | ||
|
d19ac157b0 | ||
|
6698b70f98 | ||
|
d464c1ffe8 | ||
|
42c659555c | ||
|
120f15ca50 | ||
|
4e63b2cbb5 | ||
|
f2118d6e7f | ||
|
52739e44ad | ||
|
12c2e6bf42 | ||
|
ae74f71a1e | ||
|
930c05621e | ||
|
48c7130008 | ||
|
3eefdec9b6 | ||
|
e7f0447342 | ||
|
90cb51becb | ||
|
e54a13743c | ||
|
07b81efd24 | ||
|
390be3f0b3 | ||
|
9d7ce2745f | ||
|
f8c0d87b6f | ||
|
02c531dfb5 | ||
|
5a428b462d | ||
|
6cc8040510 | ||
|
5856b6a41f | ||
|
d3a7eac62f | ||
|
a75fb35fc8 | ||
|
c5b5d554bc | ||
|
2720ffa12a | ||
|
bafe94c5c7 | ||
|
6a9bcc6e14 | ||
|
b00aea7aa5 | ||
|
322df409c2 | ||
|
b30df2b227 | ||
|
6c097233a9 | ||
|
4b3e9b8479 | ||
|
097eea7d5f | ||
|
622f550cc9 | ||
|
8d77c72c33 | ||
|
928da0ec02 | ||
|
f5aa75a3a5 | ||
|
529c2870fc | ||
|
ce4d8fb85f | ||
|
0aa7280f94 | ||
|
5a895e9685 | ||
|
8aa2d602c5 | ||
|
8f42fb976f | ||
|
ea30b9f5bc | ||
|
c622c30297 | ||
|
fe4d2f08a5 | ||
|
11429b213c | ||
|
eaa07014e0 | ||
|
8682c44957 | ||
|
20dd65b281 | ||
|
45253b398c | ||
|
e1f2cbf831 | ||
|
8ebd086aac | ||
|
9f428e471b | ||
|
907061c15e | ||
|
ee98423715 | ||
|
d98e16226b | ||
|
53fa2336b8 | ||
|
f5694d7cf6 | ||
|
7900a09763 | ||
|
f2b758f503 | ||
|
2decc105a2 | ||
|
41df9a33a4 | ||
|
36ffc92e00 | ||
|
3d7f542e49 | ||
|
ebe460aad1 | ||
|
5553d98e47 | ||
|
5838fa39ff | ||
|
63f614ebe3 | ||
|
f026b5d597 | ||
|
fe30873247 | ||
|
1e1702e676 | ||
|
f8fa865ab2 | ||
|
6e3d26185c | ||
|
c13988fbeb | ||
|
7b95e4c457 | ||
|
6e4ab93ff9 | ||
|
41345dd546 | ||
|
87f226258a | ||
|
82c8c438f7 | ||
|
1a30454d1e | ||
|
e37e9db7eb | ||
|
87a09640ba | ||
|
90e8fe81e1 | ||
|
c9c7fed1f6 | ||
|
7c097b19ba | ||
|
0f61836b10 | ||
|
b11b83f0e2 | ||
|
7084bfc8bc | ||
|
3491b0cff9 | ||
|
fb750e38bb | ||
|
3a413760f3 | ||
|
41a6c47ac2 | ||
|
e3268a62f4 | ||
|
cdaf5d0761 | ||
|
79b705d235 | ||
|
d7bcb012f6 | ||
|
ec72cf1538 | ||
|
3dabf05370 | ||
|
f4f16f95f7 | ||
|
b96885f05e | ||
|
f944d3146a | ||
|
9f58f5cb80 | ||
|
b0c22e1d57 | ||
|
506e3db601 | ||
|
d3b8d7a1b7 | ||
|
51231f116c | ||
|
d0aa64a51b | ||
|
b5ae0bdca3 | ||
|
463d1f63a8 | ||
|
c7b2149e1a | ||
|
8220accbe8 | ||
|
d3d63d6a59 | ||
|
17cce2602c | ||
|
0931098a2c | ||
|
982ebe6a46 | ||
|
ba2afc6429 | ||
|
21adb8f61f | ||
|
e083529784 | ||
|
9cd3479218 | ||
|
b76201967a | ||
|
dc6e03d38c | ||
|
c8602ba8a2 | ||
|
ec14165262 | ||
|
2c998a6fd2 | ||
|
0d9d78a721 | ||
|
e5c90b986c | ||
|
317b68bc08 | ||
|
7a6b779e77 | ||
|
454737b5bf | ||
|
4438c23a72 | ||
|
228c5710e7 | ||
|
2c4fcfc0b9 | ||
|
68b41fc82f | ||
|
434ca8952e | ||
|
19ba351f57 | ||
|
72a3420944 | ||
|
a7d166b52f | ||
|
06642ef9f7 | ||
|
38632a190e | ||
|
7b2f1a713b | ||
|
8bdf4f839a | ||
|
301569e809 | ||
|
bb765237db | ||
|
3d345113cc | ||
|
66afe7c6fc | ||
|
d681c7dc89 | ||
|
5f3247cfc0 | ||
|
96896fd2c8 | ||
|
16b9311995 | ||
|
6c375255c4 | ||
|
2c8c4d7268 | ||
|
3f5e56c75e | ||
|
76609fde49 | ||
|
673234e449 | ||
|
31d1ae269a | ||
|
b311cebaed | ||
|
81b17cd323 | ||
|
7c0efe2914 | ||
|
0de4509da6 | ||
|
4239d49582 | ||
|
6a60ee963d | ||
|
d1949a72aa | ||
|
118f95b62e | ||
|
3052bfcc16 | ||
|
05ce00a49a | ||
|
071ff40f20 | ||
|
2a0bffc2a0 | ||
|
a41af75643 | ||
|
7f9949fc4c | ||
|
23c7d67425 | ||
|
2d3765522e | ||
|
43ad610a66 | ||
|
9ef7545150 | ||
|
7c5f2431db | ||
|
2138919700 | ||
|
366ff25690 | ||
|
5769a70d02 | ||
|
03d2e89f31 | ||
|
7fd1a6a766 | ||
|
caaea17eec | ||
|
8ab21ea485 | ||
|
e163ee0158 | ||
|
b0421706d5 | ||
|
af622cb71e | ||
|
ae04bcfc43 | ||
|
dc3d2c0c2d | ||
|
6c47c075aa | ||
|
e0f6b6f97b | ||
|
ad6d560a15 | ||
|
c744f7441f | ||
|
3fb7516d72 | ||
|
257b0a8d67 | ||
|
1007bc87e8 | ||
|
cf91180e9a | ||
|
3f87486de3 | ||
|
07f2c179c2 | ||
|
6c404a3557 | ||
|
db13c754a5 | ||
|
9858a03a12 | ||
|
4a4ed663e0 | ||
|
dbbd7fcca0 | ||
|
5cf0f000db | ||
|
ae9c80de37 | ||
|
f9c4aeaae1 | ||
|
7143367610 | ||
|
eb84867721 | ||
|
fec29f22bc | ||
|
3bef7f999b | ||
|
fb22ba116f | ||
|
6b8a75bc99 | ||
|
dc5e586b99 | ||
|
c41f57686b | ||
|
c12595fa61 | ||
|
7d0205011a | ||
|
14a3dd5634 | ||
|
5b6445c1d0 | ||
|
2a620e9aa2 | ||
|
bc0c276364 | ||
|
1481ed8438 | ||
|
ff184c6780 | ||
|
0886e02435 | ||
|
190ad30a7b | ||
|
74df5b3869 | ||
|
468dbc7746 | ||
|
5ebd1eaa8d | ||
|
c5bc971f17 | ||
|
e763606da0 | ||
|
a2aacb28ff | ||
|
0ebcc329a3 | ||
|
105db5752b | ||
|
bdc647404f | ||
|
87e2e1b7aa | ||
|
607f5a712f | ||
|
10299855a5 | ||
|
06fc171980 | ||
|
92822060b2 | ||
|
1f7b7ea4ff | ||
|
eb844a2193 | ||
|
c40050f496 | ||
|
ddbd7f73c3 | ||
|
fc0919ed25 | ||
|
1ea9bce9b1 | ||
|
01906f8371 | ||
|
e88b4905bf | ||
|
afd95915e4 | ||
|
96d698e429 | ||
|
edb49c652c | ||
|
157411a199 | ||
|
224b059e30 | ||
|
0291ad07b0 | ||
|
a761ccfc72 | ||
|
4c66fba4d9 | ||
|
15841a3df6 | ||
|
798a36c2d8 | ||
|
59582ca06e | ||
|
0a3738cfbc | ||
|
d41602ba83 | ||
|
44dc0a9d88 | ||
|
179f45ca9c | ||
|
4471193d9c | ||
|
001d108b96 | ||
|
18ae4d8a39 | ||
|
cc7170eccb | ||
|
3a6cd3ac66 | ||
|
67c353d91b | ||
|
ba16daf56f | ||
|
6ee0915d3f | ||
|
bf52bd6fa3 | ||
|
6fea8fb542 | ||
|
f67601cb0c | ||
|
a6043a6ba8 | ||
|
88fbffaf49 | ||
|
3c138f6fe3 | ||
|
1c6b989590 | ||
|
e55c306a90 | ||
|
572f0f0156 | ||
|
e0c6689cb8 | ||
|
ab1e334e3e | ||
|
15a9f0e328 | ||
|
03fda64eae | ||
|
86b2c25c16 | ||
|
d67090f752 | ||
|
e56c0fce43 | ||
|
f232d121d2 | ||
|
4d07f85874 | ||
|
0575dd0112 | ||
|
53dc45e6e3 | ||
|
a595624527 | ||
|
139346addf | ||
|
bae9b3817d | ||
|
e903bf03d1 | ||
|
3a425de23e | ||
|
6fa16dd85e | ||
|
d140d6423b | ||
|
311f3d9a23 | ||
|
a753fedf7b | ||
|
de109e65c4 | ||
|
f7c0e64843 | ||
|
cb0e15a1e6 | ||
|
0ef87a90fe | ||
|
12c86a8fda | ||
|
a860b7cbaf | ||
|
8ac2c33059 | ||
|
91c5ff3bbf | ||
|
f0d22226a9 | ||
|
c4e998e165 | ||
|
30a6d4e304 | ||
|
ba4e195cde | ||
|
99eb51b4e1 | ||
|
9d09768bd1 | ||
|
dbeddb5559 | ||
|
221867ca37 | ||
|
24d9ce3e7c | ||
|
427ac0d620 | ||
|
149182a3cd | ||
|
124e421b61 | ||
|
646093d74a | ||
|
e162e7d580 | ||
|
8ae64c16e2 | ||
|
866adfd768 | ||
|
5da114b070 | ||
|
ed0cd2c1b5 | ||
|
412afabc1c | ||
|
1bab644191 | ||
|
ddabeed530 | ||
|
d86cacddf9 | ||
|
b8cf7b113e | ||
|
dae4c18c9e | ||
|
a7f4fc202b | ||
|
8b5db046be | ||
|
509b612b0f | ||
|
e959384f02 | ||
|
8c6371a416 | ||
|
c8ec463f7a | ||
|
57ea2cc410 | ||
|
194d23e878 | ||
|
e20d125709 | ||
|
3f512ee337 | ||
|
1d2f7f6d0d | ||
|
ea74634e93 | ||
|
1558b91dde | ||
|
1bbedd4ddd | ||
|
0c211c1bc7 | ||
|
65ab539516 | ||
|
a7dedd9296 | ||
|
d4f6ef320e | ||
|
88e3d1d6ba | ||
|
522030be12 | ||
|
72487d0477 | ||
|
7d70b6b3a3 | ||
|
a46a78b97f | ||
|
cbc2e76c74 | ||
|
7f13148ef4 | ||
|
bd226fc4e6 | ||
|
383cb8afef | ||
|
1c9762c3eb | ||
|
fbf0dcb259 | ||
|
d850fc8cab | ||
|
12ee7e71e3 | ||
|
b0209ce796 | ||
|
50e589a239 | ||
|
ad94031d84 | ||
|
02d3f8f3a0 | ||
|
c1e3ca14dc | ||
|
349ba9ef90 | ||
|
e21f15865c | ||
|
48d0285c93 | ||
|
75fd3b13bf | ||
|
1ecf020703 | ||
|
1ea8a4ef18 | ||
|
b07271fdf8 | ||
|
585902ad87 | ||
|
6f692c08a1 | ||
|
f07c0f8ccb | ||
|
0b6ba08175 | ||
|
5a8b4f9682 | ||
|
06874a8bad | ||
|
cc0e65e51c | ||
|
0ee01ef534 | ||
|
769ccd0f62 | ||
|
79abf8a50f | ||
|
22a7f51a9a | ||
|
2b5e91e32e | ||
|
6b84afdeeb | ||
|
a26129d283 | ||
|
ef37995a71 | ||
|
658946c214 | ||
|
a41422a7f4 | ||
|
0aaa0f1f4a | ||
|
bcdc6abbf8 | ||
|
b809e2deb0 | ||
|
9c28d4db7a | ||
|
19628caf13 | ||
|
eebe632337 | ||
|
7206593bc9 | ||
|
c2aa97dba2 | ||
|
70e79d313c | ||
|
9715d1de0a | ||
|
8440cef85b | ||
|
374d33464c | ||
|
59a79376cb | ||
|
c6c7699c86 | ||
|
77bde6c1c0 | ||
|
8bcad215ef | ||
|
5d02fe7a3e | ||
|
957a73772d | ||
|
a3bd35bd0c | ||
|
d6d933e8d5 | ||
|
8faeb46334 | ||
|
5f3e359ab6 | ||
|
d9763747d3 | ||
|
a23998a8f3 | ||
|
fabb0119ef | ||
|
487bc3a3c9 | ||
|
b84b312f44 | ||
|
71f84d4499 | ||
|
d04722f799 | ||
|
c3baa6ecbb | ||
|
5575d70531 | ||
|
34ca8072ff | ||
|
60e063e484 | ||
|
a9c0df9627 | ||
|
925e1c787e | ||
|
07c09fee81 | ||
|
143027d9fc | ||
|
18c8eaddb4 | ||
|
a87e0b6b80 | ||
|
fd4dde338a | ||
|
266e9a2df1 | ||
|
a8c89ab3fc | ||
|
9ec2dda809 | ||
|
e15667aa2f | ||
|
87d76c813a | ||
|
d826cd20e0 | ||
|
4dc40f4f4a | ||
|
0d32f2f838 | ||
|
6844b5aa48 | ||
|
c6c7cc7c1f | ||
|
cc55071afd | ||
|
1c698ff626 | ||
|
24e1161d3c | ||
|
0adca3f7a1 | ||
|
586197d591 | ||
|
73461fca08 | ||
|
e30a6e51a7 | ||
|
f29e729852 | ||
|
9baf39a262 | ||
|
b3f16cbee2 | ||
|
d3971342d8 | ||
|
73acf77646 | ||
|
c946f9457f | ||
|
4d28dfb9e6 | ||
|
0a5812f168 | ||
|
a8a1eef67a | ||
|
9b5cfb114d | ||
|
fd07e8e831 | ||
|
605b27a550 | ||
|
4056e98a43 | ||
|
fa83ac6b0b | ||
|
139e5dfda7 | ||
|
9f33928d38 | ||
|
4f308cdf62 | ||
|
1804158370 | ||
|
28b475ce7b | ||
|
b51dddcdc4 | ||
|
c0708dd41b | ||
|
3eff164421 | ||
|
d45bb86bcd | ||
|
024e48dcba | ||
|
2861c54ad7 | ||
|
e43486d4a1 | ||
|
885c0f90ec | ||
|
aa2730f07f | ||
|
f521caa013 | ||
|
026272428d | ||
|
337917c4d9 | ||
|
93fbe4c71c | ||
|
c759e49570 | ||
|
cdde59f436 | ||
|
4ef5a12e60 | ||
|
3136efc10d | ||
|
4f66ed834b | ||
|
0f5ab96310 | ||
|
fdbf495f15 | ||
|
055dce8f40 | ||
|
d692d7b79b | ||
|
ebe44bf04e | ||
|
b84c4d8530 | ||
|
314ed77d77 | ||
|
343ac49b39 | ||
|
4dbde5f886 | ||
|
165bb84bf5 | ||
|
e47a99c42a | ||
|
2a2e420353 | ||
|
ed96c35dc1 | ||
|
927947841c | ||
|
d69622fa9c | ||
|
b26dbaee68 | ||
|
083d44f676 | ||
|
b8a2ca9e5a | ||
|
71aab84949 | ||
|
4019f98594 | ||
|
42d9c851f1 | ||
|
b05cbfd8f9 | ||
|
36088bad6c | ||
|
42a76abaa7 | ||
|
d9bf247dee | ||
|
18f3109cee | ||
|
3c81209b34 | ||
|
ecc9501387 | ||
|
6e7a7bac35 | ||
|
c58f620d09 | ||
|
52fc42ca8f | ||
|
872957da4e | ||
|
790fb3a31a | ||
|
63aedf6912 | ||
|
ee569590a6 | ||
|
dead85ee75 | ||
|
2e9d81bdda | ||
|
346aae9d42 | ||
|
0eb6010448 | ||
|
40380a96c4 | ||
|
b5bec951c0 | ||
|
f17b90d30c | ||
|
b1eda34239 | ||
|
767951addb | ||
|
fdbb9b345c | ||
|
2afe15c4a3 | ||
|
f363b745ff | ||
|
30da459114 | ||
|
948032d8a3 | ||
|
aafcbd082e | ||
|
03e13834b9 | ||
|
cb225bd8f2 | ||
|
08754a3c71 | ||
|
d373b65c3e | ||
|
0713ff4f94 | ||
|
4e8e44a788 | ||
|
f245b24f30 | ||
|
a9386ef070 | ||
|
682f5859ae | ||
|
a0a25fc6ea | ||
|
e44205bb33 | ||
|
f124357734 | ||
|
69d226d489 | ||
|
12329170f1 | ||
|
9e1a6f6ff9 | ||
|
bbfbfd8956 | ||
|
e1e13bbfdc | ||
|
705335a220 | ||
|
3f6eb545e2 | ||
|
fb44f55379 | ||
|
edea7a2b9c | ||
|
d3144ea253 | ||
|
3f1644fad6 | ||
|
58e6dfbbdb | ||
|
9482e7b9f9 | ||
|
919f091820 | ||
|
34efc162a2 | ||
|
c4a2361451 | ||
|
3f08738b67 | ||
|
671fcab5c8 | ||
|
067fd52b5c | ||
|
a3f9eeed9f | ||
|
8e341da95c | ||
|
df04e14503 | ||
|
784efd7053 | ||
|
75c139ace8 | ||
|
a55229619e | ||
|
e4fcb01b6a | ||
|
8178324266 | ||
|
73f2273d71 | ||
|
198d5a8720 | ||
|
6ad66e3d42 | ||
|
90628d7ee7 | ||
|
bba53d9f7a | ||
|
c289c20b88 | ||
|
8ca2144a74 | ||
|
b771bf89b0 | ||
|
79e525d260 | ||
|
dc9d6b5bf3 | ||
|
1f2d2e8d62 | ||
|
857778b922 | ||
|
86ba4c99ca | ||
|
b4e0849690 | ||
|
f4793f0df0 | ||
|
7d5efa8a76 | ||
|
89e15a2f5d | ||
|
2da0560c68 | ||
|
973e1b51c1 | ||
|
4988d36ba2 | ||
|
a22f4eebab | ||
|
405a3c3327 | ||
|
ff644a2ca1 | ||
|
25694bdfdd | ||
|
dcef46eeea | ||
|
3a403daf6b | ||
|
09a870fa43 | ||
|
d420a25b94 | ||
|
58ee0de242 | ||
|
0d2d356ddb | ||
|
6ef82feac4 | ||
|
710aa8aa5a | ||
|
3ea5038905 | ||
|
89192f4e69 | ||
|
121af2e631 | ||
|
e68d3a699d | ||
|
9442e2ea30 | ||
|
899d1b5361 | ||
|
8a6502aaa4 | ||
|
32a85dc172 | ||
|
30cb57b32c | ||
|
d18a35c449 | ||
|
12030d99e9 | ||
|
fce947e0f4 | ||
|
2e8f16c972 | ||
|
bb1dcea894 | ||
|
5b8ac8354e | ||
|
ff9305976b | ||
|
9199747fc3 | ||
|
6c36b6dd6c | ||
|
e556fdae00 | ||
|
9c20cc13a3 | ||
|
75067b3cf3 | ||
|
88fe0e1bb8 | ||
|
3fca602809 | ||
|
9d227eb205 | ||
|
0e3a831df9 | ||
|
fc9ddd974e | ||
|
10e9b18ca7 | ||
|
d9c00752c0 | ||
|
1034cd0cb4 | ||
|
677e1b8e4e | ||
|
ea962ee2ac | ||
|
73843307f0 | ||
|
053b4dcc30 | ||
|
5f27c4c1e7 | ||
|
20e757b2a5 | ||
|
42e8c8b643 | ||
|
d0f707f33d | ||
|
83f9fba796 | ||
|
8d551acfb5 | ||
|
7ed4347de5 | ||
|
d30ffe59bc | ||
|
e56edac012 | ||
|
8d83b9b1de | ||
|
430fc81aab | ||
|
9ba1790863 | ||
|
7dfb6c3ed7 | ||
|
70a035f13d | ||
|
b70681960f | ||
|
5920bb9b35 | ||
|
af440c1703 | ||
|
41917433bd | ||
|
8c63520bac | ||
|
31e6991f24 | ||
|
26ff36479a | ||
|
fc669d8645 | ||
|
c85373f911 | ||
|
08afc2a0a9 | ||
|
35b742c1e8 | ||
|
3560b7a7f8 |
@ -1,2 +0,0 @@
|
||||
[BUILDDEB]
|
||||
native = True
|
@ -1,9 +0,0 @@
|
||||
/.shelf/
|
||||
/build/
|
||||
/python-build-stamp-*
|
||||
/debian/files
|
||||
/debian/ubuntu-dev-tools/
|
||||
/debian/ubuntu-dev-tools.debhelper.log
|
||||
/debian/ubuntu-dev-tools.*.debhelper
|
||||
/debian/ubuntu-dev-tools.substvars
|
||||
ubuntu_dev_tools.egg-info
|
2
.gitignore
vendored
Normal file
2
.gitignore
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
__pycache__
|
||||
*.egg-info
|
65
.pylintrc
Normal file
65
.pylintrc
Normal file
@ -0,0 +1,65 @@
|
||||
[MASTER]
|
||||
|
||||
# A comma-separated list of package or module names from where C extensions may
|
||||
# be loaded. Extensions are loading into the active Python interpreter and may
|
||||
# run arbitrary code.
|
||||
extension-pkg-allow-list=apt_pkg
|
||||
|
||||
# Pickle collected data for later comparisons.
|
||||
persistent=no
|
||||
|
||||
# Use all cpus, to speed up testing
|
||||
jobs=0
|
||||
|
||||
|
||||
[MESSAGES CONTROL]
|
||||
|
||||
# Disable the message, report, category or checker with the given id(s). You
|
||||
# can either give multiple identifiers separated by comma (,) or put this
|
||||
# option multiple times (only on the command line, not in the configuration
|
||||
# file where it should appear only once).You can also use "--disable=all" to
|
||||
# disable everything first and then reenable specific checks. For example, if
|
||||
# you want to run only the similarities checker, you can use "--disable=all
|
||||
# --enable=similarities". If you want to run only the classes checker, but have
|
||||
# no Warning level messages displayed, use"--disable=all --enable=classes
|
||||
# --disable=W"
|
||||
disable=fixme,locally-disabled,missing-docstring,useless-option-value,
|
||||
# TODO: Fix all following disabled checks!
|
||||
invalid-name,
|
||||
consider-using-with,
|
||||
too-many-arguments,
|
||||
too-many-branches,
|
||||
too-many-statements,
|
||||
too-many-locals,
|
||||
duplicate-code,
|
||||
too-many-instance-attributes,
|
||||
too-many-nested-blocks,
|
||||
too-many-lines,
|
||||
|
||||
|
||||
[REPORTS]
|
||||
|
||||
# Tells whether to display a full report or only the messages
|
||||
reports=no
|
||||
|
||||
|
||||
[FORMAT]
|
||||
|
||||
# Maximum number of characters on a single line.
|
||||
max-line-length=99
|
||||
|
||||
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
|
||||
# tab).
|
||||
indent-string=' '
|
||||
|
||||
|
||||
[BASIC]
|
||||
|
||||
# Allow variables called e, f, lp
|
||||
good-names=i,j,k,ex,Run,_,e,f,lp,me,to
|
||||
|
||||
|
||||
[IMPORTS]
|
||||
|
||||
# Force import order to recognize a module as part of a third party library.
|
||||
known-third-party=debian
|
177
404main
177
404main
@ -1,177 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright 2006-2007 (C) Pete Savage <petesavage@ubuntu.com>
|
||||
# Copyright 2007 (C) Siegfried-A. Gevatter <rainct@ubuntu.com>
|
||||
# Copyright 2009 (C) Canonical Ltd. (by Colin Watson <cjwatson@ubuntu.com>)
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 2
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# See file /usr/share/common-licenses/GPL for more details.
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This script is used to check if a package and all its build
|
||||
# dependencies are in main or not.
|
||||
|
||||
import sys
|
||||
|
||||
import apt_pkg
|
||||
import apt
|
||||
|
||||
from ubuntutools import subprocess
|
||||
|
||||
def process_deps(cache, deps):
|
||||
"""Takes a list of (build) dependencies and processes it."""
|
||||
|
||||
for basedep in [d.or_dependencies[0] for d in deps]:
|
||||
if not packages.has_key(basedep.name) and basedep.name != '':
|
||||
# Check the (build) dependencies recursively
|
||||
find_main(cache, basedep.name)
|
||||
|
||||
|
||||
def get_package_version(cache, distro, pack):
|
||||
if pack not in cache:
|
||||
return None
|
||||
for version in (cache[pack].candidate, cache[pack].installed):
|
||||
if not version:
|
||||
continue
|
||||
for origin in version.origins:
|
||||
if origin.archive == distro:
|
||||
return version
|
||||
return None
|
||||
|
||||
|
||||
# Cache::CompTypeDeb isn't exposed via python-apt
|
||||
def comp_type_deb(op):
|
||||
ops = ("", "<=", ">=", "<<", ">>", "=", "!=")
|
||||
if (op & 15) < 7:
|
||||
return ops[op & 15]
|
||||
return ""
|
||||
|
||||
|
||||
def find_main(cache, pack):
|
||||
"""Searches the dependencies and build dependencies of a package recursively
|
||||
to determine if they are all in the 'main' component or not."""
|
||||
|
||||
global packages
|
||||
|
||||
if pack in packages:
|
||||
return
|
||||
|
||||
# Retrieve information about the package
|
||||
version = get_package_version(cache, distro, pack)
|
||||
|
||||
if not version:
|
||||
packages[pack] = False
|
||||
return
|
||||
elif [origin for origin in version.origins if origin.component == 'main']:
|
||||
packages[pack] = True
|
||||
return
|
||||
else:
|
||||
if not packages.has_key(pack):
|
||||
packages[pack] = False
|
||||
|
||||
# Retrieve package dependencies
|
||||
process_deps(cache, version.dependencies)
|
||||
|
||||
# Retrieve package build dependencies. There's no handy
|
||||
# attribute on version for this, so unfortunately we have to
|
||||
# do a lot of messing about with apt.
|
||||
deps = []
|
||||
src_records = apt_pkg.SourceRecords()
|
||||
got_src = False
|
||||
while src_records.lookup(version.source_name):
|
||||
if pack in src_records.binaries:
|
||||
got_src = True
|
||||
break
|
||||
if got_src:
|
||||
# pylint: disable=E1101
|
||||
for _, all_deps in src_records.build_depends.iteritems():
|
||||
# pylint: enable=E1101
|
||||
for or_deps in all_deps:
|
||||
base_deps = []
|
||||
for (name, ver, op) in or_deps:
|
||||
base_deps.append(apt.package.BaseDependency(name, op,
|
||||
ver, False))
|
||||
deps.append(apt.package.Dependency(base_deps))
|
||||
|
||||
process_deps(cache, deps)
|
||||
|
||||
def usage(exit_code):
|
||||
print 'Usage: %s <package name> [<distribution>]' % sys.argv[0]
|
||||
sys.exit(exit_code)
|
||||
|
||||
def main():
|
||||
|
||||
global packages, distro
|
||||
|
||||
# Check if the amount of arguments is correct
|
||||
if len(sys.argv) > 1 and sys.argv[1] in ('help', '-h', '--help'):
|
||||
usage(0)
|
||||
|
||||
if len(sys.argv) < 2 or len(sys.argv) > 3:
|
||||
usage(1)
|
||||
|
||||
cache = apt.cache.Cache()
|
||||
|
||||
if len(sys.argv) == 3 and sys.argv[2]:
|
||||
distro = sys.argv[2]
|
||||
if not get_package_version(cache, distro, 'bash'):
|
||||
print u'«%s» is not a valid distribution.' % distro
|
||||
print ('Remember that for 404main to work with a certain '
|
||||
'distribution it must be in your /etc/apt/sources.list '
|
||||
'file.')
|
||||
sys.exit(1)
|
||||
else:
|
||||
cmd = ['lsb_release', '-cs']
|
||||
process = subprocess.Popen(cmd, stdout=subprocess.PIPE)
|
||||
distro = process.stdout.read().strip('\n')
|
||||
|
||||
if not get_package_version(cache, distro, sys.argv[1]):
|
||||
print (u"Can't find package «%s» in distribution «%s»."
|
||||
% (sys.argv[1], distro))
|
||||
sys.exit(1)
|
||||
|
||||
print (u'Checking package «%s» in distribution «%s»...'
|
||||
% (sys.argv[1], distro))
|
||||
|
||||
find_main(cache, sys.argv[1])
|
||||
|
||||
# True if everything checked until the point is in main
|
||||
all_in_main = True
|
||||
|
||||
for package in packages:
|
||||
if not packages[package]:
|
||||
if all_in_main:
|
||||
print 'The following packages aren\'t in main:'
|
||||
all_in_main = False
|
||||
print ' ', package
|
||||
|
||||
if all_in_main:
|
||||
print (u'Package «%s» and all its dependencies and build dependencies '
|
||||
u'are in main.') % sys.argv[1]
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
# Global variable to hold the status of all packages
|
||||
packages = {}
|
||||
|
||||
# Global variable to hold the target distribution
|
||||
distro = ''
|
||||
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print 'Aborted.'
|
||||
sys.exit(1)
|
@ -1,12 +1,12 @@
|
||||
Updating the ubuntu-dev-tools package
|
||||
-------------------------------------
|
||||
|
||||
Here are the steps that are recommended to take when updating the
|
||||
Here are the 10 steps that are recommended to take when updating the
|
||||
ubuntu-dev-tools package in Ubuntu.
|
||||
|
||||
1) Make sure that there are no new revisions to the package's trunk in Bazaar:
|
||||
1) Make sure there are no new commits to the package's master branch in git:
|
||||
|
||||
bzr pull lp:ubuntu-dev-tools
|
||||
git pull
|
||||
|
||||
2) Check to make sure that all approved merges have been merged:
|
||||
|
||||
@ -15,6 +15,7 @@ ubuntu-dev-tools package in Ubuntu.
|
||||
3) Make sure that there is no low lying fruit that can be fixed at:
|
||||
|
||||
https://bugs.launchpad.net/ubuntu/+source/ubuntu-dev-tools
|
||||
https://bugs.debian.org/src:ubuntu-dev-tools
|
||||
|
||||
4) Check that the test suite passes
|
||||
|
||||
@ -27,31 +28,27 @@ ubuntu-dev-tools package in Ubuntu.
|
||||
If there is no UNRELEASED entry, make sure that the version for the current
|
||||
one has not been uploaded by someone else already:
|
||||
|
||||
http://packages.qa.debian.org/u/ubuntu-dev-tools.html
|
||||
https://tracker.debian.org/pkg/ubuntu-dev-tools
|
||||
https://launchpad.net/ubuntu/+source/ubuntu-dev-tools/+publishinghistory
|
||||
|
||||
6) Once the target release has been changed, commit it to Bazaar (where X.YY is
|
||||
6) Once the target release has been changed, commit it to git (where X.YY is
|
||||
the new package version):
|
||||
|
||||
bzr commit -m "Uploaded X.YY to RELEASE."
|
||||
git commit -a -m "Uploaded X.YY to RELEASE."
|
||||
|
||||
7) Tag the new release in Bazaar:
|
||||
7) Create the new source package and tag the new release in git:
|
||||
|
||||
bzr tag
|
||||
gbp buildpackage -S --git-tag
|
||||
|
||||
For a full list of tags, please see: 'bzr tags'. This is so we can track
|
||||
which Bazaar revision is in which release and makes bug triaging easier.
|
||||
For a full list of tags, please see: 'git tag -l'. This is so we can track
|
||||
which git commit is in which release and makes bug triaging easier.
|
||||
|
||||
8) Create the new source package:
|
||||
|
||||
bzr bd -S
|
||||
|
||||
9) Upload the package to Debian with dput as normal:
|
||||
8) Upload the package to Debian with dput as normal:
|
||||
|
||||
dput ftp-master ubuntu-dev-tools_X.YY_$arch.changes
|
||||
|
||||
10) Create a new blank entry with dch -i and mark it as UNRELEASED.
|
||||
9) Create a new blank entry with dch -i and mark it as UNRELEASED.
|
||||
|
||||
11) After it's been dinstalled in Debian, sync to Ubuntu:
|
||||
10) After it's been dinstalled in Debian, sync to Ubuntu:
|
||||
|
||||
syncpackage ubuntu-dev-tools
|
||||
|
609
backportpackage
609
backportpackage
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
# ##################################################################
|
||||
#
|
||||
@ -18,228 +18,286 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
import optparse
|
||||
import argparse
|
||||
import glob
|
||||
import os
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
import tempfile
|
||||
from urllib.parse import quote
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
import lsb_release
|
||||
try:
|
||||
import lsb_release
|
||||
except ImportError:
|
||||
lsb_release = None
|
||||
from distro_info import DebianDistroInfo, UbuntuDistroInfo
|
||||
from httplib2 import Http, HttpLib2Error
|
||||
|
||||
from debian.debian_support import Version
|
||||
|
||||
from devscripts.logger import Logger
|
||||
|
||||
from ubuntutools.archive import (SourcePackage, DebianSourcePackage,
|
||||
UbuntuSourcePackage, DownloadError, rmadison)
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.archive import DebianSourcePackage, DownloadError, UbuntuSourcePackage
|
||||
from ubuntutools.builder import get_builder
|
||||
from ubuntutools.misc import (system_distribution, vendor_to_distroinfo,
|
||||
codename_to_distribution)
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
from ubuntutools.lp.lpapicache import (
|
||||
Distribution,
|
||||
Launchpad,
|
||||
PackageNotFoundException,
|
||||
SeriesNotFoundException,
|
||||
)
|
||||
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
from ubuntutools import subprocess
|
||||
|
||||
def error(msg):
|
||||
Logger.error(msg)
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def error(msg, *args):
|
||||
Logger.error(msg, *args)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def check_call(cmd, *args, **kwargs):
|
||||
Logger.command(cmd)
|
||||
Logger.debug(" ".join(cmd))
|
||||
ret = subprocess.call(cmd, *args, **kwargs)
|
||||
if ret != 0:
|
||||
error('%s returned %d.' % (cmd[0], ret))
|
||||
error("%s returned %d.", cmd[0], ret)
|
||||
|
||||
def parse(args):
|
||||
usage = 'Usage: %prog [options] <source package name or .dsc URL/file>'
|
||||
parser = optparse.OptionParser(usage)
|
||||
parser.add_option('-d', '--destination',
|
||||
dest='dest_releases',
|
||||
default=[],
|
||||
action='append',
|
||||
help='Backport to DEST release '
|
||||
'(default: current release)',
|
||||
metavar='DEST')
|
||||
parser.add_option('-s', '--source',
|
||||
dest='source_release',
|
||||
default=None,
|
||||
help='Backport from SOURCE release '
|
||||
'(default: devel release)',
|
||||
metavar='SOURCE')
|
||||
parser.add_option('-S', '--suffix',
|
||||
dest='suffix',
|
||||
default=None,
|
||||
help='Suffix to append to version number '
|
||||
'(default: ~ppa1)',
|
||||
metavar='SUFFIX')
|
||||
parser.add_option('-b', '--build',
|
||||
dest='build',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='Build the package before uploading '
|
||||
'(default: %default)')
|
||||
parser.add_option('-B', '--builder',
|
||||
dest='builder',
|
||||
default=None,
|
||||
help='Specify the package builder (default: pbuilder)',
|
||||
metavar='BUILDER')
|
||||
parser.add_option('-U', '--update',
|
||||
dest='update',
|
||||
default=False,
|
||||
action='store_true',
|
||||
help='Update the build environment before '
|
||||
'attempting to build')
|
||||
parser.add_option('-u', '--upload',
|
||||
dest='upload',
|
||||
help='Specify an upload destination',
|
||||
metavar='UPLOAD')
|
||||
parser.add_option('-y', '--yes',
|
||||
dest='prompt',
|
||||
default=True,
|
||||
action='store_false',
|
||||
help='Do not prompt before uploading to a PPA')
|
||||
parser.add_option('-v', '--version',
|
||||
dest='version',
|
||||
default=None,
|
||||
help='Package version to backport (or verify)',
|
||||
metavar='VERSION')
|
||||
parser.add_option('-w', '--workdir',
|
||||
dest='workdir',
|
||||
default=None,
|
||||
help='Specify a working directory '
|
||||
'(default: temporary dir)',
|
||||
metavar='WORKDIR')
|
||||
parser.add_option('-m', '--mirror',
|
||||
dest='mirror',
|
||||
default=None,
|
||||
help='Preferred mirror (default: Launchpad)',
|
||||
metavar='INSTANCE')
|
||||
parser.add_option('-l', '--lpinstance',
|
||||
dest='lpinstance',
|
||||
default=None,
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production)',
|
||||
metavar='INSTANCE')
|
||||
parser.add_option('--no-conf',
|
||||
dest='no_conf',
|
||||
default=False,
|
||||
help="Don't read config files or environment variables",
|
||||
action='store_true')
|
||||
|
||||
opts, args = parser.parse_args(args)
|
||||
if len(args) != 1:
|
||||
parser.error('You must specify a single source package or a .dsc '
|
||||
'URL/path.')
|
||||
config = UDTConfig(opts.no_conf)
|
||||
if opts.builder is None:
|
||||
opts.builder = config.get_value('BUILDER')
|
||||
if not opts.update:
|
||||
opts.update = config.get_value('UPDATE_BUILDER', boolean=True)
|
||||
if opts.workdir is None:
|
||||
opts.workdir = config.get_value('WORKDIR')
|
||||
if opts.lpinstance is None:
|
||||
opts.lpinstance = config.get_value('LPINSTANCE')
|
||||
if not opts.upload and not opts.workdir:
|
||||
parser.error('Please specify either a working dir or an upload target!')
|
||||
def parse(argv):
|
||||
usage = "%(prog)s [options] <source package name or .dsc URL/file>"
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
metavar="DEST",
|
||||
dest="dest_releases",
|
||||
default=[],
|
||||
action="append",
|
||||
help="Backport to DEST release (default: current release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--source",
|
||||
metavar="SOURCE",
|
||||
dest="source_release",
|
||||
help="Backport from SOURCE release (default: devel release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-S",
|
||||
"--suffix",
|
||||
metavar="SUFFIX",
|
||||
help="Suffix to append to version number (default: ~ppa1 when uploading to a PPA)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
"--message",
|
||||
metavar="MESSAGE",
|
||||
default="No-change",
|
||||
help='Changelog message to use instead of "No-change" '
|
||||
"(default: No-change backport to DEST.)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--build",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Build the package before uploading (default: %(default)s)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-B",
|
||||
"--builder",
|
||||
metavar="BUILDER",
|
||||
help="Specify the package builder (default: pbuilder)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-U",
|
||||
"--update",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Update the build environment before attempting to build",
|
||||
)
|
||||
parser.add_argument("-u", "--upload", metavar="UPLOAD", help="Specify an upload destination")
|
||||
parser.add_argument(
|
||||
"-k", "--key", dest="keyid", help="Specify the key ID to be used for signing."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dont-sign", dest="keyid", action="store_false", help="Do not sign the upload."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-y",
|
||||
"--yes",
|
||||
dest="prompt",
|
||||
default=True,
|
||||
action="store_false",
|
||||
help="Do not prompt before uploading to a PPA",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--version", metavar="VERSION", help="Package version to backport (or verify)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-w",
|
||||
"--workdir",
|
||||
metavar="WORKDIR",
|
||||
help="Specify a working directory (default: temporary dir)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--release-pocket",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Target the release pocket in the .changes file. "
|
||||
"Necessary (and default) for uploads to PPAs",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c", "--close", metavar="BUG", help="Bug to close in the changelog entry."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-m", "--mirror", metavar="URL", help="Preferred mirror (default: Launchpad)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package_or_dsc", help=argparse.SUPPRESS)
|
||||
|
||||
return opts, args, config
|
||||
args = parser.parse_args(argv)
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.builder is None:
|
||||
args.builder = config.get_value("BUILDER")
|
||||
if not args.update:
|
||||
args.update = config.get_value("UPDATE_BUILDER", boolean=True)
|
||||
if args.workdir is None:
|
||||
args.workdir = config.get_value("WORKDIR")
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
if args.upload is None:
|
||||
args.upload = config.get_value("UPLOAD")
|
||||
if args.keyid is None:
|
||||
args.keyid = config.get_value("KEYID")
|
||||
if not args.upload and not args.workdir:
|
||||
parser.error("Please specify either a working dir or an upload target!")
|
||||
if args.upload and args.upload.startswith("ppa:"):
|
||||
args.release_pocket = True
|
||||
|
||||
def get_current_version(package, distribution, source_release):
|
||||
info = vendor_to_distroinfo(distribution)
|
||||
source_release = info().codename(source_release, default=source_release)
|
||||
return args, config
|
||||
|
||||
latest_version = None
|
||||
|
||||
releases = [source_release]
|
||||
if distribution.lower() == "ubuntu":
|
||||
releases += [source_release + "-updates", source_release + "-security"]
|
||||
|
||||
for release in releases:
|
||||
for record in rmadison(distribution.lower(), package, suite=release):
|
||||
if 'source' not in record:
|
||||
continue
|
||||
|
||||
if (not latest_version or
|
||||
Version(latest_version) < Version(record['version'])):
|
||||
latest_version = record['version']
|
||||
|
||||
return latest_version
|
||||
|
||||
def find_release_package(launchpad, mirror, workdir, package, version,
|
||||
source_release, config):
|
||||
def find_release_package(mirror, workdir, package, version, source_release, config):
|
||||
srcpkg = None
|
||||
|
||||
if source_release:
|
||||
distribution = codename_to_distribution(source_release)
|
||||
|
||||
if not distribution:
|
||||
error('Unknown release codename %s' % source_release)
|
||||
error("Unknown release codename %s", source_release)
|
||||
info = vendor_to_distroinfo(distribution)()
|
||||
source_release = info.codename(source_release, default=source_release)
|
||||
else:
|
||||
distribution = system_distribution()
|
||||
mirrors = [mirror] if mirror else []
|
||||
|
||||
mirrors.append(config.get_value('%s_MIRROR' % distribution.upper()))
|
||||
mirrors.append(config.get_value(f"{distribution.upper()}_MIRROR"))
|
||||
|
||||
if not version:
|
||||
version = get_current_version(package, distribution, source_release)
|
||||
archive = Distribution(distribution.lower()).getArchive()
|
||||
try:
|
||||
spph = archive.getSourcePackage(package, source_release)
|
||||
except (SeriesNotFoundException, PackageNotFoundException) as e:
|
||||
error("%s", str(e))
|
||||
version = spph.getVersion()
|
||||
|
||||
if not version:
|
||||
error('Unable to find package %s in release %s.' %
|
||||
(package, source_release))
|
||||
|
||||
if distribution == 'Debian':
|
||||
srcpkg = DebianSourcePackage(package,
|
||||
version,
|
||||
workdir=workdir,
|
||||
lp=launchpad,
|
||||
mirrors=mirrors)
|
||||
elif distribution == 'Ubuntu':
|
||||
srcpkg = UbuntuSourcePackage(package,
|
||||
version,
|
||||
workdir=workdir,
|
||||
lp=launchpad,
|
||||
mirrors=mirrors)
|
||||
if distribution == "Debian":
|
||||
srcpkg = DebianSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
|
||||
elif distribution == "Ubuntu":
|
||||
srcpkg = UbuntuSourcePackage(package, version, workdir=workdir, mirrors=mirrors)
|
||||
|
||||
return srcpkg
|
||||
|
||||
def find_package(launchpad, mirror, workdir, package, version, source_release,
|
||||
config):
|
||||
|
||||
def find_package(mirror, workdir, package, version, source_release, config):
|
||||
"Returns the SourcePackage"
|
||||
if package.endswith('.dsc'):
|
||||
return SourcePackage(version=version, dscfile=package,
|
||||
workdir=workdir, lp=launchpad,
|
||||
mirrors=(mirror,))
|
||||
if package.endswith(".dsc"):
|
||||
# Here we are using UbuntuSourcePackage just because we don't have any
|
||||
# "general" class that is safely instantiable (as SourcePackage is an
|
||||
# abstract class). None of the distribution-specific details within
|
||||
# UbuntuSourcePackage is relevant for this use case.
|
||||
return UbuntuSourcePackage(
|
||||
version=version, dscfile=package, workdir=workdir, mirrors=(mirror,)
|
||||
)
|
||||
|
||||
if not source_release and not version:
|
||||
info = vendor_to_distroinfo(system_distribution())
|
||||
source_release = info().devel()
|
||||
|
||||
srcpkg = find_release_package(launchpad, mirror, workdir, package, version,
|
||||
source_release, config)
|
||||
srcpkg = find_release_package(mirror, workdir, package, version, source_release, config)
|
||||
if version and srcpkg.version != version:
|
||||
error('Requested backport of version %s but version of %s in %s is %s'
|
||||
% (version, package, source_release, srcpkg.version))
|
||||
error(
|
||||
"Requested backport of version %s but version of %s in %s is %s",
|
||||
version,
|
||||
package,
|
||||
source_release,
|
||||
srcpkg.version,
|
||||
)
|
||||
|
||||
return srcpkg
|
||||
|
||||
|
||||
def get_backport_version(version, suffix, upload, release):
|
||||
backport_version = version + ('~%s1' % release)
|
||||
distribution = codename_to_distribution(release)
|
||||
if not distribution:
|
||||
error("Unknown release codename %s", release)
|
||||
if distribution == "Debian":
|
||||
debian_distro_info = DebianDistroInfo()
|
||||
debian_codenames = debian_distro_info.supported()
|
||||
if release in debian_codenames:
|
||||
release_version = debian_distro_info.version(release)
|
||||
if not release_version:
|
||||
error("Can't find the release version for %s", release)
|
||||
backport_version = f"{version}~bpo{release_version}+1"
|
||||
else:
|
||||
error("%s is not a supported release (%s)", release, debian_codenames)
|
||||
elif distribution == "Ubuntu":
|
||||
series = Distribution(distribution.lower()).getSeries(name_or_version=release)
|
||||
|
||||
backport_version = f"{version}~bpo{series.version}.1"
|
||||
else:
|
||||
error("Unknown distribution «%s» for release «%s»", distribution, release)
|
||||
if suffix is not None:
|
||||
backport_version += suffix
|
||||
elif upload and upload.startswith('ppa:'):
|
||||
backport_version += '~ppa1'
|
||||
elif upload and upload.startswith("ppa:"):
|
||||
backport_version += "~ppa1"
|
||||
return backport_version
|
||||
|
||||
def get_backport_dist(upload, release):
|
||||
if not upload or upload == 'ubuntu':
|
||||
return '%s-backports' % release
|
||||
else:
|
||||
|
||||
def get_old_version(source, release):
|
||||
try:
|
||||
distribution = codename_to_distribution(release)
|
||||
archive = Distribution(distribution.lower()).getArchive()
|
||||
pkg = archive.getSourcePackage(
|
||||
source, release, ("Release", "Security", "Updates", "Proposed", "Backports")
|
||||
)
|
||||
return pkg.getVersion()
|
||||
except (SeriesNotFoundException, PackageNotFoundException):
|
||||
pass
|
||||
return None
|
||||
|
||||
|
||||
def get_backport_dist(release, release_pocket):
|
||||
if release_pocket:
|
||||
return release
|
||||
return f"{release}-backports"
|
||||
|
||||
|
||||
def do_build(workdir, dsc, release, builder, update):
|
||||
builder = get_builder(builder)
|
||||
if not builder:
|
||||
return
|
||||
return None
|
||||
|
||||
if update:
|
||||
if 0 != builder.update(release):
|
||||
@ -247,100 +305,191 @@ def do_build(workdir, dsc, release, builder, update):
|
||||
|
||||
# builder.build is going to chdir to buildresult:
|
||||
workdir = os.path.realpath(workdir)
|
||||
return builder.build(os.path.join(workdir, dsc),
|
||||
release,
|
||||
os.path.join(workdir, "buildresult"))
|
||||
return builder.build(os.path.join(workdir, dsc), release, os.path.join(workdir, "buildresult"))
|
||||
|
||||
|
||||
def do_upload(workdir, package, bp_version, changes, upload, prompt):
|
||||
print 'Please check %s %s in file://%s carefully!' % \
|
||||
(package, bp_version, workdir)
|
||||
if prompt or upload == 'ubuntu':
|
||||
question = 'Do you want to upload the package to %s' % upload
|
||||
print(f"Please check {package} {bp_version} in file://{workdir} carefully!")
|
||||
if prompt or upload == "ubuntu":
|
||||
question = f"Do you want to upload the package to {upload}"
|
||||
answer = YesNoQuestion().ask(question, "yes")
|
||||
if answer == "no":
|
||||
return
|
||||
|
||||
check_call(['dput', upload, changes], cwd=workdir)
|
||||
check_call(["dput", upload, changes], cwd=workdir)
|
||||
|
||||
def do_backport(workdir, pkg, suffix, release, build, builder, update, upload,
|
||||
prompt):
|
||||
dirname = '%s-%s' % (pkg.source, release)
|
||||
pkg.unpack(dirname)
|
||||
|
||||
def orig_needed(upload, workdir, pkg):
|
||||
"""Avoid a -sa if possible"""
|
||||
if not upload or not upload.startswith("ppa:"):
|
||||
return True
|
||||
ppa = upload.split(":", 1)[1]
|
||||
user, ppa = ppa.split("/", 1)
|
||||
|
||||
version = pkg.version.upstream_version
|
||||
|
||||
http = Http()
|
||||
for filename in glob.glob(os.path.join(workdir, f"{pkg.source}_{version}.orig*")):
|
||||
url = (
|
||||
f"https://launchpad.net/~{quote(user)}/+archive/{quote(ppa)}/+sourcefiles"
|
||||
f"/{quote(pkg.source)}/{quote(pkg.version.full_version)}"
|
||||
f"/{quote(os.path.basename(filename))}"
|
||||
)
|
||||
try:
|
||||
headers = http.request(url, "HEAD")[0]
|
||||
if headers.status != 200 or not headers["content-location"].startswith(
|
||||
"https://launchpadlibrarian.net"
|
||||
):
|
||||
return True
|
||||
except HttpLib2Error as e:
|
||||
Logger.debug(e)
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def do_backport(
|
||||
workdir,
|
||||
pkg,
|
||||
suffix,
|
||||
message,
|
||||
close,
|
||||
release,
|
||||
release_pocket,
|
||||
build,
|
||||
builder,
|
||||
update,
|
||||
upload,
|
||||
keyid,
|
||||
prompt,
|
||||
):
|
||||
dirname = f"{pkg.source}-{release}"
|
||||
srcdir = os.path.join(workdir, dirname)
|
||||
|
||||
bp_version = get_backport_version(pkg.version.full_version, suffix,
|
||||
upload, release)
|
||||
bp_dist = get_backport_dist(upload, release)
|
||||
if os.path.exists(srcdir):
|
||||
question = f"Working directory {srcdir} already exists. Delete it?"
|
||||
if YesNoQuestion().ask(question, "no") == "no":
|
||||
sys.exit(1)
|
||||
shutil.rmtree(srcdir)
|
||||
|
||||
check_call(['dch',
|
||||
'--force-bad-version',
|
||||
'--force-distribution',
|
||||
'--preserve',
|
||||
'--newversion', bp_version,
|
||||
'--distribution', bp_dist,
|
||||
'No-change backport to %s' % release],
|
||||
cwd=srcdir)
|
||||
check_call(['debuild', '--no-lintian', '-S', '-sa'], cwd=srcdir)
|
||||
pkg.unpack(dirname)
|
||||
|
||||
fn_base = pkg.source + '_' + bp_version.split(':', 1)[-1]
|
||||
bp_version = get_backport_version(pkg.version.full_version, suffix, upload, release)
|
||||
old_version = get_old_version(pkg.source, release)
|
||||
bp_dist = get_backport_dist(release, release_pocket)
|
||||
|
||||
changelog = f"{message} backport to {release}."
|
||||
if close:
|
||||
changelog += f" (LP: #{close})"
|
||||
check_call(
|
||||
[
|
||||
"dch",
|
||||
"--force-bad-version",
|
||||
"--force-distribution",
|
||||
"--preserve",
|
||||
"--newversion",
|
||||
bp_version,
|
||||
"--distribution",
|
||||
bp_dist,
|
||||
changelog,
|
||||
],
|
||||
cwd=srcdir,
|
||||
)
|
||||
|
||||
cmd = ["debuild", "--no-lintian", "-S", "-nc", "-uc", "-us"]
|
||||
if orig_needed(upload, workdir, pkg):
|
||||
cmd.append("-sa")
|
||||
else:
|
||||
cmd.append("-sd")
|
||||
if old_version:
|
||||
cmd.append(f"-v{old_version}")
|
||||
env = os.environ.copy()
|
||||
# An ubuntu.com e-mail address would make dpkg-buildpackage fail if there
|
||||
# wasn't an Ubuntu maintainer for an ubuntu-versioned package. LP: #1007042
|
||||
env.pop("DEBEMAIL", None)
|
||||
check_call(cmd, cwd=srcdir, env=env)
|
||||
|
||||
fn_base = pkg.source + "_" + bp_version.split(":", 1)[-1]
|
||||
changes = fn_base + "_source.changes"
|
||||
|
||||
if build:
|
||||
if 0 != do_build(workdir, fn_base + '.dsc', release, builder, update):
|
||||
if 0 != do_build(workdir, fn_base + ".dsc", release, builder, update):
|
||||
sys.exit(1)
|
||||
|
||||
# None: sign with the default signature. False: don't sign
|
||||
if keyid is not False:
|
||||
cmd = ["debsign"]
|
||||
if keyid:
|
||||
cmd.append("-k" + keyid)
|
||||
cmd.append(changes)
|
||||
check_call(cmd, cwd=workdir)
|
||||
if upload:
|
||||
do_upload(workdir, pkg.source, bp_version, fn_base + '_source.changes',
|
||||
upload, prompt)
|
||||
do_upload(workdir, pkg.source, bp_version, changes, upload, prompt)
|
||||
|
||||
shutil.rmtree(srcdir)
|
||||
|
||||
def main(args):
|
||||
|
||||
def main(argv):
|
||||
ubu_email()
|
||||
|
||||
opts, (package_or_dsc,), config = parse(args[1:])
|
||||
args, config = parse(argv[1:])
|
||||
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
launchpad = Launchpad.login_anonymously(script_name, opts.lpinstance)
|
||||
Launchpad.login_anonymously(service=args.lpinstance)
|
||||
|
||||
if not opts.dest_releases:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
try:
|
||||
opts.dest_releases = [distinfo['CODENAME']]
|
||||
except KeyError:
|
||||
error('No destination release specified and unable to guess yours.')
|
||||
if not args.dest_releases:
|
||||
if lsb_release:
|
||||
distinfo = lsb_release.get_distro_information()
|
||||
try:
|
||||
current_distro = distinfo["ID"]
|
||||
except KeyError:
|
||||
error("No destination release specified and unable to guess yours.")
|
||||
else:
|
||||
err, current_distro = subprocess.getstatusoutput("lsb_release --id --short")
|
||||
if err:
|
||||
error("Could not run lsb_release to retrieve distribution")
|
||||
|
||||
if opts.workdir:
|
||||
workdir = os.path.expanduser(opts.workdir)
|
||||
if current_distro == "Ubuntu":
|
||||
args.dest_releases = [UbuntuDistroInfo().lts()]
|
||||
elif current_distro == "Debian":
|
||||
args.dest_releases = [DebianDistroInfo().stable()]
|
||||
else:
|
||||
error("Unknown distribution %s, can't guess target release", current_distro)
|
||||
|
||||
if args.workdir:
|
||||
workdir = os.path.expanduser(args.workdir)
|
||||
else:
|
||||
workdir = tempfile.mkdtemp(prefix='backportpackage-')
|
||||
workdir = tempfile.mkdtemp(prefix="backportpackage-")
|
||||
|
||||
if not os.path.exists(workdir):
|
||||
os.makedirs(workdir)
|
||||
|
||||
try:
|
||||
pkg = find_package(launchpad,
|
||||
opts.mirror,
|
||||
workdir,
|
||||
package_or_dsc,
|
||||
opts.version,
|
||||
opts.source_release,
|
||||
config)
|
||||
pkg = find_package(
|
||||
args.mirror, workdir, args.package_or_dsc, args.version, args.source_release, config
|
||||
)
|
||||
pkg.pull()
|
||||
|
||||
for release in opts.dest_releases:
|
||||
do_backport(workdir,
|
||||
pkg,
|
||||
opts.suffix,
|
||||
release,
|
||||
opts.build,
|
||||
opts.builder,
|
||||
opts.update,
|
||||
opts.upload,
|
||||
opts.prompt)
|
||||
except DownloadError, e:
|
||||
error(str(e))
|
||||
for release in args.dest_releases:
|
||||
do_backport(
|
||||
workdir,
|
||||
pkg,
|
||||
args.suffix,
|
||||
args.message,
|
||||
args.close,
|
||||
release,
|
||||
args.release_pocket,
|
||||
args.build,
|
||||
args.builder,
|
||||
args.update,
|
||||
args.upload,
|
||||
args.keyid,
|
||||
args.prompt,
|
||||
)
|
||||
except DownloadError as e:
|
||||
error("%s", str(e))
|
||||
finally:
|
||||
if not opts.workdir:
|
||||
if not args.workdir:
|
||||
shutil.rmtree(workdir)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main(sys.argv))
|
||||
|
@ -21,7 +21,7 @@ _pbuilder-dist()
|
||||
|
||||
case $prev in
|
||||
build)
|
||||
COMPREPLY=( $( compgen -o filenames -G "$cur*.dsc" ) )
|
||||
_filedir "dsc"
|
||||
;;
|
||||
*)
|
||||
COMPREPLY=( $( compgen -W "$options" | grep "^$cur" ) )
|
||||
@ -36,7 +36,7 @@ _pbuilder-dist()
|
||||
for distro in $(ubuntu-distro-info --all; debian-distro-info --all) stable testing unstable; do
|
||||
for builder in pbuilder cowbuilder; do
|
||||
echo "$builder-$distro"
|
||||
for arch in i386 amd64 armel; do
|
||||
for arch in i386 amd64 armhf; do
|
||||
echo "$builder-$distro-$arch"
|
||||
done
|
||||
done
|
||||
|
89
bitesize
89
bitesize
@ -1,89 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
"""Add 'bitesize' tag to bugs and add a comment."""
|
||||
|
||||
# Copyright (c) 2011 Canonical Ltd.
|
||||
#
|
||||
# bitesize is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# bitesize is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with bitesize; see the file COPYING. If not, write to the Free
|
||||
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301, USA.
|
||||
#
|
||||
# Authors:
|
||||
# Daniel Holbach <daniel.holbach@canonical.com>
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
from launchpadlib.errors import HTTPError
|
||||
|
||||
from devscripts.logger import Logger
|
||||
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
def error_out(msg):
|
||||
Logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
def save_entry(entry):
|
||||
try:
|
||||
entry.lp_save()
|
||||
except HTTPError, error:
|
||||
error_out(error.content)
|
||||
|
||||
def tag_bug(bug):
|
||||
bug.tags = bug.tags + ['bitesize'] # LP: #254901 workaround
|
||||
save_entry(bug)
|
||||
|
||||
def main():
|
||||
usage = "Usage: %prog <bug number>"
|
||||
opt_parser = OptionParser(usage)
|
||||
opt_parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to "
|
||||
"(default: production)",
|
||||
dest="lpinstance", default=None)
|
||||
opt_parser.add_option("--no-conf",
|
||||
help="Don't read config files or "
|
||||
"environment variables.",
|
||||
dest="no_conf", default=False, action="store_true")
|
||||
(options, args) = opt_parser.parse_args()
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
if len(args) < 1:
|
||||
opt_parser.error("Need at least one bug number.")
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
||||
if launchpad is None:
|
||||
error_out("Couldn't authenticate to Launchpad.")
|
||||
|
||||
# check that the new main bug isn't a duplicate
|
||||
try:
|
||||
bug = launchpad.bugs[args[0]]
|
||||
except HTTPError, error:
|
||||
if error.response.status == 401:
|
||||
error_out("Don't have enough permissions to access bug %s. %s" % \
|
||||
(args[0], error.content))
|
||||
else:
|
||||
raise
|
||||
if 'bitesize' in bug.tags:
|
||||
error_out("Bug is already marked as 'bitesize'.")
|
||||
bug.newMessage(content="I'm marking this bug as 'bitesize' as it looks "
|
||||
"like an issue that is easy to fix and suitable "
|
||||
"for newcomers in Ubuntu development. If you need "
|
||||
"any help with fixing it, talk to me about it.")
|
||||
bug.subscribe(person=launchpad.me)
|
||||
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
144
check-mir
144
check-mir
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# Check components of build dependencies and warn about universe/multiverse
|
||||
# ones, for a package destined for main/restricted
|
||||
@ -21,65 +21,116 @@
|
||||
# this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
|
||||
|
||||
import apt
|
||||
import sys
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
"""Check if any of a package's build or binary dependencies are in universe or multiverse.
|
||||
|
||||
Run this inside an unpacked source package
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import os.path
|
||||
import sys
|
||||
|
||||
import apt
|
||||
|
||||
|
||||
def check_support(apt_cache, pkgname, alt=False):
|
||||
'''Check if pkgname is in main or restricted.
|
||||
"""Check if pkgname is in main or restricted.
|
||||
|
||||
This prints messages if a package is not in main/restricted, or only
|
||||
partially (i. e. source in main, but binary in universe).
|
||||
'''
|
||||
"""
|
||||
if alt:
|
||||
prefix = ' ... alternative ' + pkgname
|
||||
prefix = " ... alternative " + pkgname
|
||||
else:
|
||||
prefix = ' * ' + pkgname
|
||||
prefix = " * " + pkgname
|
||||
|
||||
try:
|
||||
prov_packages = apt_cache.get_providing_packages(pkgname)
|
||||
if pkgname in apt_cache:
|
||||
pkg = apt_cache[pkgname]
|
||||
except KeyError:
|
||||
print >> sys.stderr, prefix, 'does not exist (pure virtual?)'
|
||||
|
||||
# If this is a virtual package, iterate through the binary packages that
|
||||
# provide this, and ensure they are all in Main. Source packages in and of
|
||||
# themselves cannot provide virtual packages, only binary packages can.
|
||||
elif len(prov_packages) > 0:
|
||||
supported, unsupported = [], []
|
||||
for pkg in prov_packages:
|
||||
candidate = pkg.candidate
|
||||
if candidate:
|
||||
section = candidate.section
|
||||
if section.startswith("universe") or section.startswith("multiverse"):
|
||||
unsupported.append(pkg.name)
|
||||
else:
|
||||
supported.append(pkg.name)
|
||||
|
||||
if len(supported) > 0:
|
||||
msg = "is a virtual package, which is provided by the following "
|
||||
msg += "candidates in Main: " + " ".join(supported)
|
||||
print(prefix, msg)
|
||||
elif len(unsupported) > 0:
|
||||
msg = "is a virtual package, but is only provided by the "
|
||||
msg += "following non-Main candidates: " + " ".join(unsupported)
|
||||
print(prefix, msg, file=sys.stderr)
|
||||
return False
|
||||
else:
|
||||
msg = "is a virtual package that exists but is not provided by "
|
||||
msg += "package currently in the archive. Proceed with caution."
|
||||
print(prefix, msg, file=sys.stderr)
|
||||
return False
|
||||
|
||||
else:
|
||||
print(prefix, "does not exist", file=sys.stderr)
|
||||
return False
|
||||
|
||||
section = pkg.candidate.section
|
||||
if section.startswith('universe') or section.startswith('multiverse'):
|
||||
if section.startswith("universe") or section.startswith("multiverse"):
|
||||
# check if the source package is in main and thus will only need binary
|
||||
# promotion
|
||||
source_records = apt.apt_pkg.SourceRecords()
|
||||
if not source_records.lookup(pkg.candidate.source_name):
|
||||
print >> sys.stderr, 'ERROR: Cannot lookup source package for', \
|
||||
pkg.name
|
||||
print prefix, 'package is in', section.split('/')[0]
|
||||
print("ERROR: Cannot lookup source package for", pkg.name, file=sys.stderr)
|
||||
print(prefix, "package is in", section.split("/")[0])
|
||||
return False
|
||||
src = apt.apt_pkg.TagSection(source_records.record)
|
||||
if (src['Section'].startswith('universe') or
|
||||
src['Section'].startswith('multiverse')):
|
||||
print prefix, 'binary and source package is in', \
|
||||
section.split('/')[0]
|
||||
if src["Section"].startswith("universe") or src["Section"].startswith("multiverse"):
|
||||
print(prefix, "binary and source package is in", section.split("/")[0])
|
||||
return False
|
||||
else:
|
||||
print prefix, 'is in', section.split('/')[0] + ', but its source', \
|
||||
pkg.candidate.source_name, \
|
||||
('is already in main; file an ubuntu-archive bug for '
|
||||
'promoting the current preferred alternative')
|
||||
return True
|
||||
|
||||
print(
|
||||
prefix,
|
||||
"is in",
|
||||
section.split("/")[0] + ", but its source",
|
||||
pkg.candidate.source_name,
|
||||
"is already in main; file an ubuntu-archive bug for "
|
||||
"promoting the current preferred alternative",
|
||||
)
|
||||
return True
|
||||
|
||||
if alt:
|
||||
print prefix, 'is already in main; consider preferring it'
|
||||
print(prefix, "is already in main; consider preferring it")
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def check_build_dependencies(apt_cache, control):
|
||||
print 'Checking support status of build dependencies...'
|
||||
print("Checking support status of build dependencies...")
|
||||
|
||||
any_unsupported = False
|
||||
|
||||
for field in ('Build-Depends', 'Build-Depends-Indep'):
|
||||
for field in ("Build-Depends", "Build-Depends-Indep"):
|
||||
if field not in control.section:
|
||||
continue
|
||||
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
||||
pkgname = or_group[0][0]
|
||||
|
||||
# debhelper-compat is expected to be a build dependency of every
|
||||
# package, so it is a red herring to display it in this report.
|
||||
# (src:debhelper is in Ubuntu Main anyway)
|
||||
if pkgname == "debhelper-compat":
|
||||
continue
|
||||
|
||||
if not check_support(apt_cache, pkgname):
|
||||
# check non-preferred alternatives
|
||||
for altpkg in or_group[1:]:
|
||||
@ -90,22 +141,23 @@ def check_build_dependencies(apt_cache, control):
|
||||
|
||||
return any_unsupported
|
||||
|
||||
|
||||
def check_binary_dependencies(apt_cache, control):
|
||||
any_unsupported = False
|
||||
|
||||
print '\nChecking support status of binary dependencies...'
|
||||
print("\nChecking support status of binary dependencies...")
|
||||
while True:
|
||||
try:
|
||||
control.next()
|
||||
next(control)
|
||||
except StopIteration:
|
||||
break
|
||||
|
||||
for field in ('Depends', 'Pre-Depends', 'Recommends'):
|
||||
for field in ("Depends", "Pre-Depends", "Recommends"):
|
||||
if field not in control.section:
|
||||
continue
|
||||
for or_group in apt.apt_pkg.parse_depends(control.section[field]):
|
||||
for or_group in apt.apt_pkg.parse_src_depends(control.section[field]):
|
||||
pkgname = or_group[0][0]
|
||||
if pkgname.startswith('$'):
|
||||
if pkgname.startswith("$"):
|
||||
continue
|
||||
if not check_support(apt_cache, pkgname):
|
||||
# check non-preferred alternatives
|
||||
@ -117,27 +169,35 @@ def check_binary_dependencies(apt_cache, control):
|
||||
|
||||
return any_unsupported
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description=__doc__)
|
||||
parser.parse_args()
|
||||
apt_cache = apt.Cache()
|
||||
|
||||
if not os.path.exists('debian/control'):
|
||||
print >> sys.stderr, ('debian/control not found. You need to run '
|
||||
'this tool in a source package directory')
|
||||
if not os.path.exists("debian/control"):
|
||||
print(
|
||||
"debian/control not found. You need to run this tool in a source package directory",
|
||||
file=sys.stderr,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# get build dependencies from debian/control
|
||||
control = apt.apt_pkg.TagFile(open('debian/control'))
|
||||
control.next()
|
||||
control = apt.apt_pkg.TagFile(open("debian/control", encoding="utf-8"))
|
||||
next(control)
|
||||
|
||||
unsupported_build_deps = check_build_dependencies(apt_cache, control)
|
||||
unsupported_binary_deps = check_binary_dependencies(apt_cache, control)
|
||||
|
||||
if unsupported_build_deps or unsupported_binary_deps:
|
||||
print ('\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if '
|
||||
'this source package needs to get into in main/restricted, or '
|
||||
'reconsider if the package really needs above dependencies.')
|
||||
print(
|
||||
"\nPlease check https://wiki.ubuntu.com/MainInclusionProcess if "
|
||||
"this source package needs to get into in main/restricted, or "
|
||||
"reconsider if the package really needs above dependencies."
|
||||
)
|
||||
else:
|
||||
print 'All dependencies are supported in main or restricted.'
|
||||
print("All dependencies are supported in main or restricted.")
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
@ -29,21 +29,55 @@
|
||||
# * nm (from binutils)
|
||||
|
||||
DISTRO=$(lsb_release -c -s)
|
||||
VERSION=$(apt-cache madison "$1" | grep -- "$DISTRO"'/.*Sources$' | awk '{print $3}')
|
||||
PACKAGES=$(apt-cache showsrc "$1" | grep-dctrl -s Binary -F Version "$VERSION" | sed 's/Binary\:\ //g;s/\,//g' | sort -u)
|
||||
DEBLINE=""
|
||||
DEBUG=False
|
||||
|
||||
if [[ -z $1 ]]; then
|
||||
echo "Missing argument: source package name."
|
||||
exit 1
|
||||
usage() {
|
||||
prog=$(basename $0)
|
||||
cat <<EOF
|
||||
Usage: $prog [options] source-package [DEBDIR]
|
||||
|
||||
Get a diff of the exported symbols of all .so files in every binary package of
|
||||
package the source package. The source package will be found in DEBDIR, defaulting to /var/cache/pbuilder/result.
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
EOF
|
||||
exit $1
|
||||
}
|
||||
|
||||
PACKAGE=""
|
||||
DEBDIR="/var/cache/pbuilder/result"
|
||||
POSITION=0
|
||||
while [ $# -gt 0 ]; do
|
||||
case "$1" in
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
-*)
|
||||
usage 1
|
||||
;;
|
||||
*)
|
||||
if [ $POSITION -eq 0 ]; then
|
||||
PACKAGE="$1"
|
||||
elif [ $POSITION -eq 1 ]; then
|
||||
DEBDIR="$1"
|
||||
else
|
||||
echo "Too many arguments." >&2
|
||||
usage 1
|
||||
fi
|
||||
POSITION=$(($POSITION+1))
|
||||
esac
|
||||
shift
|
||||
done
|
||||
|
||||
if [ $POSITION -eq 0 ]; then
|
||||
echo "Missing argument: source package name." >&2
|
||||
usage 1
|
||||
fi
|
||||
|
||||
if [[ -z $2 ]]; then
|
||||
DEBDIR="/var/cache/pbuilder/result"
|
||||
else
|
||||
DEBDIR="$2"
|
||||
fi
|
||||
VERSION=$(apt-cache madison "$PACKAGE" | grep -- "$DISTRO"'/.*Sources$' | awk '{print $3}')
|
||||
PACKAGES=$(apt-cache showsrc "$PACKAGE" | grep-dctrl -s Binary -F Version "$VERSION" | sed 's/Binary\:\ //g;s/\,//g' | sort -u)
|
||||
|
||||
if [ `id -u` != "0" ]
|
||||
then
|
||||
@ -67,7 +101,7 @@ do
|
||||
done
|
||||
|
||||
if [[ -z $DEBLINE ]]; then
|
||||
echo "Package doesn't exist: $1."
|
||||
echo "Package doesn't exist: $PACKAGE."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
1
debian/.gitignore
vendored
Normal file
1
debian/.gitignore
vendored
Normal file
@ -0,0 +1 @@
|
||||
files
|
26
debian/README.source
vendored
Normal file
26
debian/README.source
vendored
Normal file
@ -0,0 +1,26 @@
|
||||
Changelog generation and releasing
|
||||
----------------------------------
|
||||
|
||||
The changelog is generated by the uploader using `gbp dch' from
|
||||
`git-buildpackage'. To invoke, just run
|
||||
|
||||
$ gbp dch
|
||||
|
||||
and then edit the changelog as appropriate - wrap lines, remove Signed-Off-By,
|
||||
and so on. Then finalise the changelog, e.g.
|
||||
|
||||
$ dch -D unstable --release ""
|
||||
|
||||
commit it
|
||||
|
||||
$ git commit debian/changelog -m "Releasing 0.foo"
|
||||
|
||||
and tag/sign this commit
|
||||
|
||||
$ gbp buildpackage --git-tag-only
|
||||
|
||||
then build using (for example)
|
||||
|
||||
$ gbp buildpackage -S
|
||||
|
||||
and test/upload as normal.
|
1359
debian/changelog
vendored
1359
debian/changelog
vendored
File diff suppressed because it is too large
Load Diff
3
debian/clean
vendored
3
debian/clean
vendored
@ -1,2 +1 @@
|
||||
*.egg-info/*
|
||||
test-data/example_*
|
||||
*.egg-info/
|
||||
|
1
debian/compat
vendored
1
debian/compat
vendored
@ -1 +0,0 @@
|
||||
7
|
174
debian/control
vendored
174
debian/control
vendored
@ -1,85 +1,101 @@
|
||||
Source: ubuntu-dev-tools
|
||||
Section: devel
|
||||
Priority: optional
|
||||
Maintainer: Ubuntu Developers <ubuntu-dev-team@lists.alioth.debian.org>
|
||||
Uploaders: Benjamin Drung <bdrung@debian.org>,
|
||||
Stefano Rivera <stefanor@debian.org>
|
||||
Vcs-Bzr: lp:ubuntu-dev-tools
|
||||
Vcs-Browser: https://code.launchpad.net/~ubuntu-dev/ubuntu-dev-tools/trunk
|
||||
Build-Depends: dctrl-tools,
|
||||
debhelper (>= 7.0.50~),
|
||||
devscripts (>= 2.11.0~),
|
||||
distro-info (>= 0.2~),
|
||||
libwww-perl,
|
||||
lsb-release,
|
||||
pylint,
|
||||
python-all (>= 2.6.5-13~),
|
||||
python-apt (>= 0.7.93~),
|
||||
python-debian (>= 0.1.20~),
|
||||
python-distro-info (>= 0.4~),
|
||||
python-gnupginterface,
|
||||
python-launchpadlib (>= 1.5.7),
|
||||
python-mox,
|
||||
python-setuptools,
|
||||
python-soappy,
|
||||
python-unittest2
|
||||
X-Python-Version: >= 2.6
|
||||
Maintainer: Ubuntu Developers <ubuntu-dev-tools@packages.debian.org>
|
||||
Uploaders:
|
||||
Benjamin Drung <bdrung@debian.org>,
|
||||
Stefano Rivera <stefanor@debian.org>,
|
||||
Mattia Rizzolo <mattia@debian.org>,
|
||||
Simon Quigley <tsimonq2@debian.org>,
|
||||
Build-Depends:
|
||||
black <!nocheck>,
|
||||
dctrl-tools,
|
||||
debhelper-compat (= 13),
|
||||
devscripts (>= 2.11.0~),
|
||||
dh-make,
|
||||
dh-python,
|
||||
distro-info (>= 0.2~),
|
||||
flake8,
|
||||
isort <!nocheck>,
|
||||
lsb-release,
|
||||
pylint <!nocheck>,
|
||||
python3-all,
|
||||
python3-apt,
|
||||
python3-dateutil,
|
||||
python3-debian,
|
||||
python3-debianbts,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-pytest,
|
||||
python3-requests <!nocheck>,
|
||||
python3-setuptools,
|
||||
python3-yaml <!nocheck>,
|
||||
Standards-Version: 4.7.2
|
||||
Rules-Requires-Root: no
|
||||
Vcs-Git: https://git.launchpad.net/ubuntu-dev-tools
|
||||
Vcs-Browser: https://git.launchpad.net/ubuntu-dev-tools
|
||||
Homepage: https://launchpad.net/ubuntu-dev-tools
|
||||
Standards-Version: 3.9.2
|
||||
|
||||
Package: ubuntu-dev-tools
|
||||
Architecture: all
|
||||
Depends: binutils,
|
||||
dctrl-tools,
|
||||
devscripts (>= 2.11.0~),
|
||||
diffstat,
|
||||
distro-info (>= 0.2~),
|
||||
dpkg-dev,
|
||||
lsb-release,
|
||||
python-apt (>= 0.7.93~),
|
||||
python-debian (>= 0.1.20~),
|
||||
python-distro-info (>= 0.4~),
|
||||
python-launchpadlib (>= 1.5.7),
|
||||
python-lazr.restfulclient,
|
||||
sudo,
|
||||
${misc:Depends},
|
||||
${perl:Depends},
|
||||
${python:Depends}
|
||||
Recommends: bzr,
|
||||
bzr-builddeb,
|
||||
ca-certificates,
|
||||
debian-archive-keyring,
|
||||
debian-keyring,
|
||||
debootstrap,
|
||||
genisoimage,
|
||||
libwww-perl,
|
||||
pbuilder | cowdancer | sbuild,
|
||||
perl-modules,
|
||||
python-dns,
|
||||
python-gnupginterface,
|
||||
python-soappy,
|
||||
reportbug (>= 3.39ubuntu1)
|
||||
Suggests: ipython, python-simplejson | python (>= 2.7), qemu-user-static
|
||||
Depends:
|
||||
binutils,
|
||||
dctrl-tools,
|
||||
devscripts (>= 2.11.0~),
|
||||
diffstat,
|
||||
distro-info (>= 0.2~),
|
||||
dpkg-dev,
|
||||
dput,
|
||||
lsb-release,
|
||||
python3,
|
||||
python3-apt,
|
||||
python3-debian,
|
||||
python3-debianbts,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-lazr.restfulclient,
|
||||
python3-ubuntutools (= ${binary:Version}),
|
||||
python3-yaml,
|
||||
sensible-utils,
|
||||
sudo,
|
||||
tzdata,
|
||||
${misc:Depends},
|
||||
${perl:Depends},
|
||||
Recommends:
|
||||
arch-test,
|
||||
ca-certificates,
|
||||
debian-archive-keyring,
|
||||
debian-keyring,
|
||||
debootstrap,
|
||||
genisoimage,
|
||||
lintian,
|
||||
patch,
|
||||
sbuild | pbuilder | cowbuilder,
|
||||
python3-dns,
|
||||
quilt,
|
||||
reportbug (>= 3.39ubuntu1),
|
||||
ubuntu-keyring | ubuntu-archive-keyring,
|
||||
Suggests:
|
||||
bzr | brz,
|
||||
bzr-builddeb | brz-debian,
|
||||
qemu-user-static,
|
||||
Description: useful tools for Ubuntu developers
|
||||
This is a collection of useful tools that Ubuntu developers use to make their
|
||||
packaging work a lot easier.
|
||||
.
|
||||
Such tools include:
|
||||
.
|
||||
- 404main - used to check what components a package's deps are in, for
|
||||
doing a main inclusion report for example.
|
||||
- backportpackage - helper to test package backports
|
||||
- bitesize - add the 'bitesize' tag to a bug and comment that you are
|
||||
willing to help fix it.
|
||||
- check-mir - check support status of build/binary dependencies
|
||||
- check-symbols - will compare and give you a diff of the exported symbols of
|
||||
all .so files in a binary package.
|
||||
- dch-repeat - used to repeat a change log into an older release.
|
||||
- dgetlp - download a source package from the Launchpad library.
|
||||
- grab-merge - grabs a merge from merges.ubuntu.com easily.
|
||||
- grep-merges - search for pending merges from Debian.
|
||||
- harvest - grabs information about development opportunities from
|
||||
http://harvest.ubuntu.com
|
||||
- hugdaylist - compile HugDay lists from bug list URLs.
|
||||
- import-bug-from-debian - copy a bug from the Debian BTS to Launchpad
|
||||
- merge-changelog - manually merges two Debian changelogs with the same base
|
||||
version.
|
||||
@ -89,14 +105,22 @@ Description: useful tools for Ubuntu developers
|
||||
chroots (for different Ubuntu and Debian releases) on the same system.
|
||||
- pull-debian-debdiff - attempts to find and download a specific version of
|
||||
a Debian package and its immediate parent to generate a debdiff.
|
||||
- pull-debian-source - downloads the lastest source package available in
|
||||
- pull-debian-source - downloads the latest source package available in
|
||||
Debian of a package.
|
||||
- pull-lp-source - downloads lastest source package from Launchpad.
|
||||
- pull-revu-source - downloads the latest source package from REVU
|
||||
- pull-lp-source - downloads source package from Launchpad.
|
||||
- pull-lp-debs - downloads debs package(s) from Launchpad.
|
||||
- pull-lp-ddebs - downloads dbgsym/ddebs package(s) from Launchpad.
|
||||
- pull-lp-udebs - downloads udebs package(s) from Launchpad.
|
||||
- pull-debian-* - same as pull-lp-* but for Debian packages.
|
||||
- pull-uca-* - same as pull-lp-* but for Ubuntu Cloud Archive packages.
|
||||
- pull-pkg - common script that provides above pull-* functionality.
|
||||
- requestbackport - file a backporting request.
|
||||
- requestsync - files a sync request with Debian changelog and rationale.
|
||||
- reverse-depends - find the reverse dependencies (or build dependencies) of
|
||||
a package.
|
||||
- running-autopkgtests - lists the currently running and/or queued
|
||||
autopkgtests on the Ubuntu autopkgtest infrastructure
|
||||
- seeded-in-ubuntu - query if a package is safe to upload during a freeze.
|
||||
- setup-packaging-environment - assistant to get an Ubuntu installation
|
||||
ready for packaging work.
|
||||
- sponsor-patch - Downloads a patch from a Launchpad bug, patches the source
|
||||
@ -106,4 +130,26 @@ Description: useful tools for Ubuntu developers
|
||||
- ubuntu-build - give commands to the Launchpad build daemons from the
|
||||
command line.
|
||||
- ubuntu-iso - output information of an Ubuntu ISO image.
|
||||
- ubuntu-upload-permission - query / list the upload permissions for a
|
||||
package.
|
||||
- update-maintainer - script to update maintainer field in ubuntu packages.
|
||||
|
||||
Package: python3-ubuntutools
|
||||
Architecture: all
|
||||
Section: python
|
||||
Depends:
|
||||
python3-dateutil,
|
||||
python3-debian,
|
||||
python3-distro-info,
|
||||
python3-httplib2,
|
||||
python3-launchpadlib-desktop,
|
||||
python3-lazr.restfulclient,
|
||||
python3-requests,
|
||||
sensible-utils,
|
||||
${misc:Depends},
|
||||
${python3:Depends},
|
||||
Description: useful APIs for Ubuntu developer tools — Python 3 library
|
||||
This package ships a collection of APIs, helpers and wrappers used to
|
||||
develop useful utilities for Ubuntu developers.
|
||||
.
|
||||
This package installs the library for Python 3.
|
||||
|
80
debian/copyright
vendored
80
debian/copyright
vendored
@ -1,28 +1,32 @@
|
||||
Format: http://svn.debian.org/wsvn/dep/web/deps/dep5.mdwn?op=file&rev=166
|
||||
Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/
|
||||
Upstream-Name: Ubuntu Developer Tools
|
||||
Upstream-Contact: Ubuntu Developers <ubuntu-devel-discuss@lists.ubuntu.com>
|
||||
Source: https://launchpad.net/ubuntu-dev-tools
|
||||
|
||||
Files: *
|
||||
backportpackage
|
||||
Files: backportpackage
|
||||
bash_completion/pbuilder-dist
|
||||
check-symbols
|
||||
debian/*
|
||||
doc/backportpackage.1
|
||||
doc/check-symbols.1
|
||||
doc/requestsync.1
|
||||
doc/ubuntu-iso.1
|
||||
doc/running-autopkgtests.1
|
||||
GPL-2
|
||||
README.updates
|
||||
requestsync
|
||||
setup.py
|
||||
TODO
|
||||
ubuntu-iso
|
||||
ubuntutools/requestsync/lp.py
|
||||
ubuntutools/requestsync/mail.py
|
||||
ubuntutools/requestsync/*.py
|
||||
Copyright: 2007, Albert Damen <albrt@gmx.net>
|
||||
2010, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2010, Canonical Ltd.
|
||||
2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2023, Canonical Ltd.
|
||||
2006-2007, Daniel Holbach <daniel.holbach@ubuntu.com>
|
||||
2010, Evan Broder <evan@ebroder.net>
|
||||
2006-2007, Luke Yelavich <themuso@ubuntu.com>
|
||||
2009-2010, Michael Bienia <geser@ubuntu.com>
|
||||
2024-2025, Simon Quigley <tsimonq2@debian.org>
|
||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2008, Stephan Hermann <sh@sourcecode.de>
|
||||
2007, Steve Kowalik <stevenk@ubuntu.com>
|
||||
@ -39,11 +43,7 @@ License: GPL-2
|
||||
On Debian systems, the complete text of the GNU General Public License
|
||||
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
|
||||
|
||||
Files: 404main
|
||||
dgetlp
|
||||
doc/404main.1
|
||||
doc/dgetlp.1
|
||||
doc/import-bug-from-debian.1
|
||||
Files: doc/import-bug-from-debian.1
|
||||
doc/pbuilder-dist-simple.1
|
||||
doc/pbuilder-dist.1
|
||||
doc/submittodebian.1
|
||||
@ -74,23 +74,28 @@ License: GPL-2+
|
||||
On Debian systems, the complete text of the GNU General Public License
|
||||
version 2 can be found in the /usr/share/common-licenses/GPL-2 file.
|
||||
|
||||
Files: doc/bitesize.1
|
||||
Files: doc/lp-bitesize.1
|
||||
doc/check-mir.1
|
||||
doc/grab-merge.1
|
||||
doc/harvest.1
|
||||
doc/hugdaylist.1
|
||||
doc/merge-changelog.1
|
||||
doc/pm-helper.1
|
||||
doc/setup-packaging-environment.1
|
||||
doc/syncpackage.1
|
||||
bitesize
|
||||
lp-bitesize
|
||||
check-mir
|
||||
GPL-3
|
||||
grab-merge
|
||||
harvest
|
||||
hugdaylist
|
||||
merge-changelog
|
||||
pm-helper
|
||||
pyproject.toml
|
||||
run-linters
|
||||
running-autopkgtests
|
||||
setup-packaging-environment
|
||||
syncpackage
|
||||
ubuntutools/harvest.py
|
||||
Copyright: 2010, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2011, Canonical Ltd.
|
||||
ubuntutools/running_autopkgtests.py
|
||||
ubuntutools/utils.py
|
||||
Copyright: 2010-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2007-2024, Canonical Ltd.
|
||||
2008, Jonathan Patrick Davies <jpds@ubuntu.com>
|
||||
2008-2010, Martin Pitt <martin.pitt@canonical.com>
|
||||
2009, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||
@ -112,17 +117,23 @@ Files: dch-repeat
|
||||
doc/dch-repeat.1
|
||||
doc/grep-merges.1
|
||||
doc/mk-sbuild.1
|
||||
doc/pull-lp-source.1
|
||||
doc/pull-revu-source.1
|
||||
doc/pull-pkg.1
|
||||
doc/ubuntu-build.1
|
||||
grep-merges
|
||||
mk-sbuild
|
||||
pull-lp-source
|
||||
pull-revu-source
|
||||
pull-pkg
|
||||
pull-*debs
|
||||
pull-*-source
|
||||
requirements.txt
|
||||
test-requirements.txt
|
||||
tox.ini
|
||||
ubuntu-build
|
||||
ubuntutools/lp/libsupport.py
|
||||
ubuntutools/__init__.py
|
||||
ubuntutools/lp/__init__.py
|
||||
ubuntutools/lp/lpapicache.py
|
||||
ubuntutools/lp/udtexceptions.py
|
||||
ubuntutools/misc.py
|
||||
ubuntutools/pullpkg.py
|
||||
Copyright: 2007-2010, Canonical Ltd.
|
||||
2008-2009, Iain Lane <iain@orangesquash.org.uk>
|
||||
2006, John Dong <jdong@ubuntu.com>
|
||||
@ -131,7 +142,8 @@ Copyright: 2007-2010, Canonical Ltd.
|
||||
2009-2010, Michael Bienia <geser@ubuntu.com>
|
||||
2009, Nathan Handler <nhandler@ubuntu.com>
|
||||
2007-2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2010-2012, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2012, Steve Langasek <steve.langasek@ubuntu.com>
|
||||
License: GPL-3+
|
||||
This program is free software; you can redistribute it and/or modify
|
||||
it under the terms of the GNU General Public License as published by
|
||||
@ -147,18 +159,20 @@ License: GPL-3+
|
||||
version 3 can be found in the /usr/share/common-licenses/GPL-3 file.
|
||||
|
||||
Files: doc/pull-debian-debdiff.1
|
||||
doc/pull-debian-source.1
|
||||
doc/requestbackport.1
|
||||
doc/reverse-depends.1
|
||||
doc/seeded-in-ubuntu.1
|
||||
doc/sponsor-patch.1
|
||||
doc/ubuntu-dev-tools.5
|
||||
doc/ubuntu-upload-permission.1
|
||||
doc/update-maintainer.1
|
||||
enforced-editing-wrapper
|
||||
pull-debian-debdiff
|
||||
pull-debian-source
|
||||
requestbackport
|
||||
reverse-depends
|
||||
seeded-in-ubuntu
|
||||
sponsor-patch
|
||||
test-data/*
|
||||
ubuntu-upload-permission
|
||||
ubuntutools/archive.py
|
||||
ubuntutools/builder.py
|
||||
ubuntutools/config.py
|
||||
@ -167,11 +181,15 @@ Files: doc/pull-debian-debdiff.1
|
||||
ubuntutools/sponsor_patch/*
|
||||
ubuntutools/test/*
|
||||
ubuntutools/update_maintainer.py
|
||||
ubuntutools/version.py
|
||||
update-maintainer
|
||||
Copyright: 2009-2011, Benjamin Drung <bdrung@ubuntu.com>
|
||||
.pylintrc
|
||||
Copyright: 2009-2024, Benjamin Drung <bdrung@ubuntu.com>
|
||||
2010, Evan Broder <evan@ebroder.net>
|
||||
2008, Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>
|
||||
2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
2017-2021, Dan Streetman <ddstreet@canonical.com>
|
||||
2024, Canonical Ltd.
|
||||
License: ISC
|
||||
Permission to use, copy, modify, and/or distribute this software for any
|
||||
purpose with or without fee is hereby granted, provided that the above
|
||||
|
9
debian/gbp.conf
vendored
Normal file
9
debian/gbp.conf
vendored
Normal file
@ -0,0 +1,9 @@
|
||||
[DEFAULT]
|
||||
debian-tag = %(version)s
|
||||
debian-branch = master
|
||||
sign-tags = True
|
||||
|
||||
[dch]
|
||||
meta = True
|
||||
auto = True
|
||||
full = True
|
1
debian/python3-ubuntutools.install
vendored
Normal file
1
debian/python3-ubuntutools.install
vendored
Normal file
@ -0,0 +1 @@
|
||||
/usr/lib/python3.*
|
16
debian/rules
vendored
16
debian/rules
vendored
@ -1,12 +1,14 @@
|
||||
#!/usr/bin/make -f
|
||||
|
||||
%:
|
||||
dh $@ --with python2
|
||||
override_dh_auto_clean:
|
||||
dh_auto_clean
|
||||
rm -f .coverage
|
||||
rm -rf .tox
|
||||
|
||||
ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
|
||||
override_dh_auto_test:
|
||||
set -e; \
|
||||
for python in $(shell pyversions -r); do \
|
||||
$$python setup.py test; \
|
||||
done
|
||||
ifeq (,$(filter nocheck,$(DEB_BUILD_OPTIONS)))
|
||||
python3 -m pytest -v ubuntutools
|
||||
endif
|
||||
|
||||
%:
|
||||
dh $@ --with python3 --buildsystem=pybuild
|
||||
|
3
debian/source/lintian-overrides
vendored
Normal file
3
debian/source/lintian-overrides
vendored
Normal file
@ -0,0 +1,3 @@
|
||||
# pyc files are machine-generated; they're expected to have long lines and have unstated copyright
|
||||
source: file-without-copyright-information *.pyc [debian/copyright]
|
||||
source: very-long-line-length-in-source-file * > 512 [*.pyc:*]
|
7
debian/tests/control
vendored
Normal file
7
debian/tests/control
vendored
Normal file
@ -0,0 +1,7 @@
|
||||
Test-Command: python3 -m pytest -v ubuntutools
|
||||
Depends:
|
||||
dh-make,
|
||||
python3-pytest,
|
||||
python3-setuptools,
|
||||
@,
|
||||
Restrictions: allow-stderr
|
2
debian/ubuntu-dev-tools.install
vendored
Normal file
2
debian/ubuntu-dev-tools.install
vendored
Normal file
@ -0,0 +1,2 @@
|
||||
/usr/bin
|
||||
/usr/share
|
332
dgetlp
332
dgetlp
@ -1,332 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: UTF-8 -*-
|
||||
# Copyright (C) 2008 Terence Simpson <tsimpson@ubuntu.com>
|
||||
# License:
|
||||
# This program is free software; you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation; either version 2 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program; if not, write to the Free Software Foundation, Inc.,
|
||||
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
|
||||
#
|
||||
# This script simulates «dget»'s behaviour for files hosted at
|
||||
# launchpadlibrarian.net.
|
||||
#
|
||||
# Detailed description:
|
||||
# This script attempts to download the source package in the same
|
||||
# way as dget does, but from launchpadlibrarian.net, which doesn't
|
||||
# store all the files in the same directory. It (the script) assumes
|
||||
# that the files are stored in sequential directories on Launchpad
|
||||
# Librarian and attempts to download and then unpack them.
|
||||
# This is a Python rewrite of the original bash script
|
||||
|
||||
import cStringIO
|
||||
import email.feedparser
|
||||
import hashlib
|
||||
import optparse
|
||||
import os
|
||||
import sys
|
||||
import urllib2
|
||||
|
||||
try:
|
||||
import GnuPGInterface
|
||||
except ImportError:
|
||||
print >> sys.stderr, ("Please install 'python-gnupginterface' in order to "
|
||||
"use this utility.")
|
||||
sys.exit(1)
|
||||
|
||||
from ubuntutools import subprocess
|
||||
|
||||
USAGE = u"""Usage: %prog [-d|(-v|-q)] <Launchpad URL>
|
||||
|
||||
This scripts simulates «dget»'s behaviour for files hosted at
|
||||
launchpadlibrarian.net.
|
||||
|
||||
If you specify the -d option then it won't do anything, except download the
|
||||
.dsc file, but just print the commands it would run otherwise.
|
||||
|
||||
Example:
|
||||
%prog http://launchpadlibrarian.net/10348157/coreutils_5.97-5.4ubuntu1.dsc
|
||||
"""
|
||||
|
||||
BASE_URL = "http://launchpadlibrarian.net/"
|
||||
|
||||
Debug = Verbose = Quiet = False
|
||||
|
||||
def unsign(data):
|
||||
if data.splitlines()[0] != "-----BEGIN PGP SIGNED MESSAGE-----":
|
||||
return data
|
||||
oldstdout = sys.stdout
|
||||
oldstderr = sys.stderr
|
||||
sys.stdout = sys.__stdout__
|
||||
sys.stderr = sys.__stderr__
|
||||
gpg = GnuPGInterface.GnuPG()
|
||||
proc = gpg.run(["--decrypt"], create_fhs=['stdin', 'stdout'])
|
||||
proc.handles['stdin'].write(data)
|
||||
proc.handles['stdin'].close()
|
||||
plain = proc.handles['stdout'].read()
|
||||
proc.handles['stdout'].close()
|
||||
try:
|
||||
proc.wait()
|
||||
except:
|
||||
pass
|
||||
sys.stdout = oldstdout
|
||||
sys.stderr = oldstderr
|
||||
return plain
|
||||
|
||||
def get_entries(data):
|
||||
parser = email.feedparser.FeedParser()
|
||||
parser.feed(data)
|
||||
return parser.close()
|
||||
|
||||
class DscParse(object):
|
||||
"""Attempt to get the file list from the .dsc file"""
|
||||
def __init__(self, data):
|
||||
"""
|
||||
__init__(data)
|
||||
Given the contents of a .dsc, parse it and extract it's content
|
||||
"""
|
||||
self.entries = get_entries(unsign(data))
|
||||
self.files = [x.strip().split() for x in
|
||||
self.entries['Files'].splitlines()]
|
||||
|
||||
def verify_all(self):
|
||||
"""
|
||||
verify_all()
|
||||
Verifies all the files, first checking the size, then the md5 sum.
|
||||
Currently not used in this utility.
|
||||
"""
|
||||
assert self.files, "I have no files"
|
||||
ret = []
|
||||
for f in self.files:
|
||||
ret.append(self.verify(f))
|
||||
return ret
|
||||
|
||||
def verify(self, name):
|
||||
"""
|
||||
verify(name)
|
||||
Verify the file 'name', first checking the size, then the md5 sum.
|
||||
"""
|
||||
assert self.files, "I have no files"
|
||||
f = None
|
||||
if isinstance(name, list):
|
||||
f = name
|
||||
else:
|
||||
for i in self.files:
|
||||
if i[2] == name:
|
||||
f = i
|
||||
if not f:
|
||||
raise ValueError, "%s is not in the .dsc" % name
|
||||
(md5sum, size, name) = tuple(f)
|
||||
stat = os.stat(name)
|
||||
if str(stat.st_size) != size:
|
||||
return (False, name, "Expected a size of %s, got %s" % \
|
||||
(size, stat.st_size))
|
||||
return self.getsum(name, md5sum)
|
||||
|
||||
def getsum(self, name, md5sum=None):
|
||||
"""
|
||||
getsum(name[, md5sum])
|
||||
Read the file 'name' (in 1MB chunks) and generate an md5 sum,
|
||||
then compares that to the md5 sum in the .dsc file.
|
||||
"""
|
||||
chunk_size = 1073741824
|
||||
fd = open(name, 'rb')
|
||||
res = hashlib.md5()
|
||||
if not md5sum:
|
||||
assert self.files, "I have no files"
|
||||
md5sum = [x[0] for x in self.files if x[2] == name][0]
|
||||
data = fd.read(chunk_size)
|
||||
while data:
|
||||
res.update(data)
|
||||
data = fd.read(chunk_size)
|
||||
if res.hexdigest() != md5sum:
|
||||
return (False, name, "Expected md5sum of %r, got %r" % \
|
||||
(md5sum, res.hexdigest()))
|
||||
return (True, name, None)
|
||||
|
||||
def is_native(self):
|
||||
"""
|
||||
is_native()
|
||||
Returns True if this .dsc describes a native debian package;
|
||||
else false.
|
||||
"""
|
||||
return len(self.files) == 1
|
||||
|
||||
# Access to fields in the .dsc via a dict-like interface
|
||||
def __getitem__(self, item):
|
||||
"""
|
||||
x.__getitem(item) -> x[item]
|
||||
"""
|
||||
return self.entries.__getitem__(item)
|
||||
|
||||
def __contains__(self, item):
|
||||
"""
|
||||
x.__contains__(item) -> item in x
|
||||
"""
|
||||
return self.entries.__contains__(item)
|
||||
|
||||
def __getattr__(self, attr):
|
||||
"""
|
||||
x.__getattr__(attr) -> item.attr
|
||||
"""
|
||||
return getattr(self.entries, attr)
|
||||
|
||||
def error(ret, msg, *args):
|
||||
"""Prints an error message, unless quiet is set, and exits with ret"""
|
||||
if not Quiet:
|
||||
print >> sys.stderr, msg % args
|
||||
sys.exit(ret)
|
||||
|
||||
def debug(msg, *args):
|
||||
"""If debugging is enabled, print a message"""
|
||||
if Debug:
|
||||
print >> sys.stderr, msg % args
|
||||
|
||||
def info(msg, *args):
|
||||
"""If verbose is enabled, print a message"""
|
||||
if Verbose:
|
||||
print msg % tuple(args)
|
||||
|
||||
def status(msg, *args):
|
||||
"""Prints a message, unless quiet is enabled"""
|
||||
if not Quiet:
|
||||
print msg % tuple(args)
|
||||
|
||||
def download(dscinfo, number, filename, verify=True):
|
||||
"""download filename"""
|
||||
ftype = filename.endswith(".diff.gz") and "diff.gz" or \
|
||||
filename.endswith(".orig.tar.gz") and "orig.tar.gz" or \
|
||||
filename.endswith(".dsc") and "dsc" or "tar.gz"
|
||||
if verify and os.path.exists(filename):
|
||||
info('Verifying "%s"', filename)
|
||||
res = dscinfo.verify(filename)
|
||||
if not res[0]:
|
||||
error(104, "Verification of %s failed: %s", filename, res[2])
|
||||
status("Getting %s", filename)
|
||||
debug("%s%s/%s", BASE_URL, number, filename)
|
||||
try:
|
||||
fd = urllib2.urlopen("%s%s/%s" % (BASE_URL, number, filename))
|
||||
outfd = open(filename, 'wb')
|
||||
outfd.write(fd.read())
|
||||
fd.close()
|
||||
outfd.close()
|
||||
except urllib2.HTTPError, err:
|
||||
status(u"Failed to fetch «%s» file, aborting.", ftype)
|
||||
error(106, "Error: (%d %s)", err.code, err.msg)
|
||||
except urllib2.URLError, err:
|
||||
status(u"Failed to fetch «%s» file, aborting.", ftype)
|
||||
error(105, "Error: %s", err)
|
||||
except IOError, err:
|
||||
status('Could not create "%s"', filename)
|
||||
error(107, "Error: %s", err)
|
||||
|
||||
def unpack(filename):
|
||||
out = open('/dev/null', 'w')
|
||||
err = open('/dev/null', 'w')
|
||||
cmd = ["dpkg-source", "-x", filename]
|
||||
ret = subprocess.call(cmd, stdout=out, stderr=err)
|
||||
out.close()
|
||||
err.close()
|
||||
if ret:
|
||||
status("Failed to unpack source, aborting.")
|
||||
sys.exit(108)
|
||||
|
||||
def get_host(url):
|
||||
return urllib2.splithost(urllib2.splittype(url)[1])[0]
|
||||
|
||||
def main():
|
||||
global Debug, Verbose, Quiet
|
||||
parser = optparse.OptionParser(usage=USAGE)
|
||||
parser.add_option("-d", "--debug", action="store_true", dest="debug",
|
||||
default=False, help="Enable debugging")
|
||||
parser.add_option("-v", "--verbose", action="store_true", dest="verbose",
|
||||
default=False, help="Enable verbose output")
|
||||
parser.add_option("-q", "--quiet", action="store_true", dest="quiet",
|
||||
default=False, help="Never print any output")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
if len(args) != 1:
|
||||
parser.error("Missing URL")
|
||||
Debug = options.debug
|
||||
Verbose = options.verbose
|
||||
Quiet = options.quiet
|
||||
if Verbose and Quiet:
|
||||
error(4, "Specifying both --verbose and --quiet does not make sense")
|
||||
if Quiet:
|
||||
sys.stderr = cStringIO.StringIO()
|
||||
sys.stdout = cStringIO.StringIO()
|
||||
|
||||
url = args[0]
|
||||
|
||||
if url.startswith("https://"):
|
||||
url = url.replace("https://", "http://", 1)
|
||||
|
||||
if not url.startswith("http://"):
|
||||
url = "http://" + url
|
||||
|
||||
if get_host(url).startswith("www."):
|
||||
url = url.replace("www.", "", 1)
|
||||
|
||||
if get_host(url) != get_host(BASE_URL):
|
||||
error(1, "Error: This utility only works for files on %s.\n"
|
||||
"Maybe you want to try dget?", BASE_URL)
|
||||
|
||||
(number, filename) = url.split('/')[3:]
|
||||
|
||||
if not filename.endswith('.dsc'):
|
||||
error(2, "You have to provide the URL for the .dsc file.")
|
||||
|
||||
try:
|
||||
number = int(number)
|
||||
except:
|
||||
error(3, "Bad URL format")
|
||||
|
||||
if os.path.exists(filename):
|
||||
os.remove(filename)
|
||||
|
||||
download(None, number, filename, False)
|
||||
try:
|
||||
fd = open(filename)
|
||||
dsc_data = fd.read()
|
||||
fd.close()
|
||||
except Exception:
|
||||
status("Error: Please report this bug, providing the URL and attach"
|
||||
" the following backtrace")
|
||||
raise
|
||||
|
||||
dscinfo = DscParse(dsc_data)
|
||||
|
||||
# launchpadlibrarian.net seems to store in this order:
|
||||
# For native packages:
|
||||
# <number>/.changes
|
||||
# <number>+1/.tar.gz
|
||||
# <number>+2/.dsc
|
||||
# For non-native packages:
|
||||
# <number>/.changes
|
||||
# <number>+1/.orig.tar.gz
|
||||
# <number>+2/.diff.gz
|
||||
# <number>+3/.dsc
|
||||
##
|
||||
# *Assuming* this does not change, we can figure out where the files are on
|
||||
# launchpadlibrarian.net relative to the .dsc file we're given.
|
||||
|
||||
# Only one file listed in the .dsc means it's native package
|
||||
if len(dscinfo.files) == 1:
|
||||
download(dscinfo, number-1, dscinfo.files[0][-1]) # .tar.gz
|
||||
else:
|
||||
download(dscinfo, number-1, dscinfo.files[1][-1]) # .diff.gz
|
||||
download(dscinfo, number-2, dscinfo.files[0][-1]) # .orig.tar.gz
|
||||
|
||||
status("Unpacking")
|
||||
unpack(filename)
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,29 +0,0 @@
|
||||
.TH 404main 1 "February 17, 2008" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
404main \- check if all build dependencies of a package are in main
|
||||
|
||||
.SH SYNOPSIS
|
||||
\fB404main\fP <\fIpackage name\fP> [<\fIdistribution\fP>]
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fB404main\fP is a script that can be used to check if a package and
|
||||
all its build dependencies are in Ubuntu's main component or not.
|
||||
|
||||
.SH CAVEATS
|
||||
\fB404main\fP will take the dependencies and build dependencies of the
|
||||
packages from the distribution you have first in your
|
||||
/etc/apt/sources.list file.
|
||||
.PP
|
||||
Also, because of this the <\fIdistribution\fP> option is NOT trustworthy; if
|
||||
the dependencies changed YOU WILL GET INCORRECT RESULTS.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR apt-cache (8)
|
||||
|
||||
.SH AUTHORS
|
||||
\fB404main\fP was written by Pete Savage <petesavage@ubuntu.com> and
|
||||
this manpage by Siegfried-Angel Gevatter Pujals <rainct@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the GNU General Public License, version 2 or
|
||||
later.
|
@ -39,10 +39,10 @@ development release for your distribution.
|
||||
.B \-S \fISUFFIX\fR, \fB\-\-suffix\fR=\fISUFFIX\fR
|
||||
Add the specified suffix to the version number when
|
||||
backporting. \fBbackportpackage\fR will always append
|
||||
~\fIDESTINATION\fR1 to the original version number, and if
|
||||
~ubuntu\fIDESTINATION\fR.1 to the original version number, and if
|
||||
\fISUFFIX\fR is specified, it is appended to that, to get version
|
||||
numbers of the form
|
||||
\fIORIGINAL_VERSION\fR~\fIDESTINATION\fR1\fISUFFIX\fR. If the
|
||||
\fIORIGINAL_VERSION\fR~ubuntu\fIDESTINATION\fR.1\fISUFFIX\fR. If the
|
||||
backported package is being uploaded to a PPA, then \fISUFFIX\fR
|
||||
defaults to \fB~ppa1\fR, otherwise the default is blank.
|
||||
.TP
|
||||
@ -65,6 +65,12 @@ Update the build environment before attempting to build.
|
||||
.B \-u \fIUPLOAD\fR, \fB\-\-upload\fR=\fIUPLOAD\fR
|
||||
Upload to \fIUPLOAD\fR with \fBdput\fR(1) (after confirmation).
|
||||
.TP
|
||||
.B \-k \fIKEYID\fR, \fB\-\-key\fR=\fIKEYID\fR
|
||||
Specify the key ID to be used for signing.
|
||||
.TP
|
||||
.B \-\-dont\-sign
|
||||
Do not sign the upload.
|
||||
.TP
|
||||
.B \-y\fR, \fB\-\-yes
|
||||
Do not prompt before uploading to a PPA. For everyone's safety, this
|
||||
option is ignored if \fIUPLOAD\fR is \fBubuntu\fR.
|
||||
@ -85,12 +91,23 @@ unpacked, built into, and otherwise manipulated in
|
||||
\fIWORKDIR\fR. Otherwise, a temporary directory is created, which is
|
||||
deleted before \fIbackportpackage\fR exits.
|
||||
.TP
|
||||
.B \-r\fR, \fB\-\-release\-pocket
|
||||
Target the upload at the release pocket, rather than the
|
||||
\fB\-backports\fR pocket.
|
||||
This is required for Launchpad PPAs, which are pocket-less (and the
|
||||
default, when the upload target is a PPA).
|
||||
.TP
|
||||
.B \-m \fIMIRROR\fR, \fB\-\-mirror\fR=\fIMIRROR\fR
|
||||
Use the specified mirror.
|
||||
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR.
|
||||
If the package isn't found on this mirror, \fBbackportpackage\fR
|
||||
will use Launchpad.
|
||||
.TP
|
||||
.B \-c \fIBUG\fR, \fB\-\-close\fR=\fIBUG\fR
|
||||
Include a Launchpad closer for the specified bug in the auto-generated
|
||||
changelog. In the future, this may actually close the bug, but
|
||||
currently does not.
|
||||
.TP
|
||||
.B \-l \fIINSTANCE\fR, \fB\-\-lpinstance\fR=\fIINSTANCE\fR
|
||||
Use the specified instance of Launchpad (e.g. "staging"), instead of
|
||||
the default of "production".
|
||||
@ -123,6 +140,9 @@ The default value for \fB\-\-builder\fR.
|
||||
.BR BACKPORTPACKAGE_UPDATE_BUILDER ", " UBUNTUTOOLS_UPDATE_BUILDER
|
||||
The default value for \fB--update\fR.
|
||||
.TP
|
||||
.B BACKPORTPACKAGE_UPLOAD
|
||||
The default value for \fB--upload\fR.
|
||||
.TP
|
||||
.BR BACKPORTPACKAGE_WORKDIR ", " UBUNTUTOOLS_WORKDIR
|
||||
The default value for \fB--workdir\fR.
|
||||
.TP
|
||||
|
38
doc/dgetlp.1
38
doc/dgetlp.1
@ -1,38 +0,0 @@
|
||||
.TH DGETLP "1" "27 August 2008" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
dgetlp \- simulate ``dget'' behaviour for files hosted at librarian.launchpad.net
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B dgetlp [\fB\-d\fP|\fB(\fB\-v\fP|\fB\-q\fP)\fP] <\fBLaunchpad DSC URL\fP>
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBdgetlp\fR simulates dget behaviour by downloading and extracting the <\fBLaunchpad DSC URL\fP> from the Launchpad Librarian.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for dgetlp:
|
||||
.TP
|
||||
.B \-h, \-\-help
|
||||
show this help message and exit.
|
||||
.TP
|
||||
.B \-d, \-\-debug
|
||||
Enable debugging.
|
||||
.TP
|
||||
.B \-v, \-\-verbose
|
||||
Enable verbose output.
|
||||
.TP
|
||||
.B \-q, \-\-quiet
|
||||
Never print any output.
|
||||
.TP
|
||||
.B <Launchpad DSC URL>
|
||||
This is the source package that you would like to be downloaded from the Launchpad Librarian.
|
||||
|
||||
.SH EXAMPLE
|
||||
.B dgetlp http://launchpadlibrarian.net/10348157/coreutils_5.97-5.4ubuntu1.dsc
|
||||
|
||||
.SH AUTHOR
|
||||
\fBdgetlp\fR was written by Terence Simpson <tsimpson@ubuntu.com> and
|
||||
modified by Siegfried-A. Gevatter <rainct@ubuntu.com>. The python rewrite
|
||||
was written by Terence Simpson <tsimpson@ubuntu.com> based off the original.
|
||||
This man page was written by Ryan Kavanagh <ryanakca@kubuntu.org>.
|
||||
Both are released under the GNU General Public License, version 2 or later.
|
@ -1,19 +0,0 @@
|
||||
.TH harvest 1 "March 21, 2011" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
harvest \- grabs information about a given source package from harvest.ubuntu.com.
|
||||
|
||||
.SH SYNOPSIS
|
||||
\fBharvest\fP <\fIsource package name\fP>
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBharvest\fP is a script that downloads information about development
|
||||
opportunities from harvest.ubuntu.com and gives a summary of the types of
|
||||
opportunities.
|
||||
|
||||
.SH AUTHORS
|
||||
\fBharvest\fP and its manpage were written by Daniel Holbach
|
||||
<daniel.holbach@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the GNU General Public License, version 3 or
|
||||
later.
|
@ -1,26 +0,0 @@
|
||||
.TH HUGDAYLIST "1" "August 27, 2008" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
hugdaylist \- produce MoinMoin wiki formatted tables based on a Launchpad bug list
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B hugdaylist [\fB\-n\fP|\fB\-\-number <NUMBER>\fP] \fBlaunchpad-buglist-url\fP
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBhugdaylist\fP produces MoinMoin wiki formatted tables based on a
|
||||
Launchpad bug list
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-\-number=<NUMBER>\fP
|
||||
This option allows you to specify the number of entries to output.
|
||||
.TP
|
||||
\fBlaunchpad-buglist-url\fP
|
||||
Required, this option is a URL pointing to a launchpad bug list.
|
||||
|
||||
.SH AUTHOR
|
||||
\fBhugdaylist\fP has been written by Canonical Ltd., Daniel Holbach
|
||||
<daniel.holbach@canonical.com> and Jonathan Patrick Davies <jpds@ubuntu.com>.
|
||||
This manual page was written by Ryan Kavanagh <ryanakca@kubuntu.org>.
|
||||
.PP
|
||||
Both are released under the GNU General Public License, version 3.
|
@ -1,21 +1,21 @@
|
||||
.TH bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
||||
.TH lp-bitesize "1" "May 9 2010" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
||||
lp-bitesize \- Add \fBbitesize\fR tag to bugs and add a comment.
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B bitesize \fR<\fIbug number\fR>
|
||||
.B lp-bitesize \fR<\fIbug number\fR>
|
||||
.br
|
||||
.B bitesize \-\-help
|
||||
.B lp-bitesize \-\-help
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBbitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
||||
\fBlp-bitesize\fR adds a bitesize tag to the bug, if it's not there yet. It
|
||||
also adds a comment to the bug indicating that you are willing to help with
|
||||
fixing it.
|
||||
It checks for permission to operate on a given bug first,
|
||||
then perform required tasks on Launchpad.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for \fBbitesize\fR:
|
||||
Listed below are the command line options for \fBlp-bitesize\fR:
|
||||
.TP
|
||||
.BR \-h ", " \-\-help
|
||||
Display a help message and exit.
|
||||
@ -48,7 +48,7 @@ The default value for \fB--lpinstance\fR.
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
|
||||
.SH AUTHORS
|
||||
\fBbitesize\fR and this manual page were written by Daniel Holbach
|
||||
\fBlp-bitesize\fR and this manual page were written by Daniel Holbach
|
||||
<daniel.holbach@canonical.com>.
|
||||
.PP
|
||||
Both are released under the terms of the GNU General Public License, version 3.
|
@ -15,6 +15,10 @@ Listed below are the command line options for mk\-sbuild:
|
||||
.B \-\-arch\fR=\fIARCH
|
||||
What architecture to select (defaults to the native architecture).
|
||||
.TP
|
||||
.B \-\-target\fR=\fRARCH
|
||||
Set up the chroot as a cross-building environment targeting the specified
|
||||
architecture.
|
||||
.TP
|
||||
.B \-\-name\fR=\fINAME
|
||||
Base name for the schroot (arch is appended).
|
||||
.TP
|
||||
@ -28,6 +32,10 @@ Turn on script debugging.
|
||||
Do not include the \fB\-updates\fR pocket in the installed
|
||||
\fBsources.list\fR.
|
||||
.TP
|
||||
.B \-\-skip\-proposed
|
||||
Do not include the \fB\-proposed\fR pocket in the installed
|
||||
\fBsources.list\fR.
|
||||
.TP
|
||||
.B \-\-source\-template\fR=\fIFILE
|
||||
Use \fIFILE\fR as the \fBsources.list\fR template (defaults to
|
||||
\fI$HOME\fB/.mk\-sbuild.sources\fR).
|
||||
@ -47,6 +55,24 @@ Pass along a comma separated list of packages to debootstrap's
|
||||
\fBWARNING:\fR be careful using this option as you can end up
|
||||
excluding essential package. See \fBdebootstrap \fR(8) for more details.
|
||||
.TP
|
||||
.B \-\-debootstrap\-keyring\fR=\fIkeyring
|
||||
Pass along the path to a gpg keyring file to debootsrap's
|
||||
\fB\-\-keyring\fR argument. See \fBdebootstrap\fR (8) for more details.
|
||||
.TP
|
||||
.B \-\-debootstrap\-no\-check\-gpg
|
||||
Disable checking gpg signatures of downloaded Release files by using
|
||||
debootstrap's \fB\-\-no\-check\-gpg\fR option. See \fBdebootstrap\fR (8)
|
||||
for more details.
|
||||
.TP
|
||||
.B \-\-debootstrap\-proxy\fR=\fIPROXY
|
||||
Use \fIPROXY\fR as apt proxy.
|
||||
.TP
|
||||
.B \-\-eatmydata
|
||||
Install and use eatmydata (default)
|
||||
.TP
|
||||
.B \-\-skip\-eatmydata
|
||||
Don't install and use eatmydata
|
||||
.TP
|
||||
.B \-\-distro\fR=\fIDISTRO
|
||||
Enable distro-specific logic.
|
||||
When not provided, the distribution is determined from \fIrelease\fR.
|
||||
@ -54,12 +80,34 @@ Currently known distros: "\fBdebian\fR" and "\fBubuntu\fR".
|
||||
.TP
|
||||
.B \-\-vg\fR=\fIVOLUME_GROUP
|
||||
Specify a volume group, and subsequently use a default \fBSCHROOT_TYPE\fR of
|
||||
"\fBlvm-snapshot\fR" rather than "\fBdirectory\fR" (via aufs) mounts.
|
||||
"\fBlvm-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
|
||||
aufs) mounts.
|
||||
.TP
|
||||
.B \-\-zfs-dataset=\fIDATASET
|
||||
Specify a zfs dataset, and subsequently use a default \fBSCHROOT_TYPE\fR of
|
||||
"\fBzfs-snapshot\fR" rather than "\fBdirectory\fR" (via overlayfs or
|
||||
aufs) mounts.
|
||||
.TP
|
||||
.B \-\-type\fR=\fISHROOT_TYPE
|
||||
Specify a \fBSCHROOT_TYPE\fR. Supported values are "\fBdirectory\fR"
|
||||
(default if \fB\-\-vg\fR not specified), "\fBlvm-snapshot\fR" (default
|
||||
if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", and "\fBfile\fR".
|
||||
if \fB\-\-vg\fR specified), "\fBbtrfs-snapshot\fR", "\fBzfs-snapshot\fR"
|
||||
and "\fBfile\fR".
|
||||
.TP
|
||||
.B \-\-ccache
|
||||
Enable usage of \fBccache\fR by default. See \fBccache\fR (1) for
|
||||
more details.
|
||||
.TP
|
||||
.B \-\-ccache-dir=\fIPATH
|
||||
Use \fBPATH\fR as schroot ccache directory. This directory can be
|
||||
safely shared by multiple schroots, but they will all use the same
|
||||
\fBCCACHE_MAXSIZE\fR.
|
||||
Defaults to /var/cache/ccache-sbuild.
|
||||
See \fBccache\fR (1) for more details.
|
||||
.TP
|
||||
.B \-\-ccache-size=\fISIZE
|
||||
Sets \fBSIZE\fR as the schroot \fBCCACHE_DIR\fR max-size used by ccache.
|
||||
See \fBccache\fR (1) for more details.
|
||||
|
||||
.SH ENVIRONMENT VARIABLES
|
||||
.TP
|
||||
@ -72,10 +120,17 @@ Size of snapshot LVs (defaults to 4G).
|
||||
.B SCHROOT_CONF_SUFFIX
|
||||
Lines to append to schroot entries.
|
||||
.TP
|
||||
.B SCHROOT_PROFILE
|
||||
Profile to use with schroot. (defaults to sbuild)
|
||||
.TP
|
||||
.B SKIP_UPDATES
|
||||
Do not include the \fB\-updates\fR pocket (same as
|
||||
\fB\-\-skip\-updates\fR)
|
||||
.TP
|
||||
.B SKIP_PROPOSED
|
||||
Do not include the \fB\-proposed\fR pocket (same as
|
||||
\fB\-\-skip\-proposed\fR)
|
||||
.TP
|
||||
.B DEBOOTSTRAP_MIRROR
|
||||
Mirror location (same as \fB\-\-debootstrap-mirror\fR)
|
||||
.TP
|
||||
@ -87,6 +142,22 @@ Comma separated list of packages to include when bootstrapping (same as
|
||||
Comma separated list of packages to exclude when bootstrapping (same as
|
||||
\fB\-\-debootstrap-exclude\fR; see warning above)
|
||||
.TP
|
||||
.B DEBOOTSTRAP_KEYRING
|
||||
Keyring file to use for checking gpg signatures of retrieved release files
|
||||
(same as \fB\-\-debootstrap\-keyring\fR)
|
||||
.TP
|
||||
.B DEBOOTSTRAP_NO_CHECK_GPG
|
||||
Disable gpg verification of retrieved release files (same as
|
||||
\fB\-\-debootstrap\-no\-check\-gpg\fR)
|
||||
.TP
|
||||
.B DEBOOTSTRAP_PROXY
|
||||
Proxy to use for apt. (same as
|
||||
\fB\-\-debootstrap\-proxy\fR)
|
||||
.TP
|
||||
.B EATMYDATA
|
||||
Enable or disable eatmydata usage, see \fB\-\-eatmydata\fR
|
||||
and \fB\-\-skip\-eatmydata\fR
|
||||
.TP
|
||||
.B SOURCE_CHROOTS_DIR
|
||||
Use \fBSOURCE_CHROOTS_DIR\fR as home of schroot source directories.
|
||||
(default \fB/var/lib/schroot/chroots\fR)
|
||||
@ -98,6 +169,18 @@ Use \fBSOURCE_CHROOTS_TGZ\fR as home of schroot source tarballs.
|
||||
.B CHROOT_SNAPSHOT_DIR
|
||||
Use \fBCHROOT_SNAPSHOT_DIR\fR as home of mounted btrfs snapshots.
|
||||
(default \fB/var/lib/schroot/snapshots\fR)
|
||||
.TP
|
||||
.B CCACHE
|
||||
Enable \fBccache\fR (1) by default.
|
||||
(defaults to \fB0\fR)
|
||||
.TP
|
||||
.B CCACHE_DIR
|
||||
Use \fBCCACHE_DIR\fR as the \fBccache\fR (1) directory.
|
||||
(default \fB/var/cache/ccache-sbuild\fR)
|
||||
.TP
|
||||
.B CCACHE_SIZE
|
||||
Use \fBCCACHE_SIZE\fR as the \fBccache\fR (1) max-size.
|
||||
(defaults to \fB4G\fR)
|
||||
|
||||
|
||||
.SH FILES
|
||||
|
@ -20,7 +20,7 @@ like for example \fBpbuilder\-feisty\fP, \fBpbuilder\-sid\fP, \fBpbuilder\-gutsy
|
||||
.PP
|
||||
The same applies to \fBcowbuilder\-dist\fP, which uses cowbuilder. The main
|
||||
difference between both is that pbuilder compresses the created chroot as a
|
||||
a tarball, thus using less disc space but needing to uncompress (and possibly
|
||||
tarball, thus using less disc space but needing to uncompress (and possibly
|
||||
compress) its contents again on each run, and cowbuilder doesn't do this.
|
||||
|
||||
.SH USAGE
|
||||
@ -35,11 +35,13 @@ Replace this with the codename of the version of Ubuntu or Debian you want to us
|
||||
.TP
|
||||
\fBarchitecture\fP
|
||||
This optional parameter will attempt to construct a chroot in a foreign
|
||||
architecture. For some architecture pairs (e.g. i386 on an amd64 install),
|
||||
the chroot will be created natively. For others (e.g. armel on an i386
|
||||
install), qemu\-static and binfmt\-misc will be used. Note that some
|
||||
combinations (e.g. amd64 on an i386 install) require special separate
|
||||
kernel handling, and may break in unexpected ways.
|
||||
architecture.
|
||||
For some architecture pairs (e.g. i386 on an amd64 install), the chroot
|
||||
will be created natively.
|
||||
For others (e.g. arm64 on an amd64 install), qemu\-user\-static will be
|
||||
used.
|
||||
Note that some combinations (e.g. amd64 on an i386 install) require
|
||||
special separate kernel handling, and may break in unexpected ways.
|
||||
.TP
|
||||
\fBoperation\fP
|
||||
Replace this with the action you want \fBpbuilder\fP to do (create, update,
|
||||
@ -83,6 +85,9 @@ Suitable environment for preparing security updates.
|
||||
\fB\-\-updates\-only\fP
|
||||
Only use the release, security, and updates pocket.
|
||||
Not the proposed\-updates pocket.
|
||||
.TP
|
||||
\fB\-\-backports\fP
|
||||
Also use the backports archive..
|
||||
|
||||
.SH EXAMPLES
|
||||
.TP
|
||||
|
44
doc/pm-helper.1
Normal file
44
doc/pm-helper.1
Normal file
@ -0,0 +1,44 @@
|
||||
.\" Copyright (C) 2023, Canonical Ltd.
|
||||
.\"
|
||||
.\" This program is free software; you can redistribute it and/or
|
||||
.\" modify it under the terms of the GNU General Public License, version 3.
|
||||
.\"
|
||||
.\" This program is distributed in the hope that it will be useful,
|
||||
.\" but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
.\" MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
.\" General Public License for more details.
|
||||
.\"
|
||||
.\" You should have received a copy of the GNU General Public License
|
||||
.\" along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
.TH pm\-helper 1 "June 2023" ubuntu\-dev\-tools
|
||||
|
||||
.SH NAME
|
||||
pm\-helper \- helper to guide a developer through proposed\-migration work
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pm\-helper \fR[\fIoptions\fR] [\fIpackage\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
Claim a package from proposed\-migration to work on and get additional
|
||||
information (such as the state of the package in Debian) that may be helpful
|
||||
in unblocking it.
|
||||
.PP
|
||||
This tool is incomplete and under development.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.B \-l \fIINSTANCE\fR, \fB\-\-launchpad\fR=\fIINSTANCE\fR
|
||||
Use the specified instance of Launchpad (e.g. "staging"), instead of
|
||||
the default of "production".
|
||||
.TP
|
||||
.B \-v\fR, \fB--verbose\fR
|
||||
be more verbose
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Display a help message and exit
|
||||
|
||||
.SH AUTHORS
|
||||
\fBpm\-helper\fR and this manpage were written by Steve Langasek
|
||||
<steve.langasek@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the GPLv3 license.
|
1
doc/pull-debian-ddebs.1
Symbolic link
1
doc/pull-debian-ddebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-debian-debs.1
Symbolic link
1
doc/pull-debian-debs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
@ -1,89 +0,0 @@
|
||||
.\" Copyright (C) 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
.\"
|
||||
.\" Permission to use, copy, modify, and/or distribute this software for any
|
||||
.\" purpose with or without fee is hereby granted, provided that the above
|
||||
.\" copyright notice and this permission notice appear in all copies.
|
||||
.\"
|
||||
.\" THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
.\" REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
.\" AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
.\" INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
.\" LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
.\" OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
.\" PERFORMANCE OF THIS SOFTWARE.
|
||||
.TH PULL\-DEBIAN\-SOURCE "1" "22 January 2011" "ubuntu\-dev\-tools"
|
||||
|
||||
.SH NAME
|
||||
pull\-debian\-source \- download and extract a source package from Debian
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pull\-debian\-source \fR[\fIoptions\fR] <\fIsource package\fR>
|
||||
[\fIrelease\fR|\fIversion\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBpull\-debian\-source\fR downloads and extracts the specified
|
||||
\fIversion\fR of \fIsource package\fR, or the latest version in the
|
||||
specified Debian \fIrelease\fR.
|
||||
.P
|
||||
\fBpull\-debian\-source\fR will try the preferred mirror, default
|
||||
mirror, security mirror, and fall back to \fBLaunchpad\fR or
|
||||
\fBsnapshot.debian.org\fR, in search of the requested version.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
.I source package
|
||||
The source package to download from Debian.
|
||||
.TP
|
||||
.I release
|
||||
The release to download the source package from. Defaults to
|
||||
\fBunstable\fR.
|
||||
.TP
|
||||
.I version
|
||||
The specific version of the package to download.
|
||||
.TP
|
||||
.BR \-d ", " \-\-download\-only
|
||||
Do not extract the source package.
|
||||
.TP
|
||||
.B \-m \fIDEBIAN_MIRROR\fR, \fB\-\-mirror\fR=\fIDEBIAN_MIRROR\fR
|
||||
Use the specified mirror.
|
||||
Should be in the form \fBhttp://ftp.debian.org/debian\fR.
|
||||
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
|
||||
will fall back to the default mirror.
|
||||
.TP
|
||||
.B \-s \fIDEBSEC_MIRROR\fR, \fB\-\-security\-mirror\fR=\fIDEBSEC_MIRROR\fR
|
||||
Use the specified mirror.
|
||||
Should be in the form \fBhttp://security.debian.org\fR.
|
||||
If the package isn't found on this mirror, \fBpull\-debian\-source\fR
|
||||
will fall back to the default mirror.
|
||||
.TP
|
||||
.B \-\-no\-conf
|
||||
Do not read any configuration files, or configuration from environment
|
||||
variables.
|
||||
.TP
|
||||
.BR \-h ", " \-\-help
|
||||
Display the usage instructions and exit.
|
||||
|
||||
.SH ENVIRONMENT
|
||||
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
|
||||
environment variables.
|
||||
Variables in the environment take precedence to those in configuration
|
||||
files.
|
||||
|
||||
.SH CONFIGURATION VARIABLES
|
||||
The following variables can be set in the environment or in
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
configuration files.
|
||||
In each case, the script\-specific variable takes precedence over the
|
||||
package\-wide variable.
|
||||
.TP
|
||||
.BR PULL_DEBIAN_SOURCE_DEBIAN_MIRROR ", " UBUNTUTOOLS_DEBIAN_MIRROR
|
||||
The default value for \fB\-\-mirror\fR.
|
||||
.TP
|
||||
.BR PULL_DEBIAN_SOURCE_DEBSEC_MIRROR ", " UBUNTUTOOLS_DEBSEC_MIRROR
|
||||
The default value for \fB\-\-security\-mirror\fR.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR dget (1),
|
||||
.BR pull\-debian\-debdiff (1),
|
||||
.BR pull\-lp\-source (1),
|
||||
.BR ubuntu\-dev\-tools (5)
|
1
doc/pull-debian-source.1
Symbolic link
1
doc/pull-debian-source.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-debian-udebs.1
Symbolic link
1
doc/pull-debian-udebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-lp-ddebs.1
Symbolic link
1
doc/pull-lp-ddebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-lp-debs.1
Symbolic link
1
doc/pull-lp-debs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
@ -1,76 +0,0 @@
|
||||
.TH PULL\-LP\-SOURCE "1" "4 August 2008" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
pull\-lp\-source \- download a source package from Launchpad
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pull\-lp\-source \fR[\fIoptions\fR]\fB \fBsource package\fR
|
||||
[\fIrelease\fR|\fIversion\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBpull\-lp\-source\fR downloads and extracts the specified
|
||||
\fIversion\fR of <\fBsource package\fR> from Launchpad, or the latest
|
||||
version of the specified \fIrelease\fR.
|
||||
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
|
||||
the development release will be downloaded.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for pull\-lp\-source:
|
||||
.TP
|
||||
.B source package
|
||||
This is the source package that you would like to be downloaded from Launchpad.
|
||||
.TP
|
||||
.B version
|
||||
This is the version of the source package to be downloaded.
|
||||
.TP
|
||||
.B release
|
||||
This is the release that you would like the source package to be downloaded from.
|
||||
This value defaults to the current development release.
|
||||
.TP
|
||||
.BR \-h ", " \-\-help
|
||||
Display a help message and exit.
|
||||
.TP
|
||||
.BR \-d ", " \-\-download\-only
|
||||
Do not extract the source package.
|
||||
.TP
|
||||
.B \-m \fIUBUNTU_MIRROR\fR, \fB\-\-mirror\fR=\fIUBUNTU_MIRROR\fR
|
||||
Use the specified Ubuntu mirror.
|
||||
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR.
|
||||
If the package isn't found on this mirror, \fBpull\-lp\-source\fR will
|
||||
fall back to Launchpad, as its name implies.
|
||||
.TP
|
||||
.B \-\-no\-conf
|
||||
Do not read any configuration files, or configuration from environment
|
||||
variables.
|
||||
|
||||
.SH ENVIRONMENT
|
||||
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
|
||||
environment variables.
|
||||
Variables in the environment take precedence to those in configuration
|
||||
files.
|
||||
.TP
|
||||
.B
|
||||
DIST
|
||||
Specifies the default target.
|
||||
|
||||
.SH CONFIGURATION VARIABLES
|
||||
The following variables can be set in the environment or in
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
configuration files.
|
||||
In each case, the script\-specific variable takes precedence over the
|
||||
package\-wide variable.
|
||||
.TP
|
||||
.BR PULL_LP_SOURCE_UBUNTU_MIRROR ", " UBUNTUTOOLS_UBUNTU_MIRROR
|
||||
The default value for \fB\-\-mirror\fR.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR dget (1),
|
||||
.BR pull\-debian\-source (1),
|
||||
.BR pull\-debian\-debdiff (1),
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
|
||||
.SH AUTHOR
|
||||
.PP
|
||||
\fBpull\-lp\-source\fR and this manual page were written by Iain Lane
|
||||
<iain@orangesquash.org.uk>.
|
||||
Both are released under the GNU General Public License, version 3 or later.
|
1
doc/pull-lp-source.1
Symbolic link
1
doc/pull-lp-source.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-lp-udebs.1
Symbolic link
1
doc/pull-lp-udebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
147
doc/pull-pkg.1
Normal file
147
doc/pull-pkg.1
Normal file
@ -0,0 +1,147 @@
|
||||
.TH PULL\-PKG "1" "28 August 2017" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
pull\-pkg \- download a package for Debian, Ubuntu, UCA, or a PPA
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pull\-pkg \fR[\fIoptions\fR]\fR <\fIpackage name\fR>
|
||||
[\fIrelease\fR|\fIversion\fR]
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBpull\-pkg\fR downloads the specified \fIversion\fR of
|
||||
<\fIpackage name\fR>, or the latest version from the
|
||||
specified \fIrelease\fR. To request a version from
|
||||
a particular pocket say \fIrelease\fB\-\fIpocket\fR (with a magic
|
||||
\fB\-release\fR for only the release pocket). If no \fIpocket\fR is
|
||||
specified, all pockets will be searched except -backports.
|
||||
If no \fIversion\fR or \fIrelease\fR is specified, the latest version in
|
||||
the development release will be downloaded.
|
||||
|
||||
There are convenience scripts that set pull type and distribution
|
||||
appropriately: these are
|
||||
\fBpull\-lp\-source\fR, \fBpull\-lp\-debs\fR, \fBpull\-lp\-ddebs\fR,
|
||||
and \fBpull\-lp\-udebs\fR, which all pull Ubuntu packages;
|
||||
\fBpull\-debian\-source\fR, \fBpull\-debian\-debs\fR, \fBpull\-debian\-ddebs\fR,
|
||||
and \fBpull\-debian\-udebs\fR, which all pull Debian packages;
|
||||
\fBpull\-uca\-source\fR, \fBpull\-uca\-debs\fR, \fBpull\-uca\-ddebs\fR,
|
||||
and \fBpull\-uca\-udebs\fR, which all pull Ubuntu Cloud Archive packages;
|
||||
and \fBpull\-ppa\-source\fR, \fBpull\-ppa\-debs\fR, \fBpull\-ppa\-ddebs\fR,
|
||||
and \fBpull\-ppa\-udebs\fR, which all pull from a specified Personal Package
|
||||
Archive on Launchpad. Each script pulls the file type in its name, i.e.
|
||||
\fIsource\fR, \fIdebs\fR, \fIddebs\fR, or \fIudebs\fR.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for pull\-pkg:
|
||||
.TP
|
||||
.I package name
|
||||
This is name of the package to downloaded.
|
||||
You can use either the source package name, or binary package name.
|
||||
.TP
|
||||
.I version
|
||||
This is the version of the package to downloaded.
|
||||
.TP
|
||||
.I release
|
||||
This is the release to downloaded from.
|
||||
For debian, you can use either the release name like \fBjessie\fR
|
||||
or \fBsid\fR, or you can use the special release names \fBunstable\fR,
|
||||
\fBstable\fR, or \fBtesting\fR.
|
||||
For ubuntu, you can use either the release name like \fBxenial\fR
|
||||
or the release-pocket like \fBxenial-proposed\fR.
|
||||
For ubuntu cloud archive (uca) you can use either the uca release
|
||||
name like \fBmitaka\fR or the ubuntu and uca release names like
|
||||
\fBtrusty-mitaka\fR. Defaults to the current development release.
|
||||
.TP
|
||||
.BR \-h ", " \-\-help
|
||||
Display a help message and exit.
|
||||
.TP
|
||||
.BR \-v ", " \-\-verbose
|
||||
Be verbose about what is being done.
|
||||
.TP
|
||||
.BR \-d ", " \-\-download\-only
|
||||
Do not extract the source package (applies only to source packages).
|
||||
.TP
|
||||
.B \-m \fIMIRROR\fR, \fB\-\-mirror\fR=\fIMIRROR\fR
|
||||
Use the specified mirror server.
|
||||
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR or
|
||||
\fBhttp://deb.debian.org/debian\fR. If not specified or if the
|
||||
package is not found on the specified mirror, this will fall
|
||||
back to the default mirror(s) and/or mirror(s) from environment
|
||||
variables, and then will fall back to Launchpad or Debian Snapshot.
|
||||
This can be specified multiple times to try multiple mirrors.
|
||||
.TP
|
||||
.B \-\-no\-conf
|
||||
Do not use mirrors from the default configuration, or from
|
||||
any environment variables.
|
||||
.TP
|
||||
.B \-a \fIARCH\fR, \fB\-\-arch\fR=\fIARCH\fR
|
||||
Get binary packages from the \fIARCH\fR architecture.
|
||||
Defaults to the local architecture, if it can be deteected.
|
||||
.TP
|
||||
.B \-p \fIPULL\fR, \fB\-\-pull\fR=\fIPULL\fR
|
||||
What to pull: \fBsource\fR, \fBdebs\fR, \fBddebs\fR, \fBudebs\fR,
|
||||
or \fBlist\fR. The \fBlist\fR action only lists all a package's
|
||||
source and binary files, but does not actually download any.
|
||||
Defaults to \fBsource\fR.
|
||||
.TP
|
||||
.B \-D \fIDISTRO\fR, \fB\-\-distro\fR=\fIDISTRO\fR
|
||||
Pull from: \fBdebian\fR, \fBuca\fR, \fBubuntu\fR, or a \fBppa\fR.
|
||||
\fBlp\fR can be used instead of \fBubuntu\fR.
|
||||
Any string containing \fBcloud\fR can be used instead of \fBuca\fR.
|
||||
If pulling from a ppa, you must specify the PPA. Defaults to \fBubuntu\fR.
|
||||
.TP
|
||||
.B \-\-ppa\fR=ppa:\fIUSER/NAME\fR
|
||||
Applies only when \fBdistro\fR is \fIppa\fR. Can be provided either as
|
||||
a value to the \fB\-\-ppa\fR option parameter, or as a plain option
|
||||
(like \fIrelease\fR or \fIversion\fR). When specified as a plain option,
|
||||
the form must be \fBppa:USER/NAME\fR; when specified as a value to the
|
||||
\fB\-\-ppa\fR option parameter, the leading \fBppa:\fR is optional.
|
||||
|
||||
.SH ENVIRONMENT
|
||||
All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
|
||||
environment variables.
|
||||
Variables in the environment take precedence to those in configuration
|
||||
files.
|
||||
|
||||
.SH CONFIGURATION VARIABLES
|
||||
The following variables can be set in the environment or in
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
configuration files.
|
||||
In each case, the script\-specific variable takes precedence over the
|
||||
package\-wide variable.
|
||||
.TP
|
||||
.BR UBUNTUTOOLS_UBUNTU_MIRROR
|
||||
The default mirror.
|
||||
.TP
|
||||
.BR PULL_PKG_UBUNTU_MIRROR
|
||||
The default mirror when using the \fBpull\-pkg\fR script.
|
||||
.TP
|
||||
.BR PULL_[LP|DEBIAN|PPA|UCA]_[SOURCE|DEBS|DDEBS|UDEBS]_MIRROR
|
||||
The default mirror when using the associated script.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR dget (1),
|
||||
.BR pull\-lp\-source (1),
|
||||
.BR pull\-lp\-debs (1),
|
||||
.BR pull\-lp\-ddebs (1),
|
||||
.BR pull\-lp\-udebs (1),
|
||||
.BR pull\-debian\-source (1),
|
||||
.BR pull\-debian\-debs (1),
|
||||
.BR pull\-debian\-ddebs (1),
|
||||
.BR pull\-debian\-udebs (1),
|
||||
.BR pull\-ppa\-source (1),
|
||||
.BR pull\-ppa\-debs (1),
|
||||
.BR pull\-ppa\-ddebs (1),
|
||||
.BR pull\-ppa\-udebs (1),
|
||||
.BR pull\-uca\-source (1),
|
||||
.BR pull\-uca\-debs (1),
|
||||
.BR pull\-uca\-ddebs (1),
|
||||
.BR pull\-uca\-udebs (1),
|
||||
.BR pull\-debian\-debdiff (1),
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
|
||||
.SH AUTHOR
|
||||
.PP
|
||||
\fBpull\-pkg\fR was written by Dan Streetman <ddstreet@canonical.com>,
|
||||
based on the original \fBpull\-lp\-source\fR; it and this manual page
|
||||
were written by Iain Lane <iain@orangesquash.org.uk>.
|
||||
All are released under the GNU General Public License, version 3 or later.
|
1
doc/pull-ppa-ddebs.1
Symbolic link
1
doc/pull-ppa-ddebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-ppa-debs.1
Symbolic link
1
doc/pull-ppa-debs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-ppa-source.1
Symbolic link
1
doc/pull-ppa-source.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-ppa-udebs.1
Symbolic link
1
doc/pull-ppa-udebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
@ -1,27 +0,0 @@
|
||||
.TH PULL\-REVU\-SOURCE "1" "30 August 2009" "ubuntu-dev-tools"
|
||||
|
||||
.SH NAME
|
||||
pull\-revu\-source \- download a source package from REVU
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B pull\-revu\-source \fR[\fB\-h\fR]\fB <\fBsource package\fR>
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBpull\-revu\-source\fR downloads and extracts the latest version of
|
||||
<\fBsource package\fR> from REVU.
|
||||
|
||||
.SH OPTIONS
|
||||
Listed below are the command line options for pull\-revu\-source:
|
||||
.TP
|
||||
.B \-h, \-\-help
|
||||
Display the usage instructions and exit.
|
||||
.TP
|
||||
.B <source package>
|
||||
This is the source package that you would like to be downloaded from Debian.
|
||||
|
||||
.SH AUTHOR
|
||||
.PP
|
||||
\fBpull\-revu\-source\fR and this manual page were written by Nathan Handler
|
||||
<nhandler@ubuntu.com>. \fBpull\-revu\-source\fR is based on \fBrevupull\fR in
|
||||
\fBkubuntu\-dev\-tools\fR, written by Harald Sitter <apachelogger@ubuntu.com>.
|
||||
Both are released under the GNU General Public License, version 3 or later.
|
1
doc/pull-uca-ddebs.1
Symbolic link
1
doc/pull-uca-ddebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-uca-debs.1
Symbolic link
1
doc/pull-uca-debs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-uca-source.1
Symbolic link
1
doc/pull-uca-source.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
1
doc/pull-uca-udebs.1
Symbolic link
1
doc/pull-uca-udebs.1
Symbolic link
@ -0,0 +1 @@
|
||||
pull-pkg.1
|
@ -47,10 +47,11 @@ Display a help message and exit.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR backportpackage (1),
|
||||
.BR reverse\-depends (1).
|
||||
.BR reverse\-depends (1),
|
||||
.BR https://wiki.ubuntu.com/UbuntuBackports .
|
||||
|
||||
.SH AUTHORS
|
||||
\fBreverse\-depends\fR and this manpage were written by Stefano Rivera
|
||||
\fBrequestbackport\fR and this manpage were written by Stefano Rivera
|
||||
<stefanor@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the terms of the ISC License.
|
||||
|
@ -44,7 +44,7 @@ Display a help message and exit.
|
||||
.TP
|
||||
.B \-d
|
||||
Specifies which Debian distribution a package should be synced from.
|
||||
Default is \fItesting\fR in LTS cycles, otherwise \fIunstable\fR.
|
||||
Default is \fIunstable\fR.
|
||||
.TP
|
||||
.B \-n
|
||||
Specifies that the package is a new package, and requestsync should not
|
||||
@ -125,6 +125,9 @@ Setting this to \fIno\fR is equivalent to running with \fB--email\fR.
|
||||
.TP
|
||||
.BR REQUESTSYNC_LPINSTANCE ", " UBUNTUTOOLS_LPINSTANCE
|
||||
The default value for \fB--lpinstance\fR.
|
||||
.TP
|
||||
.BR REQUESTSYNC_KEYID ", " UBUNTUTOOLS_KEYID
|
||||
The default value for \fB-k\fR.
|
||||
|
||||
.SH SEE ALSO
|
||||
.BR rmadison (1),
|
||||
|
15
doc/running-autopkgtests.1
Normal file
15
doc/running-autopkgtests.1
Normal file
@ -0,0 +1,15 @@
|
||||
.TH running\-autopkgtests "1" "18 January 2024" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
running\-autopkgtests \- dumps a list of currently running autopkgtests
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B running\-autopkgtests
|
||||
|
||||
.SH DESCRIPTION
|
||||
Dumps a list of currently running and queued tests in Autopkgtest.
|
||||
Pass --running to only see running tests, or --queued to only see
|
||||
queued tests. Passing both will print both, which is the default behavior.
|
||||
|
||||
.SH AUTHOR
|
||||
.B running\-autopkgtests
|
||||
was written by Chris Peterson <chris.peterson@canonical.com>.
|
60
doc/seeded-in-ubuntu.1
Normal file
60
doc/seeded-in-ubuntu.1
Normal file
@ -0,0 +1,60 @@
|
||||
.\" Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
.\"
|
||||
.\" Permission to use, copy, modify, and/or distribute this software for any
|
||||
.\" purpose with or without fee is hereby granted, provided that the above
|
||||
.\" copyright notice and this permission notice appear in all copies.
|
||||
.\"
|
||||
.\" THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
.\" REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
.\" AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
.\" INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
.\" LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
.\" OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
.\" PERFORMANCE OF THIS SOFTWARE.
|
||||
.TH seeded\-in\-ubuntu 1 "December 2011" ubuntu\-dev\-tools
|
||||
|
||||
.SH NAME
|
||||
seeded\-in\-ubuntu \- Determine whether a package is safe to upload
|
||||
during a freeze
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B seeded\-in\-ubuntu \fR[\fIoptions\fR] \fIpackage\fR...
|
||||
|
||||
.SH DESCRIPTION
|
||||
Lists all the current daily images containing the specified packages.
|
||||
Or whether the package is part of the supported seed.
|
||||
.PP
|
||||
If it isn't on an image, it should be safe to upload.
|
||||
During the final freeze, one should avoid packages in the supported seed
|
||||
too.
|
||||
.PP
|
||||
An index of the current manifests is downloaded from UbuntuWire.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-b\fR, \fB\-\-binary\fR
|
||||
The packages specified are binary packages.
|
||||
This is faster than source packages, as otherwise we must query LP to
|
||||
determine the binary packages that every specified source package
|
||||
builds.
|
||||
.TP
|
||||
\fB\-u\fR \fIURL\fR, \fB\-\-data\-url\fR=\fIURL\fR
|
||||
URL for index of seeded packages.
|
||||
Default: UbuntuWire's service at
|
||||
\fBhttp://qa.ubuntuwire.org/ubuntu-seeded-packages/seeded.json.gz\fR.
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Display a help message and exit
|
||||
|
||||
.SH EXAMPLES
|
||||
All the images that contain unity:
|
||||
.IP
|
||||
.nf
|
||||
.B seeded\-in\-ubuntu -b unity
|
||||
.fi
|
||||
|
||||
.SH AUTHORS
|
||||
\fBseeded\-in\-ubuntu\fR and this manpage were written by Stefano Rivera
|
||||
<stefanor@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the terms of the ISC License.
|
@ -6,7 +6,13 @@
|
||||
\fBsetup-packaging-environment\fR
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBsetup-packaging-environment\fR aims to make it more straightforward for new contributors to get their Ubuntu installation ready for packaging work. It ensures that all four components from Ubuntu's official repositories are enabled along with their corresponding source repositories. It also installs a minimal set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts, debhelper, cdbs, patchutils, pbuilder, and build-essential). Finally, it assists in defining the DEBEMAIL and DEBFULLNAME environment variables.
|
||||
\fBsetup-packaging-environment\fR aims to make it more straightforward for new
|
||||
contributors to get their Ubuntu installation ready for packaging work. It
|
||||
ensures that all four components from Ubuntu's official repositories are enabled
|
||||
along with their corresponding source repositories. It also installs a minimal
|
||||
set of packages needed for Ubuntu packaging work (ubuntu-dev-tools, devscripts,
|
||||
debhelper, patchutils, pbuilder, and build-essential). Finally, it assists
|
||||
in defining the DEBEMAIL and DEBFULLNAME environment variables.
|
||||
|
||||
.SH AUTHORS
|
||||
\fBsetup-packaging-environment\fR was written by Siegfried-A. Gevatter <rainct@ubuntu.com>.
|
||||
|
@ -14,9 +14,6 @@ builds it, runs
|
||||
.BR lintian (1)
|
||||
and, after review and confirmation, can upload it.
|
||||
|
||||
For syncs, sponsor\-patch will follow the usual procedure of commenting
|
||||
on \fIbug\fR, marking it confirmed, and subscribing \fBubuntu-archive\fR.
|
||||
|
||||
\fBsponsor\-patch\fR can be used for sponsoring patches, syncs and
|
||||
merges from Debian, SRUs, and creating debdiffs from patches.
|
||||
If \fIbug\fR has multiple patches or branches linked, it will prompt the
|
||||
@ -45,7 +42,7 @@ The changelog target must be valid.
|
||||
The changelog timestamp is touched.
|
||||
|
||||
.PP
|
||||
Should any checks (or the build fail), the user has an option to edit
|
||||
Should any checks (or the build) fail, the user has an option to edit
|
||||
the patched source and try building it again.
|
||||
.PP
|
||||
Unless a working directory is specified, the sources and patches will be
|
||||
@ -129,6 +126,9 @@ The default value for \fB--update\fR.
|
||||
.TP
|
||||
.BR SPONSOR_PATCH_WORKDIR ", " UBUNTUTOOLS_WORKDIR
|
||||
The default value for \fB--workdir\fR.
|
||||
.TP
|
||||
.BR SPONSOR_PATCH_KEYID ", " UBUNTUTOOLS_KEYID
|
||||
The default value for \fB--key\fR.
|
||||
|
||||
.SH EXAMPLES
|
||||
Test-building and sponsoring an upload of bug \fB1234\fR:
|
||||
|
@ -1,12 +1,14 @@
|
||||
.TH SYNCPACKAGE "1" "June 2010" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
syncpackage \- copy source packages from Debian to Ubuntu
|
||||
.\"
|
||||
.SH SYNOPSIS
|
||||
.B syncpackage
|
||||
[\fIoptions\fR] \fI<.dsc URL/path or package name>\fR
|
||||
[\fIoptions\fR] \fI<.dsc URL/path or package name(s)>\fR
|
||||
.\"
|
||||
.SH DESCRIPTION
|
||||
\fBsyncpackage\fR causes a source package to be copied from Debian to
|
||||
Ubuntu.
|
||||
\fBsyncpackage\fR causes one or more source package(s) to be copied from Debian
|
||||
to Ubuntu.
|
||||
.PP
|
||||
\fBsyncpackage\fR allows you to upload files with the same checksums of the
|
||||
Debian ones, as the common script used by Ubuntu archive administrators does,
|
||||
@ -14,24 +16,26 @@ this way you can preserve source files integrity between the two distributions.
|
||||
.PP
|
||||
\fBsyncpackage\fR will detect source tarballs with mismatching
|
||||
checksums, and can perform fake syncs.
|
||||
.\"
|
||||
.SH WARNING
|
||||
The use of \fBsyncpackage \-\-no\-lp\fR, which generates a changes file to
|
||||
be directly uploaded to the Ubuntu primary archive or a PPA, is discouraged
|
||||
by the Ubuntu Archive Administrators, as it introduces an unnecessary window
|
||||
for error.
|
||||
This only exists for backward compatibility, for unusual corner cases, and
|
||||
for uploads to archives other than the Ubuntu primary archive.
|
||||
This only exists for backward compatibility, for unusual corner cases
|
||||
(such as fakesyncs), and for uploads to archives other than the Ubuntu
|
||||
primary archive.
|
||||
Omitting this option will cause Launchpad to perform the sync request
|
||||
directly, which is the preferred method for uploads to the Ubuntu primary
|
||||
archive.
|
||||
.\"
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Show help message and exit
|
||||
.TP
|
||||
\fB\-d\fI DIST\fR, \fB\-\-distribution\fR=\fIDIST\fR
|
||||
Debian distribution to sync from. Default is \fItesting\fR during LTS
|
||||
cycles, and \fIunstable\fR otherwise.
|
||||
Debian distribution to sync from. Default is \fIunstable\fR.
|
||||
.TP
|
||||
\fB\-r\fI RELEASE\fR, \fB\-\-release\fR=\fIRELEASE\fR
|
||||
Specify target Ubuntu release. Default: current development release.
|
||||
@ -42,20 +46,45 @@ Specify the version to sync from.
|
||||
\fB\-c\fI COMPONENT\fR, \fB\-\-component\fR=\fICOMPONENT\fR
|
||||
Specify the component to sync from.
|
||||
.TP
|
||||
\fB\-b\fI BUG\fR, \fB\-\-bug\fR=\fIBUG\fR
|
||||
Mark a Launchpad bug as being fixed by this upload.
|
||||
.TP
|
||||
\fB\-s\fI USERNAME\fR, \fB\-\-sponsor\fR=\fIUSERNAME\fR
|
||||
Sponsor the sync for \fIUSERNAME\fR (a Launchpad username).
|
||||
.TP
|
||||
\fB\-v\fR, \fB\-\-verbose\fR
|
||||
Display more progress information.
|
||||
.TP
|
||||
.B \-\-no\-lp
|
||||
Construct sync locally rather than letting Launchpad copy the package
|
||||
directly (not recommended).
|
||||
.TP
|
||||
\fB\-F\fR, \fB\-\-fakesync\fR
|
||||
Perform a fakesync, to work around a tarball mismatch between Debian and
|
||||
Ubuntu. This option ignores blacklisting, and performs a local sync.
|
||||
Ubuntu.
|
||||
This option ignores blocklisting, and performs a local sync.
|
||||
It implies \fB\-\-no\-lp\fR, and will leave a signed \fB.changes\fR file
|
||||
for you to upload.
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
Force sync over the top of Ubuntu changes.
|
||||
.TP
|
||||
.B \-\-no\-conf
|
||||
Do not read any configuration files, or configuration from environment
|
||||
variables.
|
||||
.TP
|
||||
\fB\-l\fI INSTANCE\fR, \fB\-\-lpinstance\fR=\fIINSTANCE\fR
|
||||
Launchpad instance to connect to (default: production).
|
||||
.TP
|
||||
.B \-\-simulate
|
||||
Show what would be done, but don't actually do it.
|
||||
.\"
|
||||
.SH LOCAL SYNC PREPARATION OPTIONS
|
||||
.TP
|
||||
Options that only apply when using \fB\-\-no\-lp\fR:
|
||||
.TP
|
||||
.B \-\-no\-lp
|
||||
Construct sync locally, rather than letting Launchpad copy the package
|
||||
directly.
|
||||
It will leave a signed \fB.changes\fR file for you to upload.
|
||||
See the \fBWARNING\fR above.
|
||||
.TP
|
||||
\fB\-n\fI UPLOADER_NAME\fR, \fB\-\-uploader\-name\fR=\fIUPLOADER_NAME\fR
|
||||
Use UPLOADER_NAME as the name of the maintainer for this upload instead
|
||||
of evaluating DEBFULLNAME and UBUMAIL.
|
||||
@ -72,12 +101,6 @@ Specify the key ID to be used for signing.
|
||||
\fB\-\-dont-sign\fR
|
||||
Do not sign the upload.
|
||||
.TP
|
||||
\fB\-b\fI BUG\fR, \fB\-\-bug\fR=\fIBUG\fR
|
||||
Mark a Launchpad bug as being fixed by this upload.
|
||||
.TP
|
||||
\fB\-f\fR, \fB\-\-force\fR
|
||||
Force sync over the top of Ubuntu changes.
|
||||
.TP
|
||||
.B \-d \fIDEBIAN_MIRROR\fR, \fB\-\-debian\-mirror\fR=\fIDEBIAN_MIRROR\fR
|
||||
Use the specified mirror.
|
||||
Should be in the form \fBhttp://ftp.debian.org/debian\fR.
|
||||
@ -89,13 +112,7 @@ Use the specified Debian security mirror.
|
||||
Should be in the form \fBhttp://archive.ubuntu.com/ubuntu\fR.
|
||||
If the package isn't found on this mirror, \fBsyncpackage\fR will fall
|
||||
back to the default mirror.
|
||||
.TP
|
||||
.B \-\-no\-conf
|
||||
Do not read any configuration files, or configuration from environment
|
||||
variables.
|
||||
.TP
|
||||
.B \-\-simulate
|
||||
Show what would be done, but don't actually do it.
|
||||
.\"
|
||||
.SH ENVIRONMENT
|
||||
.TP
|
||||
.BR DEBFULLNAME ", " DEBEMAIL ", " UBUMAIL
|
||||
@ -108,6 +125,7 @@ All of the \fBCONFIGURATION VARIABLES\fR below are also supported as
|
||||
environment variables.
|
||||
Variables in the environment take precedence to those in configuration
|
||||
files.
|
||||
.\"
|
||||
.SH CONFIGURATION VARIABLES
|
||||
The following variables can be set in the environment or in
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
@ -120,12 +138,17 @@ The default value for \fB\-\-debian\-mirror\fR.
|
||||
.TP
|
||||
.BR SYNCPACKAGE_UBUNTU_MIRROR ", " UBUNTUTOOLS_DEBSEC_MIRROR
|
||||
The default value for \fB\-\-ubuntu\-mirror\fR.
|
||||
.TP
|
||||
.BR SYNCPACKAGE_KEYID ", " UBUNTUTOOLS_KEYID
|
||||
The default value for \fB\-\-key\fR.
|
||||
.\"
|
||||
.SH SEE ALSO
|
||||
.BR requestsync (1),
|
||||
.BR ubuntu\-dev\-tools (5)
|
||||
.\"
|
||||
.SH AUTHOR
|
||||
\fBsyncpackage\fR was written by Martin Pitt <martin.pitt@canonical.com> and Benjamin Drung <bdrung@ubuntu.com>.
|
||||
.PP
|
||||
This manual page were written by Luca Falavigna <dktrkranz@ubuntu.com>
|
||||
This manual page was written by Luca Falavigna <dktrkranz@ubuntu.com>
|
||||
.PP
|
||||
Both are released under GNU General Public License, version 3.
|
||||
|
@ -1,20 +1,19 @@
|
||||
.TH UBUNTU-BUILD "1" "June 2010" "ubuntu-dev-tools"
|
||||
.TH UBUNTU-BUILD "1" "Mar 2024" "ubuntu-dev-tools"
|
||||
.SH NAME
|
||||
ubuntu-build \- command-line interface to Launchpad build operations
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B ubuntu-build <srcpackage> <release> <operation>
|
||||
.nf
|
||||
\fBubuntu-build\fR <srcpackage> <release> <operation>
|
||||
\fBubuntu-build\fR --batch [--retry] [--rescore \fIPRIORITY\fR] [--arch \fIARCH\fR [...]]
|
||||
[--series \fISERIES\fR] [--state \fIBUILD-STATE\fR]
|
||||
[-A \fIARCHIVE\fR] [pkg]...
|
||||
.fi
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBubuntu-build\fR provides a command line interface to the Launchpad build
|
||||
operations.
|
||||
|
||||
.PP
|
||||
\fBubuntu-build\fR uses a cookie file stored at \fI~/.lpcookie.txt\fR to authenticate
|
||||
to Launchpad.
|
||||
This cookie is created on run from the Mozilla Firefox cookie
|
||||
file at \fI~/.mozilla/*/*/cookies.sqlite\fR.
|
||||
|
||||
.SH OPERATIONS
|
||||
Listed below are the available operations for \fBubuntu-build\fR:
|
||||
.TP
|
||||
@ -43,8 +42,8 @@ operations.
|
||||
.IP
|
||||
\fB\-a\fR ARCHITECTURE, \fB\-\-arch\fR=\fIARCHITECTURE\fR
|
||||
Rebuild or rescore a specific architecture. Valid
|
||||
architectures include: amd64, sparc, powerpc, i386,
|
||||
armel, ia64, lpia, hppa.
|
||||
architectures are:
|
||||
armhf, arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
||||
.TP
|
||||
Batch processing:
|
||||
.IP
|
||||
@ -64,14 +63,16 @@ Retry builds (give\-back).
|
||||
\fB\-\-rescore\fR=\fIPRIORITY\fR
|
||||
Rescore builds to <priority>.
|
||||
.IP
|
||||
\fB\-\-arch2\fR=\fIARCHITECTURE\fR
|
||||
\fB\-\-arch\fR=\fIARCHITECTURE\fR
|
||||
Affect only 'architecture' (can be used several
|
||||
times). Valid architectures are: amd64, sparc,
|
||||
powerpc, i386, armel, ia64, lpia, hppa.
|
||||
times). Valid architectures are:
|
||||
arm64, amd64, i386, powerpc, ppc64el, riscv64, s390x.
|
||||
.IP
|
||||
\fB\-A=\fIARCHIVE\fR
|
||||
Act on the named archive (ppa) instead of on the main Ubuntu archive.
|
||||
|
||||
.SH AUTHORS
|
||||
\fBubuntu-build\fR was written by Martin Pitt <martin.pitt@canonical.com>, and
|
||||
this manual page was written by Jonathan Patrick Davies <jpds@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the terms of the GNU General Public License, version 3
|
||||
or (at your option) any later version.
|
||||
Both are released under the terms of the GNU General Public License, version 3.
|
||||
|
60
doc/ubuntu-upload-permission.1
Normal file
60
doc/ubuntu-upload-permission.1
Normal file
@ -0,0 +1,60 @@
|
||||
.\" Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
.\"
|
||||
.\" Permission to use, copy, modify, and/or distribute this software for any
|
||||
.\" purpose with or without fee is hereby granted, provided that the above
|
||||
.\" copyright notice and this permission notice appear in all copies.
|
||||
.\"
|
||||
.\" THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
.\" REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
.\" AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
.\" INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
.\" LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
.\" OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
.\" PERFORMANCE OF THIS SOFTWARE.
|
||||
.TH ubuntu\-upload\-permission 1 "November 2011" ubuntu\-dev\-tools
|
||||
|
||||
.SH NAME
|
||||
ubuntu\-upload\-permission \- Query upload rights and (optionally) list
|
||||
the people and teams with upload rights for a package
|
||||
|
||||
.SH SYNOPSIS
|
||||
.B ubuntu\-upload\-permission \fR[\fIoptions\fR] \fIpackage
|
||||
|
||||
.SH DESCRIPTION
|
||||
\fBubuntu\-upload\-permission\fR checks if the user has upload
|
||||
permissions for \fIpackage\fR.
|
||||
If the \fB\-\-list\-uploaders\fR option is provided, all the people and
|
||||
teams that do have upload rights for \fIpackage\fR will be listed.
|
||||
|
||||
.SH OPTIONS
|
||||
.TP
|
||||
\fB\-r\fR \fIRELEASE\fR, \fB\-\-release\fR=\fIRELEASE\fR
|
||||
Query permissions in \fIRELEASE\fR.
|
||||
Default: current development release.
|
||||
.TP
|
||||
\fB\-a\fR, \fB\-\-list\-uploaders\fR
|
||||
List all the people and teams who have upload rights for \fIpackage\fR.
|
||||
.TP
|
||||
\fB\-t\fR, \fB\-\-list\-team\-members\fR
|
||||
List all the members of every team with rights. (Implies
|
||||
\fB\-\-list\-uploaders\fR)
|
||||
.TP
|
||||
\fB\-h\fR, \fB\-\-help\fR
|
||||
Display a help message and exit
|
||||
|
||||
.SH EXIT STATUS
|
||||
.TP
|
||||
.B 0
|
||||
You have the necessary upload rights.
|
||||
.TP
|
||||
.B 1
|
||||
You don't have the necessary upload rights.
|
||||
.TP
|
||||
.B 2
|
||||
There was an error.
|
||||
|
||||
.SH AUTHORS
|
||||
\fBubuntu\-upload\-permission\fR and this manpage were written by
|
||||
Stefano Rivera <stefanor@ubuntu.com>.
|
||||
.PP
|
||||
Both are released under the terms of the ISC License.
|
62
enforced-editing-wrapper
Executable file
62
enforced-editing-wrapper
Executable file
@ -0,0 +1,62 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
|
||||
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
|
||||
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
|
||||
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
|
||||
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
#
|
||||
#
|
||||
# Wraps sensisible-editor in checks for remaining boilerplate.
|
||||
# Configured through environment variables:
|
||||
# UDT_EDIT_WRAPPER_EDITOR: The user's usual $EDITOR
|
||||
# UDT_EDIT_WRAPPER_VISUAL: The user's usual $VISUAL
|
||||
# UDT_EDIT_WRAPPER_TEMPLATE_RE: An extra boilerplate-detecting regex.
|
||||
# UDT_EDIT_WRAPPER_FILE_DESCRIPTION: The type of file being edited.
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import os
|
||||
import re
|
||||
|
||||
from ubuntutools.question import EditFile
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] filename")
|
||||
parser.add_argument("filename", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
if not os.path.isfile(args.filename):
|
||||
parser.error(f"File {args.filename} does not exist")
|
||||
|
||||
if "UDT_EDIT_WRAPPER_EDITOR" in os.environ:
|
||||
os.environ["EDITOR"] = os.environ["UDT_EDIT_WRAPPER_EDITOR"]
|
||||
else:
|
||||
del os.environ["EDITOR"]
|
||||
|
||||
if "UDT_EDIT_WRAPPER_VISUAL" in os.environ:
|
||||
os.environ["VISUAL"] = os.environ["UDT_EDIT_WRAPPER_VISUAL"]
|
||||
else:
|
||||
del os.environ["VISUAL"]
|
||||
|
||||
placeholders = []
|
||||
if "UDT_EDIT_WRAPPER_TEMPLATE_RE" in os.environ:
|
||||
placeholders.append(re.compile(os.environ["UDT_EDIT_WRAPPER_TEMPLATE_RE"]))
|
||||
|
||||
description = os.environ.get("UDT_EDIT_WRAPPER_FILE_DESCRIPTION", "file")
|
||||
|
||||
EditFile(args.filename, description, placeholders).edit()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
83
grep-merges
83
grep-merges
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/python
|
||||
#! /usr/bin/python3
|
||||
#
|
||||
# grep-merges - search for pending merges from Debian
|
||||
#
|
||||
@ -19,35 +19,70 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import sys
|
||||
import urllib2
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
|
||||
from httplib2 import Http, HttpLib2Error
|
||||
|
||||
import ubuntutools.misc
|
||||
from ubuntutools import getLogger
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(
|
||||
usage="%(prog)s [options] [string]",
|
||||
description="List pending merges from Debian matching string",
|
||||
)
|
||||
parser.add_argument("string", nargs="?", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
|
||||
ubuntutools.misc.require_utf8()
|
||||
if len(sys.argv) > 1:
|
||||
match = sys.argv[1]
|
||||
else:
|
||||
match = None
|
||||
|
||||
for component in ('main', 'main-manual',
|
||||
'restricted', 'restricted-manual',
|
||||
'universe', 'universe-manual',
|
||||
'multiverse', 'multiverse-manual'):
|
||||
page = urllib2.urlopen('http://merges.ubuntu.com/%s.json' % component)
|
||||
for merge in json.load(page):
|
||||
package = merge['source_package']
|
||||
author, uploader = '', ''
|
||||
if 'user' in merge:
|
||||
author = merge['user']
|
||||
if 'uploader' in merge:
|
||||
uploader = '(%s)' % merge['uploader']
|
||||
pretty_uploader = ' '.join((author, uploader)).strip()
|
||||
if (match is None or
|
||||
match in package or match in author or match in uploader):
|
||||
print '%s\t%s' % (package, pretty_uploader)
|
||||
for component in (
|
||||
"main",
|
||||
"main-manual",
|
||||
"restricted",
|
||||
"restricted-manual",
|
||||
"universe",
|
||||
"universe-manual",
|
||||
"multiverse",
|
||||
"multiverse-manual",
|
||||
):
|
||||
url = f"https://merges.ubuntu.com/{component}.json"
|
||||
try:
|
||||
headers, page = Http().request(url)
|
||||
except HttpLib2Error as e:
|
||||
Logger.exception(e)
|
||||
sys.exit(1)
|
||||
if headers.status != 200:
|
||||
Logger.error("%s: %s %s", url, headers.status, headers.reason)
|
||||
sys.exit(1)
|
||||
|
||||
if __name__ == '__main__':
|
||||
for merge in json.loads(page):
|
||||
package = merge["source_package"]
|
||||
author, uploader = "", ""
|
||||
if merge.get("user"):
|
||||
author = merge["user"]
|
||||
if merge.get("uploader"):
|
||||
uploader = f"({merge['uploader']})"
|
||||
teams = merge.get("teams", [])
|
||||
|
||||
pretty_uploader = f"{author} {uploader}"
|
||||
if (
|
||||
args.string is None
|
||||
or args.string in package
|
||||
or args.string in author
|
||||
or args.string in uploader
|
||||
or args.string in teams
|
||||
):
|
||||
Logger.info("%s\t%s", package, pretty_uploader)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
51
harvest
51
harvest
@ -1,51 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
|
||||
# Copyright (C) 2011 Canonical Ltd., Daniel Holbach
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; version 3.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# See file /usr/share/common-licenses/GPL-3 for more details.
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
#
|
||||
# harvest - grabs information about development opportunities from
|
||||
# harvest.ubuntu.com
|
||||
#
|
||||
#
|
||||
# Daniel Holbach
|
||||
# (c) 2011 Canonical
|
||||
|
||||
from optparse import OptionParser
|
||||
import sys
|
||||
|
||||
from devscripts.logger import Logger
|
||||
|
||||
from ubuntutools.harvest import Harvest
|
||||
|
||||
def main():
|
||||
usage = "usage: %prog source-package-name"
|
||||
opt_parser = OptionParser(usage)
|
||||
args = opt_parser.parse_args()[1]
|
||||
if len(args) != 1:
|
||||
opt_parser.print_help()
|
||||
sys.exit(1)
|
||||
pkg = args[0].strip()
|
||||
|
||||
print Harvest(pkg).report()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.error("Aborted.")
|
||||
sys.exit(1)
|
136
hugdaylist
136
hugdaylist
@ -1,136 +0,0 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007 Canonical Ltd., Daniel Holbach
|
||||
# Copyright (C) 2008 Jonathan Patrick Davies <jpds@ubuntu.com>
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; version 3.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# See file /usr/share/common-licenses/GPL-3 for more details.
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
#
|
||||
# hugdaylist - produces MoinMoin wiki formatted tables based on a Launchpad bug
|
||||
# list.
|
||||
#
|
||||
# hugdaylist <url>
|
||||
# - produces lists like https://wiki.ubuntu.com/UbuntuBugDay/20070912?action=raw
|
||||
#
|
||||
# hugdaylist -n <howmany> <url>
|
||||
# - will only list <howmany> URLs.
|
||||
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools.lp.libsupport import translate_web_api
|
||||
|
||||
def check_args():
|
||||
howmany = -1
|
||||
url = ""
|
||||
|
||||
# Our usage options.
|
||||
usage = "usage: %prog [-n <number>] launchpad-buglist-url"
|
||||
opt_parser = OptionParser(usage)
|
||||
|
||||
# Options - namely just the number of bugs to output.
|
||||
opt_parser.add_option("-n", "--number", type="int",
|
||||
dest="number", help="Number of entries to output.")
|
||||
|
||||
# Parse arguments.
|
||||
(options, args) = opt_parser.parse_args()
|
||||
|
||||
# Check if we want a number other than the default.
|
||||
howmany = options.number
|
||||
|
||||
# Check that we have an URL.
|
||||
if not args:
|
||||
print >> sys.stderr, "An URL pointing to a Launchpad bug list is " \
|
||||
"required."
|
||||
opt_parser.print_help()
|
||||
sys.exit(1)
|
||||
else:
|
||||
url = args[0]
|
||||
|
||||
return (howmany, url)
|
||||
|
||||
def filter_unsolved(task):
|
||||
# TODO: don't use this filter here, only check status and assignee of
|
||||
# the given task
|
||||
# Filter out special types of bugs:
|
||||
# - https://wiki.ubuntu.com/Bugs/HowToTriage#Special%20types%20of%20bugs
|
||||
# this is expensive, parse name out of self_link instead?
|
||||
subscriptions = set(s.person.name for s in task.bug.subscriptions)
|
||||
if (task.status != "Fix Committed" and
|
||||
(not task.assignee or task.assignee.name in ['motu','desktop-bugs']) and
|
||||
'ubuntu-sponsors' not in subscriptions and
|
||||
'ubuntu-archive' not in subscriptions):
|
||||
return True
|
||||
return False
|
||||
|
||||
def main():
|
||||
(howmany, url) = check_args()
|
||||
if len(url.split("?", 1)) == 2:
|
||||
# search options not supported, because there is no mapping web ui
|
||||
# options <-> API options
|
||||
print >> sys.stderr, "Options in url are not supported, url: %s" % url
|
||||
sys.exit(1)
|
||||
|
||||
launchpad = None
|
||||
try:
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", 'production')
|
||||
except IOError, error:
|
||||
print error
|
||||
sys.exit(1)
|
||||
|
||||
api_url = translate_web_api(url, launchpad)
|
||||
try:
|
||||
product = launchpad.load(api_url)
|
||||
except Exception, error:
|
||||
response = getattr(error, "response", {})
|
||||
if response.get("status", None) == "404":
|
||||
print >> sys.stderr, ("The URL at '%s' does not appear to be a "
|
||||
"valid url to a product") % url
|
||||
sys.exit(1)
|
||||
else:
|
||||
raise
|
||||
|
||||
bug_list = [b for b in product.searchTasks() if filter_unsolved(b)]
|
||||
|
||||
if not bug_list:
|
||||
print "Bug list of %s is empty." % url
|
||||
sys.exit(0)
|
||||
if howmany == -1:
|
||||
howmany = len(bug_list)
|
||||
|
||||
print """
|
||||
## ||<rowbgcolor="#CCFFCC"> This task is done || somebody || ||
|
||||
## ||<rowbgcolor="#FFFFCC"> This task is assigned || somebody || <status> ||
|
||||
## ||<rowbgcolor="#FFEBBB"> This task isn't || ... || ||
|
||||
## ||<rowbgcolor="#FFCCCC"> This task is blocked on something || somebody || <explanation> ||
|
||||
|
||||
|| Bug || Subject || Triager ||"""
|
||||
|
||||
for i in list(bug_list)[:howmany]:
|
||||
bug = i.bug
|
||||
print '||<rowbgcolor="#FFEBBB"> [%s %s] || %s || ||' % \
|
||||
(bug.web_link, bug.id, bug.title)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print >> sys.stderr, "Aborted."
|
||||
sys.exit(1)
|
@ -1,5 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
# -*- coding: UTF-8 -*-
|
||||
#!/usr/bin/python3
|
||||
|
||||
# Copyright © 2009 James Westby <james.westby@ubuntu.com>,
|
||||
# 2010, 2011 Stefano Rivera <stefanor@ubuntu.com>
|
||||
@ -22,66 +21,66 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
from optparse import OptionParser, SUPPRESS_HELP
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import logging
|
||||
import re
|
||||
import sys
|
||||
import webbrowser
|
||||
from collections.abc import Iterable
|
||||
from email.message import EmailMessage
|
||||
|
||||
from devscripts.logger import Logger
|
||||
try:
|
||||
import SOAPpy
|
||||
except ImportError:
|
||||
Logger.error("Please install 'python-soappy' in order to use this utility.")
|
||||
sys.exit(1)
|
||||
|
||||
import debianbts
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
def main():
|
||||
Logger = getLogger()
|
||||
ATTACHMENT_MAX_SIZE = 2000
|
||||
|
||||
|
||||
def parse_args() -> argparse.Namespace:
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--browserless",
|
||||
action="store_true",
|
||||
help="Don't open the bug in the browser at the end",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="LP instance to connect to (default: production)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", action="store_true", help="Print info about the bug being imported"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Don't actually open a bug (also sets verbose)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-p", "--package", help="Launchpad package to file bug against (default: Same as Debian)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf", action="store_true", help="Don't read config files or environment variables."
|
||||
)
|
||||
parser.add_argument("bugs", nargs="+", help="Bug number(s) or URL(s)")
|
||||
return parser.parse_args()
|
||||
|
||||
|
||||
def get_bug_numbers(bug_list: Iterable[str]) -> list[int]:
|
||||
bug_re = re.compile(r"bug=(\d+)")
|
||||
|
||||
url = 'http://bugs.debian.org/cgi-bin/soap.cgi'
|
||||
namespace = 'Debbugs/SOAP'
|
||||
debbugs = SOAPpy.SOAPProxy(url, namespace)
|
||||
|
||||
# debug
|
||||
#debbugs.config.dumpSOAPOut = 1
|
||||
#debbugs.config.dumpSOAPIn = 1
|
||||
|
||||
parser = OptionParser(usage="%prog [option] bug ...")
|
||||
parser.add_option("-b", "--browserless",
|
||||
help="Don't open the bug in the browser at the end",
|
||||
dest="browserless", action="store_true")
|
||||
parser.add_option("-l", "--lpinstance", metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to "
|
||||
"(default: production)",
|
||||
dest="lpinstance", default=None)
|
||||
parser.add_option("-n", "--dry-run",
|
||||
help=SUPPRESS_HELP,
|
||||
dest="lpinstance", action="store_const", const="staging")
|
||||
parser.add_option("-p", "--package", metavar="PACKAGE",
|
||||
help="Launchpad package to file bug against "
|
||||
"(default: Same as Debian)",
|
||||
dest="package", default=None)
|
||||
parser.add_option("--no-conf", dest="no_conf", default=False,
|
||||
help="Don't read config files or environment variables.",
|
||||
action="store_true")
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
||||
|
||||
debian = launchpad.distributions['debian']
|
||||
ubuntu = launchpad.distributions['ubuntu']
|
||||
lp_debbugs = launchpad.bug_trackers.getByName(name='debbugs')
|
||||
|
||||
bug_nums = []
|
||||
|
||||
for bug_num in args:
|
||||
for bug_num in bug_list:
|
||||
if bug_num.startswith("http"):
|
||||
# bug URL
|
||||
match = bug_re.search(bug_num)
|
||||
@ -90,48 +89,168 @@ def main():
|
||||
sys.exit(1)
|
||||
bug_num = match.groups()[0]
|
||||
bug_num = bug_num.lstrip("#")
|
||||
bug_num = int(bug_num)
|
||||
bug_nums.append(bug_num)
|
||||
bug_nums.append(int(bug_num))
|
||||
|
||||
bugs = debbugs.get_status(*bug_nums)
|
||||
return bug_nums
|
||||
|
||||
if len(bug_nums) > 1:
|
||||
bugs = bugs[0]
|
||||
|
||||
def walk_multipart_message(message: EmailMessage) -> tuple[str, list[tuple[int, EmailMessage]]]:
|
||||
summary = ""
|
||||
attachments = []
|
||||
i = 1
|
||||
for part in message.walk():
|
||||
content_type = part.get_content_type()
|
||||
|
||||
if content_type.startswith("multipart/"):
|
||||
# we're already iterating on multipart items
|
||||
# let's just skip the multipart extra metadata
|
||||
continue
|
||||
if content_type == "application/pgp-signature":
|
||||
# we're not interested in importing pgp signatures
|
||||
continue
|
||||
|
||||
if part.is_attachment():
|
||||
attachments.append((i, part))
|
||||
elif content_type.startswith("image/"):
|
||||
# images here are not attachment, they are inline, but Launchpad can't handle that,
|
||||
# so let's add them as attachments
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += f"[inline image '{part.get_filename()}']\n\n"
|
||||
attachments.append((i, part))
|
||||
elif content_type.startswith("text/html"):
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += "[inline html]\n\n"
|
||||
attachments.append((i, part))
|
||||
elif content_type == "text/plain":
|
||||
summary += f"Message part #{i}\n"
|
||||
summary += part.get_content() + "\n"
|
||||
else:
|
||||
raise RuntimeError(
|
||||
f"""Unknown message part
|
||||
Your Debian bug is too weird to be imported in Launchpad, sorry.
|
||||
You can fix that by patching this script in ubuntu-dev-tools.
|
||||
Faulty message part:
|
||||
{part}"""
|
||||
)
|
||||
i += 1
|
||||
|
||||
return summary, attachments
|
||||
|
||||
|
||||
def process_bugs(
|
||||
bugs: Iterable[debianbts.Bugreport],
|
||||
launchpad: Launchpad,
|
||||
package: str,
|
||||
dry_run: bool = True,
|
||||
browserless: bool = False,
|
||||
) -> bool:
|
||||
debian = launchpad.distributions["debian"]
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
lp_debbugs = launchpad.bug_trackers.getByName(name="debbugs")
|
||||
|
||||
err = False
|
||||
for bug in bugs:
|
||||
ubupackage = bug.source
|
||||
if package:
|
||||
ubupackage = package
|
||||
bug_num = bug.bug_num
|
||||
subject = bug.subject
|
||||
log = debianbts.get_bug_log(bug_num)
|
||||
message = log[0]["message"]
|
||||
assert isinstance(message, EmailMessage)
|
||||
attachments: list[tuple[int, EmailMessage]] = []
|
||||
if message.is_multipart():
|
||||
summary, attachments = walk_multipart_message(message)
|
||||
else:
|
||||
summary = str(message.get_payload())
|
||||
|
||||
target = ubuntu.getSourcePackage(name=ubupackage)
|
||||
if target is None:
|
||||
Logger.error(
|
||||
"Source package '%s' is not in Ubuntu. Please specify "
|
||||
"the destination source package with --package",
|
||||
ubupackage,
|
||||
)
|
||||
err = True
|
||||
continue
|
||||
|
||||
description = f"Imported from Debian bug http://bugs.debian.org/{bug_num}:\n\n{summary}"
|
||||
# LP limits descriptions to 50K chars
|
||||
description = (description[:49994] + " [...]") if len(description) > 50000 else description
|
||||
|
||||
Logger.debug("Target: %s", target)
|
||||
Logger.debug("Subject: %s", subject)
|
||||
Logger.debug("Description: ")
|
||||
Logger.debug(description)
|
||||
for i, attachment in attachments:
|
||||
Logger.debug("Attachment #%s (%s)", i, attachment.get_filename() or "inline")
|
||||
Logger.debug("Content:")
|
||||
if attachment.get_content_type() == "text/plain":
|
||||
content = attachment.get_content()
|
||||
if len(content) > ATTACHMENT_MAX_SIZE:
|
||||
content = (
|
||||
content[:ATTACHMENT_MAX_SIZE]
|
||||
+ f" [attachment cropped after {ATTACHMENT_MAX_SIZE} characters...]"
|
||||
)
|
||||
Logger.debug(content)
|
||||
else:
|
||||
Logger.debug("[data]")
|
||||
|
||||
if dry_run:
|
||||
Logger.info("Dry-Run: not creating Ubuntu bug.")
|
||||
continue
|
||||
|
||||
u_bug = launchpad.bugs.createBug(target=target, title=subject, description=description)
|
||||
for i, attachment in attachments:
|
||||
name = f"#{i}-{attachment.get_filename() or "inline"}"
|
||||
content = attachment.get_content()
|
||||
if isinstance(content, str):
|
||||
# Launchpad only wants bytes
|
||||
content = content.encode()
|
||||
u_bug.addAttachment(
|
||||
filename=name,
|
||||
data=content,
|
||||
comment=f"Imported from Debian bug http://bugs.debian.org/{bug_num}",
|
||||
)
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
if d_sp is None and package:
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
d_task = u_bug.addTask(target=d_sp)
|
||||
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
||||
d_task.bug_watch = d_watch
|
||||
d_task.lp_save()
|
||||
Logger.info("Opened %s", u_bug.web_link)
|
||||
if not browserless:
|
||||
webbrowser.open(u_bug.web_link)
|
||||
|
||||
return err
|
||||
|
||||
|
||||
def main() -> None:
|
||||
options = parse_args()
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value("LPINSTANCE")
|
||||
|
||||
if options.dry_run:
|
||||
launchpad = Launchpad.login_anonymously("ubuntu-dev-tools")
|
||||
options.verbose = True
|
||||
else:
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", options.lpinstance)
|
||||
|
||||
if options.verbose:
|
||||
Logger.setLevel(logging.DEBUG)
|
||||
|
||||
bugs = debianbts.get_status(get_bug_numbers(options.bugs))
|
||||
|
||||
if not bugs:
|
||||
Logger.error("Cannot find any of the listed bugs")
|
||||
sys.exit(1)
|
||||
|
||||
for bug in bugs:
|
||||
bug = bug.value
|
||||
ubupackage = package = bug.source
|
||||
if options.package:
|
||||
ubupackage = options.package
|
||||
bug_num = bug.bug_num
|
||||
subject = bug.subject
|
||||
log = debbugs.get_bug_log(bug_num)
|
||||
summary = log[0][0]
|
||||
target = ubuntu.getSourcePackage(name=ubupackage)
|
||||
if target is None:
|
||||
Logger.error("Source package '%s' is not in Ubuntu. Please specify "
|
||||
"the destination source package with --package",
|
||||
ubupackage)
|
||||
sys.exit(1)
|
||||
if process_bugs(bugs, launchpad, options.package, options.dry_run, options.browserless):
|
||||
sys.exit(1)
|
||||
|
||||
u_bug = launchpad.bugs.createBug(target=target, title=subject,
|
||||
description='Imported from Debian bug '
|
||||
'http://bugs.debian.org/%d:\n\n%s'
|
||||
% (bug_num, summary))
|
||||
d_sp = debian.getSourcePackage(name=package)
|
||||
if d_sp is None and options.package:
|
||||
d_sp = debian.getSourcePackage(name=options.package)
|
||||
d_task = u_bug.addTask(target=d_sp)
|
||||
d_watch = u_bug.addWatch(remote_bug=bug_num, bug_tracker=lp_debbugs)
|
||||
d_task.bug_watch = d_watch
|
||||
d_task.lp_save()
|
||||
Logger.normal("Opened %s", u_bug.web_link)
|
||||
if not options.browserless:
|
||||
webbrowser.open(u_bug.web_link)
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
105
lp-bitesize
Executable file
105
lp-bitesize
Executable file
@ -0,0 +1,105 @@
|
||||
#!/usr/bin/python3
|
||||
"""Add 'bitesize' tag to bugs and add a comment."""
|
||||
|
||||
# Copyright (c) 2011 Canonical Ltd.
|
||||
#
|
||||
# bitesize is free software; you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License as published by the
|
||||
# Free Software Foundation; either version 3, or (at your option) any
|
||||
# later version.
|
||||
#
|
||||
# bitesize is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with bitesize; see the file COPYING. If not, write to the Free
|
||||
# Software Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
|
||||
# 02110-1301, USA.
|
||||
#
|
||||
# Authors:
|
||||
# Daniel Holbach <daniel.holbach@canonical.com>
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from launchpadlib.errors import HTTPError
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def error_out(msg, *args):
|
||||
Logger.error(msg, *args)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def save_entry(entry):
|
||||
try:
|
||||
entry.lp_save()
|
||||
except HTTPError as error:
|
||||
error_out("%s", error.content)
|
||||
|
||||
|
||||
def tag_bug(bug):
|
||||
bug.tags = bug.tags + ["bitesize"] # LP: #254901 workaround
|
||||
save_entry(bug)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] <bug number>")
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
help="Launchpad instance to connect to (default: production)",
|
||||
dest="lpinstance",
|
||||
default=None,
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
help="Don't read config files or environment variables.",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
)
|
||||
parser.add_argument("bug_number", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
|
||||
launchpad = Launchpad.login_with("ubuntu-dev-tools", args.lpinstance)
|
||||
if launchpad is None:
|
||||
error_out("Couldn't authenticate to Launchpad.")
|
||||
|
||||
# check that the new main bug isn't a duplicate
|
||||
try:
|
||||
bug = launchpad.bugs[args.bug_number]
|
||||
except HTTPError as error:
|
||||
if error.response.status == 401:
|
||||
error_out(
|
||||
"Don't have enough permissions to access bug %s. %s",
|
||||
args.bug_number,
|
||||
error.content,
|
||||
)
|
||||
else:
|
||||
raise
|
||||
if "bitesize" in bug.tags:
|
||||
error_out("Bug is already marked as 'bitesize'.")
|
||||
bug.newMessage(
|
||||
content="I'm marking this bug as 'bitesize' as it looks "
|
||||
"like an issue that is easy to fix and suitable "
|
||||
"for newcomers in Ubuntu development. If you need "
|
||||
"any help with fixing it, talk to me about it."
|
||||
)
|
||||
bug.subscribe(person=launchpad.me)
|
||||
tag_bug(launchpad.bugs[bug.id]) # fresh bug object, LP: #336866 workaround
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
258
merge-changelog
258
merge-changelog
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
# Copyright © 2008 Canonical Ltd.
|
||||
# Author: Scott James Remnant <scott at ubuntu.com>.
|
||||
@ -18,248 +18,67 @@
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import re
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import sys
|
||||
|
||||
from debian.changelog import Changelog
|
||||
|
||||
from ubuntutools import getLogger
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def usage(exit_code=1):
|
||||
print '''Usage: merge-changelog <left changelog> <right changelog>
|
||||
Logger.info(
|
||||
"""Usage: merge-changelog <left changelog> <right changelog>
|
||||
|
||||
merge-changelog takes two changelogs that once shared a common source,
|
||||
merges them back together, and prints the merged result to stdout. This
|
||||
is useful if you need to manually merge a ubuntu package with a new
|
||||
Debian release of the package.
|
||||
'''
|
||||
"""
|
||||
)
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
########################################################################
|
||||
# Changelog Management
|
||||
########################################################################
|
||||
|
||||
# Regular expression for top of debian/changelog
|
||||
CL_RE = re.compile(r'^(\w[-+0-9a-z.]*) \(([^\(\) \t]+)\)((\s+[-0-9a-z]+)+)\;',
|
||||
re.IGNORECASE)
|
||||
|
||||
def merge_changelog(left_changelog, right_changelog):
|
||||
"""Merge a changelog file."""
|
||||
|
||||
left_cl = read_changelog(left_changelog)
|
||||
right_cl = read_changelog(right_changelog)
|
||||
with open(left_changelog, encoding="utf-8") as f:
|
||||
left_cl = Changelog(f)
|
||||
with open(right_changelog, encoding="utf-8") as f:
|
||||
right_cl = Changelog(f)
|
||||
|
||||
for right_ver, right_text in right_cl:
|
||||
while len(left_cl) and left_cl[0][0] > right_ver:
|
||||
(left_ver, left_text) = left_cl.pop(0)
|
||||
print left_text
|
||||
left_versions = set(left_cl.versions)
|
||||
right_versions = set(right_cl.versions)
|
||||
left_blocks = iter(left_cl)
|
||||
right_blocks = iter(right_cl)
|
||||
|
||||
while len(left_cl) and left_cl[0][0] == right_ver:
|
||||
(left_ver, left_text) = left_cl.pop(0)
|
||||
clist = sorted(left_versions | right_versions, reverse=True)
|
||||
remaining = len(clist)
|
||||
for version in clist:
|
||||
remaining -= 1
|
||||
if version in left_versions:
|
||||
block = next(left_blocks)
|
||||
if version in right_versions:
|
||||
next(right_blocks)
|
||||
else:
|
||||
block = next(right_blocks)
|
||||
|
||||
print right_text
|
||||
assert block.version == version
|
||||
|
||||
for _, left_text in left_cl:
|
||||
print left_text
|
||||
|
||||
return False
|
||||
|
||||
def read_changelog(filename):
|
||||
"""Return a parsed changelog file."""
|
||||
entries = []
|
||||
|
||||
changelog_file = open(filename)
|
||||
try:
|
||||
(ver, text) = (None, "")
|
||||
for line in changelog_file:
|
||||
match = CL_RE.search(line)
|
||||
if match:
|
||||
try:
|
||||
ver = Version(match.group(2))
|
||||
except ValueError:
|
||||
ver = None
|
||||
|
||||
text += line
|
||||
elif line.startswith(" -- "):
|
||||
if ver is None:
|
||||
ver = Version("0")
|
||||
|
||||
text += line
|
||||
entries.append((ver, text))
|
||||
(ver, text) = (None, "")
|
||||
elif len(line.strip()) or ver is not None:
|
||||
text += line
|
||||
finally:
|
||||
changelog_file.close()
|
||||
|
||||
if len(text):
|
||||
entries.append((ver, text))
|
||||
|
||||
return entries
|
||||
|
||||
########################################################################
|
||||
# Version parsing code
|
||||
########################################################################
|
||||
# Regular expressions make validating things easy
|
||||
VALID_EPOCH = re.compile(r'^[0-9]+$')
|
||||
VALID_UPSTREAM = re.compile(r'^[A-Za-z0-9+:.~-]*$')
|
||||
VALID_REVISION = re.compile(r'^[A-Za-z0-9+.~]+$')
|
||||
|
||||
# Character comparison table for upstream and revision components
|
||||
CMP_TABLE = "~ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz+-.:"
|
||||
|
||||
|
||||
class Version(object):
|
||||
"""Debian version number.
|
||||
|
||||
This class is designed to be reasonably transparent and allow you
|
||||
to write code like:
|
||||
|
||||
| s.version >= '1.100-1'
|
||||
|
||||
The comparison will be done according to Debian rules, so '1.2' will
|
||||
compare lower.
|
||||
|
||||
Properties:
|
||||
epoch Epoch
|
||||
upstream Upstream version
|
||||
revision Debian/local revision
|
||||
"""
|
||||
|
||||
def __init__(self, ver):
|
||||
"""Parse a string or number into the three components."""
|
||||
self.epoch = 0
|
||||
self.upstream = None
|
||||
self.revision = None
|
||||
|
||||
ver = str(ver)
|
||||
if not len(ver):
|
||||
raise ValueError
|
||||
|
||||
# Epoch is component before first colon
|
||||
idx = ver.find(":")
|
||||
if idx != -1:
|
||||
self.epoch = ver[:idx]
|
||||
if not len(self.epoch):
|
||||
raise ValueError
|
||||
if not VALID_EPOCH.search(self.epoch):
|
||||
raise ValueError
|
||||
ver = ver[idx+1:]
|
||||
|
||||
# Revision is component after last hyphen
|
||||
idx = ver.rfind("-")
|
||||
if idx != -1:
|
||||
self.revision = ver[idx+1:]
|
||||
if not len(self.revision):
|
||||
raise ValueError
|
||||
if not VALID_REVISION.search(self.revision):
|
||||
raise ValueError
|
||||
ver = ver[:idx]
|
||||
|
||||
# Remaining component is upstream
|
||||
self.upstream = ver
|
||||
if not len(self.upstream):
|
||||
raise ValueError
|
||||
if not VALID_UPSTREAM.search(self.upstream):
|
||||
raise ValueError
|
||||
|
||||
self.epoch = int(self.epoch)
|
||||
|
||||
def get_without_epoch(self):
|
||||
"""Return the version without the epoch."""
|
||||
string = self.upstream
|
||||
if self.revision is not None:
|
||||
string += "-%s" % (self.revision,)
|
||||
return string
|
||||
|
||||
without_epoch = property(get_without_epoch)
|
||||
|
||||
def __str__(self):
|
||||
"""Return the class as a string for printing."""
|
||||
string = ""
|
||||
if self.epoch > 0:
|
||||
string += "%d:" % (self.epoch,)
|
||||
string += self.upstream
|
||||
if self.revision is not None:
|
||||
string += "-%s" % (self.revision,)
|
||||
return string
|
||||
|
||||
def __repr__(self):
|
||||
"""Return a debugging representation of the object."""
|
||||
return "<%s epoch: %d, upstream: %r, revision: %r>" \
|
||||
% (self.__class__.__name__, self.epoch,
|
||||
self.upstream, self.revision)
|
||||
|
||||
def __cmp__(self, other):
|
||||
"""Compare two Version classes."""
|
||||
other = Version(other)
|
||||
|
||||
result = cmp(self.epoch, other.epoch)
|
||||
if result != 0:
|
||||
return result
|
||||
|
||||
result = deb_cmp(self.upstream, other.upstream)
|
||||
if result != 0:
|
||||
return result
|
||||
|
||||
result = deb_cmp(self.revision or "", other.revision or "")
|
||||
if result != 0:
|
||||
return result
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
def strcut(string, idx, accept):
|
||||
"""Cut characters from string that are entirely in accept."""
|
||||
ret = ""
|
||||
while idx < len(string) and string[idx] in accept:
|
||||
ret += string[idx]
|
||||
idx += 1
|
||||
|
||||
return (ret, idx)
|
||||
|
||||
def deb_order(string, idx):
|
||||
"""Return the comparison order of two characters."""
|
||||
if idx >= len(string):
|
||||
return 0
|
||||
elif string[idx] == "~":
|
||||
return -1
|
||||
else:
|
||||
return CMP_TABLE.index(string[idx])
|
||||
|
||||
def deb_cmp_str(x, y):
|
||||
"""Compare two strings in a deb version."""
|
||||
idx = 0
|
||||
while (idx < len(x)) or (idx < len(y)):
|
||||
result = deb_order(x, idx) - deb_order(y, idx)
|
||||
if result < 0:
|
||||
return -1
|
||||
elif result > 0:
|
||||
return 1
|
||||
|
||||
idx += 1
|
||||
|
||||
return 0
|
||||
|
||||
def deb_cmp(x, y):
|
||||
"""Implement the string comparison outlined by Debian policy."""
|
||||
x_idx = y_idx = 0
|
||||
while x_idx < len(x) or y_idx < len(y):
|
||||
# Compare strings
|
||||
(x_str, x_idx) = strcut(x, x_idx, CMP_TABLE)
|
||||
(y_str, y_idx) = strcut(y, y_idx, CMP_TABLE)
|
||||
result = deb_cmp_str(x_str, y_str)
|
||||
if result != 0:
|
||||
return result
|
||||
|
||||
# Compare numbers
|
||||
(x_str, x_idx) = strcut(x, x_idx, "0123456789")
|
||||
(y_str, y_idx) = strcut(y, y_idx, "0123456789")
|
||||
result = cmp(int(x_str or "0"), int(y_str or "0"))
|
||||
if result != 0:
|
||||
return result
|
||||
|
||||
return 0
|
||||
Logger.info("%s%s", str(block).strip(), "\n" if remaining else "")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) > 1 and sys.argv[1] in ('-h', '--help'):
|
||||
if len(sys.argv) > 1 and sys.argv[1] in ("-h", "--help"):
|
||||
usage(0)
|
||||
if len(sys.argv) != 3:
|
||||
usage(1)
|
||||
@ -270,5 +89,6 @@ def main():
|
||||
merge_changelog(left_changelog, right_changelog)
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
547
mk-sbuild
547
mk-sbuild
@ -1,11 +1,13 @@
|
||||
#!/bin/bash
|
||||
#
|
||||
# Copyright 2006-2011 (C) Canonical Ltd.
|
||||
# Copyright 2006-2013 (C) Canonical Ltd.
|
||||
# Authors:
|
||||
# Kees Cook <kees@ubuntu.com>
|
||||
# Emmet Hikory <persia@ubuntu.com>
|
||||
# Scott Moser <smoser@ubuntu.com>
|
||||
# Stefano Rivera <stefanor@ubuntu.com>
|
||||
# Steve Langasek <steve.langasek@ubuntu.com>
|
||||
# Marc Deslauriers <marc.deslauriers@ubuntu.com>
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
@ -24,7 +26,7 @@
|
||||
# ##################################################################
|
||||
#
|
||||
# This script creates chroots designed to be used in a snapshot mode
|
||||
# (either with LVM or aufs) with schroot and sbuild.
|
||||
# (with LVM, btrfs, zfs, overlay, overlay or aufs) with schroot and sbuild.
|
||||
# Much love to "man sbuild-setup", https://wiki.ubuntu.com/PbuilderHowto,
|
||||
# and https://help.ubuntu.com/community/SbuildLVMHowto.
|
||||
#
|
||||
@ -37,29 +39,47 @@ SNAPSHOT_SIZE="4G"
|
||||
SOURCE_CHROOTS_DIR="/var/lib/schroot/chroots"
|
||||
SOURCE_CHROOTS_TGZ="/var/lib/schroot/tarballs"
|
||||
CHROOT_SNAPSHOT_DIR="/var/lib/schroot/snapshots"
|
||||
SCHROOT_PROFILE="sbuild"
|
||||
CCACHE_DIR="/var/cache/ccache-sbuild"
|
||||
CCACHE_SIZE="4G"
|
||||
|
||||
function usage()
|
||||
{
|
||||
echo "Usage: $0 [OPTIONS] Release" >&2
|
||||
echo "Usage: $0 [OPTIONS] Release"
|
||||
echo "Options:"
|
||||
echo " --arch=ARCH What architecture to select"
|
||||
echo " --name=NAME Base name for the schroot (arch is appended)"
|
||||
echo " --personality=PERSONALITY What personality to use (defaults to match --arch)"
|
||||
echo " --vg=VG use LVM snapshots, with group VG"
|
||||
echo " --zfs-dataset=DATASET use ZFS snapshots, with parent dataset DATASET"
|
||||
echo " --debug Turn on script debugging"
|
||||
echo " --skip-updates Do not include -updates pocket in sources.list"
|
||||
echo " --skip-security Do not include -security pocket in sources.list"
|
||||
echo " --skip-proposed Do not include -proposed pocket in sources.list"
|
||||
echo " --source-template=FILE Use FILE as the sources.list template"
|
||||
echo " --debootstrap-mirror=URL Use URL as the debootstrap source"
|
||||
echo " --debootstrap-include=list Comma separated list of packages to include"
|
||||
echo " --debootstrap-exclude=list Comma separated list of packages to exclude"
|
||||
echo " --debootstrap-opts=OPTS Extra options passed to debootstrap"
|
||||
echo " --debootstrap-proxy=URL Use PROXY as apt proxy"
|
||||
echo " --eatmydata Install and use eatmydata"
|
||||
echo " --debootstrap-keyring=KEYRING"
|
||||
echo " Use KEYRING to check signatures of retrieved Release files"
|
||||
echo " --debootstrap-no-check-gpg Disables checking gpg signatures of retrieved Release files"
|
||||
echo " --skip-eatmydata Don't install and use eatmydata"
|
||||
echo " --eatmydata Install and use eatmydata (default)"
|
||||
echo " --ccache Install configure and use ccache as default"
|
||||
echo " --ccache-dir=PATH Sets the CCACHE_DIR to PATH"
|
||||
echo " (can be shared between all schroots, defaults to ${CCACHE_DIR})"
|
||||
echo " --ccache-size=SIZE Sets the ccache max-size to SIZE"
|
||||
echo " (shared by each CCACHE_DIR, defaults to ${CCACHE_SIZE})"
|
||||
echo " --distro=DISTRO Install specific distro:"
|
||||
echo " 'ubuntu' or 'debian' "
|
||||
echo " (defaults to determining from release name)"
|
||||
echo " --target=ARCH Target architecture for cross-building"
|
||||
echo " --type=SCHROOT_TYPE Define the schroot type:"
|
||||
echo " 'directory'(default), 'file', or 'btrfs-snapshot'"
|
||||
echo " 'directory' (default), 'file', or 'btrfs-snapshot'."
|
||||
echo " 'lvm-snapshot' is selected via --vg"
|
||||
echo " 'zfs-snapshot' is selected via --zfs-dataset"
|
||||
echo ""
|
||||
echo "Configuration (via ~/.mk-sbuild.rc)"
|
||||
echo " LV_SIZE Size of source LVs (default ${LV_SIZE})"
|
||||
@ -68,12 +88,23 @@ function usage()
|
||||
echo " SOURCE_CHROOTS_TGZ Directory to store file source chroots"
|
||||
echo " CHROOT_SNAPSHOT_DIR Directory to mount open btrfs snaphshot chroots (default ${CHROOT_SNAPSHOT_DIR})"
|
||||
echo " SCHROOT_CONF_SUFFIX Lines to append to schroot.conf entries"
|
||||
echo " SCHROOT_PROFILE Profile to use with schroot (default ${SCHROOT_PROFILE})"
|
||||
echo " SKIP_UPDATES Enable --skip-updates"
|
||||
echo " SKIP_PROPOSED Enable --skip-proposed"
|
||||
echo " SKIP_SECURITY Enable --skip-security"
|
||||
echo " DEBOOTSTRAP_MIRROR Mirror location (same as --debootstrap-mirror)"
|
||||
echo " DEBOOTSTRAP_INCLUDE Included packages (same as --debootstrap-include)"
|
||||
echo " DEBOOTSTRAP_EXCLUDE Excluded packages (same as --debootstrap-exclude)"
|
||||
echo " DEBOOTSTRAP_OPTS Extra options passed to debootstrap (same as --debootstrap-opts)"
|
||||
echo " DEBOOTSTRAP_PROXY Apt proxy (same as --debootstrap-proxy)"
|
||||
echo " EATMYDATA Enable --eatmydata"
|
||||
echo " DEBOOTSTRAP_KEYRING GPG keyring (same as --debootstrap-keyring)"
|
||||
echo " DEBOOTSTRAP_NO_CHECK_GPG Disable GPG verification (same as --debootstrap-no-check-gpg)"
|
||||
echo " EATMYDATA Enable or disable eatmydata usage, see --eatmydata and --skip-eatmydata"
|
||||
echo " CCACHE Enable --ccache"
|
||||
echo " CCACHE_DIR Path for ccache (can be shared between all schroots, "
|
||||
echo " same as --ccache-dir, default ${CCACHE_DIR})"
|
||||
echo " CCACHE_SIZE Sets the ccache max-size (shared by each CCACHE_DIR, "
|
||||
echo " same as --ccache-size, default ${CCACHE_SIZE})"
|
||||
echo " TEMPLATE_SOURCES A template for sources.list"
|
||||
echo " TEMPLATE_SCHROOTCONF A template for schroot.conf stanza"
|
||||
if [ -z "$1" ]; then
|
||||
@ -86,27 +117,62 @@ function usage()
|
||||
if [ -z "$1" ]; then
|
||||
usage
|
||||
fi
|
||||
OPTS=`getopt -o 'h' --long "help,debug,skip-updates,arch:,name:,source-template:,debootstrap-mirror:,debootstrap-include:,debootstrap-exclude:,debootstrap-proxy:,personality:,distro:,vg:,type:" -- "$@"`
|
||||
supported_options=(
|
||||
help
|
||||
debug
|
||||
skip-updates
|
||||
skip-security
|
||||
skip-proposed
|
||||
skip-eatmydata
|
||||
ccache
|
||||
arch:
|
||||
name:
|
||||
source-template:
|
||||
debootstrap-mirror:
|
||||
debootstrap-include:
|
||||
debootstrap-exclude:
|
||||
debootstrap-opts:
|
||||
debootstrap-proxy:
|
||||
debootstrap-no-check-gpg
|
||||
debootstrap-keyring:
|
||||
personality:
|
||||
distro:
|
||||
vg:
|
||||
zfs-dataset:
|
||||
type:
|
||||
target:
|
||||
ccache-dir:
|
||||
ccache-size:
|
||||
)
|
||||
OPTS=$(getopt -o 'h' --long "$(IFS=, && echo "${supported_options[*]}")" -- "$@")
|
||||
eval set -- "$OPTS"
|
||||
|
||||
VG=""
|
||||
DISTRO=""
|
||||
COMMAND_PREFIX=""
|
||||
name=""
|
||||
proxy="_unset_"
|
||||
EATMYDATA=0
|
||||
DEBOOTSTRAP_NO_CHECK_GPG=0
|
||||
EATMYDATA=1
|
||||
CCACHE=0
|
||||
USE_PKGBINARYMANGLER=0
|
||||
|
||||
while :; do
|
||||
case "$1" in
|
||||
--debug)
|
||||
DEBUG=1
|
||||
set -x
|
||||
shift
|
||||
;;
|
||||
--arch)
|
||||
CHROOT_ARCH="$2"
|
||||
if [ "$2" = "i386" ] || [ "$2" = "lpia" ] && [ -z "$personality" ];
|
||||
then
|
||||
personality="linux32"
|
||||
fi
|
||||
case $2 in
|
||||
armhf|i386)
|
||||
if [ -z "$personality" ]; then
|
||||
personality="linux32"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
shift 2
|
||||
;;
|
||||
--personality)
|
||||
@ -117,6 +183,14 @@ while :; do
|
||||
SKIP_UPDATES="1"
|
||||
shift
|
||||
;;
|
||||
--skip-proposed)
|
||||
SKIP_PROPOSED="1"
|
||||
shift
|
||||
;;
|
||||
--skip-security)
|
||||
SKIP_SECURITY="1"
|
||||
shift
|
||||
;;
|
||||
--name)
|
||||
name="$2"
|
||||
shift 2
|
||||
@ -141,12 +215,29 @@ while :; do
|
||||
DEBOOTSTRAP_EXCLUDE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--debootstrap-opts)
|
||||
DEBOOTSTRAP_OPTS="$2"
|
||||
shift 2
|
||||
;;
|
||||
--debootstrap-proxy)
|
||||
proxy="$2"
|
||||
shift 2
|
||||
;;
|
||||
--eatmydata)
|
||||
EATMYDATA=1
|
||||
--debootstrap-keyring)
|
||||
# Store the absolute path because we cd to the root directory later.
|
||||
DEBOOTSTRAP_KEYRING=$(readlink -f "$2")
|
||||
shift 2
|
||||
;;
|
||||
--debootstrap-no-check-gpg)
|
||||
DEBOOTSTRAP_NO_CHECK_GPG=1
|
||||
shift
|
||||
;;
|
||||
--skip-eatmydata)
|
||||
EATMYDATA=0
|
||||
shift
|
||||
;;
|
||||
--ccache)
|
||||
CCACHE=1
|
||||
shift
|
||||
;;
|
||||
--distro)
|
||||
@ -157,10 +248,26 @@ while :; do
|
||||
VG="$2"
|
||||
shift 2
|
||||
;;
|
||||
--zfs-dataset)
|
||||
ZFS_PARENT_DATASET="$2"
|
||||
shift 2
|
||||
;;
|
||||
--type)
|
||||
SCHROOT_TYPE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--target)
|
||||
TARGET_ARCH="$2"
|
||||
shift 2
|
||||
;;
|
||||
--ccache-dir)
|
||||
CCACHE_DIR="$2"
|
||||
shift 2
|
||||
;;
|
||||
--ccache-size)
|
||||
CCACHE_SIZE="$2"
|
||||
shift 2
|
||||
;;
|
||||
--)
|
||||
shift
|
||||
break
|
||||
@ -175,9 +282,15 @@ done
|
||||
# will not exist in the chroot.
|
||||
cd /
|
||||
|
||||
# Make sure we've got a regular user
|
||||
if [ -w /etc/passwd ]; then
|
||||
echo "Please run this script as a regular user, not root." >&2
|
||||
if [ -w /etc/passwd -a ! -e ~/.sbuildrc -a ! -e ~/.mk-sbuild.rc ]; then
|
||||
cat >&2 <<EOF
|
||||
It's recommended to run this script as a regular user, not root, so that it
|
||||
uses the configuration files in your home directory.
|
||||
It will use sudo to escalate to root as necessary.
|
||||
|
||||
If you really do want to use it as root, create a .sbuildrc or .mk-sbuild.rc
|
||||
in root's home.
|
||||
EOF
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@ -191,10 +304,26 @@ if [ ! -w /var/lib/sbuild ]; then
|
||||
# Prepare a usable default .sbuildrc
|
||||
if [ ! -e ~/.sbuildrc ]; then
|
||||
cat > ~/.sbuildrc <<EOM
|
||||
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
|
||||
# *** THIS COMMAND IS DEPRECATED ***
|
||||
#
|
||||
# In sbuild 0.87.0 and later, the unshare backend is available. This is
|
||||
# expected to become the default in a future release.
|
||||
#
|
||||
# This is the new preferred way of building Debian packages, making the manual
|
||||
# creation of schroots no longer necessary. To retain the default behavior,
|
||||
# you may remove this comment block and continue.
|
||||
#
|
||||
# To test the unshare backend while retaining the default settings, run sbuild
|
||||
# with --chroot-mode=unshare like this:
|
||||
# $ sbuild --chroot-mode=unshare --dist=unstable hello
|
||||
#
|
||||
# To switch to the unshare backend by default (recommended), uncomment the
|
||||
# following lines and delete the rest of the file (with the exception of the
|
||||
# last two lines):
|
||||
#\$chroot_mode = 'unshare';
|
||||
#\$unshare_mmdebstrap_keep_tarball = 1;
|
||||
|
||||
# Mail address where logs are sent to (mandatory, no default!)
|
||||
\$mailto = '$USER';
|
||||
# *** VERIFY AND UPDATE \$mailto and \$maintainer_name BELOW ***
|
||||
|
||||
# Name to use as override in .changes files for the Maintainer: field
|
||||
#\$maintainer_name='$USER <$USER@localhost>';
|
||||
@ -282,6 +411,48 @@ elif [ -z "$DISTRO" ]; then
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# By default DEBOOTSTRAP_SCRIPT must match RELEASE
|
||||
DEBOOTSTRAP_SCRIPT="$RELEASE"
|
||||
|
||||
dist_ge() {
|
||||
local releases="$($3-distro-info --all)"
|
||||
local left=999
|
||||
local right=0
|
||||
local seq=1
|
||||
|
||||
for i in $releases; do
|
||||
if [ $1 = $i ]; then
|
||||
local left=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
|
||||
seq=1
|
||||
for i in $releases; do
|
||||
if [ $2 = $i ]; then
|
||||
local right=$seq
|
||||
break
|
||||
fi
|
||||
seq=$((seq+1))
|
||||
done
|
||||
|
||||
[ $left -ge $right ] && return 0 || return 1
|
||||
}
|
||||
|
||||
ubuntu_dist_ge () {
|
||||
dist_ge $1 $2 ubuntu
|
||||
}
|
||||
|
||||
debian_dist_ge () {
|
||||
dist_ge $1 $2 debian
|
||||
}
|
||||
|
||||
if [ "$DISTRO" = "ubuntu" ]; then
|
||||
# On Ubuntu, set DEBOOTSTRAP_SCRIPT to gutsy to allow building new RELEASES without new debootstrap
|
||||
DEBOOTSTRAP_SCRIPT=gutsy
|
||||
fi
|
||||
|
||||
# By default, name the schroot the same as the release
|
||||
if [ -z "$name" ]; then
|
||||
name="$RELEASE"
|
||||
@ -296,7 +467,11 @@ if [ -z "$CHROOT_ARCH" ]; then
|
||||
CHROOT_ARCH="$HOST_ARCH"
|
||||
fi
|
||||
|
||||
CHROOT_NAME="${name}-${CHROOT_ARCH}"
|
||||
if [ -z "$TARGET_ARCH" ]; then
|
||||
CHROOT_NAME="${name}-${CHROOT_ARCH}"
|
||||
else
|
||||
CHROOT_NAME="${name}-${CHROOT_ARCH}-${TARGET_ARCH}"
|
||||
fi
|
||||
|
||||
if [ -z "$synonym" ]; then
|
||||
CHROOT_SYNONYM=""
|
||||
@ -322,10 +497,58 @@ if [ $EATMYDATA -eq 1 ]; then
|
||||
esac
|
||||
fi
|
||||
|
||||
if [ $CCACHE -eq 1 ]; then
|
||||
if [ -z "$CCACHE_DIR" ] || [[ "$(dirname "$CCACHE_DIR")" == '/' ]]; then
|
||||
echo "Invalid ccache dir: ${CCACHE_DIR}" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# We can safely use a global cache path, in such case changing size applies
|
||||
# to all the schroots
|
||||
setup_script="$CCACHE_DIR"/mk-sbuild-setup
|
||||
if [ -d "$CCACHE_DIR" ]; then
|
||||
echo "Reusing $CCACHE_DIR as CCACHE_DIR, will be configured to use max-size=${CCACHE_SIZE}"
|
||||
rm -f "$setup_script"
|
||||
else
|
||||
echo "Configuring $CCACHE_DIR as CCACHE_DIR with max-size=${CCACHE_SIZE}"
|
||||
sudo install --group=sbuild --mode=2775 -d "$CCACHE_DIR"
|
||||
fi
|
||||
|
||||
if [ ! -x "$setup_script" ]; then
|
||||
cat <<END | sudo tee "$setup_script" 1>/dev/null
|
||||
#!/bin/sh
|
||||
export CCACHE_DIR="$CCACHE_DIR"
|
||||
export CCACHE_MAXSIZE="${CCACHE_SIZE}"
|
||||
export CCACHE_UMASK=002
|
||||
export CCACHE_COMPRESS=1
|
||||
unset CCACHE_HARDLINK
|
||||
export CCACHE_NOHARDLINK=1
|
||||
export PATH="/usr/lib/ccache:\$PATH"
|
||||
exec "\$@"
|
||||
END
|
||||
sudo chmod a+rx "$setup_script"
|
||||
fi
|
||||
|
||||
if ! sudo grep -qs "$CCACHE_DIR" /etc/schroot/sbuild/fstab; then
|
||||
# This acts on host configuration, but there is no other way to handle
|
||||
# this, however it won't affect anything
|
||||
cat <<END | sudo tee -a /etc/schroot/sbuild/fstab 1>/dev/null
|
||||
${CCACHE_DIR} ${CCACHE_DIR} none rw,bind 0 0
|
||||
END
|
||||
fi
|
||||
|
||||
DEBOOTSTRAP_INCLUDE="${DEBOOTSTRAP_INCLUDE:+$DEBOOTSTRAP_INCLUDE,}ccache"
|
||||
BUILD_PKGS="$BUILD_PKGS ccache"
|
||||
COMMAND_PREFIX="${COMMAND_PREFIX:+$COMMAND_PREFIX,}$setup_script"
|
||||
fi
|
||||
|
||||
if [ -z "$SCHROOT_TYPE" ]; then
|
||||
# To build the LV, we need to know which volume group to use
|
||||
if [ -n "$VG" ]; then
|
||||
SCHROOT_TYPE="lvm-snapshot"
|
||||
# To build the ZFS dataset, we need to know which parent to use
|
||||
elif [ -n "$ZFS_PARENT_DATASET" ]; then
|
||||
SCHROOT_TYPE="zfs-snapshot"
|
||||
else
|
||||
SCHROOT_TYPE="directory"
|
||||
fi
|
||||
@ -340,7 +563,11 @@ case "$SCHROOT_TYPE" in
|
||||
fi
|
||||
|
||||
# Set up some variables for use in the paths and names
|
||||
CHROOT_LV="${name}_${CHROOT_ARCH}_chroot"
|
||||
if [ -z "$TARGET_ARCH" ]; then
|
||||
CHROOT_LV="${name}_${CHROOT_ARCH}_chroot"
|
||||
else
|
||||
CHROOT_LV="${name}_${CHROOT_ARCH}_${TARGET_ARCH}_chroot"
|
||||
fi
|
||||
CHROOT_PATH="/dev/$VG/$CHROOT_LV"
|
||||
|
||||
# Install lvm2 if missing
|
||||
@ -368,7 +595,7 @@ case "$SCHROOT_TYPE" in
|
||||
# Set up some variables for use in the paths and names
|
||||
CHROOT_PATH="${SOURCE_CHROOTS_TGZ}/${CHROOT_NAME}.tgz"
|
||||
;;
|
||||
"btrfs-snapshot")
|
||||
"btrfs-snapshot" | "zfs-snapshot")
|
||||
if [ ! -d "${SOURCE_CHROOTS_DIR}" ]; then
|
||||
sudo mkdir -p "${SOURCE_CHROOTS_DIR}"
|
||||
fi
|
||||
@ -385,12 +612,12 @@ esac
|
||||
|
||||
# Is the specified release known to debootstrap?
|
||||
variant_opt="--variant=buildd"
|
||||
if [ ! -r "/usr/share/debootstrap/scripts/$RELEASE" ]; then
|
||||
echo "Specified release ($RELEASE) not known to debootstrap" >&2
|
||||
if [ ! -r "/usr/share/debootstrap/scripts/$DEBOOTSTRAP_SCRIPT" ]; then
|
||||
echo "Specified release ($DEBOOTSTRAP_SCRIPT) not known to debootstrap" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
BUILD_PKGS="build-essential fakeroot devscripts apt-utils"
|
||||
BUILD_PKGS="build-essential fakeroot apt-utils"
|
||||
# Handle distro-specific logic, unknown to debootstrap
|
||||
case "$DISTRO" in
|
||||
ubuntu)
|
||||
@ -399,21 +626,17 @@ ubuntu)
|
||||
amd64 | i386)
|
||||
DEBOOTSTRAP_MIRROR="http://archive.ubuntu.com/ubuntu"
|
||||
;;
|
||||
armel | hppa | ia64 | lpia | sparc)
|
||||
*)
|
||||
DEBOOTSTRAP_MIRROR="http://ports.ubuntu.com/ubuntu-ports"
|
||||
;;
|
||||
powerpc)
|
||||
if [ "$RELEASE" != "dapper" ]; then
|
||||
DEBOOTSTRAP_MIRROR="http://ports.ubuntu.com/ubuntu-ports"
|
||||
else
|
||||
DEBOOTSTRAP_MIRROR="http://archive.ubuntu.com/ubuntu"
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
if [ -z "$COMPONENTS" ]; then
|
||||
COMPONENTS="main restricted universe multiverse"
|
||||
fi
|
||||
if [ -z "$SOURCES_PROPOSED_SUITE" ]; then
|
||||
SOURCES_PROPOSED_SUITE="RELEASE-proposed"
|
||||
fi
|
||||
if [ -z "$SOURCES_SECURITY_SUITE" ]; then
|
||||
SOURCES_SECURITY_SUITE="RELEASE-security"
|
||||
fi
|
||||
@ -422,47 +645,75 @@ ubuntu)
|
||||
amd64 | i386)
|
||||
SOURCES_SECURITY_URL="http://security.ubuntu.com/ubuntu"
|
||||
;;
|
||||
armel | hppa | ia64 | lpia | sparc)
|
||||
*)
|
||||
SOURCES_SECURITY_URL="http://ports.ubuntu.com/ubuntu-ports"
|
||||
;;
|
||||
powerpc)
|
||||
if [ "$RELEASE" != "dapper" ]; then
|
||||
SOURCES_SECURITY_URL="http://ports.ubuntu.com/ubuntu-ports"
|
||||
else
|
||||
SOURCES_SECURITY_URL="http://security.ubuntu.com/ubuntu"
|
||||
fi
|
||||
esac
|
||||
fi
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
# target chroots only supported in precise and later, so ignore
|
||||
# the fact that powerpc was once not on ports.
|
||||
case "$TARGET_ARCH" in
|
||||
amd64 | i386)
|
||||
TARGET_MIRROR="http://archive.ubuntu.com/ubuntu"
|
||||
TARGET_SOURCES_SECURITY_URL="http://security.ubuntu.com/ubuntu"
|
||||
;;
|
||||
*)
|
||||
TARGET_MIRROR="http://ports.ubuntu.com/ubuntu-ports"
|
||||
TARGET_SOURCES_SECURITY_URL="http://ports.ubuntu.com/ubuntu-ports"
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
# Add edgy+ buildd tools
|
||||
if [ "$RELEASE" != "breezy" ] && [ "$RELEASE" != "dapper" ]; then
|
||||
if ubuntu_dist_ge "$RELEASE" "edgy"; then
|
||||
# Add pkgbinarymangler (edgy and later)
|
||||
BUILD_PKGS="$BUILD_PKGS pkgbinarymangler"
|
||||
USE_PKGBINARYMANGLER=1
|
||||
# Disable recommends for a smaller chroot (gutsy and later only)
|
||||
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
||||
# Add buildd tools
|
||||
BUILD_PKGS="$BUILD_PKGS pkg-create-dbgsym pkgbinarymangler"
|
||||
if ubuntu_dist_ge "$RELEASE" "gutsy"; then
|
||||
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
||||
SKIP_RECOMMENDS=1
|
||||
fi
|
||||
# Add pkg-create-dbgsym (edgy through zesty)
|
||||
if ! ubuntu_dist_ge "$RELEASE" "artful"; then
|
||||
BUILD_PKGS="$BUILD_PKGS pkg-create-dbgsym"
|
||||
fi
|
||||
fi
|
||||
;;
|
||||
debian)
|
||||
if [ -z "$DEBOOTSTRAP_MIRROR" ]; then
|
||||
DEBOOTSTRAP_MIRROR="http://ftp.debian.org/debian"
|
||||
DEBOOTSTRAP_MIRROR="http://deb.debian.org/debian"
|
||||
fi
|
||||
if [ -z "$COMPONENTS" ]; then
|
||||
COMPONENTS="main non-free contrib"
|
||||
COMPONENTS="main non-free non-free-firmware contrib"
|
||||
fi
|
||||
if [ -z "$SOURCES_PROPOSED_SUITE" ]; then
|
||||
SOURCES_PROPOSED_SUITE="RELEASE-proposed-updates"
|
||||
fi
|
||||
# Debian only performs security updates
|
||||
SKIP_UPDATES=1
|
||||
if [ -z "$SOURCES_SECURITY_SUITE" ]; then
|
||||
SOURCES_SECURITY_SUITE="RELEASE/updates"
|
||||
if debian_dist_ge "$RELEASE" "bullseye"; then
|
||||
SOURCES_SECURITY_SUITE="RELEASE-security"
|
||||
else
|
||||
SOURCES_SECURITY_SUITE="RELEASE/updates"
|
||||
fi
|
||||
fi
|
||||
if [ -z "$SOURCES_SECURITY_URL" ]; then
|
||||
SOURCES_SECURITY_URL="http://security.debian.org/"
|
||||
fi
|
||||
# Unstable (aka "sid") does not have a security repository
|
||||
if [ "$RELEASE" = 'unstable' ] || [ "$RELEASE" = 'sid' ]; then
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
TARGET_MIRROR="$DEBOOTSTRAP_MIRROR"
|
||||
TARGET_SOURCES_SECURITY_URL="$SOURCES_SECURITY_URL"
|
||||
fi
|
||||
# Unstable and Experimental do not have security or proposed repositories
|
||||
if [ "$RELEASE" = 'unstable' ] || [ "$RELEASE" = 'sid' ] || [ "$RELEASE" = 'experimental' ]; then
|
||||
SKIP_SECURITY=1
|
||||
SKIP_PROPOSED=1
|
||||
fi
|
||||
# Keep the chroot as minimal as possible
|
||||
BUILD_PKGS="--no-install-recommends $BUILD_PKGS"
|
||||
SKIP_RECOMMENDS=1
|
||||
;;
|
||||
*)
|
||||
echo "Unknown --distro '$DISTRO': aborting" >&2
|
||||
@ -470,18 +721,41 @@ debian)
|
||||
;;
|
||||
esac
|
||||
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
# Ultimately we would like there to be a "cross-build-essential-$arch"
|
||||
# package. In practice, the cross-g++ package is sufficient to pull in
|
||||
# everything we need.
|
||||
if ! target_tuple=$(dpkg-architecture -a"$TARGET_ARCH" -qDEB_HOST_GNU_TYPE 2>/dev/null)
|
||||
then
|
||||
echo "Unknown target architecture $TARGET_ARCH" >&2
|
||||
exit 1
|
||||
fi
|
||||
BUILD_PKGS="$BUILD_PKGS g++-$target_tuple pkg-config dpkg-cross libc-dev:$TARGET_ARCH"
|
||||
fi
|
||||
|
||||
debootstrap_opts="--components=$(echo $COMPONENTS | tr ' ' ,)"
|
||||
if [ -n "$DEBOOTSTRAP_INCLUDE" ] ; then
|
||||
debootstrap_opts="--include=$DEBOOTSTRAP_INCLUDE"
|
||||
debootstrap_opts="$debootstrap_opts --include=$DEBOOTSTRAP_INCLUDE"
|
||||
fi
|
||||
|
||||
if [ -n "$DEBOOTSTRAP_EXCLUDE" ] ; then
|
||||
debootstrap_opts="$debootstrap_opts --exclude=$DEBOOTSTRAP_EXCLUDE"
|
||||
fi
|
||||
|
||||
if [ $DEBOOTSTRAP_NO_CHECK_GPG -eq 1 ]; then
|
||||
debootstrap_opts="$debootstrap_opts --no-check-gpg"
|
||||
elif [ -n "$DEBOOTSTRAP_KEYRING" ]; then
|
||||
debootstrap_opts="$debootstrap_opts --keyring=$DEBOOTSTRAP_KEYRING"
|
||||
fi
|
||||
|
||||
if [ -n "$DEBOOTSTRAP_OPTS" ] ; then
|
||||
debootstrap_opts="$debootstrap_opts $DEBOOTSTRAP_OPTS"
|
||||
fi
|
||||
|
||||
# if http_proxy is set in the environment (even empty) set 'proxy' to it
|
||||
[ "$proxy" = "_unset_" -a "${DEBOOTSTRAP_PROXY-xx}" = "xx" ] &&
|
||||
[ "$proxy" = "_unset_" -a "${DEBOOTSTRAP_PROXY-xx}" != "xx" ] &&
|
||||
proxy=${DEBOOTSTRAP_PROXY}
|
||||
[ "$proxy" = "_unset_" -a "${http_proxy-xx}" = "xx" ] && proxy=${http_proxy}
|
||||
[ "$proxy" = "_unset_" -a "${http_proxy-xx}" != "xx" ] && proxy=${http_proxy}
|
||||
if [ "$proxy" = "_unset_" ]; then
|
||||
_out=$(apt-config shell x Acquire::HTTP::Proxy) &&
|
||||
_out=$(sh -c 'eval $1 && echo $x' -- "$_out") && [ -n "$_out" ] &&
|
||||
@ -494,12 +768,12 @@ DEBOOTSTRAP_COMMAND=debootstrap
|
||||
if [ "$CHROOT_ARCH" != "$HOST_ARCH" ] ; then
|
||||
case "$CHROOT_ARCH-$HOST_ARCH" in
|
||||
# Sometimes we don't need qemu
|
||||
amd64-i386|amd64-lpia|arm-armel|armel-arm|i386-amd64|i386-lpia|lpia-i386|powerpc-ppc64|ppc64-powerpc|sparc-sparc64|sparc64-sparc)
|
||||
amd64-i386|arm64-armhf|armhf-arm64|i386-amd64|powerpc-ppc64|ppc64-powerpc)
|
||||
;;
|
||||
# Sometimes we do
|
||||
*)
|
||||
DEBOOTSTRAP_COMMAND=qemu-debootstrap
|
||||
if ! which "$DEBOOTSTRAP_COMMAND"; then
|
||||
DEBOOTSTRAP_COMMAND=debootstrap
|
||||
if ! which "qemu-x86_64-static"; then
|
||||
sudo apt-get install qemu-user-static
|
||||
fi
|
||||
;;
|
||||
@ -532,53 +806,130 @@ case "$SCHROOT_TYPE" in
|
||||
fi
|
||||
sudo btrfs subvolume create "${MNT}"
|
||||
;;
|
||||
|
||||
"zfs-snapshot")
|
||||
ZFS_DATASET="${ZFS_PARENT_DATASET}/${CHROOT_NAME}"
|
||||
if sudo zfs list "${ZFS_DATASET}" >/dev/null 2>&1; then
|
||||
echo "E: ZFS dataset ${ZFS_DATASET} already exists; aborting" >&2
|
||||
exit 1
|
||||
fi
|
||||
sudo zfs create -p -o mountpoint=legacy "${ZFS_DATASET}"
|
||||
|
||||
# Mount
|
||||
MNT=`mktemp -d -t schroot-XXXXXX`
|
||||
sudo mount -t zfs "${ZFS_DATASET}" "${MNT}"
|
||||
;;
|
||||
"file")
|
||||
MNT=`mktemp -d -t schroot-XXXXXX`
|
||||
esac
|
||||
|
||||
case "$SCHROOT_TYPE" in
|
||||
directory|file)
|
||||
if grep -q '\soverlay$' /proc/filesystems \
|
||||
|| /sbin/modprobe -q --dry-run overlay; then
|
||||
OVERLAY_FS=overlay
|
||||
elif grep -q '\soverlayfs$' /proc/filesystems \
|
||||
|| /sbin/modprobe -q --dry-run overlayfs; then
|
||||
OVERLAY_FS=overlayfs
|
||||
else
|
||||
OVERLAY_FS=aufs
|
||||
fi
|
||||
esac
|
||||
|
||||
# work around apt's GPG invocation that fails without root's .gnupg directory
|
||||
sudo mkdir -p -m 0700 "$MNT"/root/.gnupg
|
||||
|
||||
# debootstrap the chroot
|
||||
sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}"
|
||||
sudo ${proxy:+"http_proxy=${proxy}"} "$DEBOOTSTRAP_COMMAND" --arch="$CHROOT_ARCH" $variant_opt $debootstrap_opts "$RELEASE" "$MNT" "${DEBOOTSTRAP_MIRROR:-http://archive.ubuntu.com/ubuntu}" "$DEBOOTSTRAP_SCRIPT"
|
||||
|
||||
if [ $EATMYDATA -eq 1 ]; then
|
||||
sudo mkdir -p "${MNT}/usr/local/libexec/mk-sbuild"
|
||||
sudo ln -s /usr/bin/eatmydata "${MNT}/usr/local/libexec/mk-sbuild/dpkg"
|
||||
echo 'Dir::Bin::dpkg "/usr/local/libexec/mk-sbuild/dpkg";' \
|
||||
| sudo tee "${MNT}/etc/apt/apt.conf.d/00mk-sbuild-eatmydata" > /dev/null
|
||||
fi
|
||||
|
||||
# Update the package sources
|
||||
TEMP_SOURCES=`mktemp -t sources-XXXXXX`
|
||||
if [ -z "$TEMPLATE_SOURCES" ]; then
|
||||
TEMPLATE_SOURCES=~/.mk-sbuild.sources
|
||||
fi
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
MIRROR_ARCHS="[arch=$CHROOT_ARCH] "
|
||||
fi
|
||||
if [ -r "$TEMPLATE_SOURCES" ]; then
|
||||
cat "$TEMPLATE_SOURCES" > "$TEMP_SOURCES"
|
||||
else
|
||||
cat > "$TEMP_SOURCES" <<EOM
|
||||
deb ${DEBOOTSTRAP_MIRROR} RELEASE ${COMPONENTS}
|
||||
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} RELEASE ${COMPONENTS}
|
||||
deb-src ${DEBOOTSTRAP_MIRROR} RELEASE ${COMPONENTS}
|
||||
EOM
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb [arch=$TARGET_ARCH] $TARGET_MIRROR RELEASE $COMPONENTS
|
||||
EOM
|
||||
fi
|
||||
if [ "$EXPERIMENTAL" -eq 1 ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb ${DEBOOTSTRAP_MIRROR} experimental ${COMPONENTS}
|
||||
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} experimental ${COMPONENTS}
|
||||
deb-src ${DEBOOTSTRAP_MIRROR} experimental ${COMPONENTS}
|
||||
EOM
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb [arch=$TARGET_ARCH] $TARGET_MIRROR experimental $COMPONENTS
|
||||
EOM
|
||||
fi
|
||||
fi
|
||||
if [ -z "$SKIP_UPDATES" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb ${DEBOOTSTRAP_MIRROR} RELEASE-updates ${COMPONENTS}
|
||||
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} RELEASE-updates ${COMPONENTS}
|
||||
deb-src ${DEBOOTSTRAP_MIRROR} RELEASE-updates ${COMPONENTS}
|
||||
EOM
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb [arch=$TARGET_ARCH] $TARGET_MIRROR RELEASE-updates $COMPONENTS
|
||||
EOM
|
||||
fi
|
||||
fi
|
||||
if [ -z "$SKIP_PROPOSED" ]; then
|
||||
TEMP_PREFERENCES=`mktemp -t preferences-XXXXXX`
|
||||
cat >> "$TEMP_PREFERENCES" <<EOM
|
||||
# override for NotAutomatic: yes
|
||||
Package: *
|
||||
Pin: release a=*-proposed
|
||||
Pin-Priority: 500
|
||||
EOM
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb ${MIRROR_ARCHS}${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
|
||||
deb-src ${DEBOOTSTRAP_MIRROR} $SOURCES_PROPOSED_SUITE ${COMPONENTS}
|
||||
EOM
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb [arch=$TARGET_ARCH] $TARGET_MIRROR $SOURCES_PROPOSED_SUITE $COMPONENTS
|
||||
EOM
|
||||
fi
|
||||
fi
|
||||
if [ -z "$SKIP_SECURITY" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb ${SOURCES_SECURITY_URL} ${SOURCES_SECURITY_SUITE} ${COMPONENTS}
|
||||
deb ${MIRROR_ARCHS}${SOURCES_SECURITY_URL} ${SOURCES_SECURITY_SUITE} ${COMPONENTS}
|
||||
deb-src ${SOURCES_SECURITY_URL} ${SOURCES_SECURITY_SUITE} ${COMPONENTS}
|
||||
EOM
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
cat >> "$TEMP_SOURCES" <<EOM
|
||||
deb [arch=$TARGET_ARCH] $TARGET_SOURCES_SECURITY_URL $SOURCES_SECURITY_SUITE $COMPONENTS
|
||||
EOM
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
cat "$TEMP_SOURCES" | sed -e "s|RELEASE|$RELEASE|g" | \
|
||||
sudo bash -c "cat > $MNT/etc/apt/sources.list"
|
||||
rm -f "$TEMP_SOURCES"
|
||||
if [ -n "$TEMP_PREFERENCES" ]; then
|
||||
sudo mv "$TEMP_PREFERENCES" $MNT/etc/apt/preferences.d/proposed.pref
|
||||
fi
|
||||
|
||||
# Copy the timezone (comment this out if you want to leave the chroot at UTC)
|
||||
sudo cp /etc/localtime /etc/timezone "$MNT"/etc/
|
||||
# Copy the timezone (uncomment this if you want to use your local time zone)
|
||||
#sudo cp -P --remove-destination /etc/localtime /etc/timezone "$MNT"/etc/
|
||||
# Create a schroot entry for this chroot
|
||||
TEMP_SCHROOTCONF=`mktemp -t schrootconf-XXXXXX`
|
||||
TEMPLATE_SCHROOTCONF=~/.mk-sbuild.schroot.conf
|
||||
@ -603,10 +954,11 @@ root-groups=$ADMIN_GROUPS
|
||||
#source-root-users=$ADMIN_GROUPS
|
||||
#source-root-groups=$ADMIN_GROUPS
|
||||
type=SCHROOT_TYPE
|
||||
profile=$SCHROOT_PROFILE
|
||||
EOM
|
||||
if [ $EATMYDATA -eq 1 ]; then
|
||||
if [ -n "$COMMAND_PREFIX" ]; then
|
||||
cat >> "$TEMP_SCHROOTCONF" <<EOM
|
||||
command-prefix=eatmydata
|
||||
command-prefix=${COMMAND_PREFIX}
|
||||
EOM
|
||||
fi
|
||||
case "$SCHROOT_TYPE" in
|
||||
@ -619,7 +971,7 @@ EOM
|
||||
;;
|
||||
directory|file)
|
||||
cat >> "${TEMP_SCHROOTCONF}" <<EOM
|
||||
union-type=aufs
|
||||
union-type=$OVERLAY_FS
|
||||
${SCHROOT_TYPE}=CHROOT_PATH
|
||||
EOM
|
||||
;;
|
||||
@ -629,6 +981,12 @@ btrfs-source-subvolume=CHROOT_PATH
|
||||
btrfs-snapshot-directory=CHROOT_SNAPSHOT_DIR
|
||||
EOM
|
||||
;;
|
||||
zfs-snapshot)
|
||||
cat >> "${TEMP_SCHROOTCONF}" <<EOM
|
||||
zfs-dataset=ZFS_DATASET
|
||||
EOM
|
||||
;;
|
||||
|
||||
esac
|
||||
fi
|
||||
if [ ! -z "$personality" ]; then
|
||||
@ -645,6 +1003,7 @@ sed -e "s|CHROOT_NAME|$CHROOT_NAME|g" \
|
||||
-e "s|SNAPSHOT_SIZE|$SNAPSHOT_SIZE|g" \
|
||||
-e "s|SCHROOT_TYPE|$SCHROOT_TYPE|g" \
|
||||
-e "s|CHROOT_SNAPSHOT_DIR|$CHROOT_SNAPSHOT_DIR|g" \
|
||||
-e "s|ZFS_DATASET|$ZFS_DATASET|g" \
|
||||
"$TEMP_SCHROOTCONF" \
|
||||
| sudo tee "/etc/schroot/chroot.d/sbuild-$CHROOT_NAME" > /dev/null
|
||||
rm -f "$TEMP_SCHROOTCONF"
|
||||
@ -666,15 +1025,56 @@ sudo chmod a+x "$MNT"/usr/sbin/policy-rc.d
|
||||
# Create image finalization script
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
#!/bin/bash
|
||||
#set -x
|
||||
if [ "$DEBUG" = 1 ]; then
|
||||
set -x
|
||||
fi
|
||||
set -e
|
||||
if [ -n "$proxy" ]; then
|
||||
mkdir -p /etc/apt/apt.conf.d/
|
||||
cat > /etc/apt/apt.conf.d/99mk-sbuild-proxy <<EOF
|
||||
## proxy settings copied from mk-sbuild
|
||||
// proxy settings copied from mk-sbuild
|
||||
Acquire { HTTP { Proxy "$proxy"; }; };
|
||||
EOF
|
||||
fi
|
||||
EOM
|
||||
|
||||
if [ -n "$SKIP_RECOMMENDS" ]; then
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
mkdir -p /etc/apt/apt.conf.d/
|
||||
cat > /etc/apt/apt.conf.d/99mk-sbuild-no-recommends <<EOF
|
||||
// disable install recommends
|
||||
APT::Install-Recommends "0";
|
||||
EOF
|
||||
EOM
|
||||
fi
|
||||
|
||||
if [ "$USE_PKGBINARYMANGLER" = 1 ]; then
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
mkdir -p /etc/pkgbinarymangler/
|
||||
cat > /etc/pkgbinarymangler/maintainermangler.conf <<EOF
|
||||
# pkgmaintainermangler configuration file
|
||||
|
||||
# pkgmaintainermangler will do nothing unless enable is set to "true"
|
||||
enable: true
|
||||
|
||||
# Configure what happens if /CurrentlyBuilding is present, but invalid
|
||||
# (i. e. it does not contain a Package: field). If "ignore" (default),
|
||||
# the file is ignored (i. e. the Maintainer field is mangled) and a
|
||||
# warning is printed. If "fail" (or any other value), pkgmaintainermangler
|
||||
# exits with an error, which causes a package build to fail.
|
||||
invalid_currentlybuilding: ignore
|
||||
EOF
|
||||
EOM
|
||||
fi
|
||||
|
||||
if [ -n "$TARGET_ARCH" ]; then
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
# Configure target architecture
|
||||
dpkg --add-architecture "$TARGET_ARCH"
|
||||
EOM
|
||||
fi
|
||||
|
||||
sudo bash -c "cat >> $MNT/finish.sh" <<EOM
|
||||
# Reload package lists
|
||||
apt-get update || true
|
||||
# Pull down signature requirements
|
||||
@ -685,7 +1085,7 @@ apt-get update || true
|
||||
echo set debconf/frontend Noninteractive | debconf-communicate
|
||||
echo set debconf/priority critical | debconf-communicate
|
||||
# Install basic build tool set, trying to match buildd
|
||||
apt-get -y --force-yes install $BUILD_PKGS
|
||||
apt-get -y --force-yes -o Dpkg::Options::="--force-confold" install $BUILD_PKGS
|
||||
# Set up expected /dev entries
|
||||
if [ ! -r /dev/stdin ]; then ln -s /proc/self/fd/0 /dev/stdin; fi
|
||||
if [ ! -r /dev/stdout ]; then ln -s /proc/self/fd/1 /dev/stdout; fi
|
||||
@ -697,7 +1097,7 @@ EOM
|
||||
sudo chmod a+x "$MNT"/finish.sh
|
||||
|
||||
case "$SCHROOT_TYPE" in
|
||||
"lvm-snapshot")
|
||||
"lvm-snapshot"|"zfs-snapshot")
|
||||
sudo umount "$MNT"
|
||||
rmdir "$MNT"
|
||||
;;
|
||||
@ -712,15 +1112,18 @@ case "$SCHROOT_TYPE" in
|
||||
esac
|
||||
|
||||
# Run finalization script on the "golden" copy via schroot.
|
||||
sudo schroot -c "$CHROOT_NAME"-source -u root /finish.sh
|
||||
sudo schroot -c source:$CHROOT_NAME -u root /finish.sh
|
||||
|
||||
# Finished
|
||||
echo ""
|
||||
echo "Done building $CHROOT_NAME."
|
||||
echo ""
|
||||
echo " To CHANGE the golden image: sudo schroot -c ${CHROOT_NAME}-source -u root"
|
||||
echo " To CHANGE the golden image: sudo schroot -c source:${CHROOT_NAME} -u root"
|
||||
echo " To ENTER an image snapshot: schroot -c ${CHROOT_NAME}"
|
||||
echo " To BUILD within a snapshot: sbuild -A -d ${CHROOT_NAME} PACKAGE*.dsc"
|
||||
if [ -n "$TARGET_ARCH" ] && [ "$CHROOT_ARCH" != "$TARGET_ARCH" ] ; then
|
||||
echo " To BUILD for ${TARGET_ARCH}: sbuild -A -d ${CHROOT_NAME} --host ${TARGET_ARCH} PACKAGE*.dsc"
|
||||
fi
|
||||
echo ""
|
||||
|
||||
# vi: set et:
|
||||
|
440
pbuilder-dist
440
pbuilder-dist
@ -1,4 +1,4 @@
|
||||
#! /usr/bin/env python
|
||||
#! /usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Copyright (C) 2007-2010, Siegfried-A. Gevatter <rainct@ubuntu.com>,
|
||||
@ -29,17 +29,27 @@
|
||||
# configurations. For example, a symlink called pbuilder-hardy will assume
|
||||
# that the target distribution is always meant to be Ubuntu Hardy.
|
||||
|
||||
import os
|
||||
import sys
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from devscripts.logger import Logger
|
||||
from distro_info import DebianDistroInfo, UbuntuDistroInfo
|
||||
import os
|
||||
import os.path
|
||||
import shutil
|
||||
import subprocess
|
||||
import sys
|
||||
from contextlib import suppress
|
||||
|
||||
import debian.deb822
|
||||
from distro_info import DebianDistroInfo, DistroDataOutdated, UbuntuDistroInfo
|
||||
|
||||
import ubuntutools.misc
|
||||
import ubuntutools.version
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools import subprocess
|
||||
from ubuntutools.question import YesNoQuestion
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
class PbuilderDist:
|
||||
def __init__(self, builder):
|
||||
@ -58,6 +68,7 @@ class PbuilderDist:
|
||||
self.enable_security = True
|
||||
self.enable_updates = True
|
||||
self.enable_proposed = True
|
||||
self.enable_backports = False
|
||||
|
||||
# File where the log of the last operation will be saved.
|
||||
self.logfile = None
|
||||
@ -79,32 +90,36 @@ class PbuilderDist:
|
||||
self.chroot_string = None
|
||||
|
||||
# Authentication method
|
||||
self.auth = 'sudo'
|
||||
self.auth = "sudo"
|
||||
|
||||
# Builder
|
||||
self.builder = builder
|
||||
|
||||
self._debian_distros = DebianDistroInfo().all + \
|
||||
['stable', 'testing', 'unstable']
|
||||
# Distro info
|
||||
self.debian_distro_info = DebianDistroInfo()
|
||||
self.ubuntu_distro_info = UbuntuDistroInfo()
|
||||
|
||||
self._debian_distros = self.debian_distro_info.all + ["stable", "testing", "unstable"]
|
||||
|
||||
# Ensure that the used builder is installed
|
||||
paths = set(os.environ['PATH'].split(':'))
|
||||
paths |= set(('/sbin', '/usr/sbin', '/usr/local/sbin'))
|
||||
paths = set(os.environ["PATH"].split(":"))
|
||||
paths |= set(("/sbin", "/usr/sbin", "/usr/local/sbin"))
|
||||
if not any(os.path.exists(os.path.join(p, builder)) for p in paths):
|
||||
Logger.error('Could not find "%s".', builder)
|
||||
sys.exit(1)
|
||||
|
||||
##############################################################
|
||||
|
||||
self.base = os.path.expanduser(os.environ.get('PBUILDFOLDER',
|
||||
'~/pbuilder/'))
|
||||
self.base = os.path.expanduser(os.environ.get("PBUILDFOLDER", "~/pbuilder/"))
|
||||
|
||||
if 'SUDO_USER' in os.environ:
|
||||
Logger.warn('Running under sudo. '
|
||||
'This is probably not what you want. '
|
||||
'pbuilder-dist will use sudo itself, when necessary.')
|
||||
if os.stat(os.environ['HOME']).st_uid != os.getuid():
|
||||
if "SUDO_USER" in os.environ:
|
||||
Logger.warning(
|
||||
"Running under sudo. "
|
||||
"This is probably not what you want. "
|
||||
"pbuilder-dist will use sudo itself, "
|
||||
"when necessary."
|
||||
)
|
||||
if os.stat(os.environ["HOME"]).st_uid != os.getuid():
|
||||
Logger.error("You don't own $HOME")
|
||||
sys.exit(1)
|
||||
|
||||
@ -115,8 +130,8 @@ class PbuilderDist:
|
||||
Logger.error('Cannot create base directory "%s"', self.base)
|
||||
sys.exit(1)
|
||||
|
||||
if 'PBUILDAUTH' in os.environ:
|
||||
self.auth = os.environ['PBUILDAUTH']
|
||||
if "PBUILDAUTH" in os.environ:
|
||||
self.auth = os.environ["PBUILDAUTH"]
|
||||
|
||||
self.system_architecture = ubuntutools.misc.host_architecture()
|
||||
self.system_distro = ubuntutools.misc.system_distribution()
|
||||
@ -126,7 +141,7 @@ class PbuilderDist:
|
||||
self.target_distro = self.system_distro
|
||||
|
||||
def set_target_distro(self, distro):
|
||||
""" PbuilderDist.set_target_distro(distro) -> None
|
||||
"""PbuilderDist.set_target_distro(distro) -> None
|
||||
|
||||
Check if the given target distribution name is correct, if it
|
||||
isn't know to the system ask the user for confirmation before
|
||||
@ -137,16 +152,17 @@ class PbuilderDist:
|
||||
Logger.error('"%s" is an invalid distribution codename.', distro)
|
||||
sys.exit(1)
|
||||
|
||||
if not os.path.isfile(os.path.join('/usr/share/debootstrap/scripts/',
|
||||
distro)):
|
||||
if os.path.isdir('/usr/share/debootstrap/scripts/'):
|
||||
if not os.path.isfile(os.path.join("/usr/share/debootstrap/scripts/", distro)):
|
||||
if os.path.isdir("/usr/share/debootstrap/scripts/"):
|
||||
# Debian experimental doesn't have a debootstrap file but
|
||||
# should work nevertheless.
|
||||
if distro not in self._debian_distros:
|
||||
answer = YesNoQuestion().ask(
|
||||
'Warning: Unknown distribution "%s". '
|
||||
'Do you want to continue' % distro, 'no')
|
||||
if answer == 'yes':
|
||||
# should work nevertheless. Ubuntu releases automatically use
|
||||
# the gutsy script as of debootstrap 1.0.128+nmu2ubuntu1.1.
|
||||
if distro not in (self._debian_distros + self.ubuntu_distro_info.all):
|
||||
question = (
|
||||
f'Warning: Unknown distribution "{distro}". ' "Do you want to continue"
|
||||
)
|
||||
answer = YesNoQuestion().ask(question, "no")
|
||||
if answer == "no":
|
||||
sys.exit(0)
|
||||
else:
|
||||
Logger.error('Please install package "debootstrap".')
|
||||
@ -155,55 +171,72 @@ class PbuilderDist:
|
||||
self.target_distro = distro
|
||||
|
||||
def set_operation(self, operation):
|
||||
""" PbuilderDist.set_operation -> None
|
||||
"""PbuilderDist.set_operation -> None
|
||||
|
||||
Check if the given string is a valid pbuilder operation and
|
||||
depending on this either save it into the appropiate variable
|
||||
or finalize pbuilder-dist's execution.
|
||||
"""
|
||||
arguments = ('create', 'update', 'build', 'clean', 'login', 'execute')
|
||||
arguments = ("create", "update", "build", "clean", "login", "execute")
|
||||
|
||||
if operation not in arguments:
|
||||
if operation.endswith('.dsc'):
|
||||
if operation.endswith(".dsc"):
|
||||
if os.path.isfile(operation):
|
||||
self.operation = 'build'
|
||||
self.operation = "build"
|
||||
return [operation]
|
||||
else:
|
||||
Logger.error('Could not find file "%s".', operation)
|
||||
sys.exit(1)
|
||||
else:
|
||||
Logger.error('"%s" is not a recognized argument.\n'
|
||||
'Please use one of these: %s.',
|
||||
operation, ', '.join(arguments))
|
||||
Logger.error('Could not find file "%s".', operation)
|
||||
sys.exit(1)
|
||||
else:
|
||||
self.operation = operation
|
||||
return []
|
||||
|
||||
def get_command(self, remaining_arguments = None):
|
||||
""" PbuilderDist.get_command -> string
|
||||
Logger.error(
|
||||
'"%s" is not a recognized argument.\nPlease use one of these: %s.',
|
||||
operation,
|
||||
", ".join(arguments),
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
self.operation = operation
|
||||
return []
|
||||
|
||||
def get_command(self, remaining_arguments=None):
|
||||
"""PbuilderDist.get_command -> string
|
||||
|
||||
Generate the pbuilder command which matches the given configuration
|
||||
and return it as a string.
|
||||
"""
|
||||
if not self.build_architecture:
|
||||
self.chroot_string = self.target_distro
|
||||
self.build_architecture = self.system_architecture
|
||||
|
||||
if self.build_architecture == self.system_architecture:
|
||||
self.chroot_string = self.target_distro
|
||||
else:
|
||||
self.chroot_string = (self.target_distro + '-'
|
||||
+ self.build_architecture)
|
||||
self.chroot_string = self.target_distro + "-" + self.build_architecture
|
||||
|
||||
prefix = os.path.join(self.base, self.chroot_string)
|
||||
if '--buildresult' not in remaining_arguments:
|
||||
result = '%s_result/' % prefix
|
||||
if "--buildresult" not in remaining_arguments:
|
||||
result = os.path.normpath(f"{prefix}_result/")
|
||||
else:
|
||||
location_of_arg = remaining_arguments.index('--buildresult')
|
||||
result = remaining_arguments[location_of_arg+1]
|
||||
remaining_arguments.pop(location_of_arg+1)
|
||||
location_of_arg = remaining_arguments.index("--buildresult")
|
||||
result = os.path.normpath(remaining_arguments[location_of_arg + 1])
|
||||
remaining_arguments.pop(location_of_arg + 1)
|
||||
remaining_arguments.pop(location_of_arg)
|
||||
|
||||
if not self.logfile and self.operation != 'login':
|
||||
self.logfile = os.path.normpath('%s/last_operation.log' % result)
|
||||
if not self.logfile and self.operation != "login":
|
||||
if self.operation == "build":
|
||||
dsc_files = [a for a in remaining_arguments if a.strip().endswith(".dsc")]
|
||||
assert len(dsc_files) == 1
|
||||
dsc = debian.deb822.Dsc(open(dsc_files[0], encoding="utf-8"))
|
||||
version = ubuntutools.version.Version(dsc["Version"])
|
||||
name = (
|
||||
dsc["Source"]
|
||||
+ "_"
|
||||
+ version.strip_epoch()
|
||||
+ "_"
|
||||
+ self.build_architecture
|
||||
+ ".build"
|
||||
)
|
||||
self.logfile = os.path.join(result, name)
|
||||
else:
|
||||
self.logfile = os.path.join(result, "last_operation.log")
|
||||
|
||||
if not os.path.isdir(result):
|
||||
try:
|
||||
@ -213,126 +246,151 @@ class PbuilderDist:
|
||||
sys.exit(1)
|
||||
|
||||
arguments = [
|
||||
'--%s' % self.operation,
|
||||
'--distribution', self.target_distro,
|
||||
'--buildresult', result,
|
||||
'--aptcache', '/var/cache/apt/archives/',
|
||||
'--override-config',
|
||||
f"--{self.operation}",
|
||||
"--distribution",
|
||||
self.target_distro,
|
||||
"--buildresult",
|
||||
result,
|
||||
]
|
||||
|
||||
if self.builder == 'pbuilder':
|
||||
arguments += ['--basetgz', prefix + '-base.tgz']
|
||||
elif self.builder == 'cowbuilder':
|
||||
arguments += ['--basepath', prefix + '-base.cow']
|
||||
if self.operation == "update":
|
||||
arguments += ["--override-config"]
|
||||
|
||||
if self.builder == "pbuilder":
|
||||
arguments += ["--basetgz", prefix + "-base.tgz"]
|
||||
elif self.builder == "cowbuilder":
|
||||
arguments += ["--basepath", prefix + "-base.cow"]
|
||||
else:
|
||||
Logger.error('Unrecognized builder "%s".', self.builder)
|
||||
sys.exit(1)
|
||||
|
||||
if self.logfile:
|
||||
arguments += ['--logfile', self.logfile]
|
||||
arguments += ["--logfile", self.logfile]
|
||||
|
||||
if os.path.exists('/var/cache/archive/'):
|
||||
arguments += ['--bindmounts', '/var/cache/archive/']
|
||||
|
||||
localrepo = '/var/cache/archive/' + self.target_distro
|
||||
if os.path.exists(localrepo):
|
||||
arguments += [
|
||||
'--othermirror',
|
||||
'deb file:///var/cache/archive/ %s/' % self.target_distro,
|
||||
]
|
||||
if os.path.exists("/var/cache/archive/"):
|
||||
arguments += ["--bindmounts", "/var/cache/archive/"]
|
||||
|
||||
config = UDTConfig()
|
||||
if self.target_distro in self._debian_distros:
|
||||
mirror = os.environ.get('MIRRORSITE',
|
||||
config.get_value('DEBIAN_MIRROR'))
|
||||
components = 'main'
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("DEBIAN_MIRROR"))
|
||||
components = "main"
|
||||
if self.extra_components:
|
||||
components += ' contrib non-free'
|
||||
components += " contrib non-free non-free-firmware"
|
||||
else:
|
||||
mirror = os.environ.get('MIRRORSITE',
|
||||
config.get_value('UBUNTU_MIRROR'))
|
||||
if self.build_architecture not in ('amd64', 'i386'):
|
||||
mirror = os.environ.get('MIRRORSITE',
|
||||
config.get_value('UBUNTU_PORTS_MIRROR'))
|
||||
components = 'main restricted'
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_MIRROR"))
|
||||
if self.build_architecture not in ("amd64", "i386"):
|
||||
mirror = os.environ.get("MIRRORSITE", config.get_value("UBUNTU_PORTS_MIRROR"))
|
||||
components = "main restricted"
|
||||
if self.extra_components:
|
||||
components += ' universe multiverse'
|
||||
components += " universe multiverse"
|
||||
|
||||
arguments += ['--mirror', mirror]
|
||||
arguments += ["--mirror", mirror]
|
||||
|
||||
othermirrors = []
|
||||
localrepo = f"/var/cache/archive/{self.target_distro}"
|
||||
if os.path.exists(localrepo):
|
||||
repo = f"deb file:///var/cache/archive/ {self.target_distro}/"
|
||||
othermirrors.append(repo)
|
||||
|
||||
if self.target_distro in self._debian_distros:
|
||||
debian_info = DebianDistroInfo()
|
||||
if (debian_info.codename(self.target_distro) or self.target_distro
|
||||
in (debian_info.devel(), 'experimental')):
|
||||
try:
|
||||
codename = self.debian_distro_info.codename(
|
||||
self.target_distro, default=self.target_distro
|
||||
)
|
||||
except DistroDataOutdated as error:
|
||||
Logger.warning(error)
|
||||
if codename in (self.debian_distro_info.devel(), "experimental"):
|
||||
self.enable_security = False
|
||||
self.enable_updates = False
|
||||
self.enable_proposed = False
|
||||
elif codename in (self.debian_distro_info.testing(), "testing"):
|
||||
self.enable_updates = False
|
||||
|
||||
if self.enable_security:
|
||||
othermirrors.append('deb %s %s/updates %s'
|
||||
% (config.get_value('DEBSEC_MIRROR'),
|
||||
self.target_distro, components))
|
||||
pocket = "-security"
|
||||
with suppress(ValueError):
|
||||
# before bullseye (version 11) security suite is /updates
|
||||
if float(self.debian_distro_info.version(codename)) < 11.0:
|
||||
pocket = "/updates"
|
||||
othermirrors.append(
|
||||
f"deb {config.get_value('DEBSEC_MIRROR')}"
|
||||
f" {self.target_distro}{pocket} {components}"
|
||||
)
|
||||
if self.enable_updates:
|
||||
othermirrors.append('deb %s %s-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
|
||||
if self.enable_proposed:
|
||||
othermirrors.append('deb %s %s-proposed-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
othermirrors.append(
|
||||
f"deb {mirror} {self.target_distro}-proposed-updates {components}"
|
||||
)
|
||||
if self.enable_backports:
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-backports {components}")
|
||||
|
||||
aptcache = os.path.join(self.base, "aptcache", "debian")
|
||||
else:
|
||||
if self.target_distro == UbuntuDistroInfo().devel():
|
||||
try:
|
||||
dev_release = self.target_distro == self.ubuntu_distro_info.devel()
|
||||
except DistroDataOutdated as error:
|
||||
Logger.warning(error)
|
||||
dev_release = True
|
||||
|
||||
if dev_release:
|
||||
self.enable_security = False
|
||||
self.enable_updates = False
|
||||
self.enable_proposed = False
|
||||
|
||||
if self.enable_security:
|
||||
othermirrors.append('deb %s %s-security %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-security {components}")
|
||||
if self.enable_updates:
|
||||
othermirrors.append('deb %s %s-updates %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-updates {components}")
|
||||
if self.enable_proposed:
|
||||
othermirrors.append('deb %s %s-proposed %s'
|
||||
% (mirror, self.target_distro, components))
|
||||
othermirrors.append(f"deb {mirror} {self.target_distro}-proposed {components}")
|
||||
|
||||
if 'OTHERMIRROR' in os.environ:
|
||||
othermirrors += os.environ['OTHERMIRROR'].split('|')
|
||||
aptcache = os.path.join(self.base, "aptcache", "ubuntu")
|
||||
|
||||
if "OTHERMIRROR" in os.environ:
|
||||
othermirrors += os.environ["OTHERMIRROR"].split("|")
|
||||
|
||||
if othermirrors:
|
||||
arguments += ['--othermirror', '|'.join(othermirrors)]
|
||||
arguments += ["--othermirror", "|".join(othermirrors)]
|
||||
|
||||
# Work around LP:#599695
|
||||
if (ubuntutools.misc.system_distribution() == 'Debian'
|
||||
and self.target_distro not in self._debian_distros):
|
||||
if not os.path.exists(
|
||||
'/usr/share/keyrings/ubuntu-archive-keyring.gpg'):
|
||||
Logger.error('ubuntu-keyring not installed')
|
||||
if (
|
||||
ubuntutools.misc.system_distribution() == "Debian"
|
||||
and self.target_distro not in self._debian_distros
|
||||
):
|
||||
if not os.path.exists("/usr/share/keyrings/ubuntu-archive-keyring.gpg"):
|
||||
Logger.error("ubuntu-keyring not installed")
|
||||
sys.exit(1)
|
||||
arguments += [
|
||||
'--debootstrapopts',
|
||||
'--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg',
|
||||
"--debootstrapopts",
|
||||
"--keyring=/usr/share/keyrings/ubuntu-archive-keyring.gpg",
|
||||
]
|
||||
elif (ubuntutools.misc.system_distribution() == 'Ubuntu'
|
||||
and self.target_distro in self._debian_distros):
|
||||
if not os.path.exists(
|
||||
'/usr/share/keyrings/debian-archive-keyring.gpg'):
|
||||
Logger.error('debian-archive-keyring not installed')
|
||||
elif (
|
||||
ubuntutools.misc.system_distribution() == "Ubuntu"
|
||||
and self.target_distro in self._debian_distros
|
||||
):
|
||||
if not os.path.exists("/usr/share/keyrings/debian-archive-keyring.gpg"):
|
||||
Logger.error("debian-archive-keyring not installed")
|
||||
sys.exit(1)
|
||||
arguments += [
|
||||
'--debootstrapopts',
|
||||
'--keyring=/usr/share/keyrings/debian-archive-keyring.gpg',
|
||||
"--debootstrapopts",
|
||||
"--keyring=/usr/share/keyrings/debian-archive-keyring.gpg",
|
||||
]
|
||||
|
||||
arguments += ['--components', components]
|
||||
arguments += ["--aptcache", aptcache, "--components", components]
|
||||
|
||||
if not os.path.isdir(aptcache):
|
||||
try:
|
||||
os.makedirs(aptcache)
|
||||
except OSError:
|
||||
Logger.error('Cannot create aptcache directory "%s"', aptcache)
|
||||
sys.exit(1)
|
||||
|
||||
if self.build_architecture != self.system_architecture:
|
||||
arguments += ['--debootstrapopts',
|
||||
'--arch=' + self.build_architecture]
|
||||
arguments += ["--debootstrapopts", "--arch=" + self.build_architecture]
|
||||
|
||||
apt_conf_dir = os.path.join(self.base,
|
||||
'etc/%s/apt.conf' % self.target_distro)
|
||||
apt_conf_dir = os.path.join(self.base, f"etc/{self.target_distro}/apt.conf")
|
||||
if os.path.exists(apt_conf_dir):
|
||||
arguments += ['--aptconfdir', apt_conf_dir]
|
||||
arguments += ["--aptconfdir", apt_conf_dir]
|
||||
|
||||
# Append remaining arguments
|
||||
if remaining_arguments:
|
||||
@ -340,25 +398,31 @@ class PbuilderDist:
|
||||
|
||||
# Export the distribution and architecture information to the
|
||||
# environment so that it is accessible to ~/.pbuilderrc (LP: #628933).
|
||||
# With both common variable name schemes (BTS: #659060).
|
||||
return [
|
||||
self.auth,
|
||||
'HOME=' + os.path.expanduser('~'),
|
||||
'ARCH=' + self.build_architecture,
|
||||
'DIST=' + self.target_distro,
|
||||
"HOME=" + os.path.expanduser("~"),
|
||||
"ARCHITECTURE=" + self.build_architecture,
|
||||
"DISTRIBUTION=" + self.target_distro,
|
||||
"ARCH=" + self.build_architecture,
|
||||
"DIST=" + self.target_distro,
|
||||
"DEB_BUILD_OPTIONS=" + os.environ.get("DEB_BUILD_OPTIONS", ""),
|
||||
self.builder,
|
||||
] + arguments
|
||||
|
||||
def show_help(exit_code = 0):
|
||||
""" help() -> None
|
||||
|
||||
def show_help(exit_code=0):
|
||||
"""help() -> None
|
||||
|
||||
Print a help message for pbuilder-dist, and exit with the given code.
|
||||
"""
|
||||
print 'See man pbuilder-dist for more information.'
|
||||
Logger.info("See man pbuilder-dist for more information.")
|
||||
|
||||
sys.exit(exit_code)
|
||||
|
||||
|
||||
def main():
|
||||
""" main() -> None
|
||||
"""main() -> None
|
||||
|
||||
This is pbuilder-dist's main function. It creates a PbuilderDist
|
||||
object, modifies all necessary settings taking data from the
|
||||
@ -366,92 +430,122 @@ def main():
|
||||
the script and runs pbuilder itself or exists with an error message.
|
||||
"""
|
||||
script_name = os.path.basename(sys.argv[0])
|
||||
parts = script_name.split('-')
|
||||
parts = script_name.split("-")
|
||||
|
||||
# Copy arguments into another list for save manipulation
|
||||
args = sys.argv[1:]
|
||||
|
||||
if ('-' in script_name and parts[0] not in ('pbuilder', 'cowbuilder')
|
||||
or len(parts) > 3):
|
||||
Logger.error('"%s" is not a valid name for a "pbuilder-dist" '
|
||||
'executable.', script_name)
|
||||
if "-" in script_name and parts[0] not in ("pbuilder", "cowbuilder") or len(parts) > 3:
|
||||
Logger.error('"%s" is not a valid name for a "pbuilder-dist" executable.', script_name)
|
||||
sys.exit(1)
|
||||
|
||||
if len(args) < 1:
|
||||
Logger.error('Insufficient number of arguments.')
|
||||
Logger.error("Insufficient number of arguments.")
|
||||
show_help(1)
|
||||
|
||||
if args[0] in ('-h', '--help', 'help'):
|
||||
if args[0] in ("-h", "--help", "help"):
|
||||
show_help(0)
|
||||
|
||||
app = PbuilderDist(parts[0])
|
||||
|
||||
if len(parts) > 1 and parts[1] != 'dist' and '.' not in parts[1]:
|
||||
if len(parts) > 1 and parts[1] != "dist" and "." not in parts[1]:
|
||||
app.set_target_distro(parts[1])
|
||||
else:
|
||||
app.set_target_distro(args.pop(0))
|
||||
|
||||
if len(parts) > 2:
|
||||
requested_arch = parts[2]
|
||||
elif len(args) > 0 and args[0] in (
|
||||
'alpha', 'amd64', 'arm', 'armeb', 'armel', 'i386', 'lpia', 'm68k',
|
||||
'mips', 'mipsel', 'powerpc', 'ppc64', 'sh4', 'sh4eb', 'sparc',
|
||||
'sparc64'):
|
||||
requested_arch = args.pop(0)
|
||||
elif len(args) > 0:
|
||||
if shutil.which("arch-test") is not None:
|
||||
arch_test = subprocess.run(
|
||||
["arch-test", args[0]], check=False, stdout=subprocess.DEVNULL
|
||||
)
|
||||
if arch_test.returncode == 0:
|
||||
requested_arch = args.pop(0)
|
||||
elif os.path.isdir("/usr/lib/arch-test") and args[0] in os.listdir(
|
||||
"/usr/lib/arch-test/"
|
||||
):
|
||||
Logger.error(
|
||||
'Architecture "%s" is not supported on your '
|
||||
"currently running kernel. Consider installing "
|
||||
"the qemu-user-static package to enable the use of "
|
||||
"foreign architectures.",
|
||||
args[0],
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
requested_arch = None
|
||||
else:
|
||||
Logger.error(
|
||||
'Cannot determine if "%s" is a valid architecture. '
|
||||
"Please install the arch-test package and retry.",
|
||||
args[0],
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
requested_arch = None
|
||||
|
||||
if requested_arch:
|
||||
app.build_architecture = requested_arch
|
||||
# For some foreign architectures we need to use qemu
|
||||
if (requested_arch != app.system_architecture
|
||||
and (app.system_architecture, requested_arch) not in [
|
||||
('amd64', 'i386'), ('amd64', 'lpia'), ('arm', 'armel'),
|
||||
('armel', 'arm'), ('i386', 'lpia'), ('lpia', 'i386'),
|
||||
('powerpc', 'ppc64'), ('ppc64', 'powerpc'),
|
||||
('sparc', 'sparc64'), ('sparc64', 'sparc')]):
|
||||
args += ['--debootstrap', 'qemu-debootstrap']
|
||||
if requested_arch != app.system_architecture and (
|
||||
app.system_architecture,
|
||||
requested_arch,
|
||||
) not in [
|
||||
("amd64", "i386"),
|
||||
("arm64", "arm"),
|
||||
("arm64", "armhf"),
|
||||
("powerpc", "ppc64"),
|
||||
("ppc64", "powerpc"),
|
||||
]:
|
||||
args += ["--debootstrap", "debootstrap"]
|
||||
|
||||
if 'mainonly' in sys.argv or '--main-only' in sys.argv:
|
||||
if "mainonly" in sys.argv or "--main-only" in sys.argv:
|
||||
app.extra_components = False
|
||||
if 'mainonly' in sys.argv:
|
||||
args.remove('mainonly')
|
||||
if "mainonly" in sys.argv:
|
||||
args.remove("mainonly")
|
||||
else:
|
||||
args.remove('--main-only')
|
||||
args.remove("--main-only")
|
||||
|
||||
if '--release-only' in sys.argv:
|
||||
args.remove('--release-only')
|
||||
if "--release-only" in sys.argv:
|
||||
args.remove("--release-only")
|
||||
app.enable_security = False
|
||||
app.enable_updates = False
|
||||
app.enable_proposed = False
|
||||
elif '--security-only' in sys.argv:
|
||||
args.remove('--security-only')
|
||||
elif "--security-only" in sys.argv:
|
||||
args.remove("--security-only")
|
||||
app.enable_updates = False
|
||||
app.enable_proposed = False
|
||||
elif '--updates-only' in sys.argv:
|
||||
args.remove('--updates-only')
|
||||
elif "--updates-only" in sys.argv:
|
||||
args.remove("--updates-only")
|
||||
app.enable_proposed = False
|
||||
elif "--backports" in sys.argv:
|
||||
args.remove("--backports")
|
||||
app.enable_backports = True
|
||||
|
||||
if len(args) < 1:
|
||||
Logger.error('Insufficient number of arguments.')
|
||||
Logger.error("Insufficient number of arguments.")
|
||||
show_help(1)
|
||||
|
||||
# Parse the operation
|
||||
args = app.set_operation(args.pop(0)) + args
|
||||
|
||||
if app.operation == 'build' and '.dsc' not in ' '.join(args):
|
||||
Logger.error('You have to specify a .dsc file if you want to build.')
|
||||
sys.exit(1)
|
||||
if app.operation == "build":
|
||||
if len([a for a in args if a.strip().endswith(".dsc")]) != 1:
|
||||
msg = "You have to specify one .dsc file if you want to build."
|
||||
Logger.error(msg)
|
||||
sys.exit(1)
|
||||
|
||||
# Execute the pbuilder command
|
||||
if not '--debug-echo' in args:
|
||||
if "--debug-echo" not in args:
|
||||
sys.exit(subprocess.call(app.get_command(args)))
|
||||
else:
|
||||
print app.get_command([arg for arg in args if arg != '--debug-echo'])
|
||||
Logger.info(app.get_command([arg for arg in args if arg != "--debug-echo"]))
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.error('Manually aborted.')
|
||||
Logger.error("Manually aborted.")
|
||||
sys.exit(1)
|
||||
|
@ -30,29 +30,44 @@ OPERATION=$1
|
||||
DISTRIBUTION=`basename $0 | cut -f2 -d '-'`
|
||||
PROCEED=false
|
||||
BASE_DIR="$HOME/pbuilder"
|
||||
|
||||
usage() {
|
||||
prog=$(basename $0)
|
||||
cat <<EOF
|
||||
Usage: $prog command [pbuilder-options...]
|
||||
|
||||
A simple multi-release pbuilder wrapper
|
||||
|
||||
Valid commands are:
|
||||
create
|
||||
update
|
||||
build
|
||||
clean
|
||||
login
|
||||
execute
|
||||
|
||||
Options:
|
||||
-h, --help show this help message and exit
|
||||
EOF
|
||||
exit $1
|
||||
}
|
||||
|
||||
case $OPERATION in
|
||||
create|update|build|clean|login|execute )
|
||||
PROCEED=true
|
||||
;;
|
||||
create|update|build|clean|login|execute)
|
||||
;;
|
||||
-h|--help)
|
||||
usage 0
|
||||
;;
|
||||
*)
|
||||
usage 1
|
||||
;;
|
||||
esac
|
||||
if [ $PROCEED = true ]; then
|
||||
shift
|
||||
if [ ! -d $BASE_DIR/${DISTRIBUTION}_result ]
|
||||
then mkdir -p $BASE_DIR/${DISTRIBUTION}_result/
|
||||
fi
|
||||
sudo pbuilder $OPERATION \
|
||||
--basetgz $BASE_DIR/$DISTRIBUTION-base.tgz \
|
||||
--distribution $DISTRIBUTION \
|
||||
--buildresult $BASE_DIR/$DISTRIBUTION_result \
|
||||
--othermirror "deb http://archive.ubuntu.com/ubuntu $DISTRIBUTION universe multiverse" $@
|
||||
else
|
||||
echo "Invalid command..."
|
||||
echo "Valid commands are:"
|
||||
echo " create"
|
||||
echo " update"
|
||||
echo " build"
|
||||
echo " clean"
|
||||
echo " login"
|
||||
echo " execute"
|
||||
exit 1
|
||||
shift
|
||||
if [ ! -d $BASE_DIR/${DISTRIBUTION}_result ]; then
|
||||
mkdir -p $BASE_DIR/${DISTRIBUTION}_result/
|
||||
fi
|
||||
sudo pbuilder $OPERATION \
|
||||
--basetgz $BASE_DIR/$DISTRIBUTION-base.tgz \
|
||||
--distribution $DISTRIBUTION \
|
||||
--buildresult $BASE_DIR/$DISTRIBUTION_result \
|
||||
--othermirror "deb http://archive.ubuntu.com/ubuntu $DISTRIBUTION universe multiverse" "$@"
|
||||
|
142
pm-helper
Executable file
142
pm-helper
Executable file
@ -0,0 +1,142 @@
|
||||
#!/usr/bin/python3
|
||||
# Find the next thing to work on for proposed-migration
|
||||
# Copyright (C) 2023 Canonical Ltd.
|
||||
# Author: Steve Langasek <steve.langasek@ubuntu.com>
|
||||
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License, version 3.
|
||||
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
||||
# General Public License for more details.
|
||||
|
||||
# You should have received a copy of the GNU General Public License
|
||||
# along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
|
||||
import lzma
|
||||
import sys
|
||||
import webbrowser
|
||||
from argparse import ArgumentParser
|
||||
|
||||
import yaml
|
||||
from launchpadlib.launchpad import Launchpad
|
||||
|
||||
from ubuntutools.utils import get_url
|
||||
|
||||
# proposed-migration is only concerned with the devel series; unlike other
|
||||
# tools, don't make this configurable
|
||||
excuses_url = "https://ubuntu-archive-team.ubuntu.com/proposed-migration/update_excuses.yaml.xz"
|
||||
|
||||
|
||||
def get_proposed_version(excuses, package):
|
||||
for k in excuses["sources"]:
|
||||
if k["source"] == package:
|
||||
return k.get("new-version")
|
||||
return None
|
||||
|
||||
|
||||
def claim_excuses_bug(launchpad, bug, package):
|
||||
print(f"LP: #{bug.id}: {bug.title}")
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
series = ubuntu.current_series.fullseriesname
|
||||
|
||||
for task in bug.bug_tasks:
|
||||
# targeting to a series doesn't make the default task disappear,
|
||||
# it just makes it useless
|
||||
if task.bug_target_name == f"{package} ({series})":
|
||||
our_task = task
|
||||
break
|
||||
if task.bug_target_name == f"{package} (Ubuntu)":
|
||||
our_task = task
|
||||
|
||||
if our_task.assignee == launchpad.me:
|
||||
print("Bug already assigned to you.")
|
||||
return True
|
||||
if our_task.assignee:
|
||||
print(f"Currently assigned to {our_task.assignee.name}")
|
||||
|
||||
print("""Do you want to claim this bug? [yN] """, end="")
|
||||
sys.stdout.flush()
|
||||
response = sys.stdin.readline()
|
||||
if response.strip().lower().startswith("y"):
|
||||
our_task.assignee = launchpad.me
|
||||
our_task.lp_save()
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
|
||||
def create_excuses_bug(launchpad, package, version):
|
||||
print("Will open a new bug")
|
||||
bug = launchpad.bugs.createBug(
|
||||
title=f"proposed-migration for {package} {version}",
|
||||
tags=("update-excuse"),
|
||||
target=f"https://api.launchpad.net/devel/ubuntu/+source/{package}",
|
||||
description=f"{package} {version} is stuck in -proposed.",
|
||||
)
|
||||
|
||||
task = bug.bug_tasks[0]
|
||||
task.assignee = launchpad.me
|
||||
task.lp_save()
|
||||
|
||||
print(f"Opening {bug.web_link} in browser")
|
||||
webbrowser.open(bug.web_link)
|
||||
return bug
|
||||
|
||||
|
||||
def has_excuses_bugs(launchpad, package):
|
||||
ubuntu = launchpad.distributions["ubuntu"]
|
||||
pkg = ubuntu.getSourcePackage(name=package)
|
||||
if not pkg:
|
||||
raise ValueError(f"No such source package: {package}")
|
||||
|
||||
tasks = pkg.searchTasks(tags=["update-excuse"], order_by=["id"])
|
||||
|
||||
bugs = [task.bug for task in tasks]
|
||||
if not bugs:
|
||||
return False
|
||||
|
||||
if len(bugs) == 1:
|
||||
print(f"There is 1 open update-excuse bug against {package}")
|
||||
else:
|
||||
print(f"There are {len(bugs)} open update-excuse bugs against {package}")
|
||||
|
||||
for bug in bugs:
|
||||
if claim_excuses_bug(launchpad, bug, package):
|
||||
return True
|
||||
|
||||
return True
|
||||
|
||||
|
||||
def main():
|
||||
parser = ArgumentParser()
|
||||
parser.add_argument("-l", "--launchpad", dest="launchpad_instance", default="production")
|
||||
parser.add_argument(
|
||||
"-v", "--verbose", default=False, action="store_true", help="be more verbose"
|
||||
)
|
||||
parser.add_argument("package", nargs="?", help="act on this package only")
|
||||
args = parser.parse_args()
|
||||
|
||||
args.launchpad = Launchpad.login_with("pm-helper", args.launchpad_instance, version="devel")
|
||||
|
||||
f = get_url(excuses_url, False)
|
||||
with lzma.open(f) as lzma_f:
|
||||
excuses = yaml.load(lzma_f, Loader=yaml.CSafeLoader)
|
||||
|
||||
if args.package:
|
||||
try:
|
||||
if not has_excuses_bugs(args.launchpad, args.package):
|
||||
proposed_version = get_proposed_version(excuses, args.package)
|
||||
if not proposed_version:
|
||||
print(f"Package {args.package} not found in -proposed.")
|
||||
sys.exit(1)
|
||||
create_excuses_bug(args.launchpad, args.package, proposed_version)
|
||||
except ValueError as e:
|
||||
sys.stderr.write(f"{e}\n")
|
||||
else:
|
||||
pass # for now
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
14
pull-debian-ddebs
Executable file
14
pull-debian-ddebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-debian-ddebs -- pull ddeb package files for debian
|
||||
# Basic usage: pull-debian-ddebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="ddebs")
|
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# pull-debian-debdiff - find and download a specific version of a Debian
|
||||
# package and its immediate parent to generate a debdiff.
|
||||
#
|
||||
@ -17,97 +17,112 @@
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import optparse
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
import debian.debian_support
|
||||
import debian.changelog
|
||||
|
||||
from devscripts.logger import Logger
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.archive import DebianSourcePackage, DownloadError
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.version import Version
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
def previous_version(package, version, distance):
|
||||
"Given an (extracted) package, determine the version distance versions ago"
|
||||
upver = debian.debian_support.Version(version).upstream_version
|
||||
filename = '%s-%s/debian/changelog' % (package, upver)
|
||||
changelog_file = open(filename, 'r')
|
||||
upver = Version(version).upstream_version
|
||||
filename = f"{package}-{upver}/debian/changelog"
|
||||
changelog_file = open(filename, "r", encoding="utf-8")
|
||||
changelog = debian.changelog.Changelog(changelog_file.read())
|
||||
changelog_file.close()
|
||||
seen = 0
|
||||
for entry in changelog:
|
||||
if entry.distributions == 'UNRELEASED':
|
||||
if entry.distributions == "UNRELEASED":
|
||||
continue
|
||||
if seen == distance:
|
||||
return entry.version.full_version
|
||||
seen += 1
|
||||
return False
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser('%prog [options] <package> <version> '
|
||||
'[distance]')
|
||||
parser.add_option('-f', '--fetch',
|
||||
dest='fetch_only', default=False, action='store_true',
|
||||
help="Only fetch the source packages, don't diff.")
|
||||
parser.add_option('-d', '--debian-mirror', metavar='DEBIAN_MIRROR',
|
||||
dest='debian_mirror',
|
||||
help='Preferred Debian mirror '
|
||||
'(default: http://ftp.debian.org/debian)')
|
||||
parser.add_option('-s', '--debsec-mirror', metavar='DEBSEC_MIRROR',
|
||||
dest='debsec_mirror',
|
||||
help='Preferred Debian Security mirror '
|
||||
'(default: http://security.debian.org)')
|
||||
parser.add_option('--no-conf',
|
||||
dest='no_conf', default=False, action='store_true',
|
||||
help="Don't read config files or environment variables")
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] <package> <version> [distance]")
|
||||
parser.add_argument(
|
||||
"-f",
|
||||
"--fetch",
|
||||
dest="fetch_only",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Only fetch the source packages, don't diff.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--debian-mirror",
|
||||
metavar="DEBIAN_MIRROR",
|
||||
dest="debian_mirror",
|
||||
help="Preferred Debian mirror (default: http://deb.debian.org/debian)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--debsec-mirror",
|
||||
metavar="DEBSEC_MIRROR",
|
||||
dest="debsec_mirror",
|
||||
help="Preferred Debian Security mirror (default: http://security.debian.org)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
action="store_true",
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package", help=argparse.SUPPRESS)
|
||||
parser.add_argument("version", help=argparse.SUPPRESS)
|
||||
parser.add_argument("distance", default=1, type=int, nargs="?", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
|
||||
opts, args = parser.parse_args()
|
||||
if len(args) < 2:
|
||||
parser.error('Must specify package and version')
|
||||
elif len(args) > 3:
|
||||
parser.error('Too many arguments')
|
||||
package = args[0]
|
||||
version = args[1]
|
||||
distance = int(args[2]) if len(args) > 2 else 1
|
||||
config = UDTConfig(args.no_conf)
|
||||
if args.debian_mirror is None:
|
||||
args.debian_mirror = config.get_value("DEBIAN_MIRROR")
|
||||
if args.debsec_mirror is None:
|
||||
args.debsec_mirror = config.get_value("DEBSEC_MIRROR")
|
||||
mirrors = [args.debsec_mirror, args.debian_mirror]
|
||||
|
||||
config = UDTConfig(opts.no_conf)
|
||||
if opts.debian_mirror is None:
|
||||
opts.debian_mirror = config.get_value('DEBIAN_MIRROR')
|
||||
if opts.debsec_mirror is None:
|
||||
opts.debsec_mirror = config.get_value('DEBSEC_MIRROR')
|
||||
mirrors = [opts.debsec_mirror, opts.debian_mirror]
|
||||
Logger.info("Downloading %s %s", args.package, args.version)
|
||||
|
||||
Logger.normal('Downloading %s %s', package, version)
|
||||
|
||||
newpkg = DebianSourcePackage(package, version, mirrors=mirrors)
|
||||
newpkg = DebianSourcePackage(args.package, args.version, mirrors=mirrors)
|
||||
try:
|
||||
newpkg.pull()
|
||||
except DownloadError, e:
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
except DownloadError as e:
|
||||
Logger.error("Failed to download: %s", str(e))
|
||||
sys.exit(1)
|
||||
newpkg.unpack()
|
||||
|
||||
if opts.fetch_only:
|
||||
if args.fetch_only:
|
||||
sys.exit(0)
|
||||
|
||||
oldversion = previous_version(package, version, distance)
|
||||
oldversion = previous_version(args.package, args.version, args.distance)
|
||||
if not oldversion:
|
||||
Logger.error('No previous version could be found')
|
||||
Logger.error("No previous version could be found")
|
||||
sys.exit(1)
|
||||
Logger.normal('Downloading %s %s', package, oldversion)
|
||||
Logger.info("Downloading %s %s", args.package, oldversion)
|
||||
|
||||
oldpkg = DebianSourcePackage(package, oldversion, mirrors=mirrors)
|
||||
oldpkg = DebianSourcePackage(args.package, oldversion, mirrors=mirrors)
|
||||
try:
|
||||
oldpkg.pull()
|
||||
except DownloadError, e:
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
except DownloadError as e:
|
||||
Logger.error("Failed to download: %s", str(e))
|
||||
sys.exit(1)
|
||||
oldpkg.unpack()
|
||||
print 'file://' + oldpkg.debdiff(newpkg, diffstat=True)
|
||||
Logger.info("file://%s", oldpkg.debdiff(newpkg, diffstat=True))
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.normal('User abort.')
|
||||
Logger.info("User abort.")
|
||||
|
14
pull-debian-debs
Executable file
14
pull-debian-debs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-debian-debs -- pull deb package files for debian
|
||||
# Basic usage: pull-debian-debs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="debs")
|
@ -1,111 +1,14 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-debian-source -- pull a source package from Launchpad
|
||||
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
# Inspired by a tool of the same name by Nathan Handler.
|
||||
# pull-debian-source -- pull source package files for debian
|
||||
# Basic usage: pull-debian-source <package name> [version|release]
|
||||
#
|
||||
# Permission to use, copy, modify, and/or distribute this software for any
|
||||
# purpose with or without fee is hereby granted, provided that the above
|
||||
# copyright notice and this permission notice appear in all copies.
|
||||
#
|
||||
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH
|
||||
# REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
|
||||
# AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT,
|
||||
# INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM
|
||||
# LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR
|
||||
# OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
|
||||
# PERFORMANCE OF THIS SOFTWARE.
|
||||
# See pull-pkg
|
||||
|
||||
import optparse
|
||||
import sys
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from devscripts.logger import Logger
|
||||
from distro_info import DebianDistroInfo
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
from ubuntutools.archive import DebianSourcePackage, DownloadError, rmadison
|
||||
from ubuntutools.config import UDTConfig
|
||||
|
||||
def is_suite(version):
|
||||
"""If version could be considered to be a Debian suite, return the
|
||||
canonical suite name. Otherwise None
|
||||
"""
|
||||
debian_info = DebianDistroInfo()
|
||||
debian_releases = debian_info.all + ['experimental']
|
||||
|
||||
if '-' in version:
|
||||
release, pocket = version.split('-', 1)
|
||||
release = debian_info.codename(release, default=release)
|
||||
if release in debian_releases:
|
||||
if pocket in ('proposed-updates', 'p-u'):
|
||||
return (release + '-proposed-updates')
|
||||
elif pocket == 'security':
|
||||
return (release + '-security')
|
||||
else:
|
||||
release = debian_info.codename(version, default=version)
|
||||
if release in debian_releases:
|
||||
return release
|
||||
return None
|
||||
|
||||
def main():
|
||||
usage = 'Usage: %prog <package> [release|version]'
|
||||
parser = optparse.OptionParser(usage)
|
||||
parser.add_option('-d', '--download-only',
|
||||
dest='download_only', default=False, action='store_true',
|
||||
help='Do not extract the source package')
|
||||
parser.add_option('-m', '--mirror', metavar='DEBIAN_MIRROR',
|
||||
dest='debian_mirror',
|
||||
help='Preferred Debian mirror (default: %s)'
|
||||
% UDTConfig.defaults['DEBIAN_MIRROR'])
|
||||
parser.add_option('-s', '--security-mirror', metavar='DEBSEC_MIRROR',
|
||||
dest='debsec_mirror',
|
||||
help='Preferred Debian Security mirror (default: %s)'
|
||||
% UDTConfig.defaults['DEBSEC_MIRROR'])
|
||||
parser.add_option('--no-conf',
|
||||
dest='no_conf', default=False, action='store_true',
|
||||
help="Don't read config files or environment variables")
|
||||
(options, args) = parser.parse_args()
|
||||
if not args:
|
||||
parser.error('Must specify package name')
|
||||
elif len(args) > 2:
|
||||
parser.error('Too many arguments. '
|
||||
'Must only specify package and (optionally) release.')
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.debian_mirror is None:
|
||||
options.debian_mirror = config.get_value('DEBIAN_MIRROR')
|
||||
if options.debsec_mirror is None:
|
||||
options.debsec_mirror = config.get_value('DEBSEC_MIRROR')
|
||||
|
||||
package = args[0].lower()
|
||||
|
||||
version = args[1] if len(args) > 1 else 'unstable'
|
||||
component = None
|
||||
|
||||
suite = is_suite(version)
|
||||
if suite is not None:
|
||||
line = list(rmadison('debian', package, suite, 'source'))
|
||||
if not line:
|
||||
Logger.error('Unable to find %s in Debian suite "%s".', package,
|
||||
suite)
|
||||
sys.exit(1)
|
||||
line = line[-1]
|
||||
version = line['version']
|
||||
component = line['component']
|
||||
|
||||
Logger.normal('Downloading %s version %s', package, version)
|
||||
srcpkg = DebianSourcePackage(package, version, component=component,
|
||||
mirrors=[options.debian_mirror,
|
||||
options.debsec_mirror])
|
||||
try:
|
||||
srcpkg.pull()
|
||||
except DownloadError, e:
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
sys.exit(1)
|
||||
if not options.download_only:
|
||||
srcpkg.unpack()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.normal('User abort.')
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="source")
|
||||
|
14
pull-debian-udebs
Executable file
14
pull-debian-udebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-debian-udebs -- pull udeb package files for debian
|
||||
# Basic usage: pull-debian-udebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="debian", pull="udebs")
|
14
pull-lp-ddebs
Executable file
14
pull-lp-ddebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-lp-ddebs -- pull ddeb package files for ubuntu
|
||||
# Basic usage: pull-lp-ddebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="ddebs")
|
14
pull-lp-debs
Executable file
14
pull-lp-debs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-lp-debs -- pull deb package files for ubuntu
|
||||
# Basic usage: pull-lp-debs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="debs")
|
112
pull-lp-source
112
pull-lp-source
@ -1,108 +1,14 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-lp-source -- pull a source package from Launchpad
|
||||
# Basic usage: pull-lp-source <source package> [<release>]
|
||||
# pull-lp-source -- pull source package files for ubuntu
|
||||
# Basic usage: pull-lp-source <package name> [version|release]
|
||||
#
|
||||
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
|
||||
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 3
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# See file /usr/share/common-licenses/GPL for more details.
|
||||
#
|
||||
# ##################################################################
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import os
|
||||
import sys
|
||||
from optparse import OptionParser
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
from devscripts.logger import Logger
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from ubuntutools.archive import UbuntuSourcePackage, DownloadError
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.lp.udtexceptions import (SeriesNotFoundException,
|
||||
PackageNotFoundException,
|
||||
PocketDoesNotExistError)
|
||||
from ubuntutools.misc import split_release_pocket
|
||||
|
||||
def main():
|
||||
usage = "Usage: %prog <package> [release|version]"
|
||||
opt_parser = OptionParser(usage)
|
||||
opt_parser.add_option('-d', '--download-only',
|
||||
dest='download_only', default=False,
|
||||
action='store_true',
|
||||
help="Do not extract the source package")
|
||||
opt_parser.add_option('-m', '--mirror', metavar='UBUNTU_MIRROR',
|
||||
dest='ubuntu_mirror',
|
||||
help='Preferred Ubuntu mirror (default: Launchpad)')
|
||||
opt_parser.add_option('--no-conf',
|
||||
dest='no_conf', default=False, action='store_true',
|
||||
help="Don't read config files or environment "
|
||||
"variables")
|
||||
(options, args) = opt_parser.parse_args()
|
||||
if not args:
|
||||
opt_parser.error("Must specify package name")
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if options.ubuntu_mirror is None:
|
||||
options.ubuntu_mirror = config.get_value('UBUNTU_MIRROR')
|
||||
|
||||
# Login anonymously to LP
|
||||
Launchpad.login_anonymously()
|
||||
|
||||
package = str(args[0]).lower()
|
||||
|
||||
ubuntu_info = UbuntuDistroInfo()
|
||||
if len(args) > 1: # Custom distribution specified.
|
||||
version = str(args[1])
|
||||
else:
|
||||
version = os.getenv('DIST') or ubuntu_info.devel()
|
||||
component = None
|
||||
|
||||
# Release, not package version number:
|
||||
release = None
|
||||
pocket = None
|
||||
try:
|
||||
(release, pocket) = split_release_pocket(version)
|
||||
except PocketDoesNotExistError, e:
|
||||
pass
|
||||
if release in ubuntu_info.all and pocket is not None:
|
||||
try:
|
||||
spph = Distribution('ubuntu').getArchive().getSourcePackage(package,
|
||||
release,
|
||||
pocket)
|
||||
except (SeriesNotFoundException, PackageNotFoundException), e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
version = spph.getVersion()
|
||||
component = spph.getComponent()
|
||||
|
||||
Logger.normal('Downloading %s version %s', package, version)
|
||||
srcpkg = UbuntuSourcePackage(package, version, component=component,
|
||||
mirrors=[options.ubuntu_mirror])
|
||||
try:
|
||||
srcpkg.pull()
|
||||
except DownloadError, e:
|
||||
Logger.error('Failed to download: %s', str(e))
|
||||
sys.exit(1)
|
||||
if not options.download_only:
|
||||
srcpkg.unpack()
|
||||
|
||||
if __name__ == '__main__':
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
Logger.normal('User abort.')
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="source")
|
||||
|
14
pull-lp-udebs
Executable file
14
pull-lp-udebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-lp-udebs -- pull udeb package files for ubuntu
|
||||
# Basic usage: pull-lp-udebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ubuntu", pull="udebs")
|
32
pull-pkg
Executable file
32
pull-pkg
Executable file
@ -0,0 +1,32 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-pkg -- pull package files for debian/ubuntu/uca/ppa
|
||||
# Basic usage: pull-pkg -D distro -p type <package name> [version|release]
|
||||
#
|
||||
# Copyright (C) 2008, Iain Lane <iain@orangesquash.org.uk>,
|
||||
# 2010-2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
# 2017-2018, Dan Streetman <ddstreet@canonical.com>
|
||||
#
|
||||
# ##################################################################
|
||||
#
|
||||
# This program is free software; you can redistribute it and/or
|
||||
# modify it under the terms of the GNU General Public License
|
||||
# as published by the Free Software Foundation; either version 3
|
||||
# of the License, or (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
#
|
||||
# See file /usr/share/common-licenses/GPL for more details.
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main()
|
15
pull-ppa-ddebs
Executable file
15
pull-ppa-ddebs
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-ppa-ddebs -- pull ddeb package files for a Launchpad Personal Package Archive
|
||||
# Basic usage: pull-ppa-ddebs <package name> <ppa:USER/NAME> [version|release]
|
||||
# pull-ppa-ddebs --ppa USER/NAME <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="ddebs")
|
15
pull-ppa-debs
Executable file
15
pull-ppa-debs
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-ppa-debs -- pull deb package files for a Launchpad Personal Package Archive
|
||||
# Basic usage: pull-ppa-debs <package name> <ppa:USER/NAME> [version|release]
|
||||
# pull-ppa-debs --ppa USER/NAME <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="debs")
|
15
pull-ppa-source
Executable file
15
pull-ppa-source
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-ppa-source -- pull source package files for a Launchpad Personal Package Archive
|
||||
# Basic usage: pull-ppa-source <package name> <ppa:USER/NAME> [version|release]
|
||||
# pull-ppa-source --ppa USER/NAME <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="source")
|
15
pull-ppa-udebs
Executable file
15
pull-ppa-udebs
Executable file
@ -0,0 +1,15 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-ppa-udebs -- pull udeb package files for a Launchpad Personal Package Archive
|
||||
# Basic usage: pull-ppa-udebs <package name> <ppa:USER/NAME> [version|release]
|
||||
# pull-ppa-udebs --ppa USER/NAME <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="ppa", pull="udebs")
|
@ -1,59 +0,0 @@
|
||||
#!/usr/bin/perl
|
||||
# Script Name: pull-revu-source
|
||||
# Author: Nathan Handler <nhandler@ubuntu.com>
|
||||
# Usage: pull-revu-source <source package>
|
||||
# Copyright (C) 2009 Nathan Handler <nhandler@ubuntu.com>
|
||||
# Based on revupull in kubuntu-dev-tools,
|
||||
# written by Harald Sitter <apachelogger@ubuntu.com>
|
||||
# License: GNU General Public License
|
||||
# This program is free software: you can redistribute it and/or modify
|
||||
# it under the terms of the GNU General Public License as published by
|
||||
# the Free Software Foundation, either version 3 of the License, or
|
||||
# (at your option) any later version.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful,
|
||||
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
||||
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
||||
# GNU General Public License for more details.
|
||||
# On Debian GNU/Linux systems, the complete text of the GNU General
|
||||
# Public License can be found in the /usr/share/common-licenses/GPL-3 file.
|
||||
|
||||
use warnings;
|
||||
use strict;
|
||||
use File::Basename;
|
||||
use Getopt::Long;
|
||||
|
||||
my $REVU = "revu.ubuntuwire.com";
|
||||
|
||||
my($package) = lc($ARGV[0]) || usage(2);
|
||||
my($help)=0;
|
||||
GetOptions('help' => \$help);
|
||||
usage(0) if($help);
|
||||
|
||||
eval { require LWP::Simple; };
|
||||
if ($@=~ m#^Can\'t locate LWP/Simple#) {
|
||||
print(STDERR "Please install libwww-perl.\n");
|
||||
exit(1);
|
||||
}
|
||||
use LWP::Simple;
|
||||
|
||||
dget(getURL());
|
||||
|
||||
sub getURL {
|
||||
my($url) = "http://" . $REVU . "/dsc.py?url&package=" . $package;
|
||||
my($page)=get($url);
|
||||
die("Could Not Get $url") unless (defined $page);
|
||||
return $page;
|
||||
}
|
||||
|
||||
sub dget {
|
||||
my($dsc) = @_;
|
||||
exec("dget -xu $dsc");
|
||||
}
|
||||
|
||||
sub usage {
|
||||
my($exit) = @_;
|
||||
my($name)=basename($0);
|
||||
print("USAGE: $name [-h] <source package>\n");
|
||||
exit($exit);
|
||||
}
|
14
pull-uca-ddebs
Executable file
14
pull-uca-ddebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-uca-ddebs -- pull ddeb package files for ubuntu cloud archive
|
||||
# Basic usage: pull-uca-ddebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="ddebs")
|
14
pull-uca-debs
Executable file
14
pull-uca-debs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-uca-debs -- pull deb package files for ubuntu cloud archive
|
||||
# Basic usage: pull-uca-debs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="debs")
|
14
pull-uca-source
Executable file
14
pull-uca-source
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-uca-source -- pull source package files for ubuntu cloud archive
|
||||
# Basic usage: pull-uca-source <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="source")
|
14
pull-uca-udebs
Executable file
14
pull-uca-udebs
Executable file
@ -0,0 +1,14 @@
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# pull-uca-udebs -- pull udeb package files for ubuntu cloud archive
|
||||
# Basic usage: pull-uca-udebs <package name> [version|release]
|
||||
#
|
||||
# See pull-pkg
|
||||
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
from ubuntutools.pullpkg import PullPkg
|
||||
|
||||
if __name__ == "__main__":
|
||||
PullPkg.main(distro="uca", pull="udebs")
|
6
pyproject.toml
Normal file
6
pyproject.toml
Normal file
@ -0,0 +1,6 @@
|
||||
[tool.black]
|
||||
line-length = 99
|
||||
|
||||
[tool.isort]
|
||||
line_length = 99
|
||||
profile = "black"
|
300
requestbackport
300
requestbackport
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
#
|
||||
@ -14,19 +14,21 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
from collections import defaultdict
|
||||
import optparse
|
||||
import argparse
|
||||
import sys
|
||||
from collections import defaultdict
|
||||
|
||||
import apt
|
||||
from devscripts.logger import Logger
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from ubuntutools.lp.lpapicache import Launchpad, Distribution
|
||||
from ubuntutools.lp.udtexceptions import PackageNotFoundException
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig
|
||||
from ubuntutools.rdepends import query_rdepends, RDependsException
|
||||
from ubuntutools.question import YesNoQuestion, EditBugReport
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.lp.udtexceptions import PackageNotFoundException
|
||||
from ubuntutools.question import EditBugReport, YesNoQuestion, confirmation_prompt
|
||||
from ubuntutools.rdepends import RDependsException, query_rdepends
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
|
||||
class DestinationException(Exception):
|
||||
@ -36,16 +38,14 @@ class DestinationException(Exception):
|
||||
def determine_destinations(source, destination):
|
||||
ubuntu_info = UbuntuDistroInfo()
|
||||
if destination is None:
|
||||
destination = ubuntu_info.stable()
|
||||
destination = ubuntu_info.lts()
|
||||
|
||||
if source not in ubuntu_info.all:
|
||||
raise DestinationException("Source release %s does not exist" % source)
|
||||
raise DestinationException(f"Source release {source} does not exist")
|
||||
if destination not in ubuntu_info.all:
|
||||
raise DestinationException("Destination release %s does not exist"
|
||||
% destination)
|
||||
raise DestinationException(f"Destination release {destination} does not exist")
|
||||
if destination not in ubuntu_info.supported():
|
||||
raise DestinationException("Destination release %s is not supported"
|
||||
% destination)
|
||||
raise DestinationException(f"Destination release {destination} is not supported")
|
||||
|
||||
found = False
|
||||
destinations = []
|
||||
@ -72,14 +72,50 @@ def determine_destinations(source, destination):
|
||||
return destinations
|
||||
|
||||
|
||||
def disclaimer():
|
||||
print(
|
||||
"Ubuntu's backports are not for fixing bugs in stable releases, "
|
||||
"but for bringing new features to older, stable releases.\n"
|
||||
"See https://wiki.ubuntu.com/UbuntuBackports for the Ubuntu "
|
||||
"Backports policy and processes.\n"
|
||||
"See https://wiki.ubuntu.com/StableReleaseUpdates for the process "
|
||||
"for fixing bugs in stable releases."
|
||||
)
|
||||
confirmation_prompt()
|
||||
|
||||
|
||||
def check_existing(package):
|
||||
"""Search for possible existing bug reports"""
|
||||
distro = Distribution("ubuntu")
|
||||
srcpkg = distro.getSourcePackage(name=package.getPackageName())
|
||||
|
||||
bugs = srcpkg.searchTasks(
|
||||
omit_duplicates=True,
|
||||
search_text="[BPO]",
|
||||
status=["Incomplete", "New", "Confirmed", "Triaged", "In Progress", "Fix Committed"],
|
||||
)
|
||||
if not bugs:
|
||||
return
|
||||
|
||||
Logger.info(
|
||||
"There are existing bug reports that look similar to your "
|
||||
"request. Please check before continuing:"
|
||||
)
|
||||
|
||||
for bug in sorted([bug_task.bug for bug_task in bugs], key=lambda bug: bug.id):
|
||||
Logger.info(" * LP: #%-7i: %s %s", bug.id, bug.title, bug.web_link)
|
||||
|
||||
confirmation_prompt()
|
||||
|
||||
|
||||
def find_rdepends(releases, published_binaries):
|
||||
intermediate = defaultdict(lambda: defaultdict(list))
|
||||
|
||||
# We want to display every pubilshed binary, even if it has no rdepends
|
||||
for binpkg in published_binaries:
|
||||
intermediate[binpkg]
|
||||
intermediate[binpkg] # pylint: disable=pointless-statement
|
||||
|
||||
for arch in ('any', 'source'):
|
||||
for arch in ("any", "source"):
|
||||
for release in releases:
|
||||
for binpkg in published_binaries:
|
||||
try:
|
||||
@ -87,22 +123,25 @@ def find_rdepends(releases, published_binaries):
|
||||
except RDependsException:
|
||||
# Not published? TODO: Check
|
||||
continue
|
||||
for relationship, rdeps in raw_rdeps.iteritems():
|
||||
for relationship, rdeps in raw_rdeps.items():
|
||||
for rdep in rdeps:
|
||||
if rdep['Package'] in published_binaries:
|
||||
# Ignore circular deps:
|
||||
if rdep["Package"] in published_binaries:
|
||||
continue
|
||||
intermediate[binpkg][rdep['Package']] \
|
||||
.append((release, relationship))
|
||||
# arch==any queries return Reverse-Build-Deps:
|
||||
if arch == "any" and rdep.get("Architectures", []) == ["source"]:
|
||||
continue
|
||||
intermediate[binpkg][rdep["Package"]].append((release, relationship))
|
||||
|
||||
output = []
|
||||
for binpkg, rdeps in intermediate.iteritems():
|
||||
output += ['', binpkg, '-' * len(binpkg)]
|
||||
for pkg, appearences in rdeps.iteritems():
|
||||
output += ['* %s' % pkg]
|
||||
for binpkg, rdeps in intermediate.items():
|
||||
output += ["", binpkg, "-" * len(binpkg)]
|
||||
for pkg, appearences in rdeps.items():
|
||||
output += [f"* {pkg}"]
|
||||
for release, relationship in appearences:
|
||||
output += [' [ ] %s (%s)' % (release, relationship)]
|
||||
output += [f" [ ] {release} ({relationship})"]
|
||||
|
||||
found_any = sum(len(rdeps) for rdeps in intermediate.itervalues())
|
||||
found_any = sum(len(rdeps) for rdeps in intermediate.values())
|
||||
if found_any:
|
||||
output = [
|
||||
"Reverse dependencies:",
|
||||
@ -115,8 +154,8 @@ def find_rdepends(releases, published_binaries):
|
||||
"package currently in the release still works with the new "
|
||||
"%(package)s installed. "
|
||||
"Reverse- Recommends, Suggests, and Enhances don't need to be "
|
||||
"tested, and are listed for completeness-sake."
|
||||
] + output
|
||||
"tested, and are listed for completeness-sake.",
|
||||
] + output
|
||||
else:
|
||||
output = ["No reverse dependencies"]
|
||||
|
||||
@ -124,135 +163,164 @@ def find_rdepends(releases, published_binaries):
|
||||
|
||||
|
||||
def locate_package(package, distribution):
|
||||
archive = Distribution('ubuntu').getArchive()
|
||||
for pass_ in ('source', 'binary'):
|
||||
try:
|
||||
package_spph = archive.getSourcePackage(package, distribution)
|
||||
return package_spph
|
||||
except PackageNotFoundException, e:
|
||||
if pass_ == 'binary':
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
|
||||
archive = Distribution("ubuntu").getArchive()
|
||||
try:
|
||||
package_spph = archive.getSourcePackage(package, distribution)
|
||||
return package_spph
|
||||
except PackageNotFoundException as e:
|
||||
try:
|
||||
apt_pkg = apt.Cache()[package]
|
||||
except KeyError:
|
||||
continue
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
package = apt_pkg.candidate.source_name
|
||||
Logger.normal("Binary package specified, considering its source "
|
||||
"package instead: %s", package)
|
||||
Logger.info(
|
||||
"Binary package specified, considering its source package instead: %s", package
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def request_backport(package_spph, source, destinations):
|
||||
|
||||
published_binaries = set()
|
||||
for bpph in package_spph.getBinaries():
|
||||
published_binaries.add(bpph.getPackageName())
|
||||
|
||||
testing = []
|
||||
testing += ["You can test-build the backport in your PPA with "
|
||||
"backportpackage:"]
|
||||
testing += ["$ backportpackage -u ppa:<lp username>/<ppa name> "
|
||||
"-s %s -d %s %s"
|
||||
% (source, dest, package_spph.getPackageName())
|
||||
for dest in destinations]
|
||||
testing += [""]
|
||||
if not published_binaries:
|
||||
Logger.error(
|
||||
"%s (%s) has no published binaries in %s. ",
|
||||
package_spph.getPackageName(),
|
||||
package_spph.getVersion(),
|
||||
source,
|
||||
)
|
||||
Logger.info(
|
||||
"Is it stuck in bin-NEW? It can't be backported until "
|
||||
"the binaries have been accepted."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
testing = ["[Testing]", ""]
|
||||
for dest in destinations:
|
||||
testing += ['* %s:' % dest]
|
||||
testing += ["[ ] Package builds without modification"]
|
||||
testing += ["[ ] %s installs cleanly and runs" % binary
|
||||
for binary in published_binaries]
|
||||
testing += [f" * {dest.capitalize()}:"]
|
||||
testing += [" [ ] Package builds without modification"]
|
||||
testing += [f" [ ] {binary} installs cleanly and runs" for binary in published_binaries]
|
||||
|
||||
subst = {
|
||||
'package': package_spph.getPackageName(),
|
||||
'version': package_spph.getVersion(),
|
||||
'component': package_spph.getComponent(),
|
||||
'source': source,
|
||||
'destinations': ', '.join(destinations),
|
||||
"package": package_spph.getPackageName(),
|
||||
"version": package_spph.getVersion(),
|
||||
"component": package_spph.getComponent(),
|
||||
"source": package_spph.getSeriesAndPocket(),
|
||||
"destinations": ", ".join(destinations),
|
||||
}
|
||||
subject = ("Please backport %(package)s %(version)s (%(component)s) "
|
||||
"from %(source)s" % subst)
|
||||
body = ('\n'.join(
|
||||
subject = "[BPO] %(package)s %(version)s to %(destinations)s" % subst
|
||||
body = (
|
||||
"\n".join(
|
||||
[
|
||||
"Please backport %(package)s %(version)s (%(component)s) "
|
||||
"from %(source)s to %(destinations)s.",
|
||||
"",
|
||||
"Reason for the backport:",
|
||||
"========================",
|
||||
">>> Enter your reasoning here <<<",
|
||||
"",
|
||||
"Testing:",
|
||||
"========",
|
||||
"Mark off items in the checklist [X] as you test them, "
|
||||
"but please leave the checklist so that backporters can quickly "
|
||||
"evaluate the state of testing.",
|
||||
""
|
||||
"[Impact]",
|
||||
"",
|
||||
" * Justification for backporting the new version to the stable release.",
|
||||
"",
|
||||
"[Scope]",
|
||||
"",
|
||||
" * List the Ubuntu release you will backport from,"
|
||||
" and the specific package version.",
|
||||
"",
|
||||
" * List the Ubuntu release(s) you will backport to.",
|
||||
"",
|
||||
"[Other Info]",
|
||||
"",
|
||||
" * Anything else you think is useful to include",
|
||||
"",
|
||||
]
|
||||
+ testing
|
||||
+ [""]
|
||||
+ find_rdepends(destinations, published_binaries)
|
||||
+ [""]
|
||||
) % subst)
|
||||
)
|
||||
% subst
|
||||
)
|
||||
|
||||
editor = EditBugReport(subject, body)
|
||||
editor.edit()
|
||||
subject, body = editor.get_report()
|
||||
|
||||
Logger.normal('The final report is:\nSummary: %s\nDescription:\n%s\n',
|
||||
subject, body)
|
||||
Logger.info("The final report is:\nSummary: %s\nDescription:\n%s\n", subject, body)
|
||||
if YesNoQuestion().ask("Request this backport", "yes") == "no":
|
||||
sys.exit(1)
|
||||
|
||||
targets = [Launchpad.projects['%s-backports' % destination]
|
||||
for destination in destinations]
|
||||
bug = Launchpad.bugs.createBug(title=subject, description=body,
|
||||
target=targets[0])
|
||||
for target in targets[1:]:
|
||||
bug.addTask(target=target)
|
||||
distro = Distribution("ubuntu")
|
||||
pkgname = package_spph.getPackageName()
|
||||
|
||||
Logger.normal("Backport request filed as %s", bug.web_link)
|
||||
bug = Launchpad.bugs.createBug(
|
||||
title=subject, description=body, target=distro.getSourcePackage(name=pkgname)
|
||||
)
|
||||
|
||||
bug.subscribe(person=Launchpad.people["ubuntu-backporters"])
|
||||
|
||||
for dest in destinations:
|
||||
series = distro.getSeries(dest)
|
||||
try:
|
||||
bug.addTask(target=series.getSourcePackage(name=pkgname))
|
||||
except Exception: # pylint: disable=broad-except
|
||||
break
|
||||
|
||||
Logger.info("Backport request filed as %s", bug.web_link)
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser('%progname [options] package')
|
||||
parser.add_option('-d', '--destination', metavar='DEST',
|
||||
help='Backport to DEST release and necessary '
|
||||
'intermediate releases '
|
||||
'(default: current stable release)')
|
||||
parser.add_option('-s', '--source', metavar='SOURCE',
|
||||
help='Backport from SOURCE release '
|
||||
'(default: current devel release)')
|
||||
parser.add_option('-l', '--lpinstance', metavar='INSTANCE', default=None,
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production).')
|
||||
parser.add_option('--no-conf', action='store_true',
|
||||
dest='no_conf', default=False,
|
||||
help="Don't read config files or environment variables")
|
||||
options, args = parser.parse_args()
|
||||
parser = argparse.ArgumentParser(usage="%(prog)s [options] package")
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--destination",
|
||||
metavar="DEST",
|
||||
help="Backport to DEST release and necessary "
|
||||
"intermediate releases "
|
||||
"(default: current LTS release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s",
|
||||
"--source",
|
||||
metavar="SOURCE",
|
||||
help="Backport from SOURCE release (default: current devel release)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
default=None,
|
||||
help="Launchpad instance to connect to (default: production).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
action="store_true",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("package", help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error("One (and only one) package must be specified")
|
||||
package = args[0]
|
||||
config = UDTConfig(args.no_conf)
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
Launchpad.login(args.lpinstance)
|
||||
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value('LPINSTANCE')
|
||||
Launchpad.login(options.lpinstance)
|
||||
|
||||
if options.source is None:
|
||||
options.source = Distribution('ubuntu').getDevelopmentSeries().name
|
||||
if args.source is None:
|
||||
args.source = Distribution("ubuntu").getDevelopmentSeries().name
|
||||
|
||||
try:
|
||||
destinations = determine_destinations(options.source,
|
||||
options.destination)
|
||||
except DestinationException, e:
|
||||
destinations = determine_destinations(args.source, args.destination)
|
||||
except DestinationException as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
|
||||
package_spph = locate_package(package, options.source)
|
||||
request_backport(package_spph, options.source, destinations)
|
||||
disclaimer()
|
||||
|
||||
package_spph = locate_package(args.package, args.source)
|
||||
|
||||
check_existing(package_spph)
|
||||
request_backport(package_spph, args.source, destinations)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
430
requestsync
430
requestsync
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# (C) 2007 Canonical Ltd., Steve Kowalik
|
||||
@ -26,285 +26,325 @@
|
||||
#
|
||||
# ##################################################################
|
||||
|
||||
import optparse
|
||||
import argparse
|
||||
import os
|
||||
import sys
|
||||
|
||||
from debian.changelog import Version
|
||||
from distro_info import UbuntuDistroInfo
|
||||
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.config import UDTConfig, ubu_email
|
||||
from ubuntutools.lp import udtexceptions
|
||||
from ubuntutools.misc import require_utf8
|
||||
from ubuntutools.question import confirmation_prompt, EditBugReport
|
||||
from ubuntutools.question import EditBugReport, confirmation_prompt
|
||||
from ubuntutools.version import Version
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
#
|
||||
# entry point
|
||||
#
|
||||
|
||||
|
||||
def main():
|
||||
ubu_info = UbuntuDistroInfo()
|
||||
DEFAULT_SOURCE = 'unstable'
|
||||
if ubu_info.is_lts(ubu_info.devel()):
|
||||
DEFAULT_SOURCE = 'testing'
|
||||
|
||||
# Our usage options.
|
||||
usage = ('Usage: %prog [options] '
|
||||
'<source package> [<target release> [base version]]')
|
||||
parser = optparse.OptionParser(usage)
|
||||
usage = "%(prog)s [options] <source package> [<target release> [base version]]"
|
||||
parser = argparse.ArgumentParser(usage=usage)
|
||||
|
||||
parser.add_option('-d', type='string',
|
||||
dest='dist', default=DEFAULT_SOURCE,
|
||||
help='Debian distribution to sync from.')
|
||||
parser.add_option('-k', type='string',
|
||||
dest='keyid', default=None,
|
||||
help='GnuPG key ID to use for signing report '
|
||||
'(only used when emailing the sync request).')
|
||||
parser.add_option('-n', action='store_true',
|
||||
dest='newpkg', default=False,
|
||||
help='Whether package to sync is a new package in '
|
||||
'Ubuntu.')
|
||||
parser.add_option('--email', action='store_true', default=False,
|
||||
help='Use a PGP-signed email for filing the sync '
|
||||
'request, rather than the LP API.')
|
||||
parser.add_option('--lp', dest='deprecated_lp_flag',
|
||||
action='store_true', default=False,
|
||||
help=optparse.SUPPRESS_HELP)
|
||||
parser.add_option('-l', '--lpinstance', metavar='INSTANCE',
|
||||
dest='lpinstance', default=None,
|
||||
help='Launchpad instance to connect to '
|
||||
'(default: production).')
|
||||
parser.add_option('-s', action='store_true',
|
||||
dest='sponsorship', default=False,
|
||||
help='Force sponsorship')
|
||||
parser.add_option('-C', action='store_true',
|
||||
dest='missing_changelog_ok', default=False,
|
||||
help='Allow changelog to be manually filled in '
|
||||
'when missing')
|
||||
parser.add_option('-e', action='store_true',
|
||||
dest='ffe', default=False,
|
||||
help='Use this after FeatureFreeze for non-bug fix '
|
||||
'syncs, changes default subscription to the '
|
||||
'appropriate release team.')
|
||||
parser.add_option('--no-conf', action='store_true',
|
||||
dest='no_conf', default=False,
|
||||
help="Don't read config files or environment variables")
|
||||
|
||||
(options, args) = parser.parse_args()
|
||||
|
||||
if not len(args):
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
parser.add_argument(
|
||||
"-d", dest="dist", default="unstable", help="Debian distribution to sync from."
|
||||
)
|
||||
parser.add_argument(
|
||||
"-k",
|
||||
dest="keyid",
|
||||
default=None,
|
||||
help="GnuPG key ID to use for signing report "
|
||||
"(only used when emailing the sync request).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-n",
|
||||
action="store_true",
|
||||
dest="newpkg",
|
||||
default=False,
|
||||
help="Whether package to sync is a new package in Ubuntu.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--email",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help="Use a PGP-signed email for filing the sync request, rather than the LP API.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--lp",
|
||||
dest="deprecated_lp_flag",
|
||||
action="store_true",
|
||||
default=False,
|
||||
help=argparse.SUPPRESS,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l",
|
||||
"--lpinstance",
|
||||
metavar="INSTANCE",
|
||||
dest="lpinstance",
|
||||
default=None,
|
||||
help="Launchpad instance to connect to (default: production).",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", action="store_true", dest="sponsorship", default=False, help="Force sponsorship"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-C",
|
||||
action="store_true",
|
||||
dest="missing_changelog_ok",
|
||||
default=False,
|
||||
help="Allow changelog to be manually filled in when missing",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-e",
|
||||
action="store_true",
|
||||
dest="ffe",
|
||||
default=False,
|
||||
help="Use this after FeatureFreeze for non-bug fix "
|
||||
"syncs, changes default subscription to the "
|
||||
"appropriate release team.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"--no-conf",
|
||||
action="store_true",
|
||||
dest="no_conf",
|
||||
default=False,
|
||||
help="Don't read config files or environment variables",
|
||||
)
|
||||
parser.add_argument("source_package", help=argparse.SUPPRESS)
|
||||
parser.add_argument("release", nargs="?", help=argparse.SUPPRESS)
|
||||
parser.add_argument("base_version", nargs="?", type=Version, help=argparse.SUPPRESS)
|
||||
args = parser.parse_args()
|
||||
|
||||
require_utf8()
|
||||
|
||||
config = UDTConfig(options.no_conf)
|
||||
config = UDTConfig(args.no_conf)
|
||||
|
||||
if options.deprecated_lp_flag:
|
||||
print "The --lp flag is now default, ignored."
|
||||
if options.email:
|
||||
options.lpapi = False
|
||||
if args.deprecated_lp_flag:
|
||||
Logger.info("The --lp flag is now default, ignored.")
|
||||
if args.email:
|
||||
args.lpapi = False
|
||||
else:
|
||||
options.lpapi = config.get_value('USE_LPAPI', default=True,
|
||||
boolean=True)
|
||||
if options.lpinstance is None:
|
||||
options.lpinstance = config.get_value('LPINSTANCE')
|
||||
args.lpapi = config.get_value("USE_LPAPI", default=True, boolean=True)
|
||||
if args.lpinstance is None:
|
||||
args.lpinstance = config.get_value("LPINSTANCE")
|
||||
|
||||
if not options.lpapi:
|
||||
if options.lpinstance == 'production':
|
||||
bug_mail_domain = 'bugs.launchpad.net'
|
||||
elif options.lpinstance == 'staging':
|
||||
bug_mail_domain = 'bugs.staging.launchpad.net'
|
||||
if args.keyid is None:
|
||||
args.keyid = config.get_value("KEYID")
|
||||
|
||||
if not args.lpapi:
|
||||
if args.lpinstance == "production":
|
||||
bug_mail_domain = "bugs.launchpad.net"
|
||||
elif args.lpinstance == "staging":
|
||||
bug_mail_domain = "bugs.staging.launchpad.net"
|
||||
else:
|
||||
print >> sys.stderr, ('Error: Unknown launchpad instance: %s'
|
||||
% options.lpinstance)
|
||||
Logger.error("Error: Unknown launchpad instance: %s", args.lpinstance)
|
||||
sys.exit(1)
|
||||
|
||||
mailserver_host = config.get_value('SMTP_SERVER',
|
||||
default=None,
|
||||
compat_keys=['UBUSMTP', 'DEBSMTP'])
|
||||
if not options.lpapi and not mailserver_host:
|
||||
mailserver_host = config.get_value(
|
||||
"SMTP_SERVER", default=None, compat_keys=["UBUSMTP", "DEBSMTP"]
|
||||
)
|
||||
if not args.lpapi and not mailserver_host:
|
||||
try:
|
||||
import DNS
|
||||
import DNS # pylint: disable=import-outside-toplevel
|
||||
|
||||
DNS.DiscoverNameServers()
|
||||
mxlist = DNS.mxlookup(bug_mail_domain)
|
||||
firstmx = mxlist[0]
|
||||
mailserver_host = firstmx[1]
|
||||
except ImportError:
|
||||
print >> sys.stderr, ('Please install python-dns to support '
|
||||
'Launchpad mail server lookup.')
|
||||
Logger.error("Please install python-dns to support Launchpad mail server lookup.")
|
||||
sys.exit(1)
|
||||
|
||||
mailserver_port = config.get_value('SMTP_PORT', default=25,
|
||||
compat_keys=['UBUSMTP_PORT',
|
||||
'DEBSMTP_PORT'])
|
||||
mailserver_user = config.get_value('SMTP_USER',
|
||||
compat_keys=['UBUSMTP_USER',
|
||||
'DEBSMTP_USER'])
|
||||
mailserver_pass = config.get_value('SMTP_PASS',
|
||||
compat_keys=['UBUSMTP_PASS',
|
||||
'DEBSMTP_PASS'])
|
||||
mailserver_port = config.get_value(
|
||||
"SMTP_PORT", default=25, compat_keys=["UBUSMTP_PORT", "DEBSMTP_PORT"]
|
||||
)
|
||||
mailserver_user = config.get_value("SMTP_USER", compat_keys=["UBUSMTP_USER", "DEBSMTP_USER"])
|
||||
mailserver_pass = config.get_value("SMTP_PASS", compat_keys=["UBUSMTP_PASS", "DEBSMTP_PASS"])
|
||||
|
||||
# import the needed requestsync module
|
||||
if options.lpapi:
|
||||
from ubuntutools.requestsync.lp import (check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
need_sponsorship, post_bug)
|
||||
# pylint: disable=import-outside-toplevel
|
||||
if args.lpapi:
|
||||
from ubuntutools.lp.lpapicache import Distribution, Launchpad
|
||||
from ubuntutools.requestsync.lp import (
|
||||
check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
get_ubuntu_srcpkg,
|
||||
need_sponsorship,
|
||||
post_bug,
|
||||
)
|
||||
|
||||
# See if we have LP credentials and exit if we don't -
|
||||
# cannot continue in this case
|
||||
|
||||
try:
|
||||
# devel for changelogUrl()
|
||||
Launchpad.login(service=options.lpinstance, api_version='devel')
|
||||
Launchpad.login(service=args.lpinstance, api_version="devel")
|
||||
except IOError:
|
||||
sys.exit(1)
|
||||
else:
|
||||
from ubuntutools.requestsync.mail import (check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
mail_bug, need_sponsorship)
|
||||
if not any(x in os.environ for x in ('UBUMAIL', 'DEBEMAIL', 'EMAIL')):
|
||||
print >> sys.stderr, (
|
||||
'E: The environment variable UBUMAIL, DEBEMAIL or EMAIL needs '
|
||||
'to be set to let this script mail the sync request.')
|
||||
from ubuntutools.requestsync.mail import (
|
||||
check_existing_reports,
|
||||
get_debian_srcpkg,
|
||||
get_ubuntu_delta_changelog,
|
||||
get_ubuntu_srcpkg,
|
||||
mail_bug,
|
||||
need_sponsorship,
|
||||
)
|
||||
|
||||
if not any(x in os.environ for x in ("UBUMAIL", "DEBEMAIL", "EMAIL")):
|
||||
Logger.error(
|
||||
"The environment variable UBUMAIL, DEBEMAIL or EMAIL needs "
|
||||
"to be set to let this script mail the sync request."
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
newsource = options.newpkg
|
||||
sponsorship = options.sponsorship
|
||||
distro = options.dist
|
||||
ffe = options.ffe
|
||||
lpapi = options.lpapi
|
||||
newsource = args.newpkg
|
||||
sponsorship = args.sponsorship
|
||||
distro = args.dist
|
||||
ffe = args.ffe
|
||||
lpapi = args.lpapi
|
||||
need_interaction = False
|
||||
force_base_version = None
|
||||
srcpkg = args[0]
|
||||
srcpkg = args.source_package
|
||||
|
||||
if len(args) == 1:
|
||||
if not args.release:
|
||||
if lpapi:
|
||||
release = Distribution('ubuntu').getDevelopmentSeries().name
|
||||
args.release = Distribution("ubuntu").getDevelopmentSeries().name
|
||||
else:
|
||||
release = ubu_info.devel()
|
||||
print >> sys.stderr, 'W: Target release missing - assuming %s' % release
|
||||
elif len(args) == 2:
|
||||
release = args[1]
|
||||
elif len(args) == 3:
|
||||
release = args[1]
|
||||
force_base_version = Version(args[2])
|
||||
else:
|
||||
print >> sys.stderr, 'E: Too many arguments.'
|
||||
parser.print_help()
|
||||
sys.exit(1)
|
||||
ubu_info = UbuntuDistroInfo()
|
||||
args.release = ubu_info.devel()
|
||||
Logger.warning("Target release missing - assuming %s", args.release)
|
||||
|
||||
# Get the current Ubuntu source package
|
||||
try:
|
||||
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, release)
|
||||
ubuntu_srcpkg = get_ubuntu_srcpkg(srcpkg, args.release, "Proposed")
|
||||
ubuntu_version = Version(ubuntu_srcpkg.getVersion())
|
||||
ubuntu_component = ubuntu_srcpkg.getComponent()
|
||||
newsource = False # override the -n flag
|
||||
newsource = False # override the -n flag
|
||||
except udtexceptions.PackageNotFoundException:
|
||||
ubuntu_srcpkg = None
|
||||
ubuntu_version = Version('~')
|
||||
ubuntu_component = 'universe' # let's assume universe
|
||||
ubuntu_version = Version("~")
|
||||
ubuntu_component = None # Set after getting the Debian info
|
||||
if not newsource:
|
||||
print ("'%s' doesn't exist in 'Ubuntu %s'.\n"
|
||||
"Do you want to sync a new package?"
|
||||
% (srcpkg, release))
|
||||
Logger.info("'%s' doesn't exist in 'Ubuntu %s'.", srcpkg, args.release)
|
||||
Logger.info("Do you want to sync a new package?")
|
||||
confirmation_prompt()
|
||||
newsource = True
|
||||
except udtexceptions.SeriesNotFoundException as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
# Get the requested Debian source package
|
||||
try:
|
||||
debian_srcpkg = get_debian_srcpkg(srcpkg, distro)
|
||||
debian_version = Version(debian_srcpkg.getVersion())
|
||||
debian_component = debian_srcpkg.getComponent()
|
||||
except udtexceptions.PackageNotFoundException, error:
|
||||
print >> sys.stderr, "E: %s" % error
|
||||
except udtexceptions.PackageNotFoundException as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
except udtexceptions.SeriesNotFoundException as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
if ubuntu_component is None:
|
||||
if debian_component == "main":
|
||||
ubuntu_component = "universe"
|
||||
else:
|
||||
ubuntu_component = "multiverse"
|
||||
|
||||
# Stop if Ubuntu has already the version from Debian or a newer version
|
||||
if (ubuntu_version >= debian_version) and options.lpapi:
|
||||
if (ubuntu_version >= debian_version) and args.lpapi:
|
||||
# try rmadison
|
||||
import ubuntutools.requestsync.mail
|
||||
import ubuntutools.requestsync.mail # pylint: disable=import-outside-toplevel
|
||||
|
||||
try:
|
||||
debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(
|
||||
srcpkg, distro)
|
||||
debian_srcpkg = ubuntutools.requestsync.mail.get_debian_srcpkg(srcpkg, distro)
|
||||
debian_version = Version(debian_srcpkg.getVersion())
|
||||
debian_component = debian_srcpkg.getComponent()
|
||||
except udtexceptions.PackageNotFoundException, error:
|
||||
print >> sys.stderr, "E: %s" % error
|
||||
except udtexceptions.PackageNotFoundException as error:
|
||||
Logger.error(error)
|
||||
sys.exit(1)
|
||||
|
||||
if ubuntu_version == debian_version:
|
||||
print >> sys.stderr, ('E: The versions in Debian and Ubuntu are the '
|
||||
'same already (%s). Aborting.'
|
||||
% ubuntu_version)
|
||||
Logger.error(
|
||||
"The versions in Debian and Ubuntu are the same already (%s). Aborting.",
|
||||
ubuntu_version,
|
||||
)
|
||||
sys.exit(1)
|
||||
if ubuntu_version > debian_version:
|
||||
print >> sys.stderr, ('E: The version in Ubuntu (%s) is newer than '
|
||||
'the version in Debian (%s). Aborting.'
|
||||
% (ubuntu_version, debian_version))
|
||||
Logger.error(
|
||||
"The version in Ubuntu (%s) is newer than the version in Debian (%s). Aborting.",
|
||||
ubuntu_version,
|
||||
debian_version,
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# -s flag not specified - check if we do need sponsorship
|
||||
if not sponsorship:
|
||||
sponsorship = need_sponsorship(srcpkg, ubuntu_component, release)
|
||||
sponsorship = need_sponsorship(srcpkg, ubuntu_component, args.release)
|
||||
|
||||
if not sponsorship and not ffe:
|
||||
print >> sys.stderr, ('Consider using syncpackage(1) for syncs that '
|
||||
'do not require feature freeze exceptions.')
|
||||
Logger.error(
|
||||
"Consider using syncpackage(1) for syncs that "
|
||||
"do not require feature freeze exceptions."
|
||||
)
|
||||
|
||||
# Check for existing package reports
|
||||
if not newsource:
|
||||
check_existing_reports(srcpkg)
|
||||
|
||||
# Generate bug report
|
||||
pkg_to_sync = ('%s %s (%s) from Debian %s (%s)'
|
||||
% (srcpkg, debian_version, ubuntu_component,
|
||||
distro, debian_component))
|
||||
title = "Sync %s" % pkg_to_sync
|
||||
pkg_to_sync = (
|
||||
f"{srcpkg} {debian_version} ({ubuntu_component})"
|
||||
f" from Debian {distro} ({debian_component})"
|
||||
)
|
||||
title = f"Sync {pkg_to_sync}"
|
||||
if ffe:
|
||||
title = "FFe: " + title
|
||||
report = "Please sync %s\n\n" % pkg_to_sync
|
||||
report = f"Please sync {pkg_to_sync}\n\n"
|
||||
|
||||
if 'ubuntu' in str(ubuntu_version):
|
||||
if "ubuntu" in str(ubuntu_version):
|
||||
need_interaction = True
|
||||
|
||||
print ('Changes have been made to the package in Ubuntu.\n'
|
||||
'Please edit the report and give an explanation.\n'
|
||||
'Not saving the report file will abort the request.')
|
||||
report += (u'Explanation of the Ubuntu delta and why it can be '
|
||||
u'dropped:\n%s\n>>> ENTER_EXPLANATION_HERE <<<\n\n'
|
||||
% get_ubuntu_delta_changelog(ubuntu_srcpkg))
|
||||
Logger.info("Changes have been made to the package in Ubuntu.")
|
||||
Logger.info("Please edit the report and give an explanation.")
|
||||
Logger.info("Not saving the report file will abort the request.")
|
||||
report += (
|
||||
f"Explanation of the Ubuntu delta and why it can be dropped:\n"
|
||||
f"{get_ubuntu_delta_changelog(ubuntu_srcpkg)}\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
|
||||
)
|
||||
|
||||
if ffe:
|
||||
need_interaction = True
|
||||
|
||||
print ('To approve FeatureFreeze exception, you need to state\n'
|
||||
'the reason why you feel it is necessary.\n'
|
||||
'Not saving the report file will abort the request.')
|
||||
report += ('Explanation of FeatureFreeze exception:\n'
|
||||
'>>> ENTER_EXPLANATION_HERE <<<\n\n')
|
||||
Logger.info("To approve FeatureFreeze exception, you need to state")
|
||||
Logger.info("the reason why you feel it is necessary.")
|
||||
Logger.info("Not saving the report file will abort the request.")
|
||||
report += "Explanation of FeatureFreeze exception:\n>>> ENTER_EXPLANATION_HERE <<<\n\n"
|
||||
|
||||
if need_interaction:
|
||||
confirmation_prompt()
|
||||
|
||||
base_version = force_base_version or ubuntu_version
|
||||
base_version = args.base_version or ubuntu_version
|
||||
|
||||
if newsource:
|
||||
report += 'All changelog entries:\n\n'
|
||||
report += "All changelog entries:\n\n"
|
||||
else:
|
||||
report += ('Changelog entries since current %s version %s:\n\n'
|
||||
% (release, ubuntu_version))
|
||||
report += f"Changelog entries since current {args.release} version {ubuntu_version}:\n\n"
|
||||
changelog = debian_srcpkg.getChangelog(since_version=base_version)
|
||||
if not changelog:
|
||||
if not options.missing_changelog_ok:
|
||||
print >> sys.stderr, ("E: Did not retrieve any changelog entries. "
|
||||
"Do you need to specify '-C'? "
|
||||
"Was the package recently uploaded? (check "
|
||||
"http://packages.debian.org/changelogs/)")
|
||||
if not args.missing_changelog_ok:
|
||||
Logger.error(
|
||||
"Did not retrieve any changelog entries. "
|
||||
"Do you need to specify '-C'? "
|
||||
"Was the package recently uploaded? (check "
|
||||
"http://packages.debian.org/changelogs/)"
|
||||
)
|
||||
sys.exit(1)
|
||||
else:
|
||||
need_interaction = True
|
||||
@ -315,37 +355,51 @@ def main():
|
||||
editor.edit(optional=not need_interaction)
|
||||
title, report = editor.get_report()
|
||||
|
||||
if 'XXX FIXME' in report:
|
||||
print >> sys.stderr, ("E: changelog boilerplate found in report, "
|
||||
"please manually add changelog when using '-C'")
|
||||
if "XXX FIXME" in report:
|
||||
Logger.error(
|
||||
"changelog boilerplate found in report, "
|
||||
"please manually add changelog when using '-C'"
|
||||
)
|
||||
sys.exit(1)
|
||||
|
||||
# bug status and bug subscriber
|
||||
status = 'confirmed'
|
||||
subscribe = 'ubuntu-archive'
|
||||
status = "confirmed"
|
||||
subscribe = "ubuntu-archive"
|
||||
if sponsorship:
|
||||
status = 'new'
|
||||
subscribe = 'ubuntu-sponsors'
|
||||
status = "new"
|
||||
subscribe = "ubuntu-sponsors"
|
||||
if ffe:
|
||||
status = 'new'
|
||||
subscribe = 'ubuntu-release'
|
||||
status = "new"
|
||||
subscribe = "ubuntu-release"
|
||||
|
||||
srcpkg = not newsource and srcpkg or None
|
||||
srcpkg = None if newsource else srcpkg
|
||||
if lpapi:
|
||||
# Map status to the values expected by LP API
|
||||
mapping = {'new': 'New', 'confirmed': 'Confirmed'}
|
||||
mapping = {"new": "New", "confirmed": "Confirmed"}
|
||||
# Post sync request using LP API
|
||||
post_bug(srcpkg, subscribe, mapping[status], title, report)
|
||||
else:
|
||||
email_from = ubu_email(export=False)[1]
|
||||
# Mail sync request
|
||||
mail_bug(srcpkg, subscribe, status, title, report, bug_mail_domain,
|
||||
options.keyid, email_from, mailserver_host, mailserver_port,
|
||||
mailserver_user, mailserver_pass)
|
||||
mail_bug(
|
||||
srcpkg,
|
||||
subscribe,
|
||||
status,
|
||||
title,
|
||||
report,
|
||||
bug_mail_domain,
|
||||
args.keyid,
|
||||
email_from,
|
||||
mailserver_host,
|
||||
mailserver_port,
|
||||
mailserver_user,
|
||||
mailserver_pass,
|
||||
)
|
||||
|
||||
if __name__ == '__main__':
|
||||
|
||||
if __name__ == "__main__":
|
||||
try:
|
||||
main()
|
||||
except KeyboardInterrupt:
|
||||
print "\nUser abort."
|
||||
Logger.error("User abort.")
|
||||
sys.exit(2)
|
||||
|
8
requirements.txt
Normal file
8
requirements.txt
Normal file
@ -0,0 +1,8 @@
|
||||
python-debian
|
||||
python-debianbts
|
||||
dateutil
|
||||
distro-info
|
||||
httplib2
|
||||
launchpadlib
|
||||
requests
|
||||
setuptools
|
294
reverse-depends
294
reverse-depends
@ -1,4 +1,4 @@
|
||||
#!/usr/bin/python
|
||||
#!/usr/bin/python3
|
||||
#
|
||||
# Copyright (C) 2011, Stefano Rivera <stefanor@ubuntu.com>
|
||||
#
|
||||
@ -14,135 +14,239 @@
|
||||
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
|
||||
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
|
||||
|
||||
import optparse
|
||||
# pylint: disable=invalid-name
|
||||
# pylint: enable=invalid-name
|
||||
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
from devscripts.logger import Logger
|
||||
from distro_info import UbuntuDistroInfo
|
||||
from distro_info import DistroDataOutdated
|
||||
|
||||
from ubuntutools.rdepends import query_rdepends, RDependsException
|
||||
from ubuntutools import getLogger
|
||||
from ubuntutools.misc import codename_to_distribution, system_distribution, vendor_to_distroinfo
|
||||
from ubuntutools.rdepends import RDependsException, query_rdepends
|
||||
|
||||
Logger = getLogger()
|
||||
|
||||
DEFAULT_MAX_DEPTH = 10 # We want avoid any infinite loop...
|
||||
|
||||
|
||||
def main():
|
||||
parser = optparse.OptionParser('%progname [options] package',
|
||||
description="List reverse-dependencies of package. "
|
||||
"If the package name is prefixed with src: then the "
|
||||
"reverse-dependencies of all the binary packages that "
|
||||
"the specified source package builds will be listed.")
|
||||
parser.add_option('-r', '--release', metavar='RELEASE',
|
||||
default=UbuntuDistroInfo().devel(),
|
||||
help='Query dependencies in RELEASE. Default: devel')
|
||||
parser.add_option('-R', '--without-recommends',
|
||||
action='store_false', dest='recommends', default=True,
|
||||
help='Only consider Depends relationships, '
|
||||
'not Recommends')
|
||||
parser.add_option('-s', '--with-suggests',
|
||||
action='store_true', dest='suggests', default=False,
|
||||
help='Also consider Suggests relationships')
|
||||
parser.add_option('-b', '--build-depends',
|
||||
action='store_const', dest='arch', const='source',
|
||||
help='Query build dependencies (synonym for '
|
||||
'--arch=source)')
|
||||
parser.add_option('-a', '--arch', metavar='ARCH', default='any',
|
||||
help='Query dependencies in ARCH. '
|
||||
'Default: any')
|
||||
parser.add_option('-c', '--component', metavar='COMPONENT',
|
||||
action='append',
|
||||
help='Only consider reverse-dependencies in COMPONENT. '
|
||||
'Can be specified multiple times. Default: all')
|
||||
parser.add_option('-l', '--list',
|
||||
action='store_true', default=False,
|
||||
help='Display a simple, machine-readable list')
|
||||
parser.add_option('-u', '--service-url', metavar='URL',
|
||||
dest='server', default=None,
|
||||
help='Reverse Dependencies webservice URL. '
|
||||
'Default: UbuntuWire')
|
||||
system_distro_info = vendor_to_distroinfo(system_distribution())()
|
||||
try:
|
||||
default_release = system_distro_info.devel()
|
||||
except DistroDataOutdated as e:
|
||||
Logger.warning(e)
|
||||
default_release = "unstable"
|
||||
|
||||
options, args = parser.parse_args()
|
||||
description = (
|
||||
"List reverse-dependencies of package. "
|
||||
"If the package name is prefixed with src: then the "
|
||||
"reverse-dependencies of all the binary packages that "
|
||||
"the specified source package builds will be listed."
|
||||
)
|
||||
|
||||
if len(args) != 1:
|
||||
parser.error("One (and only one) package must be specified")
|
||||
package = args[0]
|
||||
parser = argparse.ArgumentParser(description=description)
|
||||
parser.add_argument(
|
||||
"-r",
|
||||
"--release",
|
||||
default=default_release,
|
||||
help="Query dependencies in RELEASE. Default: %(default)s",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-R",
|
||||
"--without-recommends",
|
||||
action="store_false",
|
||||
dest="recommends",
|
||||
help="Only consider Depends relationships, not Recommends",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-s", "--with-suggests", action="store_true", help="Also consider Suggests relationships"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-b",
|
||||
"--build-depends",
|
||||
action="store_true",
|
||||
help="Query build dependencies (synonym for --arch=source)",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-a", "--arch", default="any", help="Query dependencies in ARCH. Default: any"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-c",
|
||||
"--component",
|
||||
action="append",
|
||||
help="Only consider reverse-dependencies in COMPONENT. "
|
||||
"Can be specified multiple times. Default: all",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-l", "--list", action="store_true", help="Display a simple, machine-readable list"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-u",
|
||||
"--service-url",
|
||||
metavar="URL",
|
||||
dest="server",
|
||||
default=None,
|
||||
help="Reverse Dependencies webservice URL. Default: UbuntuWire",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-x",
|
||||
"--recursive",
|
||||
action="store_true",
|
||||
help="Consider to find reverse dependencies recursively.",
|
||||
)
|
||||
parser.add_argument(
|
||||
"-d",
|
||||
"--recursive-depth",
|
||||
type=int,
|
||||
default=DEFAULT_MAX_DEPTH,
|
||||
help="If recusive, you can specify the depth.",
|
||||
)
|
||||
parser.add_argument("package")
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
opts = {}
|
||||
if options.server is not None:
|
||||
opts['server'] = options.server
|
||||
opts["server"] = options.server
|
||||
|
||||
# Convert unstable/testing aliases to codenames:
|
||||
distribution = codename_to_distribution(options.release)
|
||||
if not distribution:
|
||||
parser.error(f"Unknown release codename {options.release}")
|
||||
distro_info = vendor_to_distroinfo(distribution)()
|
||||
try:
|
||||
data = query_rdepends(package, options.release, options.arch, **opts)
|
||||
except RDependsException, e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
options.release = distro_info.codename(options.release, default=options.release)
|
||||
except DistroDataOutdated:
|
||||
# We already logged a warning
|
||||
pass
|
||||
|
||||
if options.arch == 'source':
|
||||
fields = ['Reverse-Build-Depends', 'Reverse-Build-Depends-Indep']
|
||||
if options.build_depends:
|
||||
options.arch = "source"
|
||||
|
||||
if options.arch == "source":
|
||||
fields = [
|
||||
"Reverse-Build-Depends",
|
||||
"Reverse-Build-Depends-Indep",
|
||||
"Reverse-Build-Depends-Arch",
|
||||
"Reverse-Testsuite-Triggers",
|
||||
]
|
||||
else:
|
||||
fields = ['Reverse-Depends']
|
||||
fields = ["Reverse-Depends"]
|
||||
if options.recommends:
|
||||
fields.append('Reverse-Recommends')
|
||||
if options.suggests:
|
||||
fields.append('Reverse-Suggests')
|
||||
fields.append("Reverse-Recommends")
|
||||
if options.with_suggests:
|
||||
fields.append("Reverse-Suggests")
|
||||
|
||||
for field in data.keys():
|
||||
if field not in fields:
|
||||
del data[field]
|
||||
def build_results(package, result, fields, component, recursive):
|
||||
try:
|
||||
data = query_rdepends(package, options.release, options.arch, **opts)
|
||||
except RDependsException as e:
|
||||
Logger.error(str(e))
|
||||
sys.exit(1)
|
||||
if not data:
|
||||
return
|
||||
|
||||
if options.component:
|
||||
for field, rdeps in data.items():
|
||||
filtered = [rdep for rdep in rdeps
|
||||
if rdep['Component'] in options.component]
|
||||
if not filtered:
|
||||
del data[field]
|
||||
else:
|
||||
data[field] = filtered
|
||||
if fields:
|
||||
data = {k: v for k, v in data.items() if k in fields}
|
||||
if component:
|
||||
data = {
|
||||
k: [rdep for rdep in v if rdep["Component"] in component] for k, v in data.items()
|
||||
}
|
||||
data = {k: v for k, v in data.items() if v}
|
||||
|
||||
result[package] = data
|
||||
|
||||
if recursive > 0:
|
||||
for rdeps in result[package].values():
|
||||
for rdep in rdeps:
|
||||
build_results(rdep["Package"], result, fields, component, recursive - 1)
|
||||
|
||||
result = {}
|
||||
build_results(
|
||||
options.package,
|
||||
result,
|
||||
fields,
|
||||
options.component,
|
||||
options.recursive and options.recursive_depth or 0,
|
||||
)
|
||||
|
||||
if options.list:
|
||||
display_consise(data)
|
||||
display_consise(result)
|
||||
else:
|
||||
display_verbose(data)
|
||||
display_verbose(options.package, result)
|
||||
|
||||
|
||||
def display_verbose(data):
|
||||
if not data:
|
||||
print "No reverse dependencies found"
|
||||
def display_verbose(package, values):
|
||||
if not values:
|
||||
Logger.info("No reverse dependencies found")
|
||||
return
|
||||
|
||||
def log_package(values, package, arch, dependency, visited, offset=0):
|
||||
line = f"{' ' * offset}* {package}"
|
||||
if all_archs and set(arch) != all_archs:
|
||||
line += f" [{' '.join(sorted(arch))}]"
|
||||
if dependency:
|
||||
if len(line) < 30:
|
||||
line += " " * (30 - len(line))
|
||||
line += f" (for {dependency})"
|
||||
Logger.info(line)
|
||||
if package in visited:
|
||||
return
|
||||
visited = visited.copy().add(package)
|
||||
data = values.get(package)
|
||||
if data:
|
||||
offset = offset + 1
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
log_package(
|
||||
values,
|
||||
rdep["Package"],
|
||||
rdep.get("Architectures", all_archs),
|
||||
rdep.get("Dependency"),
|
||||
visited,
|
||||
offset,
|
||||
)
|
||||
|
||||
all_archs = set()
|
||||
# This isn't accurate, but we make up for it by displaying what we found
|
||||
for rdeps in data.itervalues():
|
||||
for rdep in rdeps:
|
||||
if 'Architectures' in rdep:
|
||||
all_archs.update(rdep['Architectures'])
|
||||
for data in values.values():
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
if "Architectures" in rdep:
|
||||
all_archs.update(rdep["Architectures"])
|
||||
|
||||
for field, rdeps in data.iteritems():
|
||||
print field
|
||||
print '=' * len(field)
|
||||
rdeps.sort(key=lambda x: x['Package'])
|
||||
for field, rdeps in values[package].items():
|
||||
Logger.info("%s", field)
|
||||
Logger.info("%s", "=" * len(field))
|
||||
rdeps.sort(key=lambda x: x["Package"])
|
||||
for rdep in rdeps:
|
||||
line = '* %s' % rdep['Package']
|
||||
if all_archs and set(rdep['Architectures']) != all_archs:
|
||||
line += ' [%s]' % ' '.join(sorted(rdep['Architectures']))
|
||||
if 'Dependency' in rdep:
|
||||
if len(line) < 30:
|
||||
line += ' ' * (30 - len(line))
|
||||
line += ' (for %s)' % rdep['Dependency']
|
||||
print line
|
||||
print
|
||||
log_package(
|
||||
values,
|
||||
rdep["Package"],
|
||||
rdep.get("Architectures", all_archs),
|
||||
rdep.get("Dependency"),
|
||||
{package},
|
||||
)
|
||||
Logger.info("")
|
||||
|
||||
if all_archs:
|
||||
print ("Packages without architectures listed are "
|
||||
"reverse-dependencies in: %s"
|
||||
% ', '.join(sorted(list(all_archs))))
|
||||
Logger.info(
|
||||
"Packages without architectures listed are reverse-dependencies in: %s",
|
||||
", ".join(sorted(list(all_archs))),
|
||||
)
|
||||
|
||||
|
||||
def display_consise(data):
|
||||
def display_consise(values):
|
||||
result = set()
|
||||
for rdeps in data.itervalues():
|
||||
for rdep in rdeps:
|
||||
result.add(rdep['Package'])
|
||||
for data in values.values():
|
||||
for rdeps in data.values():
|
||||
for rdep in rdeps:
|
||||
result.add(rdep["Package"])
|
||||
|
||||
print u'\n'.join(sorted(list(result)))
|
||||
Logger.info("\n".join(sorted(list(result))))
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
19
run-linters
Executable file
19
run-linters
Executable file
@ -0,0 +1,19 @@
|
||||
#!/bin/sh
|
||||
set -eu
|
||||
|
||||
# Copyright 2023, Canonical Ltd.
|
||||
# SPDX-License-Identifier: GPL-3.0
|
||||
|
||||
PYTHON_SCRIPTS=$(grep -l -r '^#! */usr/bin/python3$' .)
|
||||
|
||||
echo "Running black..."
|
||||
black --check --diff . $PYTHON_SCRIPTS
|
||||
|
||||
echo "Running isort..."
|
||||
isort --check-only --diff .
|
||||
|
||||
echo "Running flake8..."
|
||||
flake8 --max-line-length=99 --ignore=E203,W503 . $PYTHON_SCRIPTS
|
||||
|
||||
echo "Running pylint..."
|
||||
pylint $(find * -name '*.py') $PYTHON_SCRIPTS
|
81
running-autopkgtests
Executable file
81
running-autopkgtests
Executable file
@ -0,0 +1,81 @@
|
||||
#!/usr/bin/python3
|
||||
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
|
||||
|
||||
# Authors:
|
||||
# Andy P. Whitcroft
|
||||
# Christian Ehrhardt
|
||||
# Chris Peterson <chris.peterson@canonical.com>
|
||||
#
|
||||
# Copyright (C) 2024 Canonical Ltd.
|
||||
# This program is free software: you can redistribute it and/or modify it
|
||||
# under the terms of the GNU General Public License version 3, as published
|
||||
# by the Free Software Foundation.
|
||||
#
|
||||
# This program is distributed in the hope that it will be useful, but
|
||||
# WITHOUT ANY WARRANTY; without even the implied warranties of
|
||||
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR PURPOSE.
|
||||
# See the GNU General Public License for more details.
|
||||
#
|
||||
# You should have received a copy of the GNU General Public License along
|
||||
# with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||
"""Dumps a list of currently running tests in Autopkgtest"""
|
||||
|
||||
__example__ = """
|
||||
Display first listed test running on amd64 hardware:
|
||||
$ running-autopkgtests | grep amd64 | head -n1
|
||||
R 0:01:40 systemd-upstream - focal amd64\
|
||||
upstream-systemd-ci/systemd-ci - ['CFLAGS=-O0', 'DEB_BUILD_PROFILES=noudeb',\
|
||||
'TEST_UPSTREAM=1', 'CONFFLAGS_UPSTREAM=--werror -Dslow-tests=true',\
|
||||
'UPSTREAM_PULL_REQUEST=23153',\
|
||||
'GITHUB_STATUSES_URL=https://api.github.com/repos/\
|
||||
systemd/systemd/statuses/cfb0935923dff8050315b5dd22ce8ab06461ff0e']
|
||||
"""
|
||||
|
||||
import sys
|
||||
from argparse import ArgumentParser, RawDescriptionHelpFormatter
|
||||
|
||||
from ubuntutools.running_autopkgtests import get_queued, get_running
|
||||
|
||||
|
||||
def parse_args():
|
||||
description = (
|
||||
"Dumps a list of currently running and queued tests in Autopkgtest. "
|
||||
"Pass --running to only see running tests, or --queued to only see "
|
||||
"queued tests. Passing both will print both, which is the default behavior. "
|
||||
)
|
||||
|
||||
parser = ArgumentParser(
|
||||
prog="running-autopkgtests",
|
||||
description=description,
|
||||
epilog=f"example: {__example__}",
|
||||
formatter_class=RawDescriptionHelpFormatter,
|
||||
)
|
||||
parser.add_argument(
|
||||
"-r", "--running", action="store_true", help="Print runnning autopkgtests (default: true)"
|
||||
)
|
||||
parser.add_argument(
|
||||
"-q", "--queued", action="store_true", help="Print queued autopkgtests (default: true)"
|
||||
)
|
||||
|
||||
options = parser.parse_args()
|
||||
|
||||
# If neither flag was specified, default to both not neither
|
||||
if not options.running and not options.queued:
|
||||
options.running = True
|
||||
options.queued = True
|
||||
|
||||
return options
|
||||
|
||||
|
||||
def main() -> int:
|
||||
args = parse_args()
|
||||
if args.running:
|
||||
print(get_running())
|
||||
if args.queued:
|
||||
print(get_queued())
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(main())
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user