File b4-0.8.0+2.obscpio of Package b4

07070100000000000081A400000000000000000000000161F953D70000008C000000000000000000000000000000000000001600000000b4-0.8.0+2/.gitignore*.swp
*.pyc
*.pyo
*.json
*.pdf
test.log
build/*
dist/*
MANIFEST
.idea
__pycache__
*.egg-info
*.patch
*.mbx
*.maildir
*.cover
*.thanks
.venv
07070100000001000081A400000000000000000000000161F953D700000062000000000000000000000000000000000000001700000000b4-0.8.0+2/.gitmodules[submodule "patatt"]
	path = patatt
	url = https://git.kernel.org/pub/scm/utils/patatt/patatt.git
07070100000002000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000001100000000b4-0.8.0+2/.keys07070100000003000041ED00000000000000000000000461F953D700000000000000000000000000000000000000000000001900000000b4-0.8.0+2/.keys/openpgp07070100000004000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000002D00000000b4-0.8.0+2/.keys/openpgp/linuxfoundation.org07070100000005000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000003800000000b4-0.8.0+2/.keys/openpgp/linuxfoundation.org/konstantin07070100000006000081A400000000000000000000000161F953D700005C66000000000000000000000000000000000000004000000000b4-0.8.0+2/.keys/openpgp/linuxfoundation.org/konstantin/default-----BEGIN PGP PUBLIC KEY BLOCK-----

mQINBE64XOsBEAC2CVgfiUwDHSqYPFtWxAEwHMoVDRQL5+Oz5NrvJsGRusoGMi4v
wnToaNgD4ETPaaXHUAJdyy19BY+TCIZxDd+LR1zmMfzNxgePFjIZ6x4XIUMMyH6u
jDnDkKJW/RBv262P0CRM9UXHUqyS6z3ijHowReo1FcYOp/isN9piPrKzTNLNoHM2
re1V5kI8p8rwTuuQD/0xMPs4eqMBlIr7/1E2ePVryHYs5pPGkHIKbC9BN83iV2La
YhDXqn3E9XhA1G5+nPYFNRrTSEcykoRwDhCuEA51wu2+jj0L09OO4MbzBkSZKASe
LndRVyI6t0x8ovYXcb7A4u0jiH7gVjcNcJ5NfwFUqaOQOxSluahhI497SJULbKIP
Pu3cv4/O/3Urn3fQsa689xbbUkSPhfGKG73FYnAuC5vxzBSkOB7iFRBhA37NfN5V
OhCbWfXipdBDxSYunac6FjArBG1tfaF8BflkQmKLiBuiH5zwkgju5kOzrko5iISL
0CM4zUTAUWbg1QnPvRjPzoT6tlsCOBY6jZK921Ft+uVjHg424/CVZ9A+kA33+Dfq
otnzNK4CLNnLT4OEPM6ETxLnA6PyldUjSTUekZ75/Rp+aJHt5v7Q2mqOcB/5ZA6A
+vaBgZAMfCZbU+D1FeXD8NNEQcRDWdqe0S/ZgXdU+IyqyQ3Ie4vqGGYpkQARAQAB
tDVLb25zdGFudGluIFJ5YWJpdHNldiA8a29uc3RhbnRpbkBsaW51eGZvdW5kYXRp
b24ub3JnPokCOwQTAQIAJQIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AFAlON
4fQCGQEACgkQ5j7cqTKd0H50bA//Q80DRvvB/cJjayynTjkX5rbL6MPS1X3+QRL9
AdhXp6NxsFAU8k/yScVNDnA9FpTiEwmz2SVyGA2zd7ldd14S8rSw8mzrWq0J9Ltk
guhUqbWDit+/5uvWpg97pNq3b6bEvUlFijn20NHtwr4Qz6cwSdor8BQInGqRUr/j
/lO1wYGhk2MdPXzmXdGw4FRNsaNNIoF/48kNb1OLKztBtl0feuA04OcVYN3vQn3Q
SS+1qhV4HTZGAoZlZG66bqEPFjxetZbZW2Zwi3/2Ad7fYaoyeI7B3SJ/a8l3rn7P
jRQrdgoykB1qK8lSM7GwOVRZ7LMTaf+Mz2g/48DzBG+hyV4yZDTB45xm5j49vEHk
dW1QvU1s9NjCUWB7OtC1DOyJcKD8VxO+mVxfEuPDiXeumNFi7NevUCVC8ktBO2yO
Kznyx776X8mo2d9SiUVP02rUM0+hWFrmQKuYsY9G+Phac7oPbWw0IlHoCgz8oHrb
8UVNAl2G/vMAYabCcELigcomQNXMQDd0xvPuSII7QthiHeLGmSgE6c285V8PNgJ0
QgxehxJbM8pAFFV+DDG1yaurKuQkuGZ+GhLVe4nuKpK8PbVMIrcc+oH4MeWDEIWz
z3RXWIP8+dZCp9HyzSPbA53IvyaaFvAWl/nL/1/Wq6zT2d2o8lKIe/vEKOenrArw
wHW0/AC0KEtvbnN0YW50aW4gUnlhYml0c2V2IDxtcmljb25Aa2VybmVsLm9yZz6J
AjgEEwECACICGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJTjeHzAAoJEOY+
3KkyndB+3G0P/0LxLEIYD2EG8/ZQEj25FMNbw9n6rk7Fe/PgMKe8MZpNjpcyuuo6
ZW+c1B4Dew79rOu3kKJVgUWGS/6TQR97vQeVRLvBh68FSeaKyVDu3a9jL5ocWgZX
wzgoF9rSjrRhxIQllMPrB/I/GQnbta91DWSnvD24r9vg+m6LmvQhW2ZDY0DbJrOj
zlH8DsEYg+FmxtUQGj6gQfb/n1WWHhYuM3OIOzHJgSnlCCYLxnjf5iK7LgtEzeGt
0VepiUUdZk5IxI/nFYv8ouXHZrt76HM4SaRowq8Sm5YP+4mX0cVUPBZZIQnrsbVq
CfQwr2zaxTExlJ3kEeH74JO8e7nBK9YxuLq0dkwuHfROh03rrOlJXcxHvd+s7U9D
1wrz4SOFMWaUgFGwNhA+ToW3T5Dd7Oomusze4I5HGQUVHXK4zc65u+WuU4ZXDBWG
+5Y8y31IAwqX6qIwgoEHewFd1qLCZUVJCi2MCcR1MiIsVhjPGK+C1SWdNErVlq5b
8B/3IbzcHDFTV/RHENYoq0D4fyMBmyoS+erNy2+UsOy3pDhrGxbg2VWVkbTCssj3
pirNae9gNIQrZA9NdvHEeCrrA7W14zsgKZqWjjcJQLixjCxWPTfYq7PzAydSTa4f
RlGyHb6wTteLgJmQLdjULH2zyGO9xh7sjCVj4AycyNvnpBWRUPaDf7ABtDZLb25z
dGFudGluIFJ5YWJpdHNldiAoRmVkb3JhKSA8aWNvbkBmZWRvcmFwcm9qZWN0Lm9y
Zz6JAjgEEwECACIFAk7NMp8CGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJ
EOY+3KkyndB+EsEP/0DBPiCacPeBY6gWVc67Bt5KXbsJkonSr6ti+plXHhqe4mG7
Q0qBl7udgx4zYpUFVeuSPJ2rcQzOmVFpLWdDkkpPVqSESqwBAA9bgVfYDYtgSNwn
3lRuTzoaJJ742qpn+WNwg3K3WY3Xd5Ggs+xpLStLFI18Mz7cDhOB5kM9HGgxyDxA
8jGsz+5vGlDp8GHlJrG8jB8n/LamzjvQNlOZYyWCF7G+RAX9yoL39dHZz35SqcDU
9PdI4b239ihMPe01xQnoCjKxvhIcAQxwU3LenR1NDuj/BPD7k6g/OPKY1sWrlk+l
MLR8mIYRlWYstMNs+ztIsuIgtjbeewM8H58CF+W124Iib4r07YAyn8umtrL3KijI
lMUymOmuQrXGALiVdlqyH8u7kr4IHrtS0Am9v5ENyHWs/ExHXT7EvgLsRr0T+aKD
JOgVg0EdR7wT+FgSTv0QlQfGL+p2RTTrbFobtlr9mucBwELonPNWijOgDTa/wI9o
mu27NVjjsSP+zLhhjY73SSOFMT7cwHymRgGMo8fxFdkJB4xCfcE3KT7yaV+aafYN
IkxStPYFTvQZbU6BvHBATObg/ZYtTyS1M4fJOkfJGYUqBVwhB+B8Ijo/2iofwGon
XNtwO9Z6Bt9wBLxWiheQY1Ky/UIXJcMsYC/WgIhYx+Dlm8Exaoyc9MPdClLY0cop
yicBEAABAQAAAAAAAAAAAAAAAP/Y/+AAEEpGSUYAAQIAAAEAAQAA/9sAQwAFAwQE
BAMFBAQEBQUFBgcMCAcHBwcPCgsJDBEPEhIRDxEQExYcFxMUGhUQERghGBocHR8f
HxMXIiQiHiQcHh8e/9sAQwEFBQUHBgcOCAgOHhQRFB4eHh4eHh4eHh4eHh4eHh4e
Hh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4e/8AAEQgAZABkAwEiAAIR
AQMRAf/EAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMC
BAMFBQQEAAABfQECAwAEEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJ
ChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3
eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS
09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEBAQEAAAAA
AAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEH
YXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVG
R0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKj
pKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX2
9/j5+v/aAAwDAQACEQMRAD8A+y6KKKACiiory4gtLWW6uZVihhQvI7HAVQMkn8KA
JaxvEPivw34fXdrWt2Nj6LLMAx+i9TXyl8bf2kNb1G7n0jwSz6fYqxT7SBiaUeoP
8I+nNfP13b+IPEN0097qshkckszuWLfnQB+g4+M3w4M4hXxJEzHoRE5H8q6HSvGf
hfVHVLLWbWRm+6CSufzxX53aL4dmtCHlvd7A54Y816r4JvprZESQs6AjkHnHrQB9
tg5GRRXAfCHxBNqNlJpt05kaFBJC5OSUPBB+h/nXf0AFFFFABRRRQAUUUUAFcJ+0
BLND8HvEbwOUk+y4BB9WUEfka5r4nftFfDnwLfzabc30upahCdssNmAwjb0ZyQAf
YZrw74tftV+F/GXgbUfDVno2oWTXgRftDOrgKHDH5RjrjHWgDwwmJrlpT97uc8fh
U6X0SOpjkAI64NctHdaHNcyTXOr3JTf8sIh8vK+7c4P4Vct77QwcW+nRXOAeZL1x
k54P3R+X60AdjYapaGVVMjOOpx2Ndb4av1O4xSlVJ43L09q85025aYDbYeHICVx+
+vApB9e/Htz9a6zR4tZZg1rH4UYEYYJfE/iBjj6cigD6B+FfjCLSdXtpXdXib91L
g8bT1x+hr6TjdZI1kRgysAQR3Br4o8OWPiMeWWstBbJyfKmbOc/7np/kV9H/AAp8
XTtY2+h69CLe4jUJbyrlo3UDoW/hI7ZxmgD0qio2mhVdzSoF9SwxTba6trnd9nni
l2nDbGDY/KgCaiiigAryv9qvxpd+Bfgtq+q6dKYr+4KWVtIOsbSHBYe4UMR716pX
k/7Wfgu+8c/BLV9L0uJptQtSl7bRL1kaM5Kj1JUtj3xQB+ZF5cy3ErSyuzsxySxy
a0NK8M65qmmT6hYafLPbQEh2XGcgZOB1OMjpWTKjRuyOpVlOCCMEGve/gTclfh3d
TWVsJ7u2vPLZcE4VsNuIHOO34UAeTeBvDE3iDWhYsGjA+9kYxXtmmeBrTS9L8iGI
Fc5Zm4ya7nTPBNnF4rbX4BFGJ4RuiQcbu5FbPiDQ3vNOaCFASR8qkkDPbpQB483h
S0vneKKRN6nDANnFOh+F6SsTvRgBnit25+GOut5tz9rtYicFEt/MRvfc2ea67wXo
slvPFZ3EjMBj5gxI/M0Acp4Z+FYa/jZpjFGDyA2M1z3xM8N+PIvila+HvBNzrcou
bSOSOG0uHIzllJ4OAPl5PavXfFHgXxrqPisWul38Wn6QBuEqNJ5ztjIBwMAZ4+lf
Q3wm8KQeGfDEEbRo1/KM3E/3mY56bjzjvj3oA8p+C3wF1yyhh1P4keJdR1K4wGXT
EvHMSf8AXRs/MfYce5r6BsLO1sbdbezt4reFRgJGgUD8BU9FABRRRQAUUUUAeC/G
z9l/wR8Qb+fW9Oml8Oa3MS0s9tGHhnb+88XHPqVIJ6nNeM+Cfgf8SvhH4rmuL3+z
NT8N3SmOe6trjGwgHY2xsMG5I4BHzHmvuCvO/jdqccGl2dhvAeWXzCCew4/qfyoA
8msBGtrEYY/LToFxjGD6VoI6LzkDPSswXcG3bvUBT61C87TujI/yh9tAHUslq9iz
uQABzWRpflSXMUkKHYXG1sYB5rIvtdg0+6Swu/PLOCeIzsA926VFpOn6be6gJrXW
NmwAiMXeAB6AZoA98truwiazhkIV5o/lbsSOCPryK66xAFqmDkdR+deMzLaXN7YW
GmXUk13a7ZM7iwIJwQT06f0r2e0j8q2jjPVVANAEtFFFABRRRQAUUVT1zUrbR9Iu
tTvGCwW0TSOc4yAOn1PSgDz34ofFaHwbrg0iPT1u5jb7y7SldrnO1doBJ457da+d
/GfjTUfFGuZ1nUbh7iTAjs7bgomePlX7o92JrM8W6vr3j/xVf6nZypaR3D7Zb9+R
GgxiOAfxEAAbvb8K2vDnh7TtGh8m0QvKxBlnkO6SU+pagBbPRop4Mytdh/8AbnYn
9DWlaaRdWiutvcvNGwyUkOcHtg+v51ahU+ZlW4HGF5P61p2TKhIVTjPTjmgC7YpF
qtosMiL5ycMpGGBre8J+FZ/t0cyPD5YOTujBYfTIrjfEM1ytqL/RQjX0C72jZgol
j3bWGT0wTnJ/rWnoPjy7s9dh0i/ihtpwMM4mV0zgH7ynB6igD23RtEg/tIXpjUeU
oAwOprpqzfDV5bXmkxSW77sDD+obvWlQAUUUUAFFFFABXyp+1N8Tv7W8RReANFlL
2sJ8y/eM8OQcbc9xk4x659K9q/aE8cJ4A+Fuq62rhbxozBaDPPmMOD+Ayfwr4d+H
JuL69bWNRLTXUw3Mz888nP0GfzPtQB6foduYreJ5lw235I/4Yx6Yrfi5jwrDPris
CxusRjaN36kmteORCi7gwz70AXYz+8xgdM5B6GpluPKkhJ/ilVfwJqohBbOeBkAk
VDqU3ly6ep3fNcoOfr3oAwrLXXbxXb2WcxPJqduw/wBkeScfma5tHLmGUzmSYX21
hwNo2gY+hGKwIta+yfa9ZVeYpdXkjPXc5Nuij/voio9HuS16qKxJ8tLgD1KsVP6f
yoA+tP2Wtfkv9M1XRbuQvdae6rljksnO0/liva6+S/gzrY8N/F3T5HfbaayhsZee
PM6xn8wBX1pQAUUUUAFFFFAHyH/wUOv7rb4c0oSkWjq0rIP4mLqMn8B+tec+CoEg
8OWzxFlZwCTnnrRRQB0lsdybsAHGeB3rQ09y6DPGTjgkds0UUAakIPkwkMRkEmsj
xdK6X+j4OcXg6/7poooA8e1DK/DWzlB+eW9uFc+oNzEx/WNag8H3Er+IdF3H/WQS
o3uMmiigD07U5pI9KsryNis0Sxyow/hdSCDX3HpsrT6fbzPjdJErNj1IBoooAsUU
UUAFFFFAH//ZiQI4BBMBAgAiBQJPIXkoAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIe
AQIXgAAKCRDmPtypMp3QfkDLD/0bYj1H0kZDY1HprMqAhugR9Pi61ZSEkBof1fOf
qZuz55cKdZxsQCVMRLz3P5WFv9dzqeb6WP2khy9xDm4aMQ5nf2kMSKrkiXKcy9S+
r3m6BdR1dt3i2Y6HB9JLV/IzESsUJDEvO17mNMIW8YZeev5xO8QwV2zWUuUvYjKg
4/3yXmByrsvfWG1ew7sMJwgDMCCI8bXzVUC0TkTzgDmjvE/GHPqcPsGVkKFGqptc
yBWcZmEKuJFzAAgqwmMUCZF6Cmej4wDbt1WeXpsjNigFl8gWqGiCZTFHEuFJtVJe
3Mj0vWBAoIre9MzOoUgHpX5ke1q3KXC/pAfe71gQZvekfMss4yk7NzLygrRS2BKy
b12Hl7JWUpxVZm6YsL/h3DLGA6MGwjDA+99vZPjJbLfnPVjhFlKlu5kiwlFbnImY
0jvqK7KyNO7vnKp3Zct/gbGq1/wSsbRHn2ZkdvVgWH8+S2kq+2ZGL22hdIOx0CkC
DUqWIFTLkgqX+AyPmTFiZ16E/A8aXRf0+1Pu+k7xjwJ+zkAVQ7cVBieaqAZc8vvo
grUaSDjk0XWLD2dngD5g10KXN4OCvIkUlccOWc0vTYJczRayb8I+2AJ2Lf5zG8we
kf01ughgngP/3/iUSy3XI+xwA2HJsuCg7mawHTO2UE0ldQW1l98+k+R+29diERyI
6cMC8bRZS29uc3RhbnRpbiBSeWFiaXRzZXYgKExpbnV4Rm91bmRhdGlvbi5vcmcg
ZW1haWwgYWRkcmVzcykgPGtvbnN0YW50aW5AbGludXhmb3VuZGF0aW9uLm9yZz6J
AicEMAECABEFAlON4esKHQBEdXBsY2F0ZQAKCRDmPtypMp3Qfj9ID/43HgJWx83R
3spmufpl5vqpIUHK4uFeuzGfHDUl2TmheoXnTbYb+qhqowmjAy4WcVzrcGjp8uJ3
TxBr2xZTlMaRn4a/aVNORlV3hgM/nAk9RoA9wti3CaJ3GlRkx3w/qG9toznWSK4u
5JnCzrcfBr/FKKCmw7oeGHBQkPnGfXJxjG+4Iuknn5sdV24k075wpXL4uZRsG3U/
N0cPO8Nf/8YMzeVkiTmM3W6Zy7ubKl4RpizSWnRaYl7zxJqQ5GxSK9PtyTPCHTik
HFXABipRpIWGozS1McrUp1gAM3mQSoeL7qsxfoN0Zxn0WqQFqKCrAzcwsgbWRAMI
uH2ndIeP0DET6fyFRYI/XTOF/Kda8XbqAqKkyDqWiQJ2CUl146Whkdsa2M64BLr7
VBhE7QTx7pjMyEISBc2weMSvrAaH9bNLSEH0GiSPFBTAo+DF4wr8Gy6E0bHZ/k5+
MFpwPU5hgfi2Uflo2IhmwLOpXR1UvQKJ/OPsVQNMePNx6ItJob24NjK+vXks81nL
E36Tgknq4i8yp5Tf1ifWthdXYuAygxb0L4dVhzs4ddDPyJROT099R1Nfp/bKknyS
gegxnDoVMANHtJFGvfMLmz8BGS4JkDDK3k5vl7i4D2abd36IZ+M68WRmI9V64jZf
TTp2VpivHKlaDE1iX+6ESSrbF2PlTYCj47QmS29uc3RhbnRpbiBSeWFiaXRzZXYg
PGljb25AbXJpY29uLmNvbT6JAk4EEwEIADgWIQTeDmbjLx/dCQJma5bmPtypMp3Q
fgUCWunU2gIbAwULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRDmPtypMp3QfsFw
EACUcFAleVyqsMuCFC61n/mOeapk6TsNCop9sfP64a2bhYM31DRkZHco8xrUB0dZ
6OHozzIzIK/v0SzurS3n7gHKfuktbSTvAbJMPubM8iXJyaKL/+DGHt6qJynD3tHt
SSR4c9aFrlnrn3Gefa3eQrgdNcieQcMCXOdePDHZyWKQ4gfe6zxb63SbMv3Ms25h
cmOf+HA1S8fM9bKrHEvebm23+2WOrQR/d5OPRXnWDz9yz+++eWQfdG+FUfxUz7ul
OG+C8jxzGjrAWgsvrAq48625GUrvuU2u5BJD2P1IWvEpQtFm3XnWvqP0hy5oT2i4
hHvPxumY6XuZsBvEQygGajj94xZS5Gn0kqGV5XV/I1Z4kY00Ig0KHEG+LL1O+eu2
ntfaqS2CZSlwbnfluqdgNNKs6lYsolvpqSCAXVVV27pkWo/To3E2RFvU7v33468K
ijBEHAjWlacmC6Ixs7PRmHiNGWK5Ewn0suzmPBy8lFtKBhT0JUyK12vkfrSFHs48
5TDk3uDQiyYh8lMkSuQIlBN9wfFMyPZTlfInNc7Aumczplkl6I5qz5rfaxz1uWg9
zI7deYAEoOJnaJG74stAXPx+iih2PbOpviXcr/ASL33Xg7A6ZF9Q3mmHPLym4q47
2VOaNj0AjLIUZC76oQdEXJz7Is3A/YSdgEIomBvrCGU3R7QuS29uc3RhbnRpbiBS
eWFiaXRzZXYgPGhiaWNAcGFyYW5vaWRiZWF2ZXJzLmNhPokCTgQTAQgAOBYhBN4O
ZuMvH90JAmZrluY+3KkyndB+BQJa6dUAAhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4B
AheAAAoJEOY+3KkyndB+w1IQAJIXCI5iJSSvX0AP3JuTwU1IOXBXMrwOlaltpWFC
s3Md6slh4gD6bruTYYhbOjRmJuMKDPzxo7WaQ3ru29M0HftQxQKhhi7DAfi/7Kp3
F33t5d2mpoimK8Gc4D5kXGFQmKGuuNjs7hrOol8GUds8RIgQpplZ+4GItNLXzOpt
3O4iYkIQQrVpqdeT3xQv4OGjloDzoEk3skMgTyXWyI6wa2sqsptA6ocLdzCmF5PS
U7Uidm/TYBM+TneJPsvYOBpKxngWDTmgMXxUWIkkU+Wf2nNecTnWIcfq1e2786zg
rSeBCD3yxhfy1AUaWgwJf4v4ogbj8vBQ2EGJT9i+nQnNnW4RVRjY/uouCedrFr2C
49obuW97zi6lOyhfJPOsRDD5ODEn4BM5R9TrN7uKCMcPbb8tbg3ZjaMXv7z6KCrA
d7hLRgUTorO8uEFVIIY9TUc90NXYKrWc6/or+W/NTforIox4A5qAZkVcQBSLC7t+
6v+7wYz4DRP3oLlFPpbT7+gjrU6ub1j+/MAw8Vamonf0+2xnP8P9I8k8qU86Uir3
zAovZ3LRjdxVv0BEL8ydYK/Ye9CUVDmtyd84V7Ii2/yXZlrOYxy3QzoBVH+QjhDQ
huQkbIWRiC9LTjCbhPr7HJbAZNUGnODd4mpn/KrvDOXSvWV5RRpP/lGKV3asFMrH
4sqXuQINBE64XOsBEADWJbYsPaPfoe3VAKPRgUnkQOaXmnbVaBfGnVlpV/1gx+Px
uYKZJscxGeL9102hZtMJXW5CZfH8fCHtoAkUtvpSFAN+cx2Y6Hf/j7CKFnzDYgJJ
vccopHA2b0XRQPPmjz5Q7KUF5OK9UhCTXx7qLumPgdXbEoGwMGJt5Foxn9XD8I8h
7W0SwV30hRnYGcBv1dkRKrFvR6j8ve/wykJ6Tl5gzBIFb/4D1CUJJ4ni3yqX/JUr
QQjEasJYt5YXH+XB/J5FOvAHT9b1WfcBw/UqXw0OHNkcq9CIbbJfxc6qWIsKiISU
HiJDW+RhcRVv/kecin3VBkQHSlKpMXRdu45o6ZASPCyZ6db/PbbcU/RY1kKgJcy/
xncHEa6rwfZVnI0aGGDxgdsLUAuR2O1p7GO4wBJSGMrhln6ZfUCOlCy/Yrw3olCF
XkBW/NozRbfMf01K9T+M0o70SDDBlh6rKfMPxYP/Zm3YnrLcLPD60IgozPp496Bk
nck/5UAoR+GECgnSLPFxlBYhmtKP0CsJL1PTPYQ1ftgV7AnlNvfIW7NuIppZd5Qk
PUqUi66w1jtPM/o0fG+6yncFqdDr7uiT/M132pS3nHfO/u3FU9Cpfb2MfwaCoCpR
FjRNKeVLElGilUKGunzrNb8r3QOExx1KcTk4sfz53M9pNL5iUaFhHIzDFO6rGQAR
AQABiQKDBCgBAgBtBQJPIxK7Zh0BUmVwbGFjZWQgaW4gZmF2b3VyIG9mIGEga2V5
IHN0b3JlZCBvbiBPcGVuUEdQIGNhcmQuCih3aGljaCBkb2Vzbid0IHN1cHBvcnQg
NDA5Ni1iaXQgZW5jcnlwdGlvbiBrZXlzKQAKCRDmPtypMp3QfmuSD/9EpqWU+jXQ
mj5h4rMSwxRppIJ8SxfjlwHik6xaqtR3BaDRPfGvioJJ4MylbICvlW20mymgi0hP
RSSVEV56bq0PRzKQnEd2n/9m9BdOH9r+kshaj1jL87iDjblluM+iVr05Idi7iJFc
GTE94qk7ZBNk4tMGNBs/0fxqO5IUI56YKZcuKLDhHLRtlvq+OZPmNxjeou14StvJ
COi3EC4W9plEIybZolHRI4xa9+mnxk7y70kGeofZlFNU0ZUBkvVFqi3wA4IngrvM
ITllBAgZA831qo04CqZYaR0PfaUh+sVx/XaDi2ZIm48X5p6cttYVygZo5a8+VOby
vvo9LdVaZQI9++KMCti0qU+b2Ynhbs1Zf6JEYQeYH7UGSk3ZYJOF0FmcMQfD8pSZ
2SyJYJmXY3iDKyx9OHl9PYXpGlDjZHWoaZx+PHUqtOUvBF6TpYbm/+UnvMyo2BLO
8G4SEv0crekobWZLkw+rPEqnlzgN+o/BXRfykEjCNHuugBMeB6brf7PKyZDrQs/i
wmUowqFUjrLC/7HbbqOankoaTtZRf89TYtE0IfUNWzf2SOBG2A8HIkzZzD4YIM3O
AtFryen+rHvU4KnAyQRDyZqztlm4zlRbsrePw2PMRYRdWMXk3OlDc/lcLnohM02/
t2fb+hOws7yrdmfpFPatFr2QE/4n0cydg4kCHwQYAQIACQUCTrhc6wIbDAAKCRDm
PtypMp3QflR9D/9Z/Q2Ahoe1fX00xyApCWliJtJWwz85b+KXMe158jKzuGrcMRw1
2N3HdzgbZgzqS24M3ayRcSaXJSyKS0WmKW241uxkIZap00j1aT74DKLelelXjeuj
MX8DTbxKI58zOkbTHhcJmqnoL2zRPRUbX4f2zn+wiEB4UUO1oFaeqVKKoZMBESbm
BJkKPP6Y8Lu3s9VkyZTBxvCuenPiN5rDvEP8epj0mclOv3A3t4Kz5ihHPjKMNXl3
phtCS5RlriE9cV+b/5mgzbkz4roHkZYbeuFVoccUCckUkq1KsvnAHETGaxkSZZiA
rBY47sqbEvypSF/yGGdojPKtRz72Hoi7Sm+YLqAwPjMj7UZ+6lnMFs+5LYtzOxwf
2V1E72vdlp/LKCtWpdqd7z9fA/X7JTswwKR/F1kSfiLONVytL9URNSnYOji+UJKa
/Ex1Dr5A/M03hPVPavJV3iohQLM9p4xddLOuE05hR6GqPyij3B4ZwzNDFjb6tVxx
8i8QjPBEnqGJgwJ4LWwMZJ8g9KYHTPLFlh0YXGQn1K9IM/N/MtGnvGXEen0/6wEC
sxkJNHqVjwUaHxfCC7l6rT/eB8o6jeiWeTGHT1VhxWaOKikiTagyuAg0x+AUo6kZ
yblA0LaYJ2nwyoXRqFmQV3NgHo6vS8Jy8XAJtI0IV0KIC+kM8s2vfeAKQLkBjQRP
IXQ/AQwAmcDqQfXeItD/hQKTYSmup2PfWerCSZAG0Pjfumf2x9WykqpmhuGYftFF
ExhVJthmRixsK7KmBVNfW0icEtlBybD7HHFV1Lk7hwduVnwGFWmzCQMmEnq98M8J
XwpVXueThUrpwzOPBUEjTHy7QkNdX0Uh7p1DzbGF9WreMaoQktsMeb+UWsGV8KfC
x5xAz8IScUZm6yTtawu58/+DRZRa5/kpBjAZY7aWAzFqTtHJ/KsRu3fajL++BuBM
sKbD09+2CNJALWn8Bxr8TXMXbPwfCxoi3wJ7pU+dw/KvbKqNHKTi6OeQZSKc6RG8
IVA0E2n8P8VmU9+veN9L4FxgMUs9ry1/3tQOTrSVvC6HbUVSZw0gXvnDccdOwQEc
agNHyiWX5ga8EDJlS/LWn/HKsn/ook1ztS0pw8nNlRKSSILusVl3GCc+PaBKxEac
+JJtRVQAL2p/8sBvX3x3AQeAyAEOo/jJ4OEHZXJ+zwxChGFLDliHGiJKWvuz2UWE
o6x6wsZHABEBAAGJAjsEKAECACUFAlON4vUeHQFTdXBlcnNlZGVkIGJ5IDIwNDhS
Lzk5RTA1NTU3AAoJEOY+3KkyndB+Qc4P/3+auQq3bSIT4taigjAhiPldoDlFk2B+
7t8tgn+aNroRKKUF1j1dN6bwtRctAA7RcXEZeYn+VktQdu/vo+OGVsKnlqRhLlop
prI9LAzgVCSYIEPkGbxHiwE5ghVa4h3o92oJVuM21Xbfz6iER2GZKFm3moakMaFk
1LKClkPKx/sIbGSzzzgdewHH2ufc+u346I8z9EuI5CqvP0aD7CP0JmK8Pj/sg6c0
NqYupxJRuIK+6F2+7TcY50KRshQKyMrKLs21yt4iaOkFenBiIRJvGVcOpuMSpfho
6XxdMKdhQK2hgIMdqef2eBtBGBW1Dr9vGn7Y2yGNjfuv3goLIHyrrP3W5YdQ5LmS
YaRxAUXiUhTXPn/cAzQCtzYUQvj2Sg55BditRkLPV1BbLHbWwDRFOCzxnXWjTEfJ
DiH5x/vnobSuBj6yT8aH2T7W6dACyTUjVJ2zxlMakl6h/DrzWHk2A8hvgPDZOo4h
VEo/sfOlvnfstN83b1g7981+BUn4F5WSRKz0BPlaRkfZWBo8ezsa/MQUg8XILH4T
hgOWonqFCFk6r/KyXx0dmYhnlMguWM+Z3SGWRUq7N1ByzZZ+2uvImLUofkl+pEf+
H9Zrx0bctWylBGkGvaVnxUidn7bYx25Hc7CeflPL0SiT0OaWGDrzejMbXKgL3bce
tIWj3S5Mzr6miQO+BBgBAgAJBQJPIXQ/AhsCAakJEOY+3KkyndB+wN0gBBkBAgAG
BQJPIXQ/AAoJEI6WedmufEletnIL/0wGtZj/RTGbJmfg0orpA6EiQ/7WWKdPm1rV
Em6XPKayVZHEtiRtd4YXr1ZlrbB31OSxpjt3N1yk2vDim+xrzz+B9By/wbPzCkLL
f5f/SO4d2hNm16IiYiwBr4xPVz3b2F8QAInfiEZu69CJuXkGZNM8eJjZQcu0l7Ps
e8Fs9ShfeLZFVdFgt7C2DXuvcKALF27oINzeywD1M8wGtFgEr3OlbtihwRm8FBxV
W6e/BlMBT/ZISoHETR/TKMlmp2tlIeSJRRBz52ID7QkCCNQdQa3/T+zUAXrl+qOw
62tsAvxPNAtJy+CHU46CS3rlDtvCgJWBRpCYrZdv4VcTAEg1BsVYihkCEZlkMpa5
Mo+ydbagjR9UfOZH8nMMrKVjSF3B7WGb5qNs7BwL49CDhJAvrkD4FaluDhK7tezW
Nn7E++X8jwDoxfQekkdb/zlNTQ5Z7H0cfj4OUTVD0xSIvwOVrlh2UA9iHtHSfEG5
aRKNFlelzUW1gYvDpe405vm2ii8ANvu4D/9tG4gUAMP7E48r9wSPTuTf+Ew+7BJt
UJ3dm/oYdDni8cRp2cvqn92YNerlU0GLlLAs2aM9KNmWR68mlsjzWme7QihK2Trv
yyPrLAsvl/zLfbkNmqNo7JQiA05qv9UnD0NxhmRxxL3aTEYgRoBCfn64N4P9pmAl
rSnh9YwaBuvH9dewprCYWjrJnU+whBeH5UdmH3clqkQDo2JyO7WUKXkLv2UwSe8N
VqAHbZbnROo1yibdwMRgxO2dZu+yPcx3NUAWlIjAQySDtEUnk0LcfugsEyueGDYl
UKPgZ3b52IS1wAnpxkA4eFIFMs4+7dDJhDDo2tjkIc5sTo3UyiH0K1V8rjY0+lcz
1m3NmmfmomLA51jwSWXHJ15x4qj13IQi/HP1I5Kz//8aOb3qBeMmQXBjZzFvZUr5
tteuNYL+Tor7/QMtXp8ShcWao5CVpQlGlIOfIxkjmokwYdsC35KhaJXu7KrYTdCJ
ZNA/RKt6DoZ65Jr3atauV+WohjzGASolt2dXbXns+YKe38YIkhv4l8E9gDj3vmZ4
m6y4XWywMFqGgIXJysottpSFqddztSulCm4QllpAKrJZqbut3WJZwxucJsyAIkUQ
tVxSdf+atAvrlewGBGfyFsHo570lkEHCH93UH001TxU/rbjHSnirekJ9GmMPL6KP
rqEfT/OeHrl/mbkBDQRPIXUfAQgAodOkpJWWOBKZx1jISO2k+zTqpWZi860S0XPC
PZd6xmMGGksUgckagJoNvP6glO8/SwbyRkhL5AfOl7qSM7buOn/UUnbzRHTjuIPS
LSYRVw2KcLQWwOWjKvF7sQ0jiTQHdN8diXXJLK2Pn92W+WbEnv0Bv+9odVS8qxuj
XabVFvLo9u4mV1r5H95UVhnKbIwMUrqYIQtojdAINmHuEAt2nTYvsyb8sSiX9WXs
5/Ku44ItPg7qnsL7+mf74sfUMg2XWoCfM4vJEQMyfONfQ7wZS4RIbFrsVy26zaB0
fnoovAnlahVPflFsjox99WrCLnbIbmqy9U6tHCQyueGWMGpCLwARAQABiQI7BCgB
AgAlBQJTjeNlHh0BU3VwZXJzZWRlZCBieSAyMDQ4Ui9CNEUyNzFDRQAKCRDmPtyp
Mp3Qfi2FD/9Po7TmdEGvGC8j9E6VjqSmqWEiQfShAhM7V4PzsEm4Z19yiVtnFMvC
zKJI1ch8zHlGdEFMfYPEV6fq9LCXWOwps9CbyDMrvQi5JMv8DoAwBSenV2IZyI2s
uD9y5WrBl9Scnqx9uPNWRw9RnOUSsxFpBNa9FcqvXOWSkSSTk8Obv5QQiZ51ChTi
u+9VIU2h1S312RAXu7rGT8MUzc7O2zntuegtCxSJOhCVjPuugA8BobhzHJx4+Tkc
8j9tlX/R/1WYnqmk8EiINy6gQQjPbHf+5dRhjTg2j1sEaUt+lESU7U3v6xK4eS8C
7lmJPyPNI4Af2nT++yolN1DWy7ihP9yHqzsZDnD+IXdmtJqz/Lnbarh9M5zHG2F9
TAPWSZMnS1nm04XZ50EGHC92BhTNkE1gP0Oq0FiBu3dtRuapxksqElijvIYApPk+
IGQzsPT1DRf4gX5vPJQnqghCJ6/pBKgmR6c6R1MuhrufHUMd6ZXepzh7L6YI8Afm
a3f47Wa7AP7gGX4XTIrkU4co4ssuavMnMGtfRruSVI8wIL2Hbdfcz9pjLQdVkZXx
mCYggpVqcjkVa8ycam3iynZ0ZE+rYLtON+rrrl4PeRPsXD9CrPk8p5LIAo+Ver//
hio0k7rSn7zwhJa1NmOJ1ezAyggQCeXnE4h6ppnZKLs7pBgJ3OIzy4kCHwQYAQIA
CQUCTyF1HwIbDAAKCRDmPtypMp3QfploD/989emn/GRN/44xq187bHlEbZnBL9hO
JWptQKXTL/OsQARVUH32M3IL1cO1erTZdCxIX27vszNIdbgb2UadEb4n7TIw7Vm9
X+qD5y3e1mwfX6iNgvIcVYK4U8KMkfg+JJbZlo868H7LRuFM2FPKij0x6UALLITe
ois8CLGc6D9nE72ClngG3MVwn1i2RTtDxuBuAdY49hmsbX1tXS/52vXn6fXHyC2z
QTq2misKc/xXeHzyAzBhftiT5pL6iEwd+PF6udnPxvJNYYwgYhr+UEfoYn3HSnA1
4WtAhG4+VnvQgUsD4V8UoTkae7CvdnoltLeXD5CxMaAFsnTA8l+a+71wnulxafCn
No2wowJJmVELWDNlPbI0cuP16r74VLXzlqpx0erd/bkhDCUCFF8L2bTKfNwg+kJQ
usTuWHVxTWirnUFhO3QZ7s02WjCv2SPQxWcKHlnV/P14YCcjPaLqGn6e4Kn3AveF
bRvEjsS9DKm5z0JogaYFv7ZLjN1F4myl+PzwqNAQ1YwU4APk8AJcE9q0gowpOtZ1
Ro8+I0yiv1qbZbw0TocPnAYBvX3k2NNBN8LlIlDekJRp+VkUOkjJ4DFP87Xk498U
T7scvlWg1fv8m5t0KmdpDtbFf3SwoHjsmEnF5I45tbBj9DexM1WQC3cBV8dxyyAz
l6pjYsZJPOpEcLkBDQRTjd1CAQgAkk1yOTLf8rqq663n3Xflvo/SrVG5kVROhQas
Xo2lphuhD3aqTB7g65JJxvIaeRAyBitJWNAguryLJmxl3xUg9ZIQzP8Op+qyjYwU
WIVDozwiH9KoBkKaH3i3Wo05lpW3FgmW1hmB/iP+5qvK44RPW8ejBUwlgg+smH/3
1puqseIFbilQe5PF7DfDzCnC6NuClODpDV/q5qPTetyeYySFcO8J5/8CFFnf5rR1
NHw9qPnl9D+6WdKm7X2prZLUrSd1sHPPaGxkHE5/sgtiCiE7E8S8IeQmR6IQIZun
9N4MwSTEho8atayThraH+qbV9dV6SD5Fljr3brd9a17gXJs8ZQARAQABiQIfBBgB
AgAJBQJTjd1CAhsgAAoJEOY+3KkyndB+mg0P/1ZKUL6Vx71P69A4qvBdMUKIxr+c
phzLnVt5ZaAx8Ri+q/JiH8zAS65gxbbvic0g26CVqCjpXH/SuJTFmoW0pR6u+qq0
vxWhJhobNEgTarc3cj/soG9+hsWi4/Eavx7NLHVI9jRDesN1aCWzSpczqvbjZfDe
/zIFWahOWEYnhDQNkB3zdFi3DQ3SuGm30QngZbm7L5rG1f4MODTEH59a+LH8pcCI
nk7Zg63pDkpR7C8YhFtz2bHGviWMpYM52Rt2DRn1ia442qG3IMdA2kr4m0/391CB
hVKnvDbS2KR4HUAtt9T4D+KovSrEU82CZuScZ6BJi+V6fJoWAdLeE9jB7KxBoaXl
sQonr0XvnjYHlFW8WtFB58v1XKmonGkaOIGwjs+TPMqqpH4cj5YktOJ35a5T9No8
cyA3xOdyf78Pi69mPTvsyQrrzLKZ6uWDj/f/dsE0ihX4ubgQwzh3z64w8jQDEh9y
HGf6oVTverKgB8K9p2BEEMKj2k9z4iz5D76vrm+myF0b9OmdRs+Qfpz0h2ThgZ0F
xTKFYVCuHPGDiy+lsRQhj62vxP7bLeXMg+bhVWPvyvxAeULbZv4LSb9HCnI0yQBt
YSaisslvr2sPT12j1/H+x4L2C2WNMWXlkY21ImPLTlgyKatBMfpaGoyjGCbZ8Foa
EBa8wVxl9Gp6N/DQiQI+BCgBCAAoFiEE3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlrp
3kkKHQFPYnNvbGV0ZQAKCRDmPtypMp3QfkPCD/kBpnQIAsjLqHU7N2nmtNKNqXNN
2OwHOVlvfj629b336UiuxWHZPt6cjQNwVibMw9WBqmWXctOj6tycZgR9oJKfh9sm
FoBxkRavR7LViGFWT7UYgECo3x8chtHD1goYLlJjKi2AsVIE6CAWKYXHbGh1t8EW
sbkALaFk6LbvudWHbFDha0EjfNCFFS/10TItm4BguCdtIeUP2OylWCW58YppzO9n
imsY1nz3jpJn9RF44S5/A7dY5jteE/5c8a3hO9CH74g+vlqirmSh3SLNUEoNBUKT
j+1BdBEYn2GKWnGryg+83+76dYjs+9GfvNj5f6ytyVpkfc8kZVl126Z4mV/Nvz9g
niicFGb3Ruvvlg58NyWeQClMiUMJj3unpeFEof34lG4C2wi8rPeepxfBuOsj2nm6
I/BAddAE0bNOLeSfWvsHEY3oW2Lq80Ej4Ojs43SqiX7Ld+mVUAQBsetIb3jS2Ol0
qTJ9gY/7B9oKDXOhxJgp4rugHarevVVAG4gaJTk04zlXZUz1a+cEcYDfMEGJ3DW6
W/P5/X9rHd7o3vBjEFYvVvKGdB1f7tyKUnOUvgd0Zknu1gEN73qYp6t6HmMrWT35
4T4D0cmEekmhGJsV48WH+ot3Hq0d3S/1hzoLRwM293k+G+fUpswAdYk0egwamZ56
6F4BVlC3NfMMTiyf+LkCDQRWN5NiARAA2HrOyogaOCI+bjh+6Ua71CuPURvj0dHC
+DEUqgowKPSxw+lrd8q3AIPv055BXXgd8UPZ4qPZDst/AAikJ/n1jmW8jPUZsaCr
S76Uuo/kwShOznnlqTE2ZPMWloiuGchhpAuvAQjMrJ6GVpLskyZp5KhKSpu4+sR6
VMXmK5FjwRqAaoKBBt59FgyW5bJsUpJNJoLUEGx3PBvRbKN+yLWhGs5P9NjQ/0wq
UBYqLnMfnSqeSf361r9dKp5XQS4kyGYjpvFOpByCEJbiTrtbVsIU6f4/1NMbq4z+
dfpfdlZSPCYNWUalgzM8A0XU7xd8uAQRzndYZZZNmyr8jDem0+OKUWfqz03U91ot
BzjZ2JZ6epfBc4IM5WkWGfsWOjWnvI89FSYqT3f7EaAjV8rhvv3Dv6gWJ4E0GbaL
sXrTqBIDcAOdcsot7sUTe6Ajtxo6HwnGJlwzaReicpXAmJ9xZHxt7+8bLqWQY4mt
KTzvdnlWs8b4OGL7UazU/oI0Cfmvts3CuorSu1gJQ493GO5OZmRSXKTLZsCU+bDT
qBISDP2H7bZQ25VgFEuhrhxJokGBcEAGIdtqrhwUvBxOR7AngxSp8nbhvhFfZZD/
Tf56krQOtXfc8Gqxk22/q1PIk2dZqtNJvFpHh6EAez0MuJsVIBxmH3u8M/r0Ul3c
wufPTHyjROUAEQEAAYkCHwQYAQgACQUCVjeTYgIbIAAKCRDmPtypMp3QfrWSEACH
+sAr1ok7zipU9vhWQZ2zn/FCMd/aAV87juGe6MKEN0tgxiG/aRGNzHCr1LnTp4Oa
Oim0faYVAVgSDiEYeQK2ZTiSWWOXLdZ9gGaNONKAhWhjWKawx2OrKFCMcDkl2AHT
ao1nnYnUGs8mx33HFasy32Z8AeBMZZxYIO1J29vMev7BkjE8pP8tJ9P0SJljS/Zm
4oeiMGY21EtvLusZym7BzqT63W0kqQ9KNRcllPkxXslKaZ6On9EZn3y6cxMgrYSe
+bGIwPncgBMfc6CJrAU0sbsMGquI3RII3EZdH7QH5eIjrSGBjMsZoEJmGLtrEjEo
6ms+jBJjHVWMNp6qGnbkjtKp1t4OXAP2Zeu3TjeRqjLzjsd9SFmFGjF5FJ4haR29
7dmlinAMxKtY0OKHbLBj7jiV2f9TPWqva3LCPsX0vYACvOFlsJiAV3dXG1JHuIaZ
Di/wIo0QPeZI1u2fXGXZ5clA7lIcw+/SvJI0klCf7n8F07evS3jyiaNq+EF+MjRb
YLTL9lzRuo/yxOpcjONp3w9zE2n6BjfzAWCGA1SB9mvRVHQtyk87Z2QFHA0l4Qii
OP4UI7aMzZ/iygo7U8f0uKKnhnSkmvpZGVVK1TJVOZmmvlOPTT0rLosHiF9w5+60
5VocorfbUkt2oihoqBg7gnwq0SG9AnNsZWf1uCOIo7gzBFp952gWCSsGAQQB2kcP
AQEHQPVtSuFnhMmRe46yyGKpN35sCZ96RZEMD9PYfgY23NT3iQKtBBgBCAAgFiEE
3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlp952gCGwIAgQkQ5j7cqTKd0H52IAQZFggA
HRYhBHa+XbJSceFIHmeMNbbEHONWZJlsBQJafedoAAoJELbEHONWZJls60wA/2MV
lKqzJFUdje9B9lIPCMS1bVgt2s6N1F4aKYH+zJ3rAP9GC2b7IRlj6yqVqhIr7zy9
5KEHR2J+BANSiVJ7/7V9DcA4EACymPJNqnblefv04GsXXTbwYcTPwZ5FmuooM4l/
Ry8GB5f2S6CslyGUe75rZzdVrkl27VTlaFxkE27alB8NG148xttuhJqKD+O/hE6E
6x13ffoG7iL2nkUolr5hyJitN/JOocbc/1IIZtyJNEVBrVwtAtoy402NR/fYlB6s
ZrTtPiX0GA8eH8HxLwdqsjxH8Cjsm0wJJs/bqQ1VpBheiUHyGw2qIWEfl12wLWNH
iAHtD2RzFWTnRw2NLA1O2AqQ8ONaWLiU26MsSgraH7wVeEP1K2vQNZiN2Shn/+OE
LHeIno2MbD2M/FPdybSek/YshnJindRqrfcIsoJMQzDZQYmB8yj0MMsifoFTd7BX
8fQqWn68ADk40VMXvC+TZPEVQKquveSj67bsuuzJmMvPGKooKPTyOi9HL24X+von
PPEPwkIH5esSWFmoUDsFX4t3HTFlNetqeUz9RhuIZV9yV7HJN2mIseSJ7lhj0Xay
0m1Fka+A3RvGxb9tENnq6MJgg3E2Ubi8ZFI7fKOehuPOQxGhnohNHXMaZqcdedP/
Aku/5lBeOW4FGUWzFwRjnooONa8EblZsaoR9JHNeJKFW5+shaKOjJTIiBjoASt/2
zJxTWW3B7kA1PXqplvvwtCCnmMGkXICwLL7VGSX1Y5V6pA0yr777eXCsNgNUbwu1
KjYnoLg4BFrp358SCisGAQQBl1UBBQEBB0BrjZj+KTDK8qeug5u/uwXZ2DwlHR51
NCDcVYJGkFVbMwMBCAeJAjYEGAEIACAWIQTeDmbjLx/dCQJma5bmPtypMp3QfgUC
WunfnwIbDAAKCRDmPtypMp3QfgjpEACwiXruEVFvV8k9Vq2mx3GKedVwAp675Z35
UO6MaGKZ5XpSQojM2dI86QeZPgbFkY/JS3OWccjW5hAmy5DciRgXHQsAJsBRXubk
A8sfX0ySRUbEmLi6bxIzbm2md75IlP4rC/b3tdtSOTKlfDpa80mFpHFRtm20lS9T
8Eyz1RobpGIOIoSmcWG4UWdv0W4ioeMmVLnl0iR8DI6h+U7nApBFwSAZUu6nituk
CYmwu8AxlnWv3F2UgcdwLLuI9KnL98BB/gkxoxMk1X6SnQMvPPAWksyz+mPXgdCK
ylKkkzwQXo8a7CzDDExxku8hRk9oiGMjCZRnOYxC7RFkP/psUcJbv5t4uFqysyAh
+SSibfw4/cI7WVatzb9t0eBmsAOlmxA7sd9jdnu2xMCYQKHiLo8foMR+mHNM5q0T
E+K33cwTRiXVgqcAkfheI+A4oyzqzddxsxdYwXpoceWEcs+di9Qcwg5h0XmZ/6wI
vwj5SDUg1gQtnly+aFIwHjd4ggIbhOze03dN8KKivEs2EKzaXImTR0foY+lyq9bo
IWu6i3X9bxmmcpp4h8vKrKJcWrFG+q0ENaZoYqEuXiFJ9zxfJ1TdScPSOlZLVkKP
x/uBtR1RU2+//2yV7jJWK6raVXZ9hB4km3EuAQts8+UCsXM9jsD1Jlw1fEuMQEBp
vtlgqCEcWrkBDQRa6d/DAQgA1RDvHPo5wd72mXB1ztBCN9jPCrtlwXGRbwN/Kdbw
ANd99X4Ctr5m9wKMK5078Zbj8C2Yr6e4+1vxzXqBSzKWZohswpPPVC5B96RNmQrL
jJ5V8/TLU7ckI4MtCw+2K03i9l1srwxwXw8c56k4jjmk88PlMVTcr/urjx5unYH1
uHN3Sk3n1gAbEOTRrrPZWaZviyheEHe86nnQKDsBu3yiV9BepIxYkYxZm8sI7qKQ
lzpgwHaudNf+rKPiza9D6d8pgsd/5rlzKTintEcgN9x74AHJqaFj5HAxjyg/wgTr
ndNcWeB3Eu7G8nZGjDfR+upSNjmP8evufT6A8w4d8tzdfwARAQABiQI2BBgBCAAg
FiEE3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlrp38MCGyAACgkQ5j7cqTKd0H4uCRAA
l8ygwpx87vrM9EtSt8Q7cOIj2tJfYCqhdUp32IV9OE3EPAWOV5hoSR325OTLdmv1
cE2aa35oK9eevkayAmBsprhfZD20tHB1P4wBUgcsxShJLxXxZsWLym7AU7xwCXv9
3G/fk5AqgZZjsYtWaulxzaBXo1Yr0sdUhSK0PJtqtMmJE2Q8nmOwpjx6XhO8MZxg
aRV4ktx5HyNchWKr52CcZh3y5xXxh6YUlf86k8kuN/exBzkAM581U66KP8fMFMre
pM2Z5IDm43VvHGVOa4shAmR9jIjqSXOrvgEfg2ys78aKe/fSu3GfR7lMVPD0ZKX4
lqXTCo3+4Xd7N+uPxPcEkOX2jevYdXRoHhcxH/++mSoNgV9pj/dGiBkDKUM/WOhZ
VZ9uvmDMEvprjSOlYFACkD/TNhW/O4Zi09snENWX3wDAU/u2VlySjz732YBF438q
JOycw/36tKCZlDlTorGhzODpxx9bSDJ7w7CsetB19lVoe0zEJY/bEHLxy9QA527g
1TGgzvIvC48l69WJTv1CLIiFcqEs4jgB3ynC/TPL/HpzBldicVVMddn5cZqkJOO8
9qTVgBckOmoBeLDSSKsURwXI9BQtSdfG9PpaRt2GPXUW5p7ipHjsI+4wEXTrOylu
hjAqNyQU6VSX0D6woKyUHVFkapTDnExtGkY+3M7NAYQ=
=chX+
-----END PGP PUBLIC KEY BLOCK-----
07070100000007000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000002500000000b4-0.8.0+2/.keys/openpgp/pbarker.dev07070100000008000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000002A00000000b4-0.8.0+2/.keys/openpgp/pbarker.dev/paul07070100000009000081A400000000000000000000000161F953D700000DB0000000000000000000000000000000000000003200000000b4-0.8.0+2/.keys/openpgp/pbarker.dev/paul/default-----BEGIN PGP PUBLIC KEY BLOCK-----

mQINBGC756sBEADXL6cawsZRrDvICz9Y1SG0/lW1me4xpq36obh7a0IGAzp3ywNR
b/4MODTqP4+DD0cIFuDY41/N17g0sNlp8z+/k/IIDmNPtYQOTVmAkrkdDU4BP8dD
3Cp1PUw6nrbInfujAJNrVM0IVDkwKTbL2Nu1P+xns4MIpF9Kj4XN5celYJ9vEJ2n
0Bo0nO5T5vg46dihIaDl+24iNIHSsHqYyEdMBfY8kY2RulpaAyFOuaaHdIeDkejV
vO5xLSiYLjB5qrRhgH134lJXsuLOsFQ64ybGECuOasnbauevsPBAaroQW0pqVb9F
neGrWHxMCLlQHJRqQJRdVa6bsUdp6NWra8/0msPawSrFwGQdfJBTA3aXJC2CG1Jx
Egj6QQjEQA49DSjgzdhInbiIK8Vbp/zedM4aVue7qJnwPMTFQM9lYx63b7wLN4Tu
8B9YZ0UFdSwMCJuqmYGsYRUYdwM3ArjS0VO6WpU+HBKvzLK5GQfUTSM8KaZ5eA2U
o2ain8SSZb+WptUYKpxF9jbtCPbjpZKzGuX4iHFl9eT75TM9iXJNGAjB5xigkADL
wVfPoJ5E53S+KdNVuOWHugyLMPNAQHOwpw5Rey+0zxyzPd4wphutc93UIU5g/029
ngAc7DuKCq12jl7fhkjqFlFtYPIc1k7nd+RSezmH/qResbMErHSX1MBSZQARAQAB
tB5QYXVsIEJhcmtlciA8cGF1bEBwYmFya2VyLmRldj6JAlQEEwEIAD4CGwEFCwkI
BwIGFQoJCAsCBBYCAwECHgECF4AWIQSYsqrBAKw/grtdVGd0l1yBt+ZrrAUCYLvp
iQUJA8Jo3gAKCRB0l1yBt+ZrrGa5EACyLFyzlPYp+8LIilJl8zLYMnt8V4hSa88m
iXNd37O/ky0zDVds9F6UJmM4r9Wu8G9yLd37PCtXsNmDMEIMT8T1AaHUmBvTrgZ6
BGzwJZeUkjdfozAy3lcMG3OoAG8rT1CHlmzmGMtQdzC1m4ppcSCWfILrS3DqCEcf
ACfyfkz8nl0Rd92j5LhyGf3FH9053SDFqMT4/BD7TXu00dB1e4rY70fv+GWEbKuC
4ZOEaBOQfZ5qOAZh9gsYO0ioyYFj5yWXu5mc5LrIotuFRYjSPoQek1FwLBUnx/1w
hH5NuKjUZWTRi2mtOGbk86w8nz4AOQpqtl+W63IiWo71I4sJ7OaZQm28fhQC601o
arD+j6AUl4EUVFM2+W7P6+XPKPuwNHTboo+W5DCtYyLRvGeXR6iOmfMMjIimbFg3
n54IRHgjqDKcWj0k+PZ0ETUusE84jWRyEBYCq8IpjnxonDDbY+SBg3qYUGJ9yTSE
x19/My7UW6Q8A8q/SQDz+F5ulpFI/d0rep//h1JHse8LFQ8s/O/GJNcCsQhYCBSV
XY3geMHlPFOfLfZUjrfkJLMrSbxKwiJPPSMmfXynsmCb2uRpV6X5AggHQjzWLAT0
Btwn1rhsUAOYsRuRukvxZyZ1mMWsHQGeh96d7O1IehynydIntwv06R5z3W9UEwZ0
rCoLbWsxvrgzBGC76nwWCSsGAQQB2kcPAQEHQBjNSSeObYNd6cLGd0kN5n4oJQ24
ojM8NCulj3qLA6m9iQKtBBgBCAAgFiEEmLKqwQCsP4K7XVRndJdcgbfma6wFAmC7
6nwCGwIAgQkQdJdcgbfma6x2IAQZFggAHRYhBLOM9d/ljUENaFTaIKtjVOyduvRu
BQJgu+p8AAoJEKtjVOyduvRu+w8BAOP1qLWCFJM/qx0EX/66uiPvuapEp2hMiddc
UKdovAtDAQDGmPIZZWcL7nJcJjnduyj6Eu41hYNmQpKz1/LKLE9LBiCvD/9ubYoh
00mX8tWhyU7VNYCRDyUvQoaXHJ3XV2SXgm4mVUCLeaLYAQV+6uCPtEaiLASG29kh
CyZL9bjTBU29HmBDCIsan/L2EG1EYkrNJzVR60XIYByG5i/brkDBx8WHg+k6j8rv
q5/Z1ST6PUQtyHM6DUOni+t4ebM3BsPKHLSS2z46N5exuGDC+Q6u+n0YGY8Szo+0
DsWHNrT3pwgCn1X4Q1l1SVZxj/Z2SZfVOLYXlE4KKRt/e9013cJ2Zs01UwuCCjwB
z9a5eEctvJpLICdnwCUf3Zw2aPQGPWaNkxZaDTEbCSJZ0pxKeeN0VtMkDC3VKU1/
vbbTNWCwP8vZVO/BwU4uZg/iLrBUlBhksYmITLWSSgs2gQis0xOCfw7ng13svw6M
KBnoddGy+CPq4xdODTxt+8oqy1f+gvo6gEtWzIpgri8W8AxCCRWMIkPYhz/AaZRe
SIvVZ8sv3bUHZMDkbK0l9ffLlDIeVmSZjOScai/8KkjsAKpZqv3KB1bsxyA+cYAu
gNN7avOJs5kBl1O3tdRpnaOuIg84Iek5CImygFhGV9T/QYqJFYeh3oNqmdq7dI/Q
fw6BhKOm8neWqANfgRL2wb7MBF/WAyxQPDb277X0ctSfYox2jTBMYDT1PJFxbGXY
6Tf8OWBeTRzkrSCT1cjbwML6CUwmNfbWMtgZUrg4BGC76m4SCisGAQQBl1UBBQEB
B0Dqbss9B2gohGl82RvU8TlQdeQ44hqyREclQEGuyS/IbgMBCAeJAjYEGAEIACAW
IQSYsqrBAKw/grtdVGd0l1yBt+ZrrAUCYLvqbgIbDAAKCRB0l1yBt+ZrrPjxD/wI
bniW8d/6MLgxjr8du9FPGgSyOKBwqpezLcD+WbAGiJOyv9EnRcbsc4FoHujTcZpl
BCuX6oI44l1QwbKD/ZNoYaHNb1Jpo1dhhzXp10IKbye3wPyBx828SZ/l39RSR/og
L2eSuVuNygidyYvQ8EGXNCE/RkRDV98d9lcq2daeaNjc6xdzcCbiZtQRvSIEB1lA
ojXv+OmZ6k9nr21MxLbnnjA02ro58yYCSBhtst/nETQfbco8ktCP7q3mzxvtjj02
bZM9mg7SuBFmYaV4sQkWjnCvIbFIc0/DmDZ/wammSfigXvEGsUpX7kUHChDraPsz
+IwNUDVqphtELb3HGn0CY6dOTAPSTmfbOaP3qcYBVt4v9YK4Tp2/KrLYYZSCx3vF
uRhpXYDMTFRYQpsBR1Y5YbUGRqkwhLFQ9k4cwzz0Y5XzPFB4yWDNbNaPW5s1vkZs
NYqSUlF5FtghQ6I75JMe30VrLB9IEpFYgZZ8eY+5mx4+fdlrPcqEOooYW3q2SCQe
zbkpYWO11Huxs3jA7glrzOdO8FBpjo4jD8z5AQQddiVTzPGTzQIbVK9HlQ1PE1pY
umErnUzTST7eMXSqwbYFC3CDi/dtYb5Q/UdQNlQICLL4S3eteCxlvel2LlyhoNrc
JFW9FPw1xdPVMr4GgrOqcYY+JVnUSomnPpyoiDgRRg==
=907n
-----END PGP PUBLIC KEY BLOCK-----
0707010000000A000081A400000000000000000000000161F953D7000046AC000000000000000000000000000000000000001300000000b4-0.8.0+2/COPYING                    GNU GENERAL PUBLIC LICENSE
                       Version 2, June 1991

 Copyright (C) 1989, 1991 Free Software Foundation, Inc.,
 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
 Everyone is permitted to copy and distribute verbatim copies
 of this license document, but changing it is not allowed.

                            Preamble

  The licenses for most software are designed to take away your
freedom to share and change it.  By contrast, the GNU General Public
License is intended to guarantee your freedom to share and change free
software--to make sure the software is free for all its users.  This
General Public License applies to most of the Free Software
Foundation's software and to any other program whose authors commit to
using it.  (Some other Free Software Foundation software is covered by
the GNU Lesser General Public License instead.)  You can apply it to
your programs, too.

  When we speak of free software, we are referring to freedom, not
price.  Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
this service if you wish), that you receive source code or can get it
if you want it, that you can change the software or use pieces of it
in new free programs; and that you know you can do these things.

  To protect your rights, we need to make restrictions that forbid
anyone to deny you these rights or to ask you to surrender the rights.
These restrictions translate to certain responsibilities for you if you
distribute copies of the software, or if you modify it.

  For example, if you distribute copies of such a program, whether
gratis or for a fee, you must give the recipients all the rights that
you have.  You must make sure that they, too, receive or can get the
source code.  And you must show them these terms so they know their
rights.

  We protect your rights with two steps: (1) copyright the software, and
(2) offer you this license which gives you legal permission to copy,
distribute and/or modify the software.

  Also, for each author's protection and ours, we want to make certain
that everyone understands that there is no warranty for this free
software.  If the software is modified by someone else and passed on, we
want its recipients to know that what they have is not the original, so
that any problems introduced by others will not reflect on the original
authors' reputations.

  Finally, any free program is threatened constantly by software
patents.  We wish to avoid the danger that redistributors of a free
program will individually obtain patent licenses, in effect making the
program proprietary.  To prevent this, we have made it clear that any
patent must be licensed for everyone's free use or not licensed at all.

  The precise terms and conditions for copying, distribution and
modification follow.

                    GNU GENERAL PUBLIC LICENSE
   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION

  0. This License applies to any program or other work which contains
a notice placed by the copyright holder saying it may be distributed
under the terms of this General Public License.  The "Program", below,
refers to any such program or work, and a "work based on the Program"
means either the Program or any derivative work under copyright law:
that is to say, a work containing the Program or a portion of it,
either verbatim or with modifications and/or translated into another
language.  (Hereinafter, translation is included without limitation in
the term "modification".)  Each licensee is addressed as "you".

Activities other than copying, distribution and modification are not
covered by this License; they are outside its scope.  The act of
running the Program is not restricted, and the output from the Program
is covered only if its contents constitute a work based on the
Program (independent of having been made by running the Program).
Whether that is true depends on what the Program does.

  1. You may copy and distribute verbatim copies of the Program's
source code as you receive it, in any medium, provided that you
conspicuously and appropriately publish on each copy an appropriate
copyright notice and disclaimer of warranty; keep intact all the
notices that refer to this License and to the absence of any warranty;
and give any other recipients of the Program a copy of this License
along with the Program.

You may charge a fee for the physical act of transferring a copy, and
you may at your option offer warranty protection in exchange for a fee.

  2. You may modify your copy or copies of the Program or any portion
of it, thus forming a work based on the Program, and copy and
distribute such modifications or work under the terms of Section 1
above, provided that you also meet all of these conditions:

    a) You must cause the modified files to carry prominent notices
    stating that you changed the files and the date of any change.

    b) You must cause any work that you distribute or publish, that in
    whole or in part contains or is derived from the Program or any
    part thereof, to be licensed as a whole at no charge to all third
    parties under the terms of this License.

    c) If the modified program normally reads commands interactively
    when run, you must cause it, when started running for such
    interactive use in the most ordinary way, to print or display an
    announcement including an appropriate copyright notice and a
    notice that there is no warranty (or else, saying that you provide
    a warranty) and that users may redistribute the program under
    these conditions, and telling the user how to view a copy of this
    License.  (Exception: if the Program itself is interactive but
    does not normally print such an announcement, your work based on
    the Program is not required to print an announcement.)

These requirements apply to the modified work as a whole.  If
identifiable sections of that work are not derived from the Program,
and can be reasonably considered independent and separate works in
themselves, then this License, and its terms, do not apply to those
sections when you distribute them as separate works.  But when you
distribute the same sections as part of a whole which is a work based
on the Program, the distribution of the whole must be on the terms of
this License, whose permissions for other licensees extend to the
entire whole, and thus to each and every part regardless of who wrote it.

Thus, it is not the intent of this section to claim rights or contest
your rights to work written entirely by you; rather, the intent is to
exercise the right to control the distribution of derivative or
collective works based on the Program.

In addition, mere aggregation of another work not based on the Program
with the Program (or with a work based on the Program) on a volume of
a storage or distribution medium does not bring the other work under
the scope of this License.

  3. You may copy and distribute the Program (or a work based on it,
under Section 2) in object code or executable form under the terms of
Sections 1 and 2 above provided that you also do one of the following:

    a) Accompany it with the complete corresponding machine-readable
    source code, which must be distributed under the terms of Sections
    1 and 2 above on a medium customarily used for software interchange; or,

    b) Accompany it with a written offer, valid for at least three
    years, to give any third party, for a charge no more than your
    cost of physically performing source distribution, a complete
    machine-readable copy of the corresponding source code, to be
    distributed under the terms of Sections 1 and 2 above on a medium
    customarily used for software interchange; or,

    c) Accompany it with the information you received as to the offer
    to distribute corresponding source code.  (This alternative is
    allowed only for noncommercial distribution and only if you
    received the program in object code or executable form with such
    an offer, in accord with Subsection b above.)

The source code for a work means the preferred form of the work for
making modifications to it.  For an executable work, complete source
code means all the source code for all modules it contains, plus any
associated interface definition files, plus the scripts used to
control compilation and installation of the executable.  However, as a
special exception, the source code distributed need not include
anything that is normally distributed (in either source or binary
form) with the major components (compiler, kernel, and so on) of the
operating system on which the executable runs, unless that component
itself accompanies the executable.

If distribution of executable or object code is made by offering
access to copy from a designated place, then offering equivalent
access to copy the source code from the same place counts as
distribution of the source code, even though third parties are not
compelled to copy the source along with the object code.

  4. You may not copy, modify, sublicense, or distribute the Program
except as expressly provided under this License.  Any attempt
otherwise to copy, modify, sublicense or distribute the Program is
void, and will automatically terminate your rights under this License.
However, parties who have received copies, or rights, from you under
this License will not have their licenses terminated so long as such
parties remain in full compliance.

  5. You are not required to accept this License, since you have not
signed it.  However, nothing else grants you permission to modify or
distribute the Program or its derivative works.  These actions are
prohibited by law if you do not accept this License.  Therefore, by
modifying or distributing the Program (or any work based on the
Program), you indicate your acceptance of this License to do so, and
all its terms and conditions for copying, distributing or modifying
the Program or works based on it.

  6. Each time you redistribute the Program (or any work based on the
Program), the recipient automatically receives a license from the
original licensor to copy, distribute or modify the Program subject to
these terms and conditions.  You may not impose any further
restrictions on the recipients' exercise of the rights granted herein.
You are not responsible for enforcing compliance by third parties to
this License.

  7. If, as a consequence of a court judgment or allegation of patent
infringement or for any other reason (not limited to patent issues),
conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License.  If you cannot
distribute so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you
may not distribute the Program at all.  For example, if a patent
license would not permit royalty-free redistribution of the Program by
all those who receive copies directly or indirectly through you, then
the only way you could satisfy both it and this License would be to
refrain entirely from distribution of the Program.

If any portion of this section is held invalid or unenforceable under
any particular circumstance, the balance of the section is intended to
apply and the section as a whole is intended to apply in other
circumstances.

It is not the purpose of this section to induce you to infringe any
patents or other property right claims or to contest validity of any
such claims; this section has the sole purpose of protecting the
integrity of the free software distribution system, which is
implemented by public license practices.  Many people have made
generous contributions to the wide range of software distributed
through that system in reliance on consistent application of that
system; it is up to the author/donor to decide if he or she is willing
to distribute software through any other system and a licensee cannot
impose that choice.

This section is intended to make thoroughly clear what is believed to
be a consequence of the rest of this License.

  8. If the distribution and/or use of the Program is restricted in
certain countries either by patents or by copyrighted interfaces, the
original copyright holder who places the Program under this License
may add an explicit geographical distribution limitation excluding
those countries, so that distribution is permitted only in or among
countries not thus excluded.  In such case, this License incorporates
the limitation as if written in the body of this License.

  9. The Free Software Foundation may publish revised and/or new versions
of the General Public License from time to time.  Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.

Each version is given a distinguishing version number.  If the Program
specifies a version number of this License which applies to it and "any
later version", you have the option of following the terms and conditions
either of that version or of any later version published by the Free
Software Foundation.  If the Program does not specify a version number of
this License, you may choose any version ever published by the Free Software
Foundation.

  10. If you wish to incorporate parts of the Program into other free
programs whose distribution conditions are different, write to the author
to ask for permission.  For software which is copyrighted by the Free
Software Foundation, write to the Free Software Foundation; we sometimes
make exceptions for this.  Our decision will be guided by the two goals
of preserving the free status of all derivatives of our free software and
of promoting the sharing and reuse of software generally.

                            NO WARRANTY

  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
REPAIR OR CORRECTION.

  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
POSSIBILITY OF SUCH DAMAGES.

                     END OF TERMS AND CONDITIONS

            How to Apply These Terms to Your New Programs

  If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.

  To do so, attach the following notices to the program.  It is safest
to attach them to the start of each source file to most effectively
convey the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.

    <one line to give the program's name and a brief idea of what it does.>
    Copyright (C) <year>  <name of author>

    This program is free software; you can redistribute it and/or modify
    it under the terms of the GNU General Public License as published by
    the Free Software Foundation; either version 2 of the License, or
    (at your option) any later version.

    This program is distributed in the hope that it will be useful,
    but WITHOUT ANY WARRANTY; without even the implied warranty of
    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
    GNU General Public License for more details.

    You should have received a copy of the GNU General Public License along
    with this program; if not, write to the Free Software Foundation, Inc.,
    51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.

Also add information on how to contact you by electronic and paper mail.

If the program is interactive, make it output a short notice like this
when it starts in an interactive mode:

    Gnomovision version 69, Copyright (C) year name of author
    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
    This is free software, and you are welcome to redistribute it
    under certain conditions; type `show c' for details.

The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License.  Of course, the commands you use may
be called something other than `show w' and `show c'; they could even be
mouse-clicks or menu items--whatever suits your program.

You should also get your employer (if you work as a programmer) or your
school, if any, to sign a "copyright disclaimer" for the program, if
necessary.  Here is a sample; alter the names:

  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
  `Gnomovision' (which makes passes at compilers) written by James Hacker.

  <signature of Ty Coon>, 1 April 1989
  Ty Coon, President of Vice

This General Public License does not permit incorporating your program into
proprietary programs.  If your program is a subroutine library, you may
consider it more useful to permit linking proprietary applications with the
library.  If this is what you want to do, use the GNU Lesser General
Public License instead of this License.
0707010000000B000081A400000000000000000000000161F953D700000034000000000000000000000000000000000000001700000000b4-0.8.0+2/MANIFEST.ininclude COPYING
include man/*.rst
include *.example
0707010000000C000081A400000000000000000000000161F953D700000A18000000000000000000000000000000000000001600000000b4-0.8.0+2/README.rstB4 tools
========
This is a helper utility to work with patches made available via a
public-inbox archive like lore.kernel.org. It is written to make it
easier to participate in a patch-based workflows, like those used in
the Linux kernel development.

The name "b4" was chosen for ease of typing and because B-4 was the
precursor to Lore and Data in the Star Trek universe.

See man/b4.5.rst for more information.

Installing
----------
To install from pypi::

    python3 -m pip install --user b4

Upgrading
---------
If you previously installed from pypi::

    python3 -m pip install --user --upgrade b4

Running from the checkout dir
-----------------------------
If you want to run from the checkout dir without installing the python
package, you can use the included ``b4.sh`` wrapper. You can set it as
an alias in your .bash_profile::

    alias b4="$HOME/path/to/b4/b4.sh"

Setting up a symlink should also be possible. Remember to run the
following command after the initial clone in order to pull in the
dependencies that are tracked via submodules::

    git submodule update --init

Patch attestation (EXPERIMENTAL)
--------------------------------
B4 implements two attestation verification mechanisms:

- DKIM attestation using the dkimpy library
- X-Developer-Signature attestation using the patatt library

If you installed from pip, you should have pulled both of these
dependencies in automatically. Alternatively, you can install dkimpy
from your OS packaging and then run "git submodule update --init" to
clone patatt as a submodule of b4.

For attesting your outgoing patches, see patatt documentation.
https://git.kernel.org/pub/scm/utils/patatt/patatt.git/about/

Display attestation results on received patches
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
There are four attestation verification modes in b4:

- off
- check
- softfail (default)
- hardfail

The "check" policy is look for any available attestation and try to
verify it. If verification fails, b4 will not output any errors, but
will not show verification checkmarks either.

In the "softfail" mode, any verification errors will be prominently
displayed, but b4 will still produce the resulting file with patches.

The "hardfail" mode will show verification errors and exit without
generating the .mbx file with patches.

You can set the preferred policy via the git configuration file::

    [b4]
      attestation-policy = hardfail

Support
-------
For support or with any other questions, please email
tools@linux.kernel.org, or browse the list archive at
https://lore.kernel.org/tools.
0707010000000D000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000000E00000000b4-0.8.0+2/b40707010000000E000081ED00000000000000000000000161F953D700000101000000000000000000000000000000000000001100000000b4-0.8.0+2/b4.sh#!/usr/bin/env bash
#
# Run b4 from a git checkout.
#

REAL_SCRIPT=$(realpath -e ${BASH_SOURCE[0]})
SCRIPT_TOP="${SCRIPT_TOP:-$(dirname ${REAL_SCRIPT})}"

exec env PYTHONPATH="${SCRIPT_TOP}:${SCRIPT_TOP}/patatt" python3 "${SCRIPT_TOP}/b4/command.py" "${@}"
0707010000000F000081A400000000000000000000000161F953D7000170EE000000000000000000000000000000000000001A00000000b4-0.8.0+2/b4/__init__.py# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
import subprocess
import logging
import hashlib
import re
import sys
import gzip
import os
import fnmatch
import email.utils
import email.policy
import email.header
import email.generator
import tempfile
import pathlib

import requests
import urllib.parse
import datetime
import time
import copy
import shutil
import mailbox
# noinspection PyCompatibility
import pwd

from contextlib import contextmanager
from typing import Optional, Tuple, Set, List, TextIO

from email import charset
charset.add_charset('utf-8', None)
emlpolicy = email.policy.EmailPolicy(utf8=True, cte_type='8bit', max_line_length=None)

try:
    import dkim
    can_dkim = True
except ModuleNotFoundError:
    can_dkim = False

try:
    import patatt
    can_patatt = True
except ModuleNotFoundError:
    can_patatt = False

__VERSION__ = '0.8.0'


def _dkim_log_filter(record):
    # Hide all dkim logging output in normal operation by setting the level to
    # DEBUG. If debugging output has been enabled then prefix dkim logging
    # output to make its origin clear.
    record.levelno = logging.DEBUG
    record.levelname = 'DEBUG'
    record.msg = 'DKIM: ' + record.msg
    return True


logger = logging.getLogger('b4')
dkimlogger = logger.getChild('dkim')
dkimlogger.addFilter(_dkim_log_filter)

HUNK_RE = re.compile(r'^@@ -\d+(?:,(\d+))? \+\d+(?:,(\d+))? @@')
FILENAME_RE = re.compile(r'^(---|\+\+\+) (\S+)')

ATT_PASS_SIMPLE = 'v'
ATT_FAIL_SIMPLE = 'x'
ATT_PASS_FANCY = '\033[32m\u2713\033[0m'
ATT_FAIL_FANCY = '\033[31m\u2717\033[0m'

DEVSIG_HDR = 'X-Developer-Signature'

# Headers to include into am-ready messages
# From: and Subject: are always included
AMHDRS = [
    'Date',
    'Message-Id',
    'To',
    'Cc',
    'Reply-To',
    'In-Reply-To',
    'References',
    'List-Id',
]

# You can use bash-style globbing here
# end with '*' to include any other trailers
# You can change the default in your ~/.gitconfig, e.g.:
# [b4]
#   # remember to end with ,*
#   trailer-order=link*,fixes*,cc*,reported*,suggested*,original*,co-*,tested*,reviewed*,acked*,signed-off*,*
#   (another common)
#   trailer-order=fixes*,reported*,suggested*,original*,co-*,signed-off*,tested*,reviewed*,acked*,cc*,link*,*
#
# Or use _preserve_ (alias to *) to keep the order unchanged

DEFAULT_TRAILER_ORDER = '*'

LOREADDR = 'https://lore.kernel.org'

DEFAULT_CONFIG = {
    'midmask': LOREADDR + '/r/%s',
    'linkmask': LOREADDR + '/r/%s',
    'trailer-order': DEFAULT_TRAILER_ORDER,
    'listid-preference': '*.feeds.kernel.org,*.linux.dev,*.kernel.org,*',
    'save-maildirs': 'no',
    # off: do not bother checking attestation
    # check: print an attaboy when attestation is found
    # softfail: print a warning when no attestation found
    # hardfail: exit with an error when no attestation found
    'attestation-policy': 'softfail',
    # How many days before we consider attestation too old?
    'attestation-staleness-days': '30',
    # Should we check DKIM signatures if we don't find any other attestation?
    'attestation-check-dkim': 'yes',
    # We'll use the default gnupg homedir, unless you set it here
    'attestation-gnupghome': None,
    # Do you like simple or fancy checkmarks?
    'attestation-checkmarks': 'fancy',
    # How long to keep things in cache before expiring (minutes)?
    'cache-expire': '10',
    # Used when creating summaries for b4 ty
    'thanks-commit-url-mask': None,
    # See thanks-pr-template.example
    'thanks-pr-template': None,
    # See thanks-am-template.example
    'thanks-am-template': None,
    # If this is not set, we'll use what we find in 
    # git-config for gpg.program, and if that's not set,
    # we'll use "gpg" and hope for the better
    'gpgbin': None,
}

# This is where we store actual config
MAIN_CONFIG = None
# This is git-config user.*
USER_CONFIG = None

# Used for storing our requests session
REQSESSION = None
# Indicates that we've cleaned cache already
_CACHE_CLEANED = False


class LoreMailbox:
    msgid_map: dict
    series: dict
    covers: dict
    followups: list
    unknowns: list

    def __init__(self):
        self.msgid_map = dict()
        self.series = dict()
        self.covers = dict()
        self.trailer_map = dict()
        self.followups = list()
        self.unknowns = list()

    def __repr__(self):
        out = list()
        for key, lser in self.series.items():
            out.append(str(lser))
        out.append('--- Followups ---')
        for lmsg in self.followups:
            out.append('  %s' % lmsg.full_subject)
        out.append('--- Unknowns ---')
        for lmsg in self.unknowns:
            out.append('  %s' % lmsg.full_subject)

        return '\n'.join(out)

    def get_by_msgid(self, msgid):
        if msgid in self.msgid_map:
            return self.msgid_map[msgid]
        return None

    def partial_reroll(self, revision, sloppytrailers):
        # Is it a partial reroll?
        # To qualify for a partial reroll:
        # 1. Needs to be version > 1
        # 2. Replies need to be to the exact X/N of the previous revision
        if revision <= 1 or revision - 1 not in self.series:
            return
        # Are existing patches replies to previous revisions with the same counter?
        pser = self.get_series(revision-1, sloppytrailers=sloppytrailers)
        lser = self.series[revision]
        sane = True
        for patch in lser.patches:
            if patch is None:
                continue
            if patch.in_reply_to is None or patch.in_reply_to not in self.msgid_map:
                logger.debug('Patch not sent as a reply-to')
                sane = False
                break
            ppatch = self.msgid_map[patch.in_reply_to]
            found = False
            while True:
                if patch.counter == ppatch.counter and patch.expected == ppatch.expected:
                    logger.debug('Found a previous matching patch in v%s', ppatch.revision)
                    found = True
                    break
                # Do we have another level up?
                if ppatch.in_reply_to is None or ppatch.in_reply_to not in self.msgid_map:
                    break
                ppatch = self.msgid_map[ppatch.in_reply_to]

            if not found:
                sane = False
                logger.debug('Patch not a reply to a patch with the same counter/expected (%s/%s != %s/%s)',
                             patch.counter, patch.expected, ppatch.counter, ppatch.expected)
                break

        if not sane:
            logger.debug('Not a sane partial reroll')
            return
        logger.info('Partial reroll detected, reconstituting from v%s', pser.revision)
        logger.debug('Reconstituting a partial reroll')
        at = 0
        for patch in lser.patches:
            if pser.patches[at] is None:
                at += 1
                continue
            if patch is None:
                ppatch = copy.deepcopy(pser.patches[at])
                ppatch.revision = lser.revision
                ppatch.reroll_from_revision = pser.revision
                lser.patches[at] = ppatch
            else:
                patch.reroll_from_revision = lser.revision
            at += 1
        if None not in lser.patches[1:]:
            lser.complete = True
            lser.partial_reroll = True
            if lser.patches[0] is not None:
                lser.has_cover = True
            lser.subject = pser.subject
            logger.debug('Reconstituted successfully')

    def get_series(self, revision=None, sloppytrailers=False, reroll=True):
        if revision is None:
            if not len(self.series):
                return None
            # Use the highest revision
            revision = max(self.series.keys())
        elif revision not in self.series.keys():
            return None

        lser = self.series[revision]

        # Is it empty?
        empty = True
        for lmsg in lser.patches:
            if lmsg is not None:
                empty = False
                break
        if empty:
            logger.critical('All patches in series v%s are missing.', lser.revision)
            return None

        if not lser.complete and reroll:
            self.partial_reroll(revision, sloppytrailers)

        # Grab our cover letter if we have one
        if revision in self.covers:
            lser.add_patch(self.covers[revision])
            lser.has_cover = True
        else:
            # Let's find the first patch with an in-reply-to and see if that
            # is our cover letter
            for member in lser.patches:
                if member is not None and member.in_reply_to is not None:
                    potential = self.get_by_msgid(member.in_reply_to)
                    if potential is not None and potential.has_diffstat and not potential.has_diff:
                        # This is *probably* the cover letter
                        lser.patches[0] = potential
                        lser.has_cover = True
                        break

        # Do we have any follow-ups?
        for fmsg in self.followups:
            logger.debug('Analyzing follow-up: %s (%s)', fmsg.full_subject, fmsg.fromemail)
            # If there are no trailers in this one, ignore it
            if not len(fmsg.trailers):
                logger.debug('  no trailers found, skipping')
                continue
            # Go up through the follow-ups and tally up trailers until
            # we either run out of in-reply-tos, or we find a patch in
            # one of our series
            if fmsg.in_reply_to is None:
                # Check if there's something matching in References
                refs = fmsg.msg.get('References', '')
                pmsg = None
                for ref in refs.split():
                    refid = ref.strip('<>')
                    if refid in self.msgid_map and refid != fmsg.msgid:
                        pmsg = self.msgid_map[refid]
                        break
                if pmsg is None:
                    # Can't find the message we're replying to here
                    continue
            elif fmsg.in_reply_to in self.msgid_map:
                pmsg = self.msgid_map[fmsg.in_reply_to]
            else:
                logger.debug('  missing message, skipping: %s', fmsg.in_reply_to)
                continue

            trailers, mismatches = fmsg.get_trailers(sloppy=sloppytrailers)
            for trailer in mismatches:
                lser.trailer_mismatches.add((trailer[0], trailer[1], fmsg.fromname, fmsg.fromemail))
            lvl = 1
            while True:
                logger.debug('%sParent: %s', ' ' * lvl, pmsg.full_subject)
                logger.debug('%sTrailers:', ' ' * lvl)
                for trailer in trailers:
                    logger.debug('%s%s: %s', ' ' * (lvl+1), trailer[0], trailer[1])
                if pmsg.has_diff and not pmsg.reply:
                    # We found the patch for these trailers
                    if pmsg.revision != revision:
                        # add this into our trailer map to carry over trailers from
                        # previous revisions to current revision if patch id did
                        # not change
                        if pmsg.pwhash:
                            if pmsg.pwhash not in self.trailer_map:
                                self.trailer_map[pmsg.pwhash] = list()
                            self.trailer_map[pmsg.pwhash] += trailers
                    pmsg.followup_trailers += trailers
                    break
                if not pmsg.reply:
                    # Could be a cover letter
                    pmsg.followup_trailers += trailers
                    break
                if pmsg.in_reply_to and pmsg.in_reply_to in self.msgid_map:
                    lvl += 1
                    for ptrailer in pmsg.trailers:
                        trailers.append(tuple(ptrailer + [pmsg]))
                    pmsg = self.msgid_map[pmsg.in_reply_to]
                    continue
                break

        # Carry over trailers from previous series if patch/metadata did not change
        for lmsg in lser.patches:
            if lmsg is None or lmsg.pwhash is None:
                continue
            if lmsg.pwhash in self.trailer_map:
                lmsg.followup_trailers += self.trailer_map[lmsg.pwhash]

        return lser

    def add_message(self, msg):
        msgid = LoreMessage.get_clean_msgid(msg)
        if msgid in self.msgid_map:
            logger.debug('Already have a message with this msgid, skipping %s', msgid)
            return

        lmsg = LoreMessage(msg)
        logger.debug('Looking at: %s', lmsg.full_subject)
        self.msgid_map[lmsg.msgid] = lmsg

        if lmsg.reply:
            # We'll figure out where this belongs later
            logger.debug('  adding to followups')
            self.followups.append(lmsg)
            return

        if lmsg.counter == 0 and (not lmsg.counters_inferred or lmsg.has_diffstat):
            # Cover letter
            # Add it to covers -- we'll deal with them later
            logger.debug('  adding as v%s cover letter', lmsg.revision)
            self.covers[lmsg.revision] = lmsg
            return

        if lmsg.has_diff:
            if lmsg.revision not in self.series:
                if lmsg.revision_inferred and lmsg.in_reply_to:
                    # We have an inferred revision here.
                    # Do we have an upthread cover letter that specifies a revision?
                    irt = self.get_by_msgid(lmsg.in_reply_to)
                    if irt is not None and irt.has_diffstat and not irt.has_diff:
                        # Yes, this is very likely our cover letter
                        logger.debug('  fixed revision to v%s', irt.revision)
                        lmsg.revision = irt.revision
                    # alternatively, see if upthread is patch 1
                    elif lmsg.counter > 0 and irt is not None and irt.has_diff and irt.counter == 1:
                        logger.debug('  fixed revision to v%s', irt.revision)
                        lmsg.revision = irt.revision

            # Run our check again
            if lmsg.revision not in self.series:
                self.series[lmsg.revision] = LoreSeries(lmsg.revision, lmsg.expected)
                if len(self.series) > 1:
                    logger.debug('Found new series v%s', lmsg.revision)

            # Attempt to auto-number series from the same author who did not bother
            # to set v2, v3, etc in the patch revision
            if (lmsg.counter == 1 and lmsg.counters_inferred
                    and not lmsg.reply and lmsg.lsubject.patch and not lmsg.lsubject.resend):
                omsg = self.series[lmsg.revision].patches[lmsg.counter]
                if (omsg is not None and omsg.counters_inferred and lmsg.fromemail == omsg.fromemail
                        and omsg.date < lmsg.date):
                    lmsg.revision = len(self.series) + 1
                    self.series[lmsg.revision] = LoreSeries(lmsg.revision, lmsg.expected)
                    logger.info('Assuming new revision: v%s (%s)', lmsg.revision, lmsg.full_subject)
            logger.debug('  adding as patch')
            self.series[lmsg.revision].add_patch(lmsg)
            return

        logger.debug('  adding to unknowns')
        self.unknowns.append(lmsg)


class LoreSeries:
    def __init__(self, revision, expected):
        self.revision = revision
        self.expected = expected
        self.patches = [None] * (expected+1)
        self.followups = list()
        self.trailer_mismatches = set()
        self.complete = False
        self.has_cover = False
        self.partial_reroll = False
        self.subject = '(untitled)'
        # Used for base matching
        self._indexes = None

    def __repr__(self):
        out = list()
        out.append('- Series: [v%s] %s' % (self.revision, self.subject))
        out.append('  revision: %s' % self.revision)
        out.append('  expected: %s' % self.expected)
        out.append('  complete: %s' % self.complete)
        out.append('  has_cover: %s' % self.has_cover)
        out.append('  partial_reroll: %s' % self.partial_reroll)
        out.append('  patches:')
        at = 0
        for member in self.patches:
            if member is not None:
                out.append('    [%s/%s] %s' % (at, self.expected, member.subject))
            else:
                out.append('    [%s/%s] MISSING' % (at, self.expected))
            at += 1

        return '\n'.join(out)

    def add_patch(self, lmsg):
        while len(self.patches) < lmsg.expected + 1:
            self.patches.append(None)
        self.expected = lmsg.expected
        if self.patches[lmsg.counter] is not None:
            # Okay, weird, is the one in there a reply?
            omsg = self.patches[lmsg.counter]
            if omsg.reply or (omsg.counters_inferred and not lmsg.counters_inferred):
                # Replace that one with this one
                logger.debug('  replacing existing: %s', omsg.subject)
                self.patches[lmsg.counter] = lmsg
        else:
            self.patches[lmsg.counter] = lmsg
        self.complete = not (None in self.patches[1:])
        if self.patches[0] is not None:
            # noinspection PyUnresolvedReferences
            self.subject = self.patches[0].subject
        elif self.patches[1] is not None:
            # noinspection PyUnresolvedReferences
            self.subject = self.patches[1].subject

    def get_slug(self, extended=False):
        # Find the first non-None entry
        lmsg = None
        for lmsg in self.patches:
            if lmsg is not None:
                break

        if lmsg is None:
            return 'undefined'

        prefix = lmsg.date.strftime('%Y%m%d')
        authorline = email.utils.getaddresses([str(x) for x in lmsg.msg.get_all('from', [])])[0]
        if extended:
            local = authorline[1].split('@')[0]
            unsafe = '%s_%s_%s' % (prefix, local, lmsg.subject)
            slug = re.sub(r'\W+', '_', unsafe).strip('_').lower()
        else:
            author = re.sub(r'\W+', '_', authorline[1]).strip('_').lower()
            slug = '%s_%s' % (prefix, author)

        if self.revision != 1:
            slug = 'v%s_%s' % (self.revision, slug)

        return slug[:100]

    def get_am_ready(self, noaddtrailers=False, covertrailers=False, trailer_order=None, addmysob=False,
                     addlink=False, linkmask=None, cherrypick=None, copyccs=False) -> list:

        usercfg = get_user_config()
        config = get_main_config()

        if addmysob:
            if 'name' not in usercfg or 'email' not in usercfg:
                logger.critical('WARNING: Unable to add your Signed-off-by: git returned no user.name or user.email')
                addmysob = False

        attpolicy = config['attestation-policy']
        try:
            maxdays = int(config['attestation-staleness-days'])
        except ValueError:
            logger.info('WARNING: attestation-staleness-days must be an int')
            maxdays = 0

        # Loop through all patches and see if attestation is the same for all of them,
        # since it usually is
        attref = None
        attsame = True
        attmark = None
        attcrit = False
        if attpolicy != 'off':
            logger.info('Checking attestation on all messages, may take a moment...')
            for lmsg in self.patches[1:]:
                if lmsg is None:
                    attsame = False
                    break

                checkmark, trailers, attcrit = lmsg.get_attestation_trailers(attpolicy, maxdays)
                if attref is None:
                    attref = trailers
                    attmark = checkmark
                    continue
                if set(trailers) == set(attref):
                    continue
                attsame = False
                logger.debug('Attestation info is not the same')
                break

        at = 1
        msgs = list()
        logger.info('---')
        for lmsg in self.patches[1:]:
            if cherrypick is not None:
                if at not in cherrypick:
                    at += 1
                    logger.debug('  skipped: [%s/%s] (not in cherrypick)', at, self.expected)
                    continue
                if lmsg is None:
                    logger.critical('CRITICAL: [%s/%s] is missing, cannot cherrypick', at, self.expected)
                    raise KeyError('Cherrypick not in series')

            if lmsg is not None:
                if self.has_cover and covertrailers and self.patches[0].followup_trailers:  # noqa
                    lmsg.followup_trailers += self.patches[0].followup_trailers  # noqa
                if addmysob:
                    lmsg.followup_trailers.append(('Signed-off-by',
                                                   '%s <%s>' % (usercfg['name'], usercfg['email']), None, None))
                if addlink:
                    lmsg.followup_trailers.append(('Link', linkmask % lmsg.msgid, None, None))

                if attsame and not attcrit:
                    if attmark:
                        logger.info('  %s %s', attmark, lmsg.get_am_subject())
                    else:
                        logger.info('  %s', lmsg.get_am_subject())

                else:
                    checkmark, trailers, critical = lmsg.get_attestation_trailers(attpolicy, maxdays)
                    if checkmark:
                        logger.info('  %s %s', checkmark, lmsg.get_am_subject())
                    else:
                        logger.info('  %s', lmsg.get_am_subject())

                    for trailer in trailers:
                        logger.info('    %s', trailer)

                    if critical:
                        import sys
                        logger.critical('---')
                        logger.critical('Exiting due to attestation-policy: hardfail')
                        sys.exit(128)

                add_trailers = True
                if noaddtrailers:
                    add_trailers = False
                msg = lmsg.get_am_message(add_trailers=add_trailers, trailer_order=trailer_order, copyccs=copyccs)
                msgs.append(msg)
            else:
                logger.error('  ERROR: missing [%s/%s]!', at, self.expected)
            at += 1

        if attpolicy == 'off':
            return msgs

        if attsame and attref:
            logger.info('  ---')
            for trailer in attref:
                logger.info('  %s', trailer)

        if not (can_dkim and can_patatt):
            logger.info('  ---')
            if not can_dkim:
                logger.info('  NOTE: install dkimpy for DKIM signature verification')
            if not can_patatt:
                logger.info('  NOTE: install patatt for end-to-end signature verification')

        return msgs

    def check_applies_clean(self, gitdir: str, at: Optional[str] = None) -> Tuple[int, list]:
        if self._indexes is None:
            self._indexes = list()
            seenfiles = set()
            for lmsg in self.patches[1:]:
                if lmsg is None or lmsg.blob_indexes is None:
                    continue
                for fn, bh in lmsg.blob_indexes:
                    if fn in seenfiles:
                        # if we have seen this file once already, then it's a repeat patch
                        # and it's no longer going to match current hash
                        continue
                    seenfiles.add(fn)
                    if set(bh) == {'0'}:
                        # New file, will for sure apply clean
                        continue
                    self._indexes.append((fn, bh))

        mismatches = list()
        if at is None:
            at = 'HEAD'
        for fn, bh in self._indexes:
            ecode, out = git_run_command(gitdir, ['ls-tree', at, fn])
            if ecode == 0 and len(out):
                chunks = out.split()
                if chunks[2].startswith(bh):
                    logger.debug('%s hash: matched', fn)
                    continue
                else:
                    logger.debug('%s hash: %s (expected: %s)', fn, chunks[2], bh)
            else:
                # Couldn't get this file, continue
                logger.debug('Could not look up %s:%s', at, fn)
            mismatches.append((fn, bh))

        return len(self._indexes), mismatches

    def find_base(self, gitdir: str, branches: Optional[str] = None, maxdays: int = 30) -> Tuple[str, len, len]:
        # Find the date of the first patch we have
        pdate = datetime.datetime.now()
        for lmsg in self.patches:
            if lmsg is None:
                continue
            pdate = lmsg.date
            break

        # Find latest commit on that date
        guntil = pdate.strftime('%Y-%m-%d')
        if branches:
            where = ['--branches', branches]
        else:
            where = ['--all']

        gitargs = ['log', '--pretty=oneline', '--until', guntil, '--max-count=1'] + where
        lines = git_get_command_lines(gitdir, gitargs)
        if not lines:
            raise IndexError
        commit = lines[0].split()[0]
        checked, mismatches = self.check_applies_clean(gitdir, commit)
        fewest = len(mismatches)
        if fewest > 0:
            since = pdate - datetime.timedelta(days=maxdays)
            gsince = since.strftime('%Y-%m-%d')
            logger.debug('Starting --find-object from %s to %s', gsince, guntil)
            best = commit
            for fn, bi in mismatches:
                logger.debug('Finding tree matching %s=%s in %s', fn, bi, where)
                gitargs = ['log', '--pretty=oneline', '--since', gsince, '--until', guntil,
                           '--find-object', bi] + where
                lines = git_get_command_lines(gitdir, gitargs)
                if not lines:
                    logger.debug('Could not find object %s in the tree', bi)
                    continue
                for line in lines:
                    commit = line.split()[0]
                    logger.debug('commit=%s', commit)
                    # We try both that commit and the one preceding it, in case it was a delete
                    # Keep track of the fewest mismatches
                    for tc in [commit, f'{commit}~1']:
                        sc, sm = self.check_applies_clean(gitdir, tc)
                        if len(sm) < fewest and len(sm) != sc:
                            fewest = len(sm)
                            best = tc
                            logger.debug('fewest=%s, best=%s', fewest, best)
                            if fewest == 0:
                                break
                        if fewest == 0:
                            break
                    if fewest == 0:
                        break
                if fewest == 0:
                    break
        else:
            best = commit
        if fewest == len(self._indexes):
            # None of the blobs matched
            raise IndexError

        lines = git_get_command_lines(gitdir, ['describe', '--all', best])
        if len(lines):
            return lines[0], len(self._indexes), fewest

        raise IndexError

    def make_fake_am_range(self, gitdir):
        start_commit = end_commit = None
        # Use the msgid of the first non-None patch in the series
        msgid = None
        for lmsg in self.patches:
            if lmsg is not None:
                msgid = lmsg.msgid
                break
        if msgid is None:
            logger.critical('Cannot operate on an empty series')
            return None, None
        cachedata = get_cache(msgid, suffix='fakeam')
        if cachedata and not self.partial_reroll:
            stalecache = False
            chunks = cachedata.strip().split()
            if len(chunks) == 2:
                start_commit, end_commit = chunks
            else:
                stalecache = True
            if start_commit is not None and end_commit is not None:
                # Make sure they are still there
                ecode, out = git_run_command(gitdir, ['cat-file', '-e', start_commit])
                if ecode > 0:
                    stalecache = True
                else:
                    ecode, out = git_run_command(gitdir, ['cat-file', '-e', end_commit])
                    if ecode > 0:
                        stalecache = True
                    else:
                        logger.debug('Using previously generated range')
                        return start_commit, end_commit

            if stalecache:
                logger.debug('Stale cache for [v%s] %s', self.revision, self.subject)
                save_cache(None, msgid, suffix='fakeam')

        logger.info('Preparing fake-am for v%s: %s', self.revision, self.subject)
        with git_temp_worktree(gitdir):
            # We are in a temporary chdir at this time, so writing to a known file should be safe
            mbxf = '.__git-am__'
            mbx = mailbox.mbox(mbxf)
            # Logic largely borrowed from gj_tools
            seenfiles = set()
            for lmsg in self.patches[1:]:
                if lmsg is None:
                    logger.critical('ERROR: v%s series incomplete; unable to create a fake-am range', self.revision)
                    return None, None
                logger.debug('Looking at %s', lmsg.full_subject)
                if not lmsg.blob_indexes:
                    logger.critical('ERROR: some patches do not have indexes')
                    logger.critical('       unable to create a fake-am range')
                    return None, None
                for fn, fi in lmsg.blob_indexes:
                    if fn in seenfiles:
                        # We already processed this file, so this blob won't match
                        continue
                    seenfiles.add(fn)
                    if set(fi) == {'0'}:
                        # New file creation, nothing to do here
                        logger.debug('  New file: %s', fn)
                        continue
                    # Try to grab full ref_id of this hash
                    ecode, out = git_run_command(gitdir, ['rev-parse', fi])
                    if ecode > 0:
                        logger.critical('  ERROR: Could not find matching blob for %s (%s)', fn, fi)
                        logger.critical('         If you know on which tree this patchset is based,')
                        logger.critical('         add it as a remote and perform "git remote update"')
                        logger.critical('         in order to fetch the missing objects.')
                        return None, None
                    logger.debug('  Found matching blob for: %s', fn)
                    fullref = out.strip()
                    gitargs = ['update-index', '--add', '--cacheinfo', f'0644,{fullref},{fn}']
                    ecode, out = git_run_command(None, gitargs)
                    if ecode > 0:
                        logger.critical('  ERROR: Could not run update-index for %s (%s)', fn, fullref)
                        return None, None
                mbx.add(lmsg.msg.as_string(policy=emlpolicy).encode('utf-8'))

            mbx.close()
            ecode, out = git_run_command(None, ['write-tree'])
            if ecode > 0:
                logger.critical('ERROR: Could not write fake-am tree')
                return None, None
            treeid = out.strip()
            # At this point we have a worktree with files that should cleanly receive a git am
            gitargs = ['commit-tree', treeid + '^{tree}', '-F', '-']
            ecode, out = git_run_command(None, gitargs, stdin='Initial fake commit'.encode('utf-8'))
            if ecode > 0:
                logger.critical('ERROR: Could not commit-tree')
                return None, None
            start_commit = out.strip()
            git_run_command(None, ['reset', '--hard', start_commit])
            ecode, out = git_run_command(None, ['am', mbxf])
            if ecode > 0:
                logger.critical('ERROR: Could not fake-am version %s', self.revision)
                return None, None
            ecode, out = git_run_command(None, ['rev-parse', 'HEAD'])
            end_commit = out.strip()
            logger.info('  range: %.12s..%.12s', start_commit, end_commit)

        logger.debug('Saving into cache:')
        logger.debug('    %s..%s', start_commit, end_commit)
        save_cache(f'{start_commit} {end_commit}\n', msgid, suffix='fakeam')

        return start_commit, end_commit

    def save_cover(self, outfile):
        # noinspection PyUnresolvedReferences
        cover_msg = self.patches[0].get_am_message(add_trailers=False, trailer_order=None)
        with open(outfile, 'w') as fh:
            fh.write(cover_msg.as_string(policy=emlpolicy))
        logger.critical('Cover: %s', outfile)


class LoreMessage:
    def __init__(self, msg):
        self.msg = msg
        self.msgid = None

        # Subject-based info
        self.lsubject = None
        self.full_subject = None
        self.subject = None
        self.reply = False
        self.revision = 1
        self.reroll_from_revision = None
        self.counter = 1
        self.expected = 1
        self.revision_inferred = True
        self.counters_inferred = True

        # Header-based info
        self.in_reply_to = None
        self.fromname = None
        self.fromemail = None
        self.date = None

        # Body and body-based info
        self.body = None
        self.charset = 'utf-8'
        self.has_diff = False
        self.has_diffstat = False
        self.trailers = list()
        self.followup_trailers = list()

        # These are populated by pr
        self.pr_base_commit = None
        self.pr_repo = None
        self.pr_ref = None
        self.pr_tip_commit = None
        self.pr_remote_tip_commit = None

        # Patchwork hash
        self.pwhash = None
        # Blob indexes
        self.blob_indexes = None

        self.msgid = LoreMessage.get_clean_msgid(self.msg)
        self.lsubject = LoreSubject(msg['Subject'])
        # Copy them into this object for convenience
        self.full_subject = self.lsubject.full_subject
        self.subject = self.lsubject.subject
        self.reply = self.lsubject.reply
        self.revision = self.lsubject.revision
        self.counter = self.lsubject.counter
        self.expected = self.lsubject.expected
        self.revision_inferred = self.lsubject.revision_inferred
        self.counters_inferred = self.lsubject.counters_inferred

        # Loaded when attestors property is called
        self._attestors = None

        # Handle [PATCH 6/5]
        if self.counter > self.expected:
            self.expected = self.counter

        self.in_reply_to = LoreMessage.get_clean_msgid(self.msg, header='In-Reply-To')

        try:
            fromdata = email.utils.getaddresses([LoreMessage.clean_header(str(x))
                                                 for x in self.msg.get_all('from', [])])[0]
            self.fromname = fromdata[0]
            self.fromemail = fromdata[1]
            if not len(self.fromname.strip()):
                self.fromname = self.fromemail
        except IndexError:
            pass

        msgdate = self.msg.get('Date')
        if msgdate:
            self.date = email.utils.parsedate_to_datetime(str(msgdate))
        else:
            # An email without a Date: field?
            self.date = datetime.datetime.now()
        # Force it to UTC if it's naive
        if self.date.tzinfo is None:
            self.date = self.date.replace(tzinfo=datetime.timezone.utc)

        diffre = re.compile(r'^(---.*\n\+\+\+|GIT binary patch|diff --git \w/\S+ \w/\S+)', re.M | re.I)
        diffstatre = re.compile(r'^\s*\d+ file.*\d+ (insertion|deletion)', re.M | re.I)

        # walk until we find the first text/plain part
        mcharset = self.msg.get_content_charset()
        if not mcharset:
            mcharset = 'utf-8'
        self.charset = mcharset

        for part in msg.walk():
            cte = part.get_content_type()
            if cte.find('/plain') < 0 and cte.find('/x-patch') < 0:
                continue
            payload = part.get_payload(decode=True)
            if payload is None:
                continue
            pcharset = part.get_content_charset()
            if not pcharset:
                pcharset = mcharset
            try:
                payload = payload.decode(pcharset, errors='replace')
                self.charset = pcharset
            except LookupError:
                # what kind of encoding is that?
                # Whatever, we'll use utf-8 and hope for the best
                payload = payload.decode('utf-8', errors='replace')
                part.set_param('charset', 'utf-8')
                self.charset = 'utf-8'
            if self.body is None:
                self.body = payload
                continue
            # If we already found a body, but we now find something that contains a diff,
            # then we prefer this part
            if diffre.search(payload):
                self.body = payload

        if self.body is None:
            # Woah, we didn't find any usable parts
            logger.debug('  No plain or patch parts found in message')
            logger.info('  Not plaintext: %s', self.full_subject)
            return

        if diffstatre.search(self.body):
            self.has_diffstat = True
        if diffre.search(self.body):
            self.has_diff = True
            self.pwhash = LoreMessage.get_patchwork_hash(self.body)
            self.blob_indexes = LoreMessage.get_indexes(self.body)

        # We only pay attention to trailers that are sent in reply
        if self.reply:
            trailers, others = LoreMessage.find_trailers(self.body, followup=True)
            for trailer in trailers:
                # These are commonly part of patch/commit metadata
                badtrailers = ('from', 'author', 'cc', 'to')
                if trailer[0].lower() not in badtrailers:
                    self.trailers.append(trailer)

    def get_trailers(self, sloppy=False):
        trailers = list()
        mismatches = set()

        for tname, tvalue, extdata in self.trailers:
            if sloppy or tname.lower() in ('fixes', 'obsoleted-by'):
                trailers.append((tname, tvalue, extdata, self))
                continue

            tmatch = False
            namedata = email.utils.getaddresses([tvalue])[0]
            tfrom = re.sub(r'\+[^@]+@', '@', namedata[1].lower())
            hfrom = re.sub(r'\+[^@]+@', '@', self.fromemail.lower())
            tlname = namedata[0].lower()
            hlname = self.fromname.lower()
            tchunks = tfrom.split('@')
            hchunks = hfrom.split('@')
            if tfrom == hfrom:
                logger.debug('  trailer exact email match')
                tmatch = True
            # See if domain part of one of the addresses is a subset of the other one,
            # which should match cases like @linux.intel.com and @intel.com
            elif (len(tchunks) == 2 and len(hchunks) == 2
                  and tchunks[0] == hchunks[0]
                  and (tchunks[1].find(hchunks[1]) >= 0 or hchunks[1].find(tchunks[1]) >= 0)):
                logger.debug('  trailer fuzzy email match')
                tmatch = True
            # Does the name match, at least?
            elif tlname == hlname:
                logger.debug('  trailer exact name match')
                tmatch = True
            # Finally, see if the header From has a comma in it and try to find all
            # parts in the trailer name
            elif hlname.find(',') > 0:
                nmatch = True
                for nchunk in hlname.split(','):
                    if hlname.find(nchunk.strip()) < 0:
                        nmatch = False
                        break
                if nmatch:
                    logger.debug('  trailer fuzzy name match')
                    tmatch = True
            if tmatch:
                trailers.append((tname, tvalue, extdata, self))
            else:
                mismatches.add((tname, tvalue, extdata, self))

        return trailers, mismatches

    @property
    def attestors(self):
        if self._attestors is not None:
            return self._attestors

        self._attestors = list()

        config = get_main_config()
        if config['attestation-policy'] == 'off':
            return self._attestors

        logger.debug('Loading attestation: %s', self.full_subject)
        if self.msg.get(DEVSIG_HDR):
            self._load_patatt_attestors()
        if self.msg.get('dkim-signature') and config['attestation-check-dkim'] == 'yes':
            self._load_dkim_attestors()

        logger.debug('Attestors: %s', len(self._attestors))
        return self._attestors

    def _load_dkim_attestors(self) -> None:
        if not can_dkim:
            logger.debug('Message has DKIM signatures, but can_dkim is off')
            return

        # Yank out all DKIM-Signature headers and try them in reverse order
        # until we come to a passing one
        dkhdrs = list()
        for header in list(self.msg._headers):  # noqa
            if header[0].lower() == 'dkim-signature':
                dkhdrs.append(header)
                self.msg._headers.remove(header) # noqa
        dkhdrs.reverse()

        seenatts = list()
        for hn, hval in dkhdrs:
            # Handle MIME encoded-word syntax or other types of header encoding if
            # present.
            if '?q?' in hval:
                hval = str(email.header.make_header(email.header.decode_header(hval)))
            errors = list()
            hdata = LoreMessage.get_parts_from_header(hval)
            logger.debug('Loading DKIM attestation for d=%s, s=%s', hdata['d'], hdata['s'])

            identity = hdata.get('i')
            if not identity:
                identity = hdata.get('d')
            ts = hdata.get('t')
            signtime = None
            if ts:
                signtime = LoreAttestor.parse_ts(ts)
            else:
                # See if date is included in the h: field
                sh = hdata.get('h')
                if 'date' in sh.lower().split(':'):
                    signtime = self.date

            self.msg._headers.append((hn, hval))  # noqa
            res = dkim.verify(self.msg.as_bytes(), logger=dkimlogger)

            attestor = LoreAttestorDKIM(res, identity, signtime, errors)
            logger.debug('DKIM verify results: %s=%s', identity, res)
            if attestor.check_identity(self.fromemail):
                # use this one, regardless of any other DKIM signatures
                self._attestors.append(attestor)
                return

            self.msg._headers.pop(-1)  # noqa
            seenatts.append(attestor)

        # No exact domain matches, so return everything we have
        self._attestors += seenatts

    def _trim_body(self) -> None:
        # Get the length specified in the X-Developer-Signature header
        xdsh = self.msg.get('X-Developer-Signature')
        if not xdsh:
            return
        matches = re.search(r'\s+l=(\d+)', xdsh)
        if not matches:
            return
        bl = int(matches.groups()[0])
        i, m, p = get_mailinfo(self.msg.as_bytes(), scissors=False)
        bb = b''
        for line in re.sub(rb'[\r\n]*$', b'', m + p).split(b'\n'):
            bb += re.sub(rb'[\r\n]*$', b'', line) + b'\r\n'
        if len(bb) > bl:
            self.body = bb[:bl].decode()
            # This may have potentially resulted in in-body From/Subject being removed,
            # so make sure we account for this in the message headers
            self.lsubject.subject = self.subject = i.get('Subject')
            self.fromname = i.get('Author')
            self.fromemail = i.get('Email')

    def _load_patatt_attestors(self) -> None:
        if not can_patatt:
            logger.debug('Message has %s headers, but can_patatt is off', DEVSIG_HDR)
            return

        # load our key sources if necessary
        ddir = get_data_dir()
        pdir = os.path.join(ddir, 'keyring')
        config = get_main_config()
        sources = config.get('keyringsrc')
        if not sources:
            # fallback to patatt's keyring if none is specified for b4
            patatt_config = patatt.get_config_from_git(r'patatt\..*', multivals=['keyringsrc'])
            sources = patatt_config.get('keyringsrc')
            if not sources:
                sources = ['ref:::.keys', 'ref:::.local-keys', 'ref::refs/meta/keyring:']
        if pdir not in sources:
            sources.append(pdir)

        # Push our logger and GPGBIN into patatt
        patatt.logger = logger
        patatt.GPGBIN = config['gpgbin']

        logger.debug('Loading patatt attestations with sources=%s', str(sources))

        success = False
        trim_body = False
        while True:
            attestations = patatt.validate_message(self.msg.as_bytes(), sources, trim_body=trim_body)
            # Do we have any successes?
            for attestation in attestations:
                if attestation[0] == patatt.RES_VALID:
                    success = True
                    break
            if success:
                if trim_body:
                    # If we only succeeded after trimming the body, then we MUST set the body
                    # to that value, otherwise someone can append arbitrary content after the l= value
                    # limit message.
                    self._trim_body()
                break
            if not success and trim_body:
                break
            trim_body = True

        for result, identity, signtime, keysrc, keyalgo, errors in attestations:
            if keysrc and keysrc.startswith('(default keyring)/'):
                fpr = keysrc.split('/', 1)[1]
                uids = get_gpg_uids(fpr)
                idmatch = False
                for uid in uids:
                    if uid.find(identity) >= 0:
                        idmatch = True
                        break
                if not idmatch:
                    # Take the first identity in the list and use that instead
                    parts = email.utils.parseaddr(uids[0])
                    identity = parts[1]

            if signtime:
                signdt = LoreAttestor.parse_ts(signtime)
            else:
                signdt = None
            attestor = LoreAttestorPatatt(result, identity, signdt, keysrc, keyalgo, errors)
            self._attestors.append(attestor)

    def get_attestation_trailers(self, attpolicy: str, maxdays: int = 0) -> Tuple[str, list, bool]:
        trailers = list()
        checkmark = None
        critical = False
        for attestor in self.attestors:
            if attestor.passing and maxdays and not attestor.check_time_drift(self.date, maxdays):
                logger.debug('The time drift is too much, marking as non-passing')
                attestor.passing = False
            if not attestor.passing:
                # Is it a person-trailer for which we have a key?
                if attestor.level == 'person':
                    if attestor.have_key:
                        # This was signed, and we have a key, but it's failing
                        trailers.append('%s BADSIG: %s' % (attestor.checkmark, attestor.trailer))
                        checkmark = attestor.checkmark
                    elif attpolicy in ('softfail', 'hardfail'):
                        trailers.append('%s No key: %s' % (attestor.checkmark, attestor.trailer))
                        # This is not critical even in hardfail
                        continue
                elif attpolicy in ('softfail', 'hardfail'):
                    if not checkmark:
                        checkmark = attestor.checkmark
                    trailers.append('%s BADSIG: %s' % (attestor.checkmark, attestor.trailer))

                if attpolicy == 'hardfail':
                    critical = True
            else:
                if not checkmark:
                    checkmark = attestor.checkmark
                if attestor.check_identity(self.fromemail):
                    trailers.append('%s Signed: %s' % (attestor.checkmark, attestor.trailer))
                else:
                    trailers.append('%s Signed: %s (From: %s)' % (attestor.checkmark, attestor.trailer,
                                                                  self.fromemail))

        return checkmark, trailers, critical

    def __repr__(self):
        out = list()
        out.append('msgid: %s' % self.msgid)
        out.append(str(self.lsubject))

        out.append('  fromname: %s' % self.fromname)
        out.append('  fromemail: %s' % self.fromemail)
        out.append('  date: %s' % str(self.date))
        out.append('  in_reply_to: %s' % self.in_reply_to)

        # Header-based info
        out.append('  --- begin body ---')
        for line in self.body.split('\n'):
            out.append('  |%s' % line)
        out.append('  --- end body ---')

        # Body and body-based info
        out.append('  has_diff: %s' % self.has_diff)
        out.append('  has_diffstat: %s' % self.has_diffstat)
        out.append('  --- begin my trailers ---')
        for trailer in self.trailers:
            out.append('  |%s' % str(trailer))
        out.append('  --- begin followup trailers ---')
        for trailer in self.followup_trailers:
            out.append('  |%s' % str(trailer))
        out.append('  --- end trailers ---')
        out.append('  --- begin attestors ---')
        for attestor in self.attestors:
            out.append('  |%s' % str(attestor))
        out.append('  --- end attestors ---')

        return '\n'.join(out)

    @staticmethod
    def clean_header(hdrval):
        if hdrval is None:
            return ''

        decoded = ''
        for hstr, hcs in email.header.decode_header(hdrval):
            if hcs is None:
                hcs = 'utf-8'
            try:
                decoded += hstr.decode(hcs, errors='replace')
            except LookupError:
                # Try as utf-u
                decoded += hstr.decode('utf-8', errors='replace')
            except (UnicodeDecodeError, AttributeError):
                decoded += hstr
        new_hdrval = re.sub(r'\n?\s+', ' ', decoded)
        return new_hdrval.strip()

    @staticmethod
    def get_parts_from_header(hstr: str) -> dict:
        hstr = re.sub(r'\s*', '', hstr)
        hdata = dict()
        for chunk in hstr.split(';'):
            parts = chunk.split('=', 1)
            if len(parts) < 2:
                continue
            hdata[parts[0]] = parts[1]
        return hdata

    @staticmethod
    def get_clean_msgid(msg, header='Message-Id'):
        msgid = None
        raw = msg.get(header)
        if raw:
            matches = re.search(r'<([^>]+)>', LoreMessage.clean_header(raw))
            if matches:
                msgid = matches.groups()[0]
        return msgid

    @staticmethod
    def get_preferred_duplicate(msg1, msg2):
        config = get_main_config()
        listid1 = LoreMessage.get_clean_msgid(msg1, 'list-id')
        if listid1:
            prefidx1 = 0
            for listglob in config['listid-preference']:
                if fnmatch.fnmatch(listid1, listglob):
                    break
                prefidx1 += 1
        else:
            prefidx1 = config['listid-preference'].index('*')

        listid2 = LoreMessage.get_clean_msgid(msg2, 'list-id')
        if listid2:
            prefidx2 = 0
            for listglob in config['listid-preference']:
                if fnmatch.fnmatch(listid2, listglob):
                    break
                prefidx2 += 1
        else:
            prefidx2 = config['listid-preference'].index('*')

        if prefidx1 <= prefidx2:
            logger.debug('Picked duplicate from preferred source: %s', listid1)
            return msg1
        logger.debug('Picked duplicate from preferred source: %s', listid2)
        return msg2

    @staticmethod
    def get_patchwork_hash(diff: str) -> str:
        """Generate a hash from a diff. Lifted verbatim from patchwork."""

        prefixes = ['-', '+', ' ']
        hashed = hashlib.sha1()

        for line in diff.split('\n'):
            if len(line) <= 0:
                continue

            hunk_match = HUNK_RE.match(line)
            filename_match = FILENAME_RE.match(line)

            if filename_match:
                # normalise -p1 top-directories
                if filename_match.group(1) == '---':
                    filename = 'a/'
                else:
                    filename = 'b/'
                filename += '/'.join(filename_match.group(2).split('/')[1:])

                line = filename_match.group(1) + ' ' + filename
            elif hunk_match:
                # remove line numbers, but leave line counts
                def fn(x):
                    if not x:
                        return 1
                    return int(x)

                line_nos = list(map(fn, hunk_match.groups()))
                line = '@@ -%d +%d @@' % tuple(line_nos)
            elif line[0] in prefixes:
                # if we have a +, - or context line, leave as-is
                pass
            else:
                # other lines are ignored
                continue

            hashed.update((line + '\n').encode('utf-8'))

        return hashed.hexdigest()

    @staticmethod
    def get_indexes(diff: str) -> Set[tuple]:
        indexes = set()
        curfile = None
        for line in diff.split('\n'):
            if line.find('diff ') != 0 and line.find('index ') != 0:
                continue
            matches = re.search(r'^diff\s+--git\s+\w/(.*)\s+\w/(.*)$', line)
            if matches and matches.groups()[0] == matches.groups()[1]:
                curfile = matches.groups()[0]
                continue
            matches = re.search(r'^index\s+([0-9a-f]+)\.\.[0-9a-f]+.*$', line)
            if matches and curfile is not None:
                indexes.add((curfile, matches.groups()[0]))
        return indexes

    @staticmethod
    def find_trailers(body, followup=False):
        ignores = {'phone', 'email'}
        headers = {'subject', 'date', 'from'}
        nonperson = {'fixes', 'subject', 'date', 'link', 'buglink', 'obsoleted-by'}
        # Ignore everything below standard email signature marker
        body = body.split('\n-- \n', 1)[0].strip() + '\n'
        # Fix some more common copypasta trailer wrapping
        # Fixes: abcd0123 (foo bar
        # baz quux)
        body = re.sub(r'^(\S+:\s+[0-9a-f]+\s+\([^)]+)\n([^\n]+\))', r'\1 \2', body, flags=re.M)
        # Signed-off-by: Long Name
        # <email.here@example.com>
        body = re.sub(r'^(\S+:\s+[^<]+)\n(<[^>]+>)$', r'\1 \2', body, flags=re.M)
        # Signed-off-by: Foo foo <foo@foo.com>
        # [for the thing that the thing is too long the thing that is
        # thing but thing]
        # (too false-positivey, commented out)
        # body = re.sub(r'^(\[[^]]+)\n([^]]+]$)', r'\1 \2', body, flags=re.M)
        trailers = list()
        others = list()
        was_trailer = False
        for line in body.split('\n'):
            line = line.strip('\r')
            matches = re.search(r'^\s*(\w\S+):\s+(\S.*)', line, flags=re.I)
            if matches:
                groups = list(matches.groups())
                # We only accept headers if we haven't seen any non-trailer lines
                tname = groups[0].lower()
                if tname in ignores:
                    logger.debug('Ignoring known non-trailer: %s', line)
                    continue
                if len(others) and tname in headers:
                    logger.debug('Ignoring %s (header after other content)', line)
                    continue
                if followup:
                    mperson = re.search(r'\S+@\S+\.\S+', groups[1])
                    if not mperson and tname not in nonperson:
                        logger.debug('Ignoring %s (not a recognized non-person trailer)', line)
                        continue
                was_trailer = True
                groups.append(None)
                trailers.append(groups)
                continue
            # Is it an extended info line, e.g.:
            # Signed-off-by: Foo Foo <foo@foo.com>
            # [for the foo bits]
            if len(line) > 2 and line[0] == '[' and line[-1] == ']' and was_trailer:
                trailers[-1][2] = line
                was_trailer = False
                continue
            was_trailer = False
            others.append(line)

        return trailers, others

    @staticmethod
    def get_body_parts(body):
        # remove any starting/trailing blank lines
        body = body.replace('\r', '')
        body = body.strip('\n')
        # Extra git-relevant headers, like From:, Subject:, Date:, etc
        githeaders = list()
        # commit message
        message = ''
        # everything below the ---
        basement = ''
        # conformant signature --\s\n
        signature = ''
        sparts = body.rsplit('\n-- \n', 1)
        if len(sparts) > 1:
            signature = sparts[1]
            body = sparts[0].rstrip('\n')

        parts = re.split('^---\n', body, maxsplit=1, flags=re.M)
        if len(parts) == 2:
            basement = parts[1].rstrip('\n')
        elif body.find('\ndiff ') >= 0:
            parts = body.split('\ndiff ', 1)
            if len(parts) == 2:
                parts[1] = 'diff ' + parts[1]
            basement = parts[1].rstrip('\n')

        mbody = parts[0].strip('\n')

        # Split into paragraphs
        bpara = mbody.split('\n\n')

        # Is every line of the first part in a header format?
        mparts = list()
        h, o = LoreMessage.find_trailers(bpara[0])
        if len(o):
            # Not everything was a header, so we don't treat it as headers
            mparts.append(bpara[0])
        else:
            githeaders = h

        # Any lines of the last part match the header format?
        trailers, nlines = LoreMessage.find_trailers(bpara[-1])

        if len(bpara) == 1:
            if githeaders == trailers:
                # This is a message that consists of just trailers?
                githeaders = list()
            if nlines:
                message = '\n'.join(nlines)
            return githeaders, message, trailers, basement, signature

        # Add all parts between first and last to mparts
        if len(bpara) > 2:
            mparts += bpara[1:-1]

        if len(nlines):
            # Add them as the last part
            mparts.append('\n'.join(nlines))

        message = '\n\n'.join(mparts)

        return githeaders, message, trailers, basement, signature

    def fix_trailers(self, trailer_order=None, copyccs=False):
        config = get_main_config()
        attpolicy = config['attestation-policy']

        bheaders, message, btrailers, basement, signature = LoreMessage.get_body_parts(self.body)
        # Now we add mix-in trailers
        trailers = btrailers + self.followup_trailers

        if copyccs:
            alldests = email.utils.getaddresses([str(x) for x in self.msg.get_all('to', [])])
            alldests += email.utils.getaddresses([str(x) for x in self.msg.get_all('cc', [])])
            # Sort by domain name, then local
            alldests.sort(key=lambda x: x[1].find('@') > 0 and x[1].split('@')[1] + x[1].split('@')[0] or x[1])
            for pair in alldests:
                found = False
                for ftr in trailers:
                    if ftr[1].lower().find(pair[1].lower()) >= 0:
                        # already present
                        found = True
                        break

                if not found:
                    if len(pair[0]):
                        trailers.append(('Cc', f'{pair[0]} <{pair[1]}>', None, None))  # noqa
                    else:
                        trailers.append(('Cc', pair[1], None, None))  # noqa

        fixtrailers = list()
        if trailer_order is None:
            trailer_order = DEFAULT_TRAILER_ORDER
        elif trailer_order in ('preserve', '_preserve_'):
            trailer_order = '*'

        for trailermatch in trailer_order:
            for trailer in trailers:
                if list(trailer[:3]) in fixtrailers:
                    # Dupe
                    continue
                if fnmatch.fnmatch(trailer[0].lower(), trailermatch.strip()):
                    fixtrailers.append(list(trailer[:3]))
                    if trailer[:3] not in btrailers:
                        extra = ''
                        if trailer[3] is not None:
                            fmsg = trailer[3]
                            for attestor in fmsg.attestors:  # noqa
                                if attestor.passing:
                                    extra = ' (%s %s)' % (attestor.checkmark, attestor.trailer)
                                elif attpolicy in ('hardfail', 'softfail'):
                                    extra = ' (%s %s)' % (attestor.checkmark, attestor.trailer)
                                    if attpolicy == 'hardfail':
                                        import sys
                                        logger.critical('---')
                                        logger.critical('Exiting due to attestation-policy: hardfail')
                                        sys.exit(1)

                        logger.info('    + %s: %s%s', trailer[0], trailer[1], extra)
                    else:
                        logger.debug('    . %s: %s', trailer[0], trailer[1])

        # Reconstitute the message
        self.body = ''
        if bheaders:
            for bheader in bheaders:
                # There is no [extdata] in git headers, so we ignore bheader[2]
                self.body += '%s: %s\n' % (bheader[0], bheader[1])
            self.body += '\n'

        if len(message):
            self.body += message.rstrip('\r\n') + '\n'
            if len(fixtrailers):
                self.body += '\n'

        if len(fixtrailers):
            for trailer in fixtrailers:
                self.body += '%s: %s\n' % (trailer[0], trailer[1])
                if trailer[2]:
                    self.body += '%s\n' % trailer[2]
        if len(basement):
            self.body += '---\n'
            self.body += basement.rstrip('\r\n') + '\n\n'
        if len(signature):
            self.body += '-- \n'
            self.body += signature.rstrip('\r\n') + '\n\n'

    def get_am_subject(self, indicate_reroll=True):
        # Return a clean patch subject
        parts = ['PATCH']
        if self.lsubject.rfc:
            parts.append('RFC')
        if self.reroll_from_revision:
            if indicate_reroll:
                if self.reroll_from_revision != self.revision:
                    parts.append('v%d->v%d' % (self.reroll_from_revision, self.revision))
                else:
                    parts.append(' %s  v%d' % (' ' * len(str(self.reroll_from_revision)), self.revision))
            else:
                parts.append('v%d' % self.revision)
        elif not self.revision_inferred:
            parts.append('v%d' % self.revision)
        if not self.lsubject.counters_inferred:
            parts.append('%d/%d' % (self.lsubject.counter, self.lsubject.expected))

        return '[%s] %s' % (' '.join(parts), self.lsubject.subject)

    def get_am_message(self, add_trailers=True, trailer_order=None, copyccs=False):
        if add_trailers:
            self.fix_trailers(trailer_order=trailer_order, copyccs=copyccs)
        am_msg = email.message.EmailMessage()
        am_msg.set_payload(self.body.encode())
        am_msg.add_header('Subject', self.get_am_subject(indicate_reroll=False))
        if self.fromname:
            am_msg.add_header('From', f'{self.fromname} <{self.fromemail}>')
        else:
            am_msg.add_header('From', self.fromemail)

        # Add select headers from the original message
        for hname in AMHDRS:
            hval = self.msg.get(hname)
            if not hval:
                continue
            hval = LoreMessage.clean_header(hval)
            # noinspection PyBroadException
            try:
                am_msg.add_header(hname, hval)
            except:
                # A broad except to handle any potential weird header conditions
                pass
        am_msg.set_charset('utf-8')
        return am_msg


class LoreSubject:
    def __init__(self, subject):
        # Subject-based info
        self.full_subject = None
        self.subject = None
        self.reply = False
        self.resend = False
        self.patch = False
        self.rfc = False
        self.revision = 1
        self.counter = 1
        self.expected = 1
        self.revision_inferred = True
        self.counters_inferred = True
        self.prefixes = list()

        subject = re.sub(r'\s+', ' ', LoreMessage.clean_header(subject)).strip()
        self.full_subject = subject

        # Is it a reply?
        if re.search(r'^(Re|Aw|Fwd):', subject, re.I) or re.search(r'^\w{2,3}:\s*\[', subject):
            self.reply = True
            self.subject = subject
            # We don't care to parse the rest
            return

        # Remove any brackets inside brackets
        while True:
            oldsubj = subject
            subject = re.sub(r'\[([^]]*)\[([^\[\]]*)]', r'[\1\2]', subject)
            subject = re.sub(r'\[([^]]*)]([^\[\]]*)]', r'[\1\2]', subject)
            if oldsubj == subject:
                break

        # Find all [foo] in the title
        while subject.find('[') == 0:
            matches = re.search(r'^\[([^]]*)]', subject)
            if not matches:
                break

            bracketed = matches.groups()[0].strip()
            # Fix [PATCHv3] to be properly [PATCH v3]
            bracketed = re.sub(r'(patch)(v\d+)', r'\1 \2', bracketed, flags=re.I)

            for chunk in bracketed.split():
                # Remove any trailing commas or semicolons
                chunk = chunk.strip(',;')
                if re.search(r'^\d{1,3}/\d{1,3}$', chunk):
                    counters = chunk.split('/')
                    self.counter = int(counters[0])
                    self.expected = int(counters[1])
                    self.counters_inferred = False
                elif re.search(r'^v\d+$', chunk, re.IGNORECASE):
                    self.revision = int(chunk[1:])
                    self.revision_inferred = False
                elif chunk.lower().find('rfc') == 0:
                    self.rfc = True
                elif chunk.lower().find('resend') == 0:
                    self.resend = True
                elif chunk.lower().find('patch') == 0:
                    self.patch = True
                self.prefixes.append(chunk)
            subject = re.sub(r'^\s*\[[^]]*]\s*', '', subject)
        self.subject = subject

    def get_slug(self):
        unsafe = '%04d_%s' % (self.counter, self.subject)
        return re.sub(r'\W+', '_', unsafe).strip('_').lower()

    def __repr__(self):
        out = list()
        out.append('  full_subject: %s' % self.full_subject)
        out.append('  subject: %s' % self.subject)
        out.append('  reply: %s' % self.reply)
        out.append('  resend: %s' % self.resend)
        out.append('  patch: %s' % self.patch)
        out.append('  rfc: %s' % self.rfc)
        out.append('  revision: %s' % self.revision)
        out.append('  revision_inferred: %s' % self.revision_inferred)
        out.append('  counter: %s' % self.counter)
        out.append('  expected: %s' % self.expected)
        out.append('  counters_inferred: %s' % self.counters_inferred)
        out.append('  prefixes: %s' % ', '.join(self.prefixes))

        return '\n'.join(out)


class LoreAttestor:
    mode: Optional[str]
    level: Optional[str]
    identity: Optional[str]
    signtime: Optional[any]
    keysrc: Optional[str]
    keyalgo: Optional[str]
    passing: bool
    have_key: bool
    errors: list

    def __init__(self) -> None:
        self.mode = None
        self.level = None
        self.identity = None
        self.signtime = None
        self.keysrc = None
        self.keyalgo = None
        self.passing = False
        self.have_key = False
        self.errors = list()

    @property
    def checkmark(self) -> str:
        config = get_main_config()
        if config['attestation-checkmarks'] == 'fancy':
            if self.passing:
                return ATT_PASS_FANCY
            return ATT_FAIL_FANCY
        if self.passing:
            return ATT_PASS_SIMPLE
        return ATT_FAIL_SIMPLE

    @property
    def trailer(self):
        if self.keyalgo:
            mode = self.keyalgo
        else:
            mode = self.mode

        return '%s/%s' % (mode, self.identity)

    def check_time_drift(self, emldate, maxdays: int = 30) -> bool:
        if not self.passing or self.signtime is None:
            return False

        maxdrift = datetime.timedelta(days=maxdays)

        sdrift = self.signtime - emldate
        if sdrift > maxdrift:
            self.errors.append('Time drift between Date and t too great (%s)' % sdrift)
            return False

        logger.debug('PASS : time drift between Date and t (%s)', sdrift)
        return True

    def check_identity(self, emlfrom: str) -> bool:
        if not self.passing or not emlfrom:
            return False

        if self.level == 'domain':
            if emlfrom.endswith('@' + self.identity):
                logger.debug('PASS : sig domain %s matches from identity %s', self.identity, emlfrom)
                return True
            self.errors.append('signing domain %s does not match From: %s' % (self.identity, emlfrom))
            return False

        if emlfrom == self.identity:
            logger.debug('PASS : sig identity %s matches from identity %s', self.identity, emlfrom)
            return True
        self.errors.append('signing identity %s does not match From: %s' % (self.identity, emlfrom))
        return False

    @staticmethod
    def parse_ts(ts: Optional[str]):
        try:
            return datetime.datetime.utcfromtimestamp(int(ts)).replace(tzinfo=datetime.timezone.utc)
        except:  # noqa
            logger.debug('Failed parsing t=%s', ts)
        return None

    def __repr__(self):
        out = list()
        out.append('    mode: %s' % self.mode)
        out.append('   level: %s' % self.level)
        out.append('identity: %s' % self.identity)
        out.append('signtime: %s' % self.signtime)
        out.append('  keysrc: %s' % self.keysrc)
        out.append(' keyalgo: %s' % self.keyalgo)
        out.append(' passing: %s' % self.passing)
        out.append('have_key: %s' % self.have_key)
        out.append('  errors: %s' % ','.join(self.errors))
        return '\n'.join(out)


class LoreAttestorDKIM(LoreAttestor):
    def __init__(self, passing: bool, identity: str, signtime: Optional[any], errors: list) -> None:
        super().__init__()
        self.mode = 'DKIM'
        self.level = 'domain'
        self.keysrc = 'DNS'
        self.signtime = signtime
        self.passing = passing
        self.identity = identity.lstrip('@')
        self.errors = errors


class LoreAttestorPatatt(LoreAttestor):
    def __init__(self, result: bool, identity: str, signtime: Optional[any], keysrc: str, keyalgo: str,
                 errors: list) -> None:
        super().__init__()
        self.mode = 'patatt'
        self.level = 'person'
        self.identity = identity
        self.signtime = signtime
        self.keysrc = keysrc
        self.keyalgo = keyalgo
        self.errors = errors
        if result == patatt.RES_VALID:
            self.passing = True
            self.have_key = True
        elif result >= patatt.RES_BADSIG:
            self.have_key = True


def _run_command(cmdargs: list, stdin: Optional[bytes] = None) -> Tuple[int, bytes, bytes]:
    logger.debug('Running %s' % ' '.join(cmdargs))
    sp = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE)
    (output, error) = sp.communicate(input=stdin)

    return sp.returncode, output, error


def gpg_run_command(args: List[str], stdin: Optional[bytes] = None) -> Tuple[int, bytes, bytes]:
    config = get_main_config()
    cmdargs = [config['gpgbin'], '--batch', '--no-auto-key-retrieve', '--no-auto-check-trustdb']
    if config['attestation-gnupghome'] is not None:
        cmdargs += ['--homedir', config['attestation-gnupghome']]
    cmdargs += args

    return _run_command(cmdargs, stdin=stdin)


def git_run_command(gitdir: Optional[str], args: List[str], stdin: Optional[bytes] = None,
                    logstderr: bool = False) -> Tuple[int, str]:
    cmdargs = ['git', '--no-pager']
    if gitdir:
        if os.path.exists(os.path.join(gitdir, '.git')):
            gitdir = os.path.join(gitdir, '.git')
        cmdargs += ['--git-dir', gitdir]
    cmdargs += args

    ecode, out, err = _run_command(cmdargs, stdin=stdin)

    out = out.decode(errors='replace')

    if logstderr and len(err.strip()):
        err = err.decode(errors='replace')
        logger.debug('Stderr: %s', err)
        out += err

    return ecode, out


def git_get_command_lines(gitdir: Optional[str], args: list) -> List[str]:
    ecode, out = git_run_command(gitdir, args)
    lines = list()
    if out:
        for line in out.split('\n'):
            if line == '':
                continue
            lines.append(line)

    return lines


@contextmanager
def git_temp_worktree(gitdir=None, commitish=None):
    """Context manager that creates a temporary work tree and chdirs into it. The
    worktree is deleted when the contex manager is closed. Taken from gj_tools."""
    dfn = None
    try:
        with tempfile.TemporaryDirectory() as dfn:
            gitargs = ['worktree', 'add', '--detach', '--no-checkout', dfn]
            if commitish:
                gitargs.append(commitish)
            git_run_command(gitdir, gitargs)
            with in_directory(dfn):
                yield dfn
    finally:
        if dfn is not None:
            git_run_command(gitdir, ['worktree', 'remove', dfn])


@contextmanager
def git_temp_clone(gitdir=None):
    """Context manager that creates a temporary shared clone."""
    if gitdir is None:
        topdir = git_get_toplevel()
        if topdir and os.path.isdir(os.path.join(topdir, '.git')):
            gitdir = os.path.join(topdir, '.git')

    if not gitdir:
        logger.critical('Current directory is not a git checkout. Try using -g.')
        return None

    with tempfile.TemporaryDirectory() as dfn:
        gitargs = ['clone', '--mirror', '--shared', gitdir, dfn]
        git_run_command(None, gitargs)
        yield dfn


@contextmanager
def in_directory(dirname):
    """Context manager that chdirs into a directory and restores the original
    directory when closed. Taken from gj_tools."""
    cdir = os.getcwd()
    try:
        os.chdir(dirname)
        yield True
    finally:
        os.chdir(cdir)


def get_config_from_git(regexp: str, defaults: Optional[dict] = None, multivals: Optional[list] = None) -> dict:
    if multivals is None:
        multivals = list()
    args = ['config', '-z', '--get-regexp', regexp]
    ecode, out = git_run_command(None, args)
    gitconfig = defaults
    if not gitconfig:
        gitconfig = dict()
    if not out:
        return gitconfig

    for line in out.split('\x00'):
        if not line:
            continue
        key, value = line.split('\n', 1)
        try:
            chunks = key.split('.')
            cfgkey = chunks[-1].lower()
            if cfgkey in multivals:
                if cfgkey not in gitconfig:
                    gitconfig[cfgkey] = list()
                gitconfig[cfgkey].append(value)
            else:
                gitconfig[cfgkey] = value
        except ValueError:
            logger.debug('Ignoring git config entry %s', line)

    return gitconfig


def get_main_config() -> dict:
    global MAIN_CONFIG
    if MAIN_CONFIG is None:
        config = get_config_from_git(r'b4\..*', defaults=DEFAULT_CONFIG, multivals=['keyringsrc'])
        # Legacy name was get-lore-mbox, so load those as well
        config = get_config_from_git(r'get-lore-mbox\..*', defaults=config)
        config['trailer-order'] = config['trailer-order'].split(',')
        config['trailer-order'].remove('*')
        config['trailer-order'].append('*')
        config['listid-preference'] = config['listid-preference'].split(',')
        config['listid-preference'].remove('*')
        config['listid-preference'].append('*')
        if config['gpgbin'] is None:
            gpgcfg = get_config_from_git(r'gpg\..*', {'program': 'gpg'})
            config['gpgbin'] = gpgcfg['program']
        MAIN_CONFIG = config
    return MAIN_CONFIG


def get_data_dir(appname: str = 'b4') -> str:
    if 'XDG_DATA_HOME' in os.environ:
        datahome = os.environ['XDG_DATA_HOME']
    else:
        datahome = os.path.join(str(pathlib.Path.home()), '.local', 'share')
    datadir = os.path.join(datahome, appname)
    pathlib.Path(datadir).mkdir(parents=True, exist_ok=True)
    return datadir


def get_cache_dir(appname: str = 'b4') -> str:
    global _CACHE_CLEANED
    if 'XDG_CACHE_HOME' in os.environ:
        cachehome = os.environ['XDG_CACHE_HOME']
    else:
        cachehome = os.path.join(str(pathlib.Path.home()), '.cache')
    cachedir = os.path.join(cachehome, appname)
    pathlib.Path(cachedir).mkdir(parents=True, exist_ok=True)
    if _CACHE_CLEANED:
        return cachedir

    # Delete all .mbx and .lookup files older than cache-expire
    config = get_main_config()
    try:
        expmin = int(config['cache-expire']) * 60
    except ValueError:
        logger.critical('ERROR: cache-expire must be an integer (minutes): %s', config['cache-expire'])
        expmin = 600
    expage = time.time() - expmin
    for entry in os.listdir(cachedir):
        if entry.find('.mbx') <= 0 and entry.find('.lookup') <= 0 and entry.find('.msgs') <= 0:
            continue
        fullpath = os.path.join(cachedir, entry)
        st = os.stat(fullpath)
        if st.st_mtime < expage:
            logger.debug('Cleaning up cache: %s', entry)
            if os.path.isdir(fullpath):
                shutil.rmtree(fullpath)
            else:
                os.unlink(os.path.join(cachedir, entry))
    _CACHE_CLEANED = True
    return cachedir


def get_cache_file(identifier, suffix=None):
    cachedir = get_cache_dir()
    cachefile = hashlib.sha1(identifier.encode()).hexdigest()
    if suffix:
        cachefile = f'{cachefile}.{suffix}'
    return os.path.join(cachedir, cachefile)


def get_cache(identifier, suffix=None):
    fullpath = get_cache_file(identifier, suffix=suffix)
    try:
        with open(fullpath) as fh:
            logger.debug('Using cache %s for %s', fullpath, identifier)
            return fh.read()
    except FileNotFoundError:
        logger.debug('Cache miss for %s', identifier)
    return None


def save_cache(contents, identifier, suffix=None, mode='w'):
    fullpath = get_cache_file(identifier, suffix=suffix)
    if not contents:
        # noinspection PyBroadException
        try:
            os.unlink(fullpath)
            logger.debug('Removed cache %s for %s', fullpath, identifier)
        except:
            pass
    try:
        with open(fullpath, mode) as fh:
            fh.write(contents)
            logger.debug('Saved cache %s for %s', fullpath, identifier)
    except FileNotFoundError:
        logger.debug('Could not write cache %s for %s', fullpath, identifier)


def get_user_config():
    global USER_CONFIG
    if USER_CONFIG is None:
        USER_CONFIG = get_config_from_git(r'user\..*')
        if 'name' not in USER_CONFIG:
            udata = pwd.getpwuid(os.getuid())
            USER_CONFIG['name'] = udata.pw_gecos
    return USER_CONFIG


def get_requests_session():
    global REQSESSION
    if REQSESSION is None:
        REQSESSION = requests.session()
        REQSESSION.headers.update({'User-Agent': 'b4/%s' % __VERSION__})
    return REQSESSION


def get_msgid_from_stdin():
    if not sys.stdin.isatty():
        from email.parser import BytesParser
        message = BytesParser().parsebytes(
            sys.stdin.buffer.read(), headersonly=True)
        return message.get('Message-ID', None)
    return None


def get_msgid(cmdargs) -> Optional[str]:
    if not cmdargs.msgid:
        logger.debug('Getting Message-ID from stdin')
        msgid = get_msgid_from_stdin()
    else:
        msgid = cmdargs.msgid

    if msgid is None:
        return None

    msgid = msgid.strip('<>')
    # Handle the case when someone pastes a full URL to the message
    matches = re.search(r'^https?://[^/]+/([^/]+)/([^/]+@[^/]+)', msgid, re.IGNORECASE)
    if matches:
        chunks = matches.groups()
        msgid = urllib.parse.unquote(chunks[1])
        # Infer the project name from the URL, if possible
        if chunks[0] != 'r':
            cmdargs.useproject = chunks[0]
    # Handle special case when msgid is prepended by id: or rfc822msgid:
    if msgid.find('id:') >= 0:
        msgid = re.sub(r'^\w*id:', '', msgid)

    return msgid


def get_strict_thread(msgs, msgid):
    want = {msgid}
    got = set()
    seen = set()
    maybe = dict()
    strict = list()
    while True:
        for msg in msgs:
            c_msgid = LoreMessage.get_clean_msgid(msg)
            seen.add(c_msgid)
            if c_msgid in got:
                continue
            logger.debug('Looking at: %s', c_msgid)

            refs = set()
            msgrefs = list()
            if msg.get('In-Reply-To', None):
                msgrefs += email.utils.getaddresses([str(x) for x in msg.get_all('in-reply-to', [])])
            if msg.get('References', None):
                msgrefs += email.utils.getaddresses([str(x) for x in msg.get_all('references', [])])
            for ref in set([x[1] for x in msgrefs]):
                if ref in got or ref in want:
                    want.add(c_msgid)
                elif len(ref):
                    refs.add(ref)
                    if c_msgid not in want:
                        if ref not in maybe:
                            maybe[ref] = set()
                        logger.debug('Going into maybe: %s->%s', ref, c_msgid)
                        maybe[ref].add(c_msgid)

            if c_msgid in want:
                strict.append(msg)
                got.add(c_msgid)
                want.update(refs)
                want.discard(c_msgid)
                logger.debug('Kept in thread: %s', c_msgid)
                if c_msgid in maybe:
                    # Add all these to want
                    want.update(maybe[c_msgid])
                    maybe.pop(c_msgid)
                # Add all maybes that have the same ref into want
                for ref in refs:
                    if ref in maybe:
                        want.update(maybe[ref])
                        maybe.pop(ref)

        # Remove any entries not in "seen" (missing messages)
        for c_msgid in set(want):
            if c_msgid not in seen or c_msgid in got:
                want.remove(c_msgid)
        if not len(want):
            break

    if not len(strict):
        return None

    if len(msgs) > len(strict):
        logger.debug('Reduced mbox to strict matches only (%s->%s)', len(msgs), len(strict))

    return strict


def mailsplit_bytes(bmbox: bytes, outdir: str) -> list:
    logger.debug('Mailsplitting the mbox into %s', outdir)
    args = ['mailsplit', '--mboxrd', '-o%s' % outdir]
    ecode, out = git_run_command(None, args, stdin=bmbox)
    msgs = list()
    if ecode > 0:
        logger.critical('Unable to parse mbox received from the server')
        return msgs
    # Read in the files
    for msg in os.listdir(outdir):
        with open(os.path.join(outdir, msg), 'rb') as fh:
            msgs.append(email.message_from_binary_file(fh))
    return msgs


def get_pi_thread_by_url(t_mbx_url, nocache=False):
    msgs = list()
    cachedir = get_cache_file(t_mbx_url, 'pi.msgs')
    if os.path.exists(cachedir) and not nocache:
        logger.debug('Using cached copy: %s', cachedir)
        for msg in os.listdir(cachedir):
            with open(os.path.join(cachedir, msg), 'rb') as fh:
                msgs.append(email.message_from_binary_file(fh))
    else:
        logger.critical('Grabbing thread from %s', t_mbx_url.split('://')[1])
        session = get_requests_session()
        resp = session.get(t_mbx_url)
        if resp.status_code != 200:
            logger.critical('Server returned an error: %s', resp.status_code)
            return None
        t_mbox = gzip.decompress(resp.content)
        resp.close()
        if not len(t_mbox):
            logger.critical('No messages found for that query')
            return None
        # Convert into individual files using git-mailsplit
        with tempfile.TemporaryDirectory(suffix='-mailsplit') as tfd:
            msgs = mailsplit_bytes(t_mbox, tfd)
            if os.path.exists(cachedir):
                shutil.rmtree(cachedir)
            shutil.copytree(tfd, cachedir)

    deduped = dict()
    for msg in msgs:
        msgid = LoreMessage.get_clean_msgid(msg)
        if msgid in deduped:
            deduped[msgid] = LoreMessage.get_preferred_duplicate(deduped[msgid], msg)
            continue
        deduped[msgid] = msg
    return list(deduped.values())


def get_pi_thread_by_msgid(msgid, useproject=None, nocache=False, onlymsgids: Optional[set] = None):
    qmsgid = urllib.parse.quote_plus(msgid)
    config = get_main_config()
    # Grab the head from lore, to see where we are redirected
    midmask = config['midmask'] % qmsgid
    loc = urllib.parse.urlparse(midmask)
    if useproject:
        projurl = '%s://%s/%s' % (loc.scheme, loc.netloc, useproject)
    else:
        logger.info('Looking up %s', midmask)
        session = get_requests_session()
        resp = session.head(midmask)
        if resp.status_code < 300 or resp.status_code > 400:
            logger.critical('That message-id is not known.')
            return None
        # Pop msgid from the end of the redirect
        chunks = resp.headers['Location'].rstrip('/').split('/')
        projurl = '/'.join(chunks[:-1])
        resp.close()
    t_mbx_url = '%s/%s/t.mbox.gz' % (projurl, qmsgid)
    logger.debug('t_mbx_url=%s', t_mbx_url)

    msgs = get_pi_thread_by_url(t_mbx_url, nocache=nocache)
    if not msgs:
        return None

    if onlymsgids:
        strict = list()
        for msg in msgs:
            if LoreMessage.get_clean_msgid(msg) in onlymsgids:
                strict.append(msg)
            # also grab any messages where this msgid is in the references header
            for onlymsgid in onlymsgids:
                if msg.get('references', '').find(onlymsgid) >= 0:
                    strict.append(msg)
    else:
        strict = get_strict_thread(msgs, msgid)

    return strict


@contextmanager
def git_format_patches(gitdir, start, end, prefixes=None, extraopts=None):
    with tempfile.TemporaryDirectory() as tmpd:
        gitargs = ['format-patch', '--cover-letter', '-o', tmpd, '--signature', f'b4 {__VERSION__}']
        if prefixes is not None and len(prefixes):
            gitargs += ['--subject-prefix', ' '.join(prefixes)]
        if extraopts:
            gitargs += extraopts
        gitargs += ['%s..%s' % (start, end)]
        ecode, out = git_run_command(gitdir, gitargs)
        if ecode > 0:
            logger.critical('ERROR: Could not convert pull request into patches')
            logger.critical(out)
            yield None
        yield tmpd


def git_commit_exists(gitdir, commit_id):
    gitargs = ['cat-file', '-e', commit_id]
    ecode, out = git_run_command(gitdir, gitargs)
    return ecode == 0


def git_branch_contains(gitdir, commit_id):
    gitargs = ['branch', '--format=%(refname:short)', '--contains', commit_id]
    lines = git_get_command_lines(gitdir, gitargs)
    return lines


def git_get_toplevel(path=None):
    topdir = None
    # Are we in a git tree and if so, what is our toplevel?
    gitargs = ['rev-parse', '--show-toplevel']
    lines = git_get_command_lines(path, gitargs)
    if len(lines) == 1:
        topdir = lines[0]
    return topdir


def format_addrs(pairs, clean=True):
    addrs = set()
    for pair in pairs:
        pair = list(pair)
        if pair[0] == pair[1]:
            pair[0] = ''
        if clean:
            # Remove any quoted-printable header junk from the name
            pair[0] = LoreMessage.clean_header(pair[0])
        addrs.add(email.utils.formataddr(pair))  # noqa
    return ', '.join(addrs)


def make_quote(body, maxlines=5):
    headers, message, trailers, basement, signature = LoreMessage.get_body_parts(body)
    if not len(message):
        # Sometimes there is no message, just trailers
        return '> \n'
    # Remove common greetings
    message = re.sub(r'^(hi|hello|greetings|dear)\W.*\n+', '', message, flags=re.I)
    quotelines = list()
    qcount = 0
    for line in message.split('\n'):
        # Quote the first paragraph only and then [snip] if we quoted more than maxlines
        if qcount > maxlines and not len(line.strip()):
            quotelines.append('> ')
            quotelines.append('> [...]')
            break
        quotelines.append('> %s' % line.rstrip())
        qcount += 1
    return '\n'.join(quotelines)


def parse_int_range(intrange, upper=None):
    # Remove all whitespace
    intrange = re.sub(r'\s', '', intrange)
    for n in intrange.split(','):
        if n.isdigit():
            yield int(n)
        elif n.find('<') == 0 and len(n) > 1 and n[1:].isdigit():
            yield from range(1, int(n[1:]))
        elif n.find('-') > 0:
            nr = n.split('-')
            if nr[0].isdigit() and nr[1].isdigit():
                yield from range(int(nr[0]), int(nr[1])+1)
            elif not len(nr[1]) and nr[0].isdigit() and upper:
                yield from range(int(nr[0]), upper+1)
        else:
            logger.critical('Unknown range value specified: %s', n)


def check_gpg_status(status: str) -> Tuple[bool, bool, bool, Optional[str], Optional[str]]:
    good = False
    valid = False
    trusted = False
    keyid = None
    signtime = None

    # Do we have a BADSIG?
    bs_matches = re.search(r'^\[GNUPG:] BADSIG ([0-9A-F]+)\s+(.*)$', status, flags=re.M)
    if bs_matches:
        keyid = bs_matches.groups()[0]
        return good, valid, trusted, keyid, signtime

    gs_matches = re.search(r'^\[GNUPG:] GOODSIG ([0-9A-F]+)\s+(.*)$', status, flags=re.M)
    if gs_matches:
        good = True
        keyid = gs_matches.groups()[0]
    vs_matches = re.search(r'^\[GNUPG:] VALIDSIG ([0-9A-F]+) (\d{4}-\d{2}-\d{2}) (\d+)', status, flags=re.M)
    if vs_matches:
        valid = True
        signtime = vs_matches.groups()[2]
    ts_matches = re.search(r'^\[GNUPG:] TRUST_(FULLY|ULTIMATE)', status, flags=re.M)
    if ts_matches:
        trusted = True

    return good, valid, trusted, keyid, signtime


def get_gpg_uids(keyid: str) -> list:
    gpgargs = ['--with-colons', '--list-keys', keyid]
    ecode, out, err = gpg_run_command(gpgargs)
    if ecode > 0:
        raise KeyError('Unable to get UIDs list matching key %s' % keyid)

    keyinfo = out.decode()
    uids = list()
    for line in keyinfo.split('\n'):
        if line[:4] != 'uid:':
            continue
        chunks = line.split(':')
        if chunks[1] in ('r',):
            # Revoked UID, ignore
            continue
        uids.append(chunks[9])

    return uids


def save_git_am_mbox(msgs: list, dest: TextIO):
    # Git-am has its own understanding of what "mbox" format is that differs from Python's
    # mboxo implementation. Specifically, it never escapes the ">From " lines found in bodies
    # unless invoked with --patch-format=mboxrd (this is wrong, because ">From " escapes are also
    # required in the original mbox "mboxo" format).
    # So, save in the format that git-am expects
    gen = email.generator.Generator(dest, policy=emlpolicy)
    for msg in msgs:
        msg.set_unixfrom('From git@z Thu Jan  1 00:00:00 1970')
        gen.flatten(msg, unixfrom=True)
        gen.write('\n')


def save_maildir(msgs: list, dest):
    d_new = os.path.join(dest, 'new')
    pathlib.Path(d_new).mkdir(parents=True)
    d_cur = os.path.join(dest, 'cur')
    pathlib.Path(d_cur).mkdir(parents=True)
    d_tmp = os.path.join(dest, 'tmp')
    pathlib.Path(d_tmp).mkdir(parents=True)
    for msg in msgs:
        # make a slug out of it
        lsubj = LoreSubject(msg.get('subject', ''))
        slug = '%04d_%s' % (lsubj.counter, re.sub(r'\W+', '_', lsubj.subject).strip('_').lower())
        with open(os.path.join(d_tmp, f'{slug}.eml'), 'wb') as mfh:
            mfh.write(msg.as_string(policy=emlpolicy).encode())
        os.rename(os.path.join(d_tmp, f'{slug}.eml'), os.path.join(d_new, f'{slug}.eml'))


def get_mailinfo(bmsg: bytes, scissors: bool = False) -> Tuple[dict, bytes, bytes]:
    with tempfile.TemporaryDirectory() as tfd:
        m_out = os.path.join(tfd, 'm')
        p_out = os.path.join(tfd, 'p')
        if scissors:
            cmdargs = ['mailinfo', '--encoding=UTF-8', '--scissors', m_out, p_out]
        else:
            cmdargs = ['mailinfo', '--encoding=UTF-8', '--no-scissors', m_out, p_out]

        ecode, info = git_run_command(None, cmdargs, bmsg)
        if not len(info.strip()):
            raise ValueError('Could not get mailinfo')

        i = dict()
        m = b''
        p = b''
        for line in info.split('\n'):
            line = line.strip()
            if not line:
                continue
            chunks = line.split(':',  1)
            i[chunks[0]] = chunks[1].strip()

            with open(m_out, 'rb') as mfh:
                m = mfh.read()
            with open(p_out, 'rb') as pfh:
                p = pfh.read()
    return i, m, p
07070100000010000081A400000000000000000000000161F953D700000383000000000000000000000000000000000000001800000000b4-0.8.0+2/b4/attest.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#

import sys
import b4
import argparse
try:
    import patatt
    can_patatt = True
except ModuleNotFoundError:
    can_patatt = False

from collections import namedtuple

logger = b4.logger


def attest_patches(cmdargs: argparse.Namespace) -> None:
    if not can_patatt:
        logger.critical('ERROR: b4 now uses patatt for patch attestation. See:')
        logger.critical('       https://git.kernel.org/pub/scm/utils/patatt/patatt.git/about/')
        sys.exit(1)

    # directly invoke cmd_sign in patatt
    config = patatt.get_config_from_git(r'patatt\..*', multivals=['keyringsrc'])
    fakeargs = namedtuple('Struct', ['hookmode', 'msgfile'])
    fakeargs.hookmode = True
    fakeargs.msgfile = cmdargs.patchfile
    patatt.cmd_sign(fakeargs, config)
07070100000011000081A400000000000000000000000161F953D700003495000000000000000000000000000000000000001900000000b4-0.8.0+2/b4/command.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import argparse
import logging
import b4
import sys

logger = b4.logger


def cmd_retrieval_common_opts(sp):
    sp.add_argument('msgid', nargs='?',
                    help='Message ID to process, or pipe a raw message')
    sp.add_argument('-p', '--use-project', dest='useproject', default=None,
                    help='Use a specific project instead of guessing (linux-mm, linux-hardening, etc)')
    sp.add_argument('-m', '--use-local-mbox', dest='localmbox', default=None,
                    help='Instead of grabbing a thread from lore, process this mbox file (or - for stdin)')
    sp.add_argument('-C', '--no-cache', dest='nocache', action='store_true', default=False,
                    help='Do not use local cache')


def cmd_mbox_common_opts(sp):
    cmd_retrieval_common_opts(sp)
    sp.add_argument('-o', '--outdir', default='.',
                    help='Output into this directory (or use - to output mailbox contents to stdout)')
    sp.add_argument('-c', '--check-newer-revisions', dest='checknewer', action='store_true', default=False,
                    help='Check if newer patch revisions exist')
    sp.add_argument('-n', '--mbox-name', dest='wantname', default=None,
                    help='Filename to name the mbox destination')
    sp.add_argument('-M', '--save-as-maildir', dest='maildir', action='store_true', default=False,
                    help='Save as maildir (avoids mbox format ambiguities)')


def cmd_mbox(cmdargs):
    import b4.mbox
    b4.mbox.main(cmdargs)


def cmd_kr(cmdargs):
    import b4.kr
    b4.kr.main(cmdargs)


def cmd_am(cmdargs):
    import b4.mbox
    b4.mbox.main(cmdargs)


def cmd_attest(cmdargs):
    import b4.attest
    if len(cmdargs.patchfile):
        b4.attest.attest_patches(cmdargs)
    else:
        logger.critical('ERROR: missing patches to attest')
        sys.exit(1)


def cmd_pr(cmdargs):
    import b4.pr
    b4.pr.main(cmdargs)


def cmd_ty(cmdargs):
    import b4.ty
    b4.ty.main(cmdargs)


def cmd_diff(cmdargs):
    import b4.diff
    b4.diff.main(cmdargs)


def cmd():
    # noinspection PyTypeChecker
    parser = argparse.ArgumentParser(
        prog='b4',
        description='A tool to work with public-inbox patches',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter,
    )
    parser.add_argument('--version', action='version', version=b4.__VERSION__)
    parser.add_argument('-d', '--debug', action='store_true', default=False,
                        help='Add more debugging info to the output')
    parser.add_argument('-q', '--quiet', action='store_true', default=False,
                        help='Output critical information only')

    subparsers = parser.add_subparsers(help='sub-command help', dest='subcmd')

    # b4 mbox
    sp_mbox = subparsers.add_parser('mbox', help='Download a thread as an mbox file')
    cmd_mbox_common_opts(sp_mbox)
    sp_mbox.add_argument('-f', '--filter-dupes', dest='filterdupes', action='store_true', default=False,
                         help='When adding messages to existing maildir, filter out duplicates')
    sp_mbox.set_defaults(func=cmd_mbox)

    # b4 am
    sp_am = subparsers.add_parser('am', help='Create an mbox file that is ready to git-am')
    cmd_mbox_common_opts(sp_am)
    sp_am.add_argument('-v', '--use-version', dest='wantver', type=int, default=None,
                       help='Get a specific version of the patch/series')
    sp_am.add_argument('-t', '--apply-cover-trailers', dest='covertrailers', action='store_true', default=False,
                       help='Apply trailers sent to the cover letter to all patches')
    sp_am.add_argument('-S', '--sloppy-trailers', dest='sloppytrailers', action='store_true', default=False,
                       help='Apply trailers without email address match checking')
    sp_am.add_argument('-T', '--no-add-trailers', dest='noaddtrailers', action='store_true', default=False,
                       help='Do not add or sort any trailers')
    sp_am.add_argument('-s', '--add-my-sob', dest='addmysob', action='store_true', default=False,
                       help='Add your own signed-off-by to every patch')
    sp_am.add_argument('-l', '--add-link', dest='addlink', action='store_true', default=False,
                       help='Add a lore.kernel.org/r/ link to every patch')
    sp_am.add_argument('-Q', '--quilt-ready', dest='quiltready', action='store_true', default=False,
                       help='Save patches in a quilt-ready folder')
    sp_am.add_argument('-P', '--cherry-pick', dest='cherrypick', default=None,
                       help='Cherry-pick a subset of patches (e.g. "-P 1-2,4,6-", '
                            '"-P _" to use just the msgid specified, or '
                            '"-P *globbing*" to match on commit subject)')
    sp_am.add_argument('-g', '--guess-base', dest='guessbase', action='store_true', default=False,
                       help='Try to guess the base of the series (if not specified)')
    sp_am.add_argument('-b', '--guess-branch', dest='guessbranch', default=None,
                       help='When guessing base, restrict to this branch (use with -g)')
    sp_am.add_argument('--guess-lookback', dest='guessdays', type=int, default=14,
                       help='When guessing base, go back this many days from the date of the patch')
    sp_am.add_argument('-3', '--prep-3way', dest='threeway', action='store_true', default=False,
                       help='Prepare for a 3-way merge '
                            '(tries to ensure that all index blobs exist by making a fake commit range)')
    sp_am.add_argument('--cc-trailers', dest='copyccs', action='store_true', default=False,
                       help='Copy all Cc\'d addresses into Cc: trailers')
    sp_am.add_argument('--no-cover', dest='nocover', action='store_true', default=False,
                       help='Do not save the cover letter (on by default when using -o -)')
    sp_am.add_argument('--no-partial-reroll', dest='nopartialreroll', action='store_true', default=False,
                       help='Do not reroll partial series when detected')
    sp_am.set_defaults(func=cmd_am)

    # b4 attest
    sp_att = subparsers.add_parser('attest', help='Create cryptographic attestation for a set of patches')
    sp_att.add_argument('-f', '--from', dest='sender', default=None,
                        help='OBSOLETE: this option does nothing and will be removed')
    sp_att.add_argument('-n', '--no-submit', dest='nosubmit', action='store_true', default=False,
                        help='OBSOLETE: this option does nothing and will be removed')
    sp_att.add_argument('-o', '--output', default=None,
                        help='OBSOLETE: this option does nothing and will be removed')
    sp_att.add_argument('-m', '--mutt-filter', default=None,
                        help='OBSOLETE: this option does nothing and will be removed')
    sp_att.add_argument('patchfile', nargs='*', help='Patches to attest')
    sp_att.set_defaults(func=cmd_attest)

    # b4 pr
    sp_pr = subparsers.add_parser('pr', help='Fetch a pull request found in a message ID')
    sp_pr.add_argument('-g', '--gitdir', default=None,
                       help='Operate on this git tree instead of current dir')
    sp_pr.add_argument('-b', '--branch', default=None,
                       help='Check out FETCH_HEAD into this branch after fetching')
    sp_pr.add_argument('-c', '--check', action='store_true', default=False,
                       help='Check if pull request has already been applied')
    sp_pr.add_argument('-e', '--explode', action='store_true', default=False,
                       help='Convert a pull request into an mbox full of patches')
    sp_pr.add_argument('-o', '--output-mbox', dest='outmbox', default=None,
                       help='Save exploded messages into this mailbox (default: msgid.mbx)')
    sp_pr.add_argument('-l', '--retrieve-links', action='store_true', dest='getlinks', default=False,
                       help='Attempt to retrieve any Link: URLs (use with -e)')
    sp_pr.add_argument('-f', '--from-addr', dest='mailfrom', default=None,
                       help='Use this From: in exploded messages (use with -e)')
    sp_pr.add_argument('-s', '--send-as-identity', dest='sendidentity', default=None,
                       help=('Use git-send-email to send exploded series (use with -e);'
                             'the identity must match a [sendemail "identity"] config section'))
    sp_pr.add_argument('--dry-run', dest='dryrun', action='store_true', default=False,
                       help='Force a --dry-run on git-send-email invocation (use with -s)')
    sp_pr.add_argument('msgid', nargs='?',
                       help='Message ID to process, or pipe a raw message')
    sp_pr.set_defaults(func=cmd_pr)

    # b4 ty
    sp_ty = subparsers.add_parser('ty', help='Generate thanks email when something gets merged/applied')
    sp_ty.add_argument('-g', '--gitdir', default=None,
                       help='Operate on this git tree instead of current dir')
    sp_ty.add_argument('-o', '--outdir', default='.',
                       help='Write thanks files into this dir (default=.)')
    sp_ty.add_argument('-l', '--list', action='store_true', default=False,
                       help='List pull requests and patch series you have retrieved')
    sp_ty.add_argument('-s', '--send', default=None,
                       help='Generate thankyous for specific entries from -l (e.g.: 1,3-5,7-; or "all")')
    sp_ty.add_argument('-d', '--discard', default=None,
                       help='Discard specific messages from -l (e.g.: 1,3-5,7-; or "all")')
    sp_ty.add_argument('-a', '--auto', action='store_true', default=False,
                       help='Use the Auto-Thankanator to figure out what got applied/merged')
    sp_ty.add_argument('-b', '--branch', default=None,
                       help='The branch to check against, instead of current')
    sp_ty.add_argument('--since', default='1.week',
                       help='The --since option to use when auto-matching patches (default=1.week)')
    sp_ty.set_defaults(func=cmd_ty)

    # b4 diff
    sp_diff = subparsers.add_parser('diff', help='Show a range-diff to previous series revision')
    sp_diff.add_argument('msgid', nargs='?',
                         help='Message ID to process, or pipe a raw message')
    sp_diff.add_argument('-g', '--gitdir', default=None,
                         help='Operate on this git tree instead of current dir')
    sp_diff.add_argument('-p', '--use-project', dest='useproject', default=None,
                         help='Use a specific project instead of guessing (linux-mm, linux-hardening, etc)')
    sp_diff.add_argument('-C', '--no-cache', dest='nocache', action='store_true', default=False,
                         help='Do not use local cache')
    sp_diff.add_argument('-v', '--compare-versions', dest='wantvers', type=int, default=None, nargs='+',
                         help='Compare specific versions instead of latest and one before that, e.g. -v 3 5')
    sp_diff.add_argument('-n', '--no-diff', dest='nodiff', action='store_true', default=False,
                         help='Do not generate a diff, just show the command to do it')
    sp_diff.add_argument('-o', '--output-diff', dest='outdiff', default=None,
                         help='Save diff into this file instead of outputting to stdout')
    sp_diff.add_argument('-c', '--color', dest='color', action='store_true', default=False,
                         help='Force color output even when writing to file')
    sp_diff.add_argument('-m', '--compare-am-mboxes', dest='ambox', nargs=2, default=None,
                         help='Compare two mbx files prepared with "b4 am"')
    sp_diff.set_defaults(func=cmd_diff)

    # b4 kr
    sp_kr = subparsers.add_parser('kr', help='Keyring operations')
    cmd_retrieval_common_opts(sp_kr)
    sp_kr.add_argument('--show-keys', dest='showkeys', action='store_true', default=False,
                       help='Show all developer keys found in a thread')
    sp_kr.set_defaults(func=cmd_kr)

    cmdargs = parser.parse_args()

    logger.setLevel(logging.DEBUG)

    ch = logging.StreamHandler()
    formatter = logging.Formatter('%(message)s')
    ch.setFormatter(formatter)

    if cmdargs.quiet:
        ch.setLevel(logging.CRITICAL)
    elif cmdargs.debug:
        ch.setLevel(logging.DEBUG)
    else:
        ch.setLevel(logging.INFO)

    logger.addHandler(ch)

    if 'func' not in cmdargs:
        parser.print_help()
        sys.exit(1)

    cmdargs.func(cmdargs)


if __name__ == '__main__':
    # We're running from a checkout, so reflect git commit in the version
    import os
    # noinspection PyBroadException
    try:
        if b4.__VERSION__.find('-dev') > 0:
            base = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
            dotgit = os.path.join(base, '.git')
            ecode, short = b4.git_run_command(dotgit, ['rev-parse', '--short', 'HEAD'])
            if ecode == 0:
                b4.__VERSION__ = '%s-%.5s' % (b4.__VERSION__, short.strip())
    except Exception as ex:
        # Any failures above are non-fatal
        pass
    cmd()
07070100000012000081A400000000000000000000000161F953D70000158E000000000000000000000000000000000000001600000000b4-0.8.0+2/b4/diff.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import os
import sys
import b4
import b4.mbox
import mailbox
import email
import shutil
import pathlib

logger = b4.logger


def diff_same_thread_series(cmdargs):
    msgid = b4.get_msgid(cmdargs)
    wantvers = cmdargs.wantvers
    if wantvers and len(wantvers) > 2:
        logger.critical('Can only compare two versions at a time')
        sys.exit(1)

    # start by grabbing the mbox provided
    # Do we have a cache of this lookup?
    identifier = msgid
    if wantvers:
        identifier += '-' + '-'.join([str(x) for x in wantvers])
    if cmdargs.useproject:
        identifier += '-' + cmdargs.useproject

    cachedir = b4.get_cache_file(identifier, suffix='diff.msgs')
    if os.path.exists(cachedir) and not cmdargs.nocache:
        logger.info('Using cached copy of the lookup')
        msgs = list()
        for msg in os.listdir(cachedir):
            with open(os.path.join(cachedir, msg), 'rb') as fh:
                msgs.append(email.message_from_binary_file(fh))
    else:
        msgs = b4.get_pi_thread_by_msgid(msgid, useproject=cmdargs.useproject, nocache=cmdargs.nocache)
        if not msgs:
            logger.critical('Unable to retrieve thread: %s', msgid)
            return
        msgs = b4.mbox.get_extra_series(msgs, direction=-1, wantvers=wantvers, useproject=cmdargs.useproject)
        if os.path.exists(cachedir):
            shutil.rmtree(cachedir)
        pathlib.Path(cachedir).mkdir(parents=True)
        at = 0
        for msg in msgs:
            with open(os.path.join(cachedir, '%04d' % at), 'wb') as fh:
                fh.write(msg.as_bytes(policy=b4.emlpolicy))
            at += 1

    count = len(msgs)
    logger.info('---')
    logger.info('Analyzing %s messages in the thread', count)
    lmbx = b4.LoreMailbox()
    for msg in msgs:
        lmbx.add_message(msg)

    if wantvers and len(wantvers) == 1:
        upper = max(lmbx.series.keys())
        lower = wantvers[0]
    elif wantvers and len(wantvers) == 2:
        upper = max(wantvers)
        lower = min(wantvers)
    else:
        upper = max(lmbx.series.keys())
        lower = min(lmbx.series.keys())

    if upper == lower:
        logger.critical('ERROR: Could not auto-find previous revision')
        logger.critical('       Run "b4 am -T" manually, then "b4 diff -m mbx1 mbx2"')
        return None, None

    if upper not in lmbx.series:
        return None, None

    if lower not in lmbx.series:
        return None, None

    if not lmbx.series[lower].complete:
        lmbx.partial_reroll(lower, sloppytrailers=False)

    if not lmbx.series[upper].complete:
        lmbx.partial_reroll(upper, sloppytrailers=False)

    return lmbx.series[lower], lmbx.series[upper]


def diff_mboxes(cmdargs):
    chunks = list()
    for mboxfile in cmdargs.ambox:
        if not os.path.exists(mboxfile):
            logger.critical('Cannot open %s', mboxfile)
            return None, None

        if os.path.isdir(mboxfile):
            mbx = mailbox.Maildir(mboxfile)
        else:
            mbx = mailbox.mbox(mboxfile)
        count = len(mbx)
        logger.info('Loading %s messages from %s', count, mboxfile)
        lmbx = b4.LoreMailbox()
        for key, msg in mbx.items():
            lmbx.add_message(msg)
        if len(lmbx.series) < 1:
            logger.critical('No valid patches found in %s', mboxfile)
            sys.exit(1)
        if len(lmbx.series) > 1:
            logger.critical('More than one series version in %s, will use latest', mboxfile)

        chunks.append(lmbx.series[max(lmbx.series.keys())])

    return chunks


def main(cmdargs):
    if cmdargs.ambox is not None:
        lser, user = diff_mboxes(cmdargs)
    else:
        lser, user = diff_same_thread_series(cmdargs)

    if lser is None or user is None:
        sys.exit(1)

    # Prepare the lower fake-am range
    lsc, lec = lser.make_fake_am_range(gitdir=cmdargs.gitdir)
    if lsc is None or lec is None:
        logger.critical('---')
        logger.critical('Could not create fake-am range for lower series v%s', lser.revision)
        sys.exit(1)
    # Prepare the upper fake-am range
    usc, uec = user.make_fake_am_range(gitdir=cmdargs.gitdir)
    if usc is None or uec is None:
        logger.critical('---')
        logger.critical('Could not create fake-am range for upper series v%s', user.revision)
        sys.exit(1)
    grdcmd = 'git range-diff %.12s..%.12s %.12s..%.12s' % (lsc, lec, usc, uec)
    if cmdargs.nodiff:
        logger.info('Success, to compare v%s and v%s:', lser.revision, user.revision)
        logger.info(f'    {grdcmd}')
        sys.exit(0)
    logger.info('Diffing v%s and v%s', lser.revision, user.revision)
    logger.info('    Running: %s', grdcmd)
    gitargs = ['range-diff', f'{lsc}..{lec}', f'{usc}..{uec}']
    if cmdargs.outdiff is None or cmdargs.color:
        gitargs.append('--color')
    ecode, rdiff = b4.git_run_command(cmdargs.gitdir, gitargs)
    if ecode > 0:
        logger.critical('Unable to generate diff')
        logger.critical('Try running it yourself:')
        logger.critical(f'    {grdcmd}')
        sys.exit(1)
    if cmdargs.outdiff is not None:
        logger.info('Writing %s', cmdargs.outdiff)
        fh = open(cmdargs.outdiff, 'w')
    else:
        logger.info('---')
        fh = sys.stdout
    fh.write(rdiff)
07070100000013000081A400000000000000000000000161F953D700000C4F000000000000000000000000000000000000001400000000b4-0.8.0+2/b4/kr.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020-2021 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import os
import sys
import pathlib
import re

import b4
import b4.mbox


logger = b4.logger


def main(cmdargs):
    msgid, msgs = b4.mbox.get_msgs(cmdargs)
    if cmdargs.showkeys:
        logger.info('---')
        try:
            import patatt
        except ModuleNotFoundError:
            logger.info('--show-keys requires the patatt library')
            sys.exit(1)

        keydata = set()
        for msg in msgs:
            xdk = msg.get('x-developer-key')
            xds = msg.get('x-developer-signature')
            if not xdk or not xds:
                continue
            # grab the selector they used
            kdata = b4.LoreMessage.get_parts_from_header(xdk)
            sdata = b4.LoreMessage.get_parts_from_header(xds)
            algo = kdata.get('a')
            identity = kdata.get('i')
            selector = sdata.get('s', 'default')
            if algo == 'openpgp':
                keyinfo = kdata.get('fpr')
            elif algo == 'ed25519':
                keyinfo = kdata.get('pk')
            else:
                logger.debug('Unknown key type: %s', algo)
                continue
            keydata.add((identity, algo, selector, keyinfo))

        if not keydata:
            logger.info('No keys found in the thread.')
            sys.exit(0)
        krpath = os.path.join(b4.get_data_dir(), 'keyring')
        pgp = False
        ecc = False
        for identity, algo, selector, keyinfo in keydata:
            keypath = patatt.make_pkey_path(algo, identity, selector)
            fullpath = os.path.join(krpath, keypath)
            if os.path.exists(fullpath):
                status = 'known'
            else:
                status = 'unknown'
                if algo == 'openpgp':
                    try:
                        uids = b4.get_gpg_uids(keyinfo)
                        if len(uids):
                            status = 'in default keyring'
                    except KeyError:
                        pass
            pathlib.Path(os.path.dirname(fullpath)).mkdir(parents=True, exist_ok=True)

            logger.info('%s: (%s)', identity, status)
            logger.info('    keytype: %s', algo)
            if algo == 'openpgp':
                pgp = True
                logger.info('      keyid: %s', keyinfo[-16:])
                logger.info('        fpr: %s', ':'.join(re.findall(r'.{4}', keyinfo)))
            else:
                ecc = True
                logger.info('     pubkey: %s', keyinfo)
            logger.info('     krpath: %s', keypath)
            logger.info('   fullpath: %s', fullpath)
        logger.info('---')
        if pgp:
            logger.info('For openpgp keys:')
            logger.info('    gpg --recv-key [keyid]')
            logger.info('    gpg -a --export [keyid] > [fullpath]')
        if ecc:
            logger.info('For ed25519 keys:')
            logger.info('    echo [pubkey] > [fullpath]')

        sys.exit(0)
07070100000014000081A400000000000000000000000161F953D700006163000000000000000000000000000000000000001600000000b4-0.8.0+2/b4/mbox.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import os
import sys
import mailbox
import email
import email.message
import email.utils
import re
import time
import json
import fnmatch
import shutil
import pathlib
import tempfile

import urllib.parse
import xml.etree.ElementTree

import b4

from typing import Optional, Tuple

logger = b4.logger


def make_am(msgs, cmdargs, msgid):
    config = b4.get_main_config()
    outdir = cmdargs.outdir
    if outdir == '-':
        cmdargs.nocover = True
    wantver = cmdargs.wantver
    wantname = cmdargs.wantname
    covertrailers = cmdargs.covertrailers
    count = len(msgs)
    logger.info('Analyzing %s messages in the thread', count)
    lmbx = b4.LoreMailbox()
    # Go through the mbox once to populate base series
    for msg in msgs:
        lmbx.add_message(msg)

    reroll = True
    if cmdargs.nopartialreroll:
        reroll = False

    lser = lmbx.get_series(revision=wantver, sloppytrailers=cmdargs.sloppytrailers, reroll=reroll)
    if lser is None and wantver is None:
        logger.critical('No patches found.')
        return
    if lser is None:
        logger.critical('Unable to find revision %s', wantver)
        return
    if len(lmbx.series) > 1 and not wantver:
        logger.info('Will use the latest revision: v%s', lser.revision)
        logger.info('You can pick other revisions using the -vN flag')

    if cmdargs.cherrypick:
        cherrypick = list()
        if cmdargs.cherrypick == '_':
            # Only grab the exact msgid provided
            at = 0
            for lmsg in lser.patches[1:]:
                at += 1
                if lmsg and lmsg.msgid == msgid:
                    cherrypick = [at]
                    cmdargs.cherrypick = f'<{msgid}>'
                    break
            if not len(cherrypick):
                logger.critical('Specified msgid is not present in the series, cannot cherrypick')
                sys.exit(1)
        elif cmdargs.cherrypick.find('*') >= 0:
            # Globbing on subject
            at = 0
            for lmsg in lser.patches[1:]:
                at += 1
                if fnmatch.fnmatch(lmsg.subject, cmdargs.cherrypick):
                    cherrypick.append(at)
            if not len(cherrypick):
                logger.critical('Could not match "%s" to any subjects in the series', cmdargs.cherrypick)
                sys.exit(1)
        else:
            cherrypick = list(b4.parse_int_range(cmdargs.cherrypick, upper=len(lser.patches)-1))
    else:
        cherrypick = None

    try:
        am_msgs = lser.get_am_ready(noaddtrailers=cmdargs.noaddtrailers,
                                    covertrailers=covertrailers, trailer_order=config['trailer-order'],
                                    addmysob=cmdargs.addmysob, addlink=cmdargs.addlink,
                                    linkmask=config['linkmask'], cherrypick=cherrypick,
                                    copyccs=cmdargs.copyccs)
    except KeyError:
        sys.exit(1)

    if cmdargs.maildir or config.get('save-maildirs', 'no') == 'yes':
        save_maildir = True
        dftext = 'maildir'
    else:
        save_maildir = False
        dftext = 'mbx'

    if wantname:
        slug = wantname
        if wantname.find('.') > -1:
            slug = '.'.join(wantname.split('.')[:-1])
        gitbranch = slug
    else:
        slug = lser.get_slug(extended=True)
        gitbranch = lser.get_slug(extended=False)

    if outdir != '-':
        am_filename = os.path.join(outdir, f'{slug}.{dftext}')
        am_cover = os.path.join(outdir, f'{slug}.cover')

        if os.path.exists(am_filename):
            if os.path.isdir(am_filename):
                shutil.rmtree(am_filename)
            else:
                os.unlink(am_filename)
        if save_maildir:
            b4.save_maildir(am_msgs, am_filename)
        else:
            with open(am_filename, 'w') as fh:
                b4.save_git_am_mbox(am_msgs, fh)
    else:
        am_filename = None
        am_cover = None
        b4.save_git_am_mbox(am_msgs, sys.stdout)

    logger.info('---')

    if cherrypick is None:
        logger.critical('Total patches: %s', len(am_msgs))
    else:
        logger.info('Total patches: %s (cherrypicked: %s)', len(am_msgs), cmdargs.cherrypick)
    # Check if any of the followup-trailers is an Obsoleted-by
    if not cmdargs.checknewer:
        warned = False
        for lmsg in lser.patches:
            # Only check cover letter or first patch
            if not lmsg or lmsg.counter > 1:
                continue
            for trailer in list(lmsg.followup_trailers):
                if trailer[0].lower() == 'obsoleted-by':
                    lmsg.followup_trailers.remove(trailer)
                    if warned:
                        continue
                    logger.critical('---')
                    logger.critical('WARNING: Found an Obsoleted-by follow-up trailer!')
                    logger.critical('         Rerun with -c to automatically retrieve the new series.')
                    warned = True

    if lser.has_cover and lser.patches[0].followup_trailers and not covertrailers:
        # Warn that some trailers were sent to the cover letter
        logger.critical('---')
        logger.critical('NOTE: Some trailers were sent to the cover letter:')
        tseen = set()
        for trailer in lser.patches[0].followup_trailers:
            if tuple(trailer[:2]) not in tseen:
                logger.critical('      %s: %s', trailer[0], trailer[1])
                tseen.add(tuple(trailer[:2]))
        logger.critical('NOTE: Rerun with -t to apply them to all patches')
    if len(lser.trailer_mismatches):
        logger.critical('---')
        logger.critical('NOTE: some trailers ignored due to from/email mismatches:')
        for tname, tvalue, fname, femail in lser.trailer_mismatches:
            logger.critical('    ! Trailer: %s: %s', tname, tvalue)
            logger.critical('     Msg From: %s <%s>', fname, femail)
        logger.critical('NOTE: Rerun with -S to apply them anyway')

    topdir = b4.git_get_toplevel()

    if cmdargs.threeway:
        if not topdir:
            logger.critical('WARNING: cannot prepare 3-way (not in a git dir)')
        elif not lser.complete:
            logger.critical('WARNING: cannot prepare 3-way (series incomplete)')
        else:
            rstart, rend = lser.make_fake_am_range(gitdir=None)
            if rstart and rend:
                logger.info('Prepared a fake commit range for 3-way merge (%.12s..%.12s)', rstart, rend)

    logger.critical('---')
    if lser.partial_reroll:
        logger.critical('WARNING: v%s is a partial reroll from previous revisions', lser.revision)
        logger.critical('         Please carefully review the resulting series to ensure correctness')
        logger.critical('         Pass --no-partial-reroll to disable')
        logger.critical('---')
    if not lser.complete and not cmdargs.cherrypick:
        logger.critical('WARNING: Thread incomplete!')

    if lser.has_cover and not cmdargs.nocover:
        lser.save_cover(am_cover)

    top_msgid = None
    first_body = None
    for lmsg in lser.patches:
        if lmsg is not None:
            first_body = lmsg.body
            top_msgid = lmsg.msgid
            break
    if top_msgid is None:
        logger.critical('Could not find any patches in the series.')
        return

    linkurl = config['linkmask'] % top_msgid
    if cmdargs.quiltready:
        q_dirname = os.path.join(outdir, f'{slug}.patches')
        save_as_quilt(am_msgs, q_dirname)
        logger.critical('Quilt: %s', q_dirname)

    logger.critical(' Link: %s', linkurl)

    base_commit = None
    matches = re.search(r'base-commit: .*?([0-9a-f]+)', first_body, re.MULTILINE)
    if matches:
        base_commit = matches.groups()[0]
    else:
        # Try a more relaxed search
        matches = re.search(r'based on .*?([0-9a-f]{40})', first_body, re.MULTILINE)
        if matches:
            base_commit = matches.groups()[0]

    if base_commit:
        logger.critical(' Base: %s', base_commit)
    else:
        if topdir is not None:
            if cmdargs.guessbase:
                logger.critical('       attempting to guess base-commit...')
                try:
                    base_commit, nblobs, mismatches = lser.find_base(topdir, branches=cmdargs.guessbranch,
                                                                     maxdays=cmdargs.guessdays)
                    if mismatches == 0:
                        logger.critical(' Base: %s (exact match)', base_commit)
                    elif nblobs == mismatches:
                        logger.critical(' Base: failed to guess base')
                    else:
                        logger.critical(' Base: %s (best guess, %s/%s blobs matched)', base_commit,
                                        nblobs - mismatches, nblobs)
                except IndexError:
                    logger.critical(' Base: failed to guess base')
            else:
                checked, mismatches = lser.check_applies_clean(topdir, at=cmdargs.guessbranch)
                if checked and len(mismatches) == 0 and checked != mismatches:
                    logger.critical(' Base: applies clean to current tree')
                else:
                    logger.critical(' Base: not specified')
        else:
            logger.critical(' Base: not specified')

    if base_commit is not None:
        logger.critical('       git checkout -b %s %s', gitbranch, base_commit)
    if cmdargs.outdir != '-':
        logger.critical('       git am %s', am_filename)

    thanks_record_am(lser, cherrypick=cherrypick)


def thanks_record_am(lser, cherrypick=None):
    # Are we tracking this already?
    datadir = b4.get_data_dir()
    slug = lser.get_slug(extended=True)
    filename = '%s.am' % slug

    patches = list()
    at = 0
    padlen = len(str(lser.expected))
    lmsg = None

    for pmsg in lser.patches:
        if pmsg is None:
            at += 1
            continue

        if lmsg is None:
            lmsg = pmsg

        if not pmsg.has_diff:
            # Don't care about the cover letter
            at += 1
            continue

        if cherrypick is not None and at not in cherrypick:
            logger.debug('Skipped non-cherrypicked: %s', at)
            at += 1
            continue

        if pmsg.pwhash is None:
            logger.debug('Unable to get hashes for all patches, not tracking for thanks')
            return

        prefix = '%s/%s' % (str(pmsg.counter).zfill(padlen), pmsg.expected)
        patches.append((pmsg.subject, pmsg.pwhash, pmsg.msgid, prefix))
        at += 1

    if lmsg is None:
        logger.debug('All patches missing, not tracking for thanks')
        return

    allto = email.utils.getaddresses([str(x) for x in lmsg.msg.get_all('to', [])])
    allcc = email.utils.getaddresses([str(x) for x in lmsg.msg.get_all('cc', [])])

    out = {
        'msgid': lmsg.msgid,
        'subject': lmsg.full_subject,
        'fromname': lmsg.fromname,
        'fromemail': lmsg.fromemail,
        'to': b4.format_addrs(allto, clean=False),
        'cc': b4.format_addrs(allcc, clean=False),
        'references': b4.LoreMessage.clean_header(lmsg.msg['References']),
        'sentdate': b4.LoreMessage.clean_header(lmsg.msg['Date']),
        'quote': b4.make_quote(lmsg.body, maxlines=5),
        'cherrypick': cherrypick is not None,
        'patches': patches,
    }
    fullpath = os.path.join(datadir, filename)
    with open(fullpath, 'w', encoding='utf-8') as fh:
        json.dump(out, fh, ensure_ascii=False, indent=4)
        logger.debug('Wrote %s for thanks tracking', filename)


def save_as_quilt(am_msgs, q_dirname):
    if os.path.exists(q_dirname):
        logger.critical('ERROR: Directory %s exists, not saving quilt patches', q_dirname)
        return
    pathlib.Path(q_dirname).mkdir(parents=True)
    patch_filenames = list()
    for msg in am_msgs:
        lsubj = b4.LoreSubject(msg.get('subject', ''))
        slug = '%04d_%s' % (lsubj.counter, re.sub(r'\W+', '_', lsubj.subject).strip('_').lower())
        patch_filename = f'{slug}.patch'
        patch_filenames.append(patch_filename)
        quilt_out = os.path.join(q_dirname, patch_filename)
        i, m, p = b4.get_mailinfo(msg.as_bytes(policy=b4.emlpolicy), scissors=True)
        with open(quilt_out, 'wb') as fh:
            if i.get('Author'):
                fh.write(b'From: %s <%s>\n' % (i.get('Author').encode(), i.get('Email').encode()))
            else:
                fh.write(b'From: %s\n' % i.get('Email').encode())
            fh.write(b'Subject: %s\n' % i.get('Subject').encode())
            fh.write(b'Date: %s\n' % i.get('Date').encode())
            fh.write(b'\n')
            fh.write(m)
            fh.write(p)
        logger.debug('  Wrote: %s', patch_filename)
    # Write the series file
    with open(os.path.join(q_dirname, 'series'), 'w') as sfh:
        for patch_filename in patch_filenames:
            sfh.write('%s\n' % patch_filename)


def get_extra_series(msgs: list, direction: int = 1, wantvers: Optional[int] = None, nocache: bool = False,
                     useproject: Optional[str] = None) -> list:
    base_msg = None
    latest_revision = None
    seen_msgids = set()
    seen_covers = set()
    obsoleted = list()
    for msg in msgs:
        msgid = b4.LoreMessage.get_clean_msgid(msg)
        seen_msgids.add(msgid)
        lsub = b4.LoreSubject(msg['Subject'])
        if direction > 0 and lsub.reply:
            # Does it have an "Obsoleted-by: trailer?
            rmsg = b4.LoreMessage(msg)
            trailers, mismatches = rmsg.get_trailers()
            for tl in trailers:
                if tl[0].lower() == 'obsoleted-by':
                    for chunk in tl[1].split('/'):
                        if chunk.find('@') > 0 and chunk not in seen_msgids:
                            obsoleted.append(chunk)
                            break
        # Ignore patches above 1
        if lsub.counter > 1:
            continue
        if base_msg is not None:
            logger.debug('Current base_msg: %s', base_msg['Subject'])
        logger.debug('Checking the subject on %s', lsub.full_subject)
        if latest_revision is None or lsub.revision >= latest_revision:
            latest_revision = lsub.revision
            if lsub.counter == 0 and not lsub.counters_inferred:
                # And a cover letter, nice. This is the easy case
                base_msg = msg
                seen_covers.add(latest_revision)
            elif lsub.counter == 1 and latest_revision not in seen_covers:
                # A patch/series without a cover letter
                base_msg = msg

    if base_msg is None:
        logger.debug('Could not find cover of 1st patch in mbox')
        return msgs

    config = b4.get_main_config()
    loc = urllib.parse.urlparse(config['midmask'])
    if not useproject:
        useproject = 'all'

    listarc = f'{loc.scheme}://{loc.netloc}/{useproject}/'
    # Make sure it exists
    queryurl = f'{listarc}_/text/config/raw'
    session = b4.get_requests_session()
    resp = session.get(queryurl)
    if not resp.status_code == 200:
        logger.info('Unable to figure out list archive location')
        return msgs

    nt_msgs = list()
    if len(obsoleted):
        for nt_msgid in obsoleted:
            logger.info('Obsoleted-by: %s', nt_msgid)
            # Grab this thread from remote
            t_mbx_url = '%s/%s/t.mbox.gz' % (listarc.rstrip('/'), nt_msgid)
            potentials = b4.get_pi_thread_by_url(t_mbx_url, nocache=nocache)
            if potentials:
                potentials = b4.get_strict_thread(potentials, nt_msgid)
                nt_msgs += potentials
                logger.info('   Added %s messages from that thread', len(potentials))
            else:
                logger.info('   No messages added from that thread')

    else:
        # Get subject info from base_msg again
        lsub = b4.LoreSubject(base_msg['Subject'])
        if not len(lsub.prefixes):
            logger.debug('Not checking for new revisions: no prefixes on the cover letter.')
            return msgs
        if direction < 0 and latest_revision <= 1:
            logger.debug('This is the latest version of the series')
            return msgs
        if direction < 0 and wantvers is None:
            wantvers = [latest_revision - 1]

        base_msgid = b4.LoreMessage.get_clean_msgid(base_msg)
        fromeml = email.utils.getaddresses(base_msg.get_all('from', []))[0][1]
        msgdate = email.utils.parsedate_tz(str(base_msg['Date']))
        startdate = time.strftime('%Y%m%d', msgdate[:9])
        if direction > 0:
            q = 's:"%s" AND f:"%s" AND d:%s..' % (lsub.subject.replace('"', ''), fromeml, startdate)
            queryurl = '%s?%s' % (listarc, urllib.parse.urlencode({'q': q, 'x': 'A', 'o': '-1'}))
            logger.critical('Checking for newer revisions on %s', listarc)
        else:
            q = 's:"%s" AND f:"%s" AND d:..%s' % (lsub.subject.replace('"', ''), fromeml, startdate)
            queryurl = '%s?%s' % (listarc, urllib.parse.urlencode({'q': q, 'x': 'A', 'o': '1'}))
            logger.critical('Checking for older revisions on %s', listarc)

        logger.debug('Query URL: %s', queryurl)
        session = b4.get_requests_session()
        resp = session.get(queryurl)
        # try to parse it
        try:
            tree = xml.etree.ElementTree.fromstring(resp.content)
        except xml.etree.ElementTree.ParseError as ex:
            logger.debug('Unable to parse results, ignoring: %s', ex)
            resp.close()
            return msgs
        resp.close()
        ns = {'atom': 'http://www.w3.org/2005/Atom'}
        entries = tree.findall('atom:entry', ns)
        seen_urls = set()

        for entry in entries:
            title = entry.find('atom:title', ns).text
            lsub = b4.LoreSubject(title)
            if lsub.reply or lsub.counter > 1:
                logger.debug('Ignoring result (not interesting): %s', title)
                continue
            link = entry.find('atom:link', ns).get('href')
            if direction > 0 and lsub.revision <= latest_revision:
                logger.debug('Ignoring result (not new revision): %s', title)
                continue
            elif direction < 0 and lsub.revision >= latest_revision:
                logger.debug('Ignoring result (not old revision): %s', title)
                continue
            elif direction < 0 and lsub.revision not in wantvers:
                logger.debug('Ignoring result (not revision we want): %s', title)
                continue
            if link.find('/%s/' % base_msgid) > 0:
                logger.debug('Ignoring result (same thread as ours):%s', title)
                continue
            if lsub.revision == 1 and lsub.revision == latest_revision:
                # Someone sent a separate message with an identical title but no new vX in the subject line
                if direction > 0:
                    # It's *probably* a new revision.
                    logger.debug('Likely a new revision: %s', title)
                else:
                    # It's *probably* an older revision.
                    logger.debug('Likely an older revision: %s', title)
            elif direction > 0 and lsub.revision > latest_revision:
                logger.debug('Definitely a new revision [v%s]: %s', lsub.revision, title)
            elif direction < 0 and lsub.revision < latest_revision:
                logger.debug('Definitely an older revision [v%s]: %s', lsub.revision, title)
            else:
                logger.debug('No idea what this is: %s', title)
                continue
            t_mbx_url = '%st.mbox.gz' % link
            if t_mbx_url in seen_urls:
                continue
            seen_urls.add(t_mbx_url)
            logger.info('New revision: %s', title)
            potentials = b4.get_pi_thread_by_url(t_mbx_url, nocache=nocache)
            if potentials:
                nt_msgs += potentials
                logger.info('   Added %s messages from that thread', len(potentials))

    # Append all of these to the existing mailbox
    for nt_msg in nt_msgs:
        nt_msgid = b4.LoreMessage.get_clean_msgid(nt_msg)
        if nt_msgid in seen_msgids:
            logger.debug('Duplicate message, skipping')
            continue
        nt_subject = re.sub(r'\s+', ' ', nt_msg['Subject'])
        logger.debug('Adding: %s', nt_subject)
        msgs.append(nt_msg)
        seen_msgids.add(nt_msgid)

    return msgs


def get_msgs(cmdargs) -> Tuple[Optional[str], Optional[list]]:
    msgid = None
    if not cmdargs.localmbox:
        msgid = b4.get_msgid(cmdargs)
        if not msgid:
            logger.error('Error: pipe a message or pass msgid as parameter')
            sys.exit(1)

        pickings = set()
        try:
            if cmdargs.cherrypick == '_':
                # Just that msgid, please
                pickings = {msgid}
        except AttributeError:
            pass
        msgs = b4.get_pi_thread_by_msgid(msgid, useproject=cmdargs.useproject, nocache=cmdargs.nocache,
                                         onlymsgids=pickings)
        if not msgs:
            return None, msgs
    else:
        if cmdargs.localmbox == '-':
            # The entire mbox is passed via stdin, so mailsplit it and use the first message for our msgid
            with tempfile.TemporaryDirectory() as tfd:
                msgs = b4.mailsplit_bytes(sys.stdin.buffer.read(), tfd)
            if not len(msgs):
                logger.critical('Stdin did not contain any messages')
                sys.exit(1)

        elif os.path.exists(cmdargs.localmbox):
            msgid = b4.get_msgid(cmdargs)
            if os.path.isdir(cmdargs.localmbox):
                in_mbx = mailbox.Maildir(cmdargs.localmbox)
            else:
                in_mbx = mailbox.mbox(cmdargs.localmbox)

            if msgid:
                msgs = b4.get_strict_thread(in_mbx, msgid)
                if not msgs:
                    logger.critical('Could not find %s in %s', msgid, cmdargs.localmbox)
                    sys.exit(1)
            else:
                msgs = in_mbx
        else:
            logger.critical('Mailbox %s does not exist', cmdargs.localmbox)
            sys.exit(1)

    if not msgid and msgs:
        for msg in msgs:
            msgid = msg.get('Message-ID', None)
            if msgid:
                msgid = msgid.strip('<>')
                break

    return msgid, msgs


def main(cmdargs):
    if cmdargs.checknewer:
        # Force nocache mode
        cmdargs.nocache = True

    msgid, msgs = get_msgs(cmdargs)
    if not msgs:
        return

    if len(msgs) and cmdargs.checknewer:
        msgs = get_extra_series(msgs, direction=1, useproject=cmdargs.useproject)

    if cmdargs.subcmd == 'am':
        make_am(msgs, cmdargs, msgid)
        return

    logger.info('%s messages in the thread', len(msgs))
    if cmdargs.outdir == '-':
        logger.info('---')
        b4.save_git_am_mbox(msgs, sys.stdout)
        return

    # Check if outdir is a maildir
    if (os.path.isdir(os.path.join(cmdargs.outdir, 'new'))
            and os.path.isdir(os.path.join(cmdargs.outdir, 'cur'))
            and os.path.isdir(os.path.join(cmdargs.outdir, 'tmp'))):
        mdr = mailbox.Maildir(cmdargs.outdir)
        have_msgids = set()
        added = 0
        if cmdargs.filterdupes:
            for emsg in mdr:
                have_msgids.add(b4.LoreMessage.get_clean_msgid(emsg))
        for msg in msgs:
            if b4.LoreMessage.get_clean_msgid(msg) not in have_msgids:
                added += 1
                mdr.add(msg)
        logger.info('Added %s messages to maildir %s', added, cmdargs.outdir)
        return

    config = b4.get_main_config()
    if cmdargs.maildir or config.get('save-maildirs', 'no') == 'yes':
        save_maildir = True
        dftext = 'maildir'
    else:
        save_maildir = False
        dftext = 'mbx'

    if cmdargs.wantname:
        savename = os.path.join(cmdargs.outdir, cmdargs.wantname)
    else:
        safe_msgid = re.sub(r'[^\w@.+%-]+', '_', msgid).strip('_')
        savename = os.path.join(cmdargs.outdir, f'{safe_msgid}.{dftext}')

    if save_maildir:
        if os.path.isdir(savename):
            shutil.rmtree(savename)
        md = mailbox.Maildir(savename, create=True)
        for msg in msgs:
            md.add(msg)
        md.close()
        logger.info('Saved maildir %s', savename)
        return

    with open(savename, 'w') as fh:
        b4.save_git_am_mbox(msgs, fh)

    logger.info('Saved %s', savename)
07070100000015000081A400000000000000000000000161F953D700005B5C000000000000000000000000000000000000001400000000b4-0.8.0+2/b4/pr.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import os
import sys
import tempfile

import b4
import re
import mailbox
import json
import email
import gzip

import urllib.parse
import requests

from datetime import datetime, timedelta

from email import utils, charset
from email.mime.text import MIMEText
from email.mime.application import MIMEApplication
from email.mime.multipart import MIMEMultipart


charset.add_charset('utf-8', None)

logger = b4.logger

PULL_BODY_SINCE_ID_RE = [
    re.compile(r'changes since commit ([0-9a-f]{5,40}):', re.M | re.I)
]

# I like these
PULL_BODY_WITH_COMMIT_ID_RE = [
    re.compile(r'fetch changes up to ([0-9a-f]{5,40}):', re.M | re.I),
]

# I don't like these
PULL_BODY_REMOTE_REF_RE = [
    re.compile(r'^\s*([\w+-]+(?:://|@)[\w/.@:~-]+)[\s\\]+([\w/._-]+)\s*$', re.M | re.I),
    re.compile(r'^\s*([\w+-]+(?:://|@)[\w/.@~-]+)\s*$', re.M | re.I),
]


def format_addrs(pairs):
    return ', '.join([utils.formataddr(pair) for pair in pairs])


def git_get_commit_id_from_repo_ref(repo, ref):
    # We only handle git and http/s URLs
    if not (repo.find('git://') == 0 or repo.find('http://') == 0 or repo.find('https://') == 0):
        logger.info('%s uses unsupported protocol', repo)
        return None

    logger.debug('getting commit-id from: %s %s', repo, ref)
    # Drop the leading "refs/", if any
    ref = re.sub(r'^refs/', '', ref)
    # Is it a full ref name or a shortname?
    if ref.find('heads/') < 0 and ref.find('tags/') < 0:
        # Try grabbing it as a head first
        lines = b4.git_get_command_lines(None, ['ls-remote', repo, 'refs/heads/%s' % ref])
        if not lines:
            # try it as a tag, then
            lines = b4.git_get_command_lines(None, ['ls-remote', repo, 'refs/tags/%s^{}' % ref])

    elif ref.find('tags/') == 0:
        lines = b4.git_get_command_lines(None, ['ls-remote', repo, 'refs/%s^{}' % ref])

    else:
        # Grab it as a head and hope for the best
        lines = b4.git_get_command_lines(None, ['ls-remote', repo, 'refs/%s' % ref])

    if not lines:
        # Oh well, we tried
        logger.debug('did not find commit-id, ignoring pull request')
        return None

    commit_id = lines[0].split()[0]
    logger.debug('success, commit-id: %s', commit_id)
    return commit_id


def parse_pr_data(msg):
    lmsg = b4.LoreMessage(msg)
    if lmsg.body is None:
        logger.critical('Could not find a plain part in the message body')
        return None

    logger.info('Looking at: %s', lmsg.full_subject)

    for since_re in PULL_BODY_SINCE_ID_RE:
        matches = since_re.search(lmsg.body)
        if matches:
            lmsg.pr_base_commit = matches.groups()[0]
            break

    for reporef_re in PULL_BODY_REMOTE_REF_RE:
        matches = reporef_re.search(lmsg.body)
        if matches:
            chunks = matches.groups()
            lmsg.pr_repo = chunks[0]
            if len(chunks) > 1:
                lmsg.pr_ref = chunks[1]
            else:
                lmsg.pr_ref = 'refs/heads/master'
            break

    for cid_re in PULL_BODY_WITH_COMMIT_ID_RE:
        matches = cid_re.search(lmsg.body)
        if matches:
            lmsg.pr_tip_commit = matches.groups()[0]
            break

    if lmsg.pr_repo and lmsg.pr_ref:
        lmsg.pr_remote_tip_commit = git_get_commit_id_from_repo_ref(lmsg.pr_repo, lmsg.pr_ref)

    return lmsg


def attest_fetch_head(gitdir, lmsg):
    config = b4.get_main_config()
    attpolicy = config['attestation-policy']
    if config['attestation-checkmarks'] == 'fancy':
        attpass = b4.ATT_PASS_FANCY
        attfail = b4.ATT_FAIL_FANCY
    else:
        attpass = b4.ATT_PASS_SIMPLE
        attfail = b4.ATT_FAIL_SIMPLE
    # Is FETCH_HEAD a tag or a commit?
    htype = b4.git_get_command_lines(gitdir, ['cat-file', '-t', 'FETCH_HEAD'])
    passing = False
    out = ''
    otype = 'unknown'
    if len(htype):
        otype = htype[0]
    if otype == 'tag':
        ecode, out = b4.git_run_command(gitdir, ['verify-tag', '--raw', 'FETCH_HEAD'], logstderr=True)
    elif otype == 'commit':
        ecode, out = b4.git_run_command(gitdir, ['verify-commit', '--raw', 'FETCH_HEAD'], logstderr=True)

    good, valid, trusted, keyid, sigtime = b4.check_gpg_status(out)
    signer = None
    if keyid:
        try:
            uids = b4.get_gpg_uids(keyid)
            for uid in uids:
                if uid.find(f'<{lmsg.fromemail}') >= 0:
                    signer = uid
                    break
            if not signer:
                signer = uids[0]

        except KeyError:
            signer = f'{lmsg.fromname} <{lmsg.fromemail}>'

    if good and valid:
        passing = True

    out = out.strip()
    errors = set()
    if not len(out) and attpolicy != 'check':
        errors.add('Remote %s is not signed!' % otype)

    if passing:
        trailer = 'Signed: %s' % signer
        logger.info('  ---')
        logger.info('  %s %s', attpass, trailer)
        return

    if errors:
        logger.critical('  ---')
        if len(out):
            logger.critical('  Pull request is signed, but verification did not succeed:')
        else:
            logger.critical('  Pull request verification did not succeed:')
        for error in errors:
            logger.critical('    %s %s', attfail, error)

        if attpolicy == 'hardfail':
            import sys
            sys.exit(128)


def fetch_remote(gitdir, lmsg, branch=None, check_sig=True, ty_track=True):
    # Do we know anything about this base commit?
    if lmsg.pr_base_commit and not b4.git_commit_exists(gitdir, lmsg.pr_base_commit):
        logger.critical('ERROR: git knows nothing about commit %s', lmsg.pr_base_commit)
        logger.critical('       Are you running inside a git checkout and is it up-to-date?')
        return 1

    if lmsg.pr_tip_commit != lmsg.pr_remote_tip_commit:
        logger.critical('ERROR: commit-id mismatch between pull request and remote')
        logger.critical('       msg=%s, remote=%s', lmsg.pr_tip_commit, lmsg.pr_remote_tip_commit)
        return 1

    # Fetch it now
    logger.info('  Fetching %s %s', lmsg.pr_repo, lmsg.pr_ref)
    gitargs = ['fetch', lmsg.pr_repo, lmsg.pr_ref]
    ecode, out = b4.git_run_command(gitdir, gitargs, logstderr=True)
    if ecode > 0:
        logger.critical('ERROR: Could not fetch remote:')
        logger.critical(out)
        return ecode

    config = b4.get_main_config()
    if check_sig and config['attestation-policy'] != 'off':
        attest_fetch_head(gitdir, lmsg)

    logger.info('---')
    if branch:
        gitargs = ['checkout', '-b', branch, 'FETCH_HEAD']
        logger.info('Fetched into branch %s', branch)
        ecode, out = b4.git_run_command(gitdir, gitargs)
        if ecode > 0:
            logger.critical('ERROR: Failed to create branch')
            logger.critical(out)
            return ecode
    else:
        logger.info('Successfully fetched into FETCH_HEAD')

    if ty_track:
        thanks_record_pr(lmsg)

    return 0


def thanks_record_pr(lmsg):
    datadir = b4.get_data_dir()
    # Check if we're tracking it already
    filename = '%s.pr' % lmsg.pr_remote_tip_commit
    for entry in os.listdir(datadir):
        if entry == filename:
            return
    allto = utils.getaddresses([str(x) for x in lmsg.msg.get_all('to', [])])
    allcc = utils.getaddresses([str(x) for x in lmsg.msg.get_all('cc', [])])
    out = {
        'msgid': lmsg.msgid,
        'subject': lmsg.full_subject,
        'fromname': lmsg.fromname,
        'fromemail': lmsg.fromemail,
        'to': b4.format_addrs(allto, clean=False),
        'cc': b4.format_addrs(allcc, clean=False),
        'references': b4.LoreMessage.clean_header(lmsg.msg['References']),
        'remote': lmsg.pr_repo,
        'ref': lmsg.pr_ref,
        'sentdate': b4.LoreMessage.clean_header(lmsg.msg['Date']),
        'quote': b4.make_quote(lmsg.body, maxlines=6)
    }
    fullpath = os.path.join(datadir, filename)
    with open(fullpath, 'w', encoding='utf-8') as fh:
        json.dump(out, fh, ensure_ascii=False, indent=4)
        logger.debug('Wrote %s for thanks tracking', filename)


def explode(gitdir, lmsg, mailfrom=None, retrieve_links=True, fpopts=None):
    ecode = fetch_remote(gitdir, lmsg, check_sig=False, ty_track=False)
    if ecode > 0:
        raise RuntimeError('Fetching unsuccessful')

    if not lmsg.pr_base_commit:
        # Use git merge-base between HEAD and FETCH_HEAD to find
        # where we should start
        logger.info('Running git merge-base to find common ancestry')
        gitargs = ['merge-base', 'HEAD', 'FETCH_HEAD']
        ecode, out = b4.git_run_command(gitdir, gitargs, logstderr=True)
        if ecode > 0:
            logger.critical('Could not find common ancestry.')
            logger.critical(out)
            raise RuntimeError('Could not find common ancestry')
        lmsg.pr_base_commit = out.strip()
        if lmsg.pr_base_commit == lmsg.pr_tip_commit:
            logger.critical('Cannot auto-discover merge-base on a merged pull request.')
            raise RuntimeError('Cannot find merge-base on a merged pull request')

    logger.info('Generating patches starting from the base-commit')

    msgs = list()

    prefixes = ['PATCH']
    for prefix in lmsg.lsubject.prefixes:
        if prefix.lower() not in ('git', 'pull'):
            prefixes.append(prefix)

    # get our to's and cc's
    allto = utils.getaddresses(lmsg.msg.get_all('to', []))
    allcc = utils.getaddresses(lmsg.msg.get_all('cc', []))

    if mailfrom is None:
        mailfrom = b4.LoreMessage.clean_header(lmsg.msg.get('From'))
    else:
        realname = None
        for fromaddr in utils.getaddresses(lmsg.msg.get_all('from', [])):
            realname = fromaddr[0]
            if not realname:
                realname = fromaddr[1]
            if fromaddr not in allcc:
                allcc.append(fromaddr)
        if realname:
            # Use "Name via Foo" notation
            if mailfrom.find('@') > 0 > mailfrom.find('<'):
                mailfrom = f'<{mailfrom}>'
            mailfrom = f'{realname} via {mailfrom}'

    config = b4.get_main_config()
    linked_ids = set()
    if retrieve_links:
        # Insert the pull request itself into linked_ids, so we preserve it as part
        # of the archived threads.
        linked_ids.add(lmsg.msgid)

    with b4.git_format_patches(gitdir, lmsg.pr_base_commit, 'FETCH_HEAD', prefixes=prefixes, extraopts=fpopts) as pdir:
        if pdir is None:
            raise RuntimeError('Could not run format-patches')

        for msgfile in sorted(os.listdir(pdir)):
            with open(os.path.join(pdir, msgfile), 'rb') as fh:
                msg = email.message_from_binary_file(fh)

            msubj = b4.LoreSubject(msg.get('subject', ''))

            # Is this the cover letter?
            if msubj.counter == 0:
                # We rebuild the message from scratch
                # The cover letter body is the pull request body, plus a few trailers
                body = '%s\n\nbase-commit: %s\nPR-Link: %s\n' % (
                    lmsg.body.strip(), lmsg.pr_base_commit, config['linkmask'] % lmsg.msgid)

                # Make it a multipart if we're doing retrieve_links
                if retrieve_links:
                    cmsg = MIMEMultipart()
                    cmsg.attach(MIMEText(body, 'plain'))
                else:
                    cmsg = email.message.EmailMessage()
                    cmsg.set_payload(body)

                cmsg.add_header('From', mailfrom)
                cmsg.add_header('Subject', '[' + ' '.join(msubj.prefixes) + '] ' + lmsg.subject)
                cmsg.add_header('Date', lmsg.msg.get('Date'))
                cmsg.set_charset('utf-8')
                cmsg.replace_header('Content-Transfer-Encoding', '8bit')

                msg = cmsg

            else:
                # Move the original From and Date into the body
                prepend = list()
                if msg.get('From') != mailfrom:
                    cleanfrom = b4.LoreMessage.clean_header(msg['from'])
                    prepend.append('From: %s' % ''.join(cleanfrom))
                    msg.replace_header('From', mailfrom)

                prepend.append('Date: %s' % msg['date'])
                body = '%s\n\n%s' % ('\n'.join(prepend), msg.get_payload(decode=True).decode('utf-8'))
                msg.set_payload(body)
                msg.replace_header('Subject', msubj.full_subject)

                if retrieve_links:
                    matches = re.findall(r'^Link:\s+https?://.*/(\S+@\S+)[^/]', body, flags=re.M | re.I)
                    if matches:
                        linked_ids.update(matches)
                    matches = re.findall(r'^Message-ID:\s+(\S+@\S+)', body, flags=re.M | re.I)
                    if matches:
                        linked_ids.update(matches)

                # Add a number of seconds equalling the counter, in hopes it gets properly threaded
                newdate = lmsg.date + timedelta(seconds=msubj.counter)
                msg.replace_header('Date', utils.format_datetime(newdate))

                # Thread it to the cover letter
                msg.add_header('In-Reply-To', '<b4-exploded-0-%s>' % lmsg.msgid)
                msg.add_header('References', '<b4-exploded-0-%s>' % lmsg.msgid)

            msg.add_header('To', format_addrs(allto))
            if allcc:
                msg.add_header('Cc', format_addrs(allcc))

            # Set the message-id based on the original pull request msgid
            msg.add_header('Message-Id', '<b4-exploded-%s-%s>' % (msubj.counter, lmsg.msgid))

            if mailfrom != lmsg.msg.get('From'):
                msg.add_header('Reply-To', lmsg.msg.get('From'))
                msg.add_header('X-Original-From', lmsg.msg.get('From'))

            if lmsg.msg['List-Id']:
                msg.add_header('X-Original-List-Id', b4.LoreMessage.clean_header(lmsg.msg['List-Id']))
            logger.info('  %s', msg.get('Subject'))
            msg.set_charset('utf-8')
            msgs.append(msg)

    logger.info('Exploded %s messages', len(msgs))
    if retrieve_links and linked_ids:
        with tempfile.TemporaryDirectory() as tfd:
            # Create a single mbox file with all linked conversations
            mbf = os.path.join(tfd, 'linked.mbox')
            tmbx = mailbox.mbox(mbf)
            logger.info('---')
            logger.info('Retrieving %s linked conversations', len(linked_ids))

            seen_msgids = set()
            for msgid in linked_ids:
                # Did we already retrieve it as part of a previous tread?
                if msgid in seen_msgids:
                    continue
                lmsgs = b4.get_pi_thread_by_msgid(msgid)
                if lmsgs:
                    # Append any messages we don't yet have
                    for lmsg in lmsgs:
                        amsgid = b4.LoreMessage.get_clean_msgid(lmsg)
                        if amsgid not in seen_msgids:
                            seen_msgids.add(amsgid)
                            logger.debug('Added linked: %s', lmsg.get('Subject'))
                            tmbx.add(lmsg.as_string(policy=b4.emlpolicy).encode())

            if len(tmbx):
                tmbx.close()
                # gzip the mailbox and attach it to the cover letter
                with open(mbf, 'rb') as fh:
                    mbz = gzip.compress(fh.read())
                    fname = 'linked-threads.mbox.gz'
                    att = MIMEApplication(mbz, 'x-gzip')
                    att.add_header('Content-Disposition', f'attachment; filename={fname}')
                    msgs[0].attach(att)

        logger.info('---')
        if len(seen_msgids):
            logger.info('Attached %s messages as linked-threads.mbox.gz', len(seen_msgids))
        else:
            logger.info('Could not retrieve any linked threads')

    return msgs


def get_pr_from_github(ghurl: str):
    loc = urllib.parse.urlparse(ghurl)
    chunks = loc.path.strip('/').split('/')
    rproj = chunks[0]
    rrepo = chunks[1]
    rpull = chunks[-1]
    apiurl = f'https://api.github.com/repos/{rproj}/{rrepo}/pulls/{rpull}'
    req = requests.session()
    # Do we have a github API key?
    config = b4.get_main_config()
    ghkey = config.get('gh-api-key')
    if ghkey:
        req.headers.update({'Authorization': f'token {ghkey}'})
    req.headers.update({'Accept': 'application/vnd.github.v3+json'})
    resp = req.get(apiurl)
    if resp.status_code != 200:
        logger.critical('Server returned an error: %s', resp.status_code)
        return None
    prdata = resp.json()

    msg = email.message.EmailMessage()
    msg.set_payload(prdata.get('body', '(no body)'))
    head = prdata.get('head', {})
    repo = head.get('repo', {})
    base = prdata.get('base', {})
    user = prdata.get('user', {})

    ulogin = user.get('login')
    fake_email = f'{ulogin}@github.com'
    apiurl = f'https://api.github.com/users/{ulogin}'
    resp = req.get(apiurl)
    if resp.status_code == 200:
        udata = resp.json()
        uname = udata.get('name')
        if not uname:
            uname = ulogin
        uemail = udata.get('email')
        if not uemail:
            uemail = fake_email
    else:
        uname = ulogin
        uemail = fake_email

    msg['From'] = f'{uname} <{uemail}>'
    title = prdata.get('title', '')
    msg['Subject'] = f'[GIT PULL] {title}'
    msg['To'] = fake_email
    msg['Message-Id'] = utils.make_msgid(idstring=f'{rproj}-{rrepo}-pr-{rpull}', domain='github.com')
    created_at = utils.format_datetime(datetime.strptime(prdata.get('created_at'), '%Y-%m-%dT%H:%M:%SZ'))
    msg['Date'] = created_at
    # We are going to turn it into bytes and then parse again
    # in order to avoid bugs with python's message parsing routines that
    # end up not doing the right thing when decoding 8bit message bodies
    msg.set_charset('utf-8')
    msg.replace_header('Content-Transfer-Encoding', '8bit')
    bug_avoidance = msg.as_string(policy=b4.emlpolicy).encode()
    cmsg = email.message_from_bytes(bug_avoidance)
    lmsg = b4.LoreMessage(cmsg)
    lmsg.pr_base_commit = base.get('sha')
    lmsg.pr_repo = repo.get('clone_url')
    lmsg.pr_ref = head.get('ref')
    lmsg.pr_tip_commit = head.get('sha')
    lmsg.pr_remote_tip_commit = head.get('sha')
    return lmsg


def main(cmdargs):
    gitdir = cmdargs.gitdir
    lmsg = None

    if not sys.stdin.isatty():
        logger.debug('Getting PR message from stdin')
        msg = email.message_from_bytes(sys.stdin.buffer.read())
        cmdargs.msgid = b4.LoreMessage.get_clean_msgid(msg)
        lmsg = parse_pr_data(msg)
    else:
        if cmdargs.msgid and 'github.com' in cmdargs.msgid and '/pull/' in cmdargs.msgid:
            logger.debug('Getting PR info from Github')
            lmsg = get_pr_from_github(cmdargs.msgid)
        else:
            logger.debug('Getting PR message from public-inbox')

            msgid = b4.get_msgid(cmdargs)
            msgs = b4.get_pi_thread_by_msgid(msgid)
            if not msgs:
                return
            for msg in msgs:
                mmsgid = b4.LoreMessage.get_clean_msgid(msg)
                if mmsgid == msgid:
                    lmsg = parse_pr_data(msg)
                    break

    if lmsg is None or lmsg.pr_remote_tip_commit is None:
        logger.critical('ERROR: Could not find pull request info in %s', cmdargs.msgid)
        sys.exit(1)

    if not lmsg.pr_tip_commit:
        lmsg.pr_tip_commit = lmsg.pr_remote_tip_commit

    if cmdargs.explode:
        # Set up a temporary clone
        with b4.git_temp_clone(gitdir) as tc:
            try:
                msgs = explode(tc, lmsg, mailfrom=cmdargs.mailfrom, retrieve_links=cmdargs.getlinks)
            except RuntimeError:
                logger.critical('Nothing exploded.')
                sys.exit(1)

        if msgs:
            if cmdargs.sendidentity:
                # Pass exploded series via git-send-email
                config = b4.get_config_from_git(rf'sendemail\.{cmdargs.sendidentity}\..*')
                if not len(config):
                    logger.critical('Not able to find sendemail.%s configuration', cmdargs.sendidentity)
                    sys.exit(1)
                # Make sure from is not overridden by current user
                mailfrom = msgs[0].get('from')
                gitargs = ['send-email', '--identity', cmdargs.sendidentity, '--from', mailfrom]
                if cmdargs.dryrun:
                    gitargs.append('--dry-run')
                # Write out everything into a temporary dir
                counter = 0
                with tempfile.TemporaryDirectory() as tfd:
                    for msg in msgs:
                        outfile = os.path.join(tfd, '%04d' % counter)
                        with open(outfile, 'wb') as tfh:
                            tfh.write(msg.as_string(policy=b4.emlpolicy).encode())
                        gitargs.append(outfile)
                        counter += 1
                    ecode, out = b4.git_run_command(cmdargs.gitdir, gitargs, logstderr=True)
                    if cmdargs.dryrun:
                        logger.info(out)
                    sys.exit(ecode)

            config = b4.get_main_config()
            if config.get('save-maildirs', 'no') == 'yes':
                save_maildir = True
                dftext = 'maildir'
            else:
                save_maildir = False
                dftext = 'mbx'
            savefile = cmdargs.outmbox
            if savefile is None:
                savefile = f'{lmsg.msgid}.{dftext}'
            if os.path.exists(savefile):
                logger.info('File exists: %s', savefile)
                sys.exit(1)

            if save_maildir:
                b4.save_maildir(msgs, savefile)
            else:
                with open(savefile, 'w') as fh:
                    b4.save_git_am_mbox(msgs, fh)
            logger.info('---')
            logger.info('Saved %s', savefile)
            sys.exit(0)
        else:
            logger.critical('Nothing exploded.')
            sys.exit(1)

    exists = b4.git_commit_exists(gitdir, lmsg.pr_tip_commit)
    if exists:
        # Is it in any branch, or just flapping in the wind?
        branches = b4.git_branch_contains(gitdir, lmsg.pr_tip_commit)
        if len(branches):
            logger.info('Pull request tip commit exists in the following branches:')
            for branch in branches:
                logger.info('  %s', branch)
            if cmdargs.check:
                sys.exit(0)
            sys.exit(1)

        # Is it at the tip of FETCH_HEAD?
        loglines = b4.git_get_command_lines(gitdir, ['log', '-1', '--pretty=oneline', 'FETCH_HEAD'])
        if len(loglines) and loglines[0].find(lmsg.pr_tip_commit) == 0:
            logger.info('Pull request is at the tip of FETCH_HEAD')
            if cmdargs.check:
                attest_fetch_head(gitdir, lmsg)
                sys.exit(0)

    elif cmdargs.check:
        logger.info('Pull request does not appear to be in this tree.')
        sys.exit(0)

    fetch_remote(gitdir, lmsg, branch=cmdargs.branch)
07070100000016000081A400000000000000000000000161F953D700005647000000000000000000000000000000000000001400000000b4-0.8.0+2/b4/ty.py#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: GPL-2.0-or-later
# Copyright (C) 2020 by the Linux Foundation
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import os
import sys
import b4
import re
import email
import email.message
import json

from string import Template
from email import utils
from pathlib import Path

logger = b4.logger

DEFAULT_PR_TEMPLATE = """
On ${sentdate}, ${fromname} wrote:
${quote}

Merged, thanks!

${summary}

Best regards,
-- 
${signature}
"""

DEFAULT_AM_TEMPLATE = """
On ${sentdate}, ${fromname} wrote:
${quote}

Applied, thanks!

${summary}

Best regards,
-- 
${signature}
"""

# Used to track commits created by current user
MY_COMMITS = None
# Used to track additional branch info
BRANCH_INFO = None


def git_get_merge_id(gitdir, commit_id, branch=None):
    # get merge commit id
    args = ['rev-list', '%s..' % commit_id, '--ancestry-path']
    if branch is not None:
        args += [branch]
    lines = b4.git_get_command_lines(gitdir, args)
    if not len(lines):
        return None
    return lines[-1]


def git_get_rev_diff(gitdir, rev):
    args = ['diff', '%s~..%s' % (rev, rev)]
    return b4.git_run_command(gitdir, args)


def git_get_commit_message(gitdir, rev):
    args = ['log', '--format=%B', '-1', rev]
    return b4.git_run_command(gitdir, args)


def make_reply(reply_template, jsondata):
    body = Template(reply_template).safe_substitute(jsondata)
    # Conform to email standards
    body = body.replace('\n', '\r\n')
    msg = email.message_from_string(body)
    msg['From'] = '%s <%s>' % (jsondata['myname'], jsondata['myemail'])
    allto = utils.getaddresses([jsondata['to']])
    allcc = utils.getaddresses([jsondata['cc']])
    # Remove ourselves and original sender from allto or allcc
    for entry in list(allto):
        if entry[1] == jsondata['myemail'] or entry[1] == jsondata['fromemail']:
            allto.remove(entry)
    for entry in list(allcc):
        if entry[1] == jsondata['myemail'] or entry[1] == jsondata['fromemail']:
            allcc.remove(entry)

    # Add original sender to the To
    allto.append((jsondata['fromname'], jsondata['fromemail']))

    msg['To'] = b4.format_addrs(allto)
    if allcc:
        msg['Cc'] = b4.format_addrs(allcc)
    msg['In-Reply-To'] = '<%s>' % jsondata['msgid']
    if len(jsondata['references']):
        msg['References'] = '%s <%s>' % (jsondata['references'], jsondata['msgid'])
    else:
        msg['References'] = '<%s>' % jsondata['msgid']

    subject = re.sub(r'^Re:\s+', '', jsondata['subject'], flags=re.I)
    if jsondata.get('cherrypick'):
        msg['Subject'] = 'Re: (subset) ' + subject
    else:
        msg['Subject'] = 'Re: ' + subject

    mydomain = jsondata['myemail'].split('@')[1]
    msg['Message-Id'] = email.utils.make_msgid(idstring='b4-ty', domain=mydomain)
    msg['Date'] = email.utils.formatdate(localtime=True)
    return msg


def auto_locate_pr(gitdir, jsondata, branch):
    pr_commit_id = jsondata['pr_commit_id']
    logger.debug('Checking %s', jsondata['pr_commit_id'])
    if not b4.git_commit_exists(gitdir, pr_commit_id):
        return None

    onbranches = b4.git_branch_contains(gitdir, pr_commit_id)
    if not len(onbranches):
        logger.debug('%s is not on any branches', pr_commit_id)
        return None
    if branch not in onbranches:
        logger.debug('%s is not on branch %s', pr_commit_id, branch)
        return None

    # Get the merge commit
    merge_commit_id = git_get_merge_id(gitdir, pr_commit_id, branch)
    if not merge_commit_id:
        logger.debug('Could not get a merge commit-id for %s', pr_commit_id)
        return None

    # Check that we are the author of the merge commit
    gitargs = ['show', '--format=%ae', merge_commit_id]
    out = b4.git_get_command_lines(gitdir, gitargs)
    if not out:
        logger.debug('Could not get merge commit author for %s', pr_commit_id)
        return None

    usercfg = b4.get_user_config()
    if usercfg['email'] not in out:
        logger.debug('Merged by a different author, ignoring %s', pr_commit_id)
        logger.debug('Author: %s', out[0])
        return None

    return merge_commit_id


def get_all_commits(gitdir, branch, since='1.week', committer=None):
    global MY_COMMITS
    if MY_COMMITS is not None:
        return MY_COMMITS

    MY_COMMITS = dict()
    if committer is None:
        usercfg = b4.get_user_config()
        committer = usercfg['email']

    gitargs = ['log', '--committer', committer, '--no-abbrev', '--oneline', '--since', since, branch]
    lines = b4.git_get_command_lines(gitdir, gitargs)
    if not len(lines):
        logger.debug('No new commits from the current user --since=%s', since)
        return MY_COMMITS

    logger.info('Found %s of your commits since %s', len(lines), since)
    logger.info('Calculating patch hashes, may take a moment...')
    # Get patch hash of each commit
    for line in lines:
        commit_id, subject = line.split(maxsplit=1)
        ecode, out = git_get_rev_diff(gitdir, commit_id)
        pwhash = b4.LoreMessage.get_patchwork_hash(out)
        logger.debug('phash=%s', pwhash)
        # get all message-id or link trailers
        ecode, out = git_get_commit_message(gitdir, commit_id)
        matches = re.findall(r'^\s*(?:message-id|link):[ \t]+(\S+)\s*$', out, flags=re.I | re.M)
        trackers = list()
        if matches:
            for tvalue in matches:
                trackers.append(tvalue)

        MY_COMMITS[pwhash] = (commit_id, subject, trackers)

    return MY_COMMITS


def auto_locate_series(gitdir, jsondata, branch, since='1.week'):
    commits = get_all_commits(gitdir, branch, since)

    patchids = set(commits.keys())
    # We need to find all of them in the commits
    found = list()
    matches = 0
    at = 0
    for patch in jsondata['patches']:
        at += 1
        logger.debug('Checking %s', patch)
        if patch[1] in patchids:
            logger.debug('Found: %s', patch[0])
            found.append((at, commits[patch[1]][0]))
            matches += 1
        else:
            # try to locate by subject
            success = False
            for pwhash, commit in commits.items():
                if commit[1] == patch[0]:
                    logger.debug('Matched using subject')
                    found.append((at, commit[0]))
                    success = True
                    matches += 1
                    break
                elif len(patch) > 2 and len(patch[2]) and len(commit[2]):
                    for tracker in commit[2]:
                        if tracker.find(patch[2]) >= 0:
                            logger.debug('Matched using recorded message-id')
                            found.append((at, commit[0]))
                            success = True
                            matches += 1
                            break
                if success:
                    break

            if not success:
                logger.debug('  Failed to find a match for: %s', patch[0])
                found.append((at, None))

    return found


def read_template(tptfile):
    # bubbles up FileNotFound
    tpt = ''
    if tptfile.find('~') >= 0:
        tptfile = os.path.expanduser(tptfile)
    if tptfile.find('$') >= 0:
        tptfile = os.path.expandvars(tptfile)
    with open(tptfile, 'r', encoding='utf-8') as fh:
        for line in fh:
            if len(line) and line[0] == '#':
                continue
            tpt += line
    return tpt


def set_branch_details(gitdir, branch, jsondata, config):
    binfo = get_branch_info(gitdir, branch)
    jsondata['branch'] = branch
    for key, val in binfo.items():
        if key == 'b4-treename':
            config['thanks-treename'] = val
        elif key == 'b4-commit-url-mask':
            config['thanks-commit-url-mask'] = val
        elif key == 'b4-pr-template':
            config['thanks-pr-template'] = val
        elif key == 'b4-am-template':
            config['thanks-am-template'] = val
        elif key == 'branch':
            jsondata['branch'] = val

    if 'thanks-treename' in config:
        jsondata['treename'] = config['thanks-treename']
    elif 'url' in binfo:
        # noinspection PyBroadException
        try:
            # Try to grab the last two chunks of the path
            purl = Path(binfo['url'])
            jsondata['treename'] = os.path.join(purl.parts[-2], purl.parts[-1])
        except:
            # Something went wrong... just use the whole URL
            jsondata['treename'] = binfo['url']
    else:
        jsondata['treename'] = 'local tree'

    return jsondata, config


def generate_pr_thanks(gitdir, jsondata, branch):
    config = b4.get_main_config()
    jsondata, config = set_branch_details(gitdir, branch, jsondata, config)
    thanks_template = DEFAULT_PR_TEMPLATE
    if config['thanks-pr-template']:
        # Try to load this template instead
        try:
            thanks_template = read_template(config['thanks-pr-template'])
        except FileNotFoundError:
            logger.critical('ERROR: thanks-pr-template says to use %s, but it does not exist',
                            config['thanks-pr-template'])
            sys.exit(2)

    if 'merge_commit_id' not in jsondata:
        merge_commit_id = git_get_merge_id(gitdir, jsondata['pr_commit_id'])
        if not merge_commit_id:
            logger.critical('Could not get merge commit id for %s', jsondata['subject'])
            logger.critical('Was it actually merged?')
            sys.exit(1)
        jsondata['merge_commit_id'] = merge_commit_id
    # Make a summary
    cidmask = config['thanks-commit-url-mask']
    if not cidmask:
        cidmask = 'merge commit: %s'
    jsondata['summary'] = cidmask % jsondata['merge_commit_id']
    msg = make_reply(thanks_template, jsondata)
    return msg


def generate_am_thanks(gitdir, jsondata, branch, since):
    config = b4.get_main_config()
    jsondata, config = set_branch_details(gitdir, branch, jsondata, config)
    thanks_template = DEFAULT_AM_TEMPLATE
    if config['thanks-am-template']:
        # Try to load this template instead
        try:
            thanks_template = read_template(config['thanks-am-template'])
        except FileNotFoundError:
            logger.critical('ERROR: thanks-am-template says to use %s, but it does not exist',
                            config['thanks-am-template'])
            sys.exit(2)
    if 'commits' not in jsondata:
        commits = auto_locate_series(gitdir, jsondata, branch, since)
    else:
        commits = jsondata['commits']

    cidmask = config['thanks-commit-url-mask']
    if not cidmask:
        cidmask = 'commit: %s'
    slines = list()
    nomatch = 0
    padlen = len(str(len(commits)))
    patches = jsondata['patches']
    for at, cid in commits:
        try:
            prefix = '[%s] ' % patches[at-1][3]
        except IndexError:
            prefix = '[%s/%s] ' % (str(at).zfill(padlen), len(commits))
        slines.append('%s%s' % (prefix, patches[at-1][0]))
        if cid is None:
            slines.append('%s(no commit info)' % (' ' * len(prefix)))
            nomatch += 1
        else:
            slines.append('%s%s' % (' ' * len(prefix), cidmask % cid))
    jsondata['summary'] = '\n'.join(slines)
    if nomatch == len(commits):
        logger.critical('  WARNING: None of the patches matched for: %s', jsondata['subject'])
        logger.critical('           Please review the resulting message')
    elif nomatch > 0:
        logger.critical('  WARNING: Could not match %s of %s patches in: %s',
                        nomatch, len(commits), jsondata['subject'])
        logger.critical('           Please review the resulting message')

    msg = make_reply(thanks_template, jsondata)
    return msg


def auto_thankanator(cmdargs):
    gitdir = cmdargs.gitdir
    wantbranch = get_wanted_branch(cmdargs)
    logger.info('Auto-thankanating commits in %s', wantbranch)
    tracked = list_tracked()
    if not len(tracked):
        logger.info('Nothing to do')
        sys.exit(0)

    applied = list()
    for jsondata in tracked:
        if 'pr_commit_id' in jsondata:
            # this is a pull request
            merge_commit_id = auto_locate_pr(gitdir, jsondata, wantbranch)
            if merge_commit_id is None:
                continue
            jsondata['merge_commit_id'] = merge_commit_id
        else:
            # This is a patch series
            commits = auto_locate_series(gitdir, jsondata, wantbranch, since=cmdargs.since)
            # Weed out series that have no matches at all
            found = False
            for commit in commits:
                if commit[1] is not None:
                    found = True
                    break
            if not found:
                continue
            jsondata['commits'] = commits
        applied.append(jsondata)
        logger.info('  Located: %s', jsondata['subject'])

    if not len(applied):
        logger.info('Nothing to do')
        sys.exit(0)

    logger.info('---')
    send_messages(applied, cmdargs.gitdir, cmdargs.outdir, wantbranch, since=cmdargs.since)
    sys.exit(0)


def send_messages(listing, gitdir, outdir, branch, since='1.week'):
    # Not really sending, but writing them out to be sent on your own
    # We'll probably gain ability to send these once the feature is
    # more mature and we're less likely to mess things up
    datadir = b4.get_data_dir()
    logger.info('Generating %s thank-you letters', len(listing))
    # Check if the outdir exists and if it has any .thanks files in it
    if not os.path.exists(outdir):
        os.mkdir(outdir)

    usercfg = b4.get_user_config()
    # Do we have a .signature file?
    sigfile = os.path.join(str(Path.home()), '.signature')
    if os.path.exists(sigfile):
        with open(sigfile, 'r', encoding='utf-8') as fh:
            signature = fh.read()
    else:
        signature = '%s <%s>' % (usercfg['name'], usercfg['email'])

    outgoing = 0
    for jsondata in listing:
        slug_from = re.sub(r'\W', '_', jsondata['fromemail'])
        slug_subj = re.sub(r'\W', '_', jsondata['subject'])
        slug = '%s_%s' % (slug_from.lower(), slug_subj.lower())
        slug = re.sub(r'_+', '_', slug)
        jsondata['myname'] = usercfg['name']
        jsondata['myemail'] = usercfg['email']
        jsondata['signature'] = signature
        if 'pr_commit_id' in jsondata:
            # This is a pull request
            msg = generate_pr_thanks(gitdir, jsondata, branch)
        else:
            # This is a patch series
            msg = generate_am_thanks(gitdir, jsondata, branch, since)

        if msg is None:
            continue

        outgoing += 1
        outfile = os.path.join(outdir, '%s.thanks' % slug)
        logger.info('  Writing: %s', outfile)
        msg.set_charset('utf-8')
        msg.replace_header('Content-Transfer-Encoding', '8bit')
        with open(outfile, 'w') as fh:
            fh.write(msg.as_string(policy=b4.emlpolicy))
        logger.debug('Cleaning up: %s', jsondata['trackfile'])
        fullpath = os.path.join(datadir, jsondata['trackfile'])
        os.rename(fullpath, '%s.sent' % fullpath)
    logger.info('---')
    if not outgoing:
        logger.info('No thanks necessary.')
        return

    logger.debug('Wrote %s thank-you letters', outgoing)
    logger.info('You can now run:')
    logger.info('  git send-email %s/*.thanks', outdir)


def list_tracked():
    # find all tracked bits
    tracked = list()
    datadir = b4.get_data_dir()
    paths = sorted(Path(datadir).iterdir(), key=os.path.getmtime)
    for fullpath in paths:
        if fullpath.suffix not in ('.pr', '.am'):
            continue
        with fullpath.open('r', encoding='utf-8') as fh:
            jsondata = json.load(fh)
            jsondata['trackfile'] = fullpath.name
            if fullpath.suffix == '.pr':
                jsondata['pr_commit_id'] = fullpath.stem
        tracked.append(jsondata)
    return tracked


def write_tracked(tracked):
    counter = 1
    config = b4.get_main_config()
    logger.info('Currently tracking:')
    for entry in tracked:
        logger.info('%3d: %s', counter, entry['subject'])
        logger.info('       From: %s <%s>', entry['fromname'], entry['fromemail'])
        logger.info('       Date: %s', entry['sentdate'])
        logger.info('       Link: %s', config['linkmask'] % entry['msgid'])
        counter += 1


def send_selected(cmdargs):
    tracked = list_tracked()
    if not len(tracked):
        logger.info('Nothing to do')
        sys.exit(0)

    if cmdargs.send == 'all':
        listing = tracked
    else:
        listing = list()
        for num in b4.parse_int_range(cmdargs.send, upper=len(tracked)):
            try:
                index = int(num) - 1
                listing.append(tracked[index])
            except ValueError:
                logger.critical('Please provide the number of the message')
                logger.info('---')
                write_tracked(tracked)
                sys.exit(1)
            except IndexError:
                logger.critical('Invalid index: %s', num)
                logger.info('---')
                write_tracked(tracked)
                sys.exit(1)
    if not len(listing):
        logger.info('Nothing to do')
        sys.exit(0)

    wantbranch = get_wanted_branch(cmdargs)
    send_messages(listing, cmdargs.gitdir, cmdargs.outdir, wantbranch, cmdargs.since)
    sys.exit(0)


def discard_selected(cmdargs):
    tracked = list_tracked()
    if not len(tracked):
        logger.info('Nothing to do')
        sys.exit(0)

    if cmdargs.discard == 'all':
        listing = tracked
    else:
        listing = list()
        for num in b4.parse_int_range(cmdargs.discard, upper=len(tracked)):
            try:
                index = int(num) - 1
                listing.append(tracked[index])
            except ValueError:
                logger.critical('Please provide the number of the message')
                logger.info('---')
                write_tracked(tracked)
                sys.exit(1)
            except IndexError:
                logger.critical('Invalid index: %s', num)
                logger.info('---')
                write_tracked(tracked)
                sys.exit(1)

    if not len(listing):
        logger.info('Nothing to do')
        sys.exit(0)

    datadir = b4.get_data_dir()
    logger.info('Discarding %s messages', len(listing))
    for jsondata in listing:
        fullpath = os.path.join(datadir, jsondata['trackfile'])
        os.rename(fullpath, '%s.discarded' % fullpath)
        logger.info('  Discarded: %s', jsondata['subject'])

    sys.exit(0)


def check_stale_thanks(outdir):
    if os.path.exists(outdir):
        for entry in Path(outdir).iterdir():
            if entry.suffix == '.thanks':
                logger.critical('ERROR: Found existing .thanks files in: %s', outdir)
                logger.critical('       Please send them first (or delete if already sent).')
                logger.critical('       Refusing to run to avoid potential confusion.')
                sys.exit(1)


def get_wanted_branch(cmdargs):
    global BRANCH_INFO
    gitdir = cmdargs.gitdir
    if not cmdargs.branch:
        # Find out our current branch
        gitargs = ['symbolic-ref', '-q', 'HEAD']
        ecode, out = b4.git_run_command(gitdir, gitargs)
        if ecode > 0:
            logger.critical('Not able to get current branch (git symbolic-ref HEAD)')
            sys.exit(1)
        wantbranch = re.sub(r'^refs/heads/', '', out.strip())
        logger.debug('will check branch=%s', wantbranch)
    else:
        # Make sure it's a real branch
        gitargs = ['branch', '--format=%(refname)', '--list', '--all', cmdargs.branch]
        lines = b4.git_get_command_lines(gitdir, gitargs)
        if not len(lines):
            logger.critical('Requested branch not found in git branch --list --all %s', cmdargs.branch)
            sys.exit(1)
        wantbranch = cmdargs.branch

    return wantbranch


def get_branch_info(gitdir, branch):
    global BRANCH_INFO
    if BRANCH_INFO is not None:
        return BRANCH_INFO

    BRANCH_INFO = dict()

    remotecfg = b4.get_config_from_git('branch\\.%s\\.*' % branch)
    if remotecfg is None or 'remote' not in remotecfg:
        # Did not find a matching branch entry, so look at remotes
        gitargs = ['remote', 'show']
        lines = b4.git_get_command_lines(gitdir, gitargs)
        if not len(lines):
            # No remotes? Hmm...
            return BRANCH_INFO

        remote = None
        for entry in lines:
            if branch.find(f'{entry}/') == 0:
                remote = entry
                break

        if remote is None:
            # Not found any matching remotes
            return BRANCH_INFO

        BRANCH_INFO['remote'] = remote
        BRANCH_INFO['branch'] = branch.replace(f'{remote}/', '')

    else:
        BRANCH_INFO['remote'] = remotecfg['remote']
        if 'merge' in remotecfg:
            BRANCH_INFO['branch'] = re.sub(r'^refs/heads/', '', remotecfg['merge'])

    # Grab template overrides
    remotecfg = b4.get_config_from_git('remote\\.%s\\..*' % BRANCH_INFO['remote'])
    BRANCH_INFO.update(remotecfg)

    return BRANCH_INFO


def main(cmdargs):
    usercfg = b4.get_user_config()
    if 'email' not in usercfg:
        logger.critical('Please set user.email in gitconfig to use this feature.')
        sys.exit(1)

    if cmdargs.auto:
        check_stale_thanks(cmdargs.outdir)
        auto_thankanator(cmdargs)
    elif cmdargs.send:
        check_stale_thanks(cmdargs.outdir)
        send_selected(cmdargs)
    elif cmdargs.discard:
        discard_selected(cmdargs)
    else:
        tracked = list_tracked()
        if not len(tracked):
            logger.info('No thanks necessary.')
            sys.exit(0)
        write_tracked(tracked)
        logger.info('---')
        logger.info('You can send them using number ranges, e.g:')
        logger.info('  b4 ty -s 1-3,5,7-')
07070100000017000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000000F00000000b4-0.8.0+2/man07070100000018000081A400000000000000000000000161F953D700003635000000000000000000000000000000000000001400000000b4-0.8.0+2/man/b4.5.\" Man page generated from reStructuredText.
.
.TH B4 5 "2021-09-01" "0.8.0" ""
.SH NAME
B4 \- Work with code submissions in a public-inbox archive
.
.nr rst2man-indent-level 0
.
.de1 rstReportMargin
\\$1 \\n[an-margin]
level \\n[rst2man-indent-level]
level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
-
\\n[rst2man-indent0]
\\n[rst2man-indent1]
\\n[rst2man-indent2]
..
.de1 INDENT
.\" .rstReportMargin pre:
. RS \\$1
. nr rst2man-indent\\n[rst2man-indent-level] \\n[an-margin]
. nr rst2man-indent-level +1
.\" .rstReportMargin post:
..
.de UNINDENT
. RE
.\" indent \\n[an-margin]
.\" old: \\n[rst2man-indent\\n[rst2man-indent-level]]
.nr rst2man-indent-level -1
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.SH SYNOPSIS
.sp
b4 {mbox,am,attest,pr,ty,diff} [options]
.SH DESCRIPTION
.sp
This is a helper utility to work with patches and pull requests made
available via a public\-inbox archive like lore.kernel.org. It is
written to make it easier to participate in a patch\-based workflows,
like those used in the Linux kernel development.
.sp
The name "b4" was chosen for ease of typing and because B\-4 was the
precursor to Lore and Data in the Star Trek universe.
.SH SUBCOMMANDS
.INDENT 0.0
.IP \(bu 2
\fIb4 mbox\fP: Download a thread as an mbox file
.IP \(bu 2
\fIb4 am\fP: Create an mbox file that is ready to git\-am
.IP \(bu 2
\fIb4 pr\fP: Work with pull requests
.IP \(bu 2
\fIb4 diff\fP: Show range\-diff style diffs between patch versions
.IP \(bu 2
\fIb4 ty\fP: Create templated replies for processed patches and pull requests
.IP \(bu 2
\fIb4 attest\fP: (EXPERIMENTAL) Add cryptographic attestation to patches
.IP \(bu 2
\fIb4 kr\fP (EXPERIMENTAL) Operate on patatt\-compatible keyrings
.UNINDENT
.SH OPTIONS
.INDENT 0.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.B \-d\fP,\fB  \-\-debug
Add more debugging info to the output (default: False)
.TP
.B \-q\fP,\fB  \-\-quiet
Output critical information only (default: False)
.UNINDENT
.SH SUBCOMMAND OPTIONS
.SS b4 mbox
.INDENT 0.0
.TP
.B usage:
b4 mbox [\-h] [\-o OUTDIR] [\-p USEPROJECT] [\-c] [\-n WANTNAME] [\-m LOCALMBOX] [msgid]
.TP
.B positional arguments:
msgid                 Message ID to process, or pipe a raw message
.TP
.B optional arguments:
.INDENT 7.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-o \ OUTDIR\fR,\fB \ \-\-outdir \ OUTDIR
Output into this directory (or use \- to output mailbox contents to stdout)
.TP
.BI \-p \ USEPROJECT\fR,\fB \ \-\-use\-project \ USEPROJECT
Use a specific project instead of guessing (linux\-mm, linux\-hardening, etc)
.TP
.B \-c\fP,\fB  \-\-check\-newer\-revisions
Check if newer patch revisions exist
.TP
.BI \-n \ WANTNAME\fR,\fB \ \-\-mbox\-name \ WANTNAME
Filename to name the mbox file
.TP
.BI \-m \ LOCALMBOX\fR,\fB \ \-\-use\-local\-mbox \ LOCALMBOX
Instead of grabbing a thread from lore, process this mbox file
(or use \- for stdin)
.TP
.B \-C\fP,\fB  \-\-no\-cache
Do not use local cache
.TP
.B \-f\fP,\fB  \-\-filter\-dupes
When adding messages to existing maildir, filter out duplicates
.TP
.B \-M\fP,\fB  \-\-save\-as\-maildir
Save as maildir (avoids mbox format ambiguities)
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 mbox \fI\%20200313231252.64999\-1\-keescook@chromium.org\fP
.SS b4 am
.INDENT 0.0
.TP
.B usage:
b4 am [\-h] [\-o OUTDIR] [\-p USEPROJECT] [\-c] [\-n WANTNAME] [\-m LOCALMBOX] [\-v WANTVER] [\-t] [\-T] [\-s] [\-l] [\-Q] [msgid]
.TP
.B positional arguments:
msgid                 Message ID to process, or pipe a raw message
.TP
.B optional arguments:
.INDENT 7.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-o \ OUTDIR\fR,\fB \ \-\-outdir \ OUTDIR
Output into this directory (or use \- to output mailbox contents to stdout)
.TP
.BI \-p \ USEPROJECT\fR,\fB \ \-\-use\-project \ USEPROJECT
Use a specific project instead of guessing (linux\-mm, linux\-hardening, etc)
.TP
.B \-c\fP,\fB  \-\-check\-newer\-revisions
Check if newer patch revisions exist
.TP
.BI \-n \ WANTNAME\fR,\fB \ \-\-mbox\-name \ WANTNAME
Filename to name the mbox file
.TP
.BI \-m \ LOCALMBOX\fR,\fB \ \-\-use\-local\-mbox \ LOCALMBOX
Instead of grabbing a thread from lore, process this mbox file
(or use \- for stdin)
.TP
.B \-M\fP,\fB  \-\-save\-as\-maildir
Save as maildir (avoids mbox format ambiguities)
.TP
.B \-C\fP,\fB  \-\-no\-cache
Do not use local cache
.TP
.BI \-v \ WANTVER\fR,\fB \ \-\-use\-version \ WANTVER
Get a specific version of the patch/series
.TP
.B \-t\fP,\fB  \-\-apply\-cover\-trailers
Apply trailers sent to the cover letter to all patches
.TP
.B \-S\fP,\fB  \-\-sloppy\-trailers
Apply trailers without email address match checking
.TP
.B \-T\fP,\fB  \-\-no\-add\-trailers
Do not add or sort any trailers
.TP
.B \-s\fP,\fB  \-\-add\-my\-sob
Add your own signed\-off\-by to every patch
.TP
.B \-l\fP,\fB  \-\-add\-link
Add a lore.kernel.org/r/ link to every patch
.TP
.B \-Q\fP,\fB  \-\-quilt\-ready
Save patches in a quilt\-ready folder
.TP
.BI \-P \ CHERRYPICK\fR,\fB \ \-\-cherry\-pick \ CHERRYPICK
Cherry\-pick a subset of patches (e.g. "\-P 1\-2,4,6\-", "\-P _" to use just the msgid specified, or "\-P *globbing*" to match on commit subject)
.TP
.B \-g\fP,\fB  \-\-guess\-base
Try to guess the base of the series (if not specified)
.TP
.B \-3\fP,\fB  \-\-prep\-3way
Prepare for a 3\-way merge (tries to ensure that all index blobs exist by making a fake commit range)
.TP
.B \-\-cc\-trailers
Copy all Cc\(aqd addresses into Cc: trailers, if not already present
.TP
.B \-\-no\-cover
Do not save the cover letter (on by default when using \-o \-)
.TP
.B \-\-no\-partial\-reroll
Do not reroll partial series when detected
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 am \fI\%20200313231252.64999\-1\-keescook@chromium.org\fP
.SS b4 attest
.sp
usage: b4 attest [\-h] patchfile [patchfile ...]
.INDENT 0.0
.TP
.B positional arguments:
patchfile             Patches to attest
.UNINDENT
.sp
\fIExample\fP: b4 attest outgoing/*.patch
.SS b4 pr
.INDENT 0.0
.TP
.B usage:
command.py pr [\-h] [\-g GITDIR] [\-b BRANCH] [\-c] [\-e] [\-o OUTMBOX] [msgid]
.TP
.B positional arguments:
msgid                 Message ID to process, or pipe a raw message
.TP
.B optional arguments:
.INDENT 7.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-g \ GITDIR\fR,\fB \ \-\-gitdir \ GITDIR
Operate on this git tree instead of current dir
.TP
.BI \-b \ BRANCH\fR,\fB \ \-\-branch \ BRANCH
Check out FETCH_HEAD into this branch after fetching
.TP
.B \-c\fP,\fB  \-\-check
Check if pull request has already been applied
.TP
.B \-e\fP,\fB  \-\-explode
Convert a pull request into an mbox full of patches
.TP
.BI \-o \ OUTMBOX\fR,\fB \ \-\-output\-mbox \ OUTMBOX
Save exploded messages into this mailbox (default: msgid.mbx)
.TP
.B \-l\fP,\fB  \-\-retrieve\-links
Attempt to retrieve any Link: URLs (use with \-e)
.TP
.BI \-f \ MAILFROM\fR,\fB \ \-\-from\-addr \ MAILFROM
Use this From: in exploded messages (use with \-e)
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 pr \fI\%202003292120.2BDCB41@keescook\fP
.SS b4 ty
.INDENT 0.0
.TP
.B usage:
b4 ty [\-h] [\-g GITDIR] [\-o OUTDIR] [\-l] [\-s SEND [SEND ...]] [\-d DISCARD [DISCARD ...]] [\-a] [\-b BRANCH] [\-\-since SINCE]
.TP
.B optional arguments:
.INDENT 7.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-g \ GITDIR\fR,\fB \ \-\-gitdir \ GITDIR
Operate on this git tree instead of current dir
.TP
.BI \-o \ OUTDIR\fR,\fB \ \-\-outdir \ OUTDIR
Write thanks files into this dir (default=.)
.TP
.B \-l\fP,\fB  \-\-list
List pull requests and patch series you have retrieved
.TP
.BI \-s \ SEND\fR,\fB \ \-\-send \ SEND
Generate thankyous for specific entries from \-l (e.g.: 1,3\-5,7\-; or "all")
.TP
.BI \-d \ DISCARD\fR,\fB \ \-\-discard \ DISCARD
Discard specific messages from \-l (e.g.: 1,3\-5,7\-; or "all")
.TP
.B \-a\fP,\fB  \-\-auto
Use the Auto\-Thankanator to figure out what got applied/merged
.TP
.BI \-b \ BRANCH\fR,\fB \ \-\-branch \ BRANCH
The branch to check against, instead of current
.TP
.BI \-\-since \ SINCE
The \-\-since option to use when auto\-matching patches (default=1.week)
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 ty \-\-auto
.SS b4 diff
.sp
usage: b4 diff [\-h] [\-g GITDIR] [\-p USEPROJECT] [\-C] [\-v WANTVERS [WANTVERS ...]] [\-n] [\-o OUTDIFF] [\-c] [\-m AMBOX AMBOX] [msgid]
.INDENT 0.0
.TP
.B positional arguments:
msgid                 Message ID to process, pipe a raw message, or use \-m
.UNINDENT
.sp
optional arguments:
.INDENT 0.0
.INDENT 3.5
.INDENT 0.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-g \ GITDIR\fR,\fB \ \-\-gitdir \ GITDIR
Operate on this git tree instead of current dir
.TP
.BI \-p \ USEPROJECT\fR,\fB \ \-\-use\-project \ USEPROJECT
Use a specific project instead of guessing (linux\-mm, linux\-hardening, etc)
.TP
.B \-C\fP,\fB  \-\-no\-cache
Do not use local cache
.UNINDENT
.INDENT 0.0
.TP
.B \-v WANTVERS [WANTVERS ...], \-\-compare\-versions WANTVERS [WANTVERS ...]
Compare specific versions instead of latest and one before that, e.g. \-v 3 5
.UNINDENT
.INDENT 0.0
.TP
.B \-n\fP,\fB  \-\-no\-diff
Do not generate a diff, just show the command to do it
.TP
.BI \-o \ OUTDIFF\fR,\fB \ \-\-output\-diff \ OUTDIFF
Save diff into this file instead of outputting to stdout
.TP
.B \-c\fP,\fB  \-\-color
Force color output even when writing to file
.UNINDENT
.INDENT 0.0
.TP
.B \-m AMBOX AMBOX, \-\-compare\-am\-mboxes AMBOX AMBOX
Compare two mbx files prepared with "b4 am"
.UNINDENT
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 diff \fI\%20200526205322.23465\-1\-mic@digikod.net\fP
.SS b4 kr
.sp
usage: b4 kr [\-h] [\-p USEPROJECT] [\-m LOCALMBOX] [\-C] [\-\-show\-keys] [msgid]
.INDENT 0.0
.TP
.B positional arguments:
msgid                 Message ID to process, or pipe a raw message
.TP
.B optional arguments:
.INDENT 7.0
.TP
.B \-h\fP,\fB  \-\-help
show this help message and exit
.TP
.BI \-p \ USEPROJECT\fR,\fB \ \-\-use\-project \ USEPROJECT
Use a specific project instead of guessing (linux\-mm, linux\-hardening, etc)
.TP
.BI \-m \ LOCALMBOX\fR,\fB \ \-\-use\-local\-mbox \ LOCALMBOX
Instead of grabbing a thread from lore, process this mbox file (or \- for stdin)
.TP
.B \-C\fP,\fB  \-\-no\-cache
Do not use local cache
.TP
.B \-\-show\-keys
Show all developer keys from the thread
.UNINDENT
.UNINDENT
.sp
\fIExample\fP: b4 kr \-\-show\-keys \fI\%20210521184811.617875\-1\-konstantin@linuxfoundation.org\fP
.SH CONFIGURATION
.sp
B4 configuration is handled via git\-config(1), so you can store it in
either the toplevel $HOME/.gitconfig file, or in a per\-repository
\&.git/config file if your workflow changes per project.
.sp
Default configuration, with explanations:
.INDENT 0.0
.INDENT 3.5
.sp
.nf
.ft C
[b4]
   # Where to look up threads by message id
   midmask = https://lore.kernel.org/r/%s
   #
   # When recording Link: trailers, use this mask
   linkmask = https://lore.kernel.org/r/%s
   #
   # When duplicate messages exist, use the following order to decide
   # which list\-id is likely to have the least mangled version. Default
   # preference is listed below, in the order of lists most likely to
   # preserve proper DKIM validation. Use shell\-style globbing and
   # separate multiple entries with commas. Must end with ,*
   listid\-preference = *.feeds.kernel.org,*.linux.dev,*.kernel.org,*
   #
   # Set to "yes" to save maildirs instead of mailboxes
   # This will help avoid mboxo/mboxrd format inconsistencies between
   # public\-inbox, python, and git
   save\-maildirs = no
   #
   # When processing thread trailers, sort them in this order.
   # Can use shell\-globbing and must end with ,*
   # Some sorting orders:
   #trailer\-order=link*,fixes*,cc*,reported*,suggested*,original*,co\-*,tested*,reviewed*,acked*,signed\-off*,*
   #trailer\-order = fixes*,reported*,suggested*,original*,co\-*,signed\-off*,tested*,reviewed*,acked*,cc*,link*,*
   trailer\-order = _preserve_
   #
   # Attestation\-checking configuration parameters
   # off: do not bother checking attestation
   # check: print an attaboy when attestation is found
   # softfail: print a warning when no attestation found
   # hardfail: exit with an error when no attestation found
   attestation\-policy = softfail
   #
   # Perform DKIM attestation?
   attestation\-check\-dkim = yes
   #
   # When showing attestation check results, do you like "fancy" (color, unicode)
   # or simple markers?
   attestation\-checkmarks = fancy
   #
   # How long before we consider attestation to be too old?
   attestation\-staleness\-days = 30
   #
   # You can point this at a non\-default home dir, if you like, or leave out to
   # use the OS default.
   attestation\-gnupghome = None
   #
   # If this is not set, we\(aqll use what we find in
   # git\-config for gpg.program; and if that\(aqs not set,
   # we\(aqll use "gpg" and hope for the best
   gpgbin = None
   #
   # How long to keep downloaded threads in cache (minutes)?
   cache\-expire = 10
   # Used when creating summaries for b4 ty, and can be set to a value like
   # thanks\-commit\-url\-mask = https://git.kernel.org/username/c/%.12s
   # See this page for more info on convenient git.kernel.org shorterners:
   # https://korg.wiki.kernel.org/userdoc/git\-url\-shorterners
   thanks\-commit\-url\-mask = None
   # See thanks\-pr\-template.example. If not set, a default template will be used.
   thanks\-pr\-template = None
   # See thanks\-am\-template.example. If not set, a default template will be used.
   thanks\-am\-template = None
.ft P
.fi
.UNINDENT
.UNINDENT
.SH SUPPORT
.sp
Please email \fI\%tools@linux.kernel.org\fP with support requests,
or browse the list archive at \fI\%https://lore.kernel.org/tools\fP\&.
.SH AUTHOR
mricon@kernel.org

License: GPLv2+
.SH COPYRIGHT
The Linux Foundation and contributors
.\" Generated by docutils manpage writer.
.
07070100000019000081A400000000000000000000000161F953D70000317A000000000000000000000000000000000000001800000000b4-0.8.0+2/man/b4.5.rstB4
==
----------------------------------------------------
Work with code submissions in a public-inbox archive
----------------------------------------------------

:Author:    mricon@kernel.org
:Date:      2021-09-01
:Copyright: The Linux Foundation and contributors
:License:   GPLv2+
:Version:   0.8.0
:Manual section: 5

SYNOPSIS
--------
b4 {mbox,am,attest,pr,ty,diff} [options]

DESCRIPTION
-----------
This is a helper utility to work with patches and pull requests made
available via a public-inbox archive like lore.kernel.org. It is
written to make it easier to participate in a patch-based workflows,
like those used in the Linux kernel development.

The name "b4" was chosen for ease of typing and because B-4 was the
precursor to Lore and Data in the Star Trek universe.

SUBCOMMANDS
-----------
* *b4 mbox*: Download a thread as an mbox file
* *b4 am*: Create an mbox file that is ready to git-am
* *b4 pr*: Work with pull requests
* *b4 diff*: Show range-diff style diffs between patch versions
* *b4 ty*: Create templated replies for processed patches and pull requests
* *b4 attest*: (EXPERIMENTAL) Add cryptographic attestation to patches
* *b4 kr* (EXPERIMENTAL) Operate on patatt-compatible keyrings

OPTIONS
-------
-h, --help            show this help message and exit
-d, --debug           Add more debugging info to the output (default: False)
-q, --quiet           Output critical information only (default: False)

SUBCOMMAND OPTIONS
------------------
b4 mbox
~~~~~~~
usage:
  b4 mbox [-h] [-o OUTDIR] [-p USEPROJECT] [-c] [-n WANTNAME] [-m LOCALMBOX] [msgid]

positional arguments:
  msgid                 Message ID to process, or pipe a raw message

optional arguments:
  -h, --help            show this help message and exit
  -o OUTDIR, --outdir OUTDIR
                        Output into this directory (or use - to output mailbox contents to stdout)
  -p USEPROJECT, --use-project USEPROJECT
                        Use a specific project instead of guessing (linux-mm, linux-hardening, etc)
  -c, --check-newer-revisions
                        Check if newer patch revisions exist
  -n WANTNAME, --mbox-name WANTNAME
                        Filename to name the mbox file
  -m LOCALMBOX, --use-local-mbox LOCALMBOX
                        Instead of grabbing a thread from lore, process this mbox file
                        (or use - for stdin)
  -C, --no-cache        Do not use local cache
  -f, --filter-dupes    When adding messages to existing maildir, filter out duplicates
  -M, --save-as-maildir
                        Save as maildir (avoids mbox format ambiguities)

*Example*: b4 mbox 20200313231252.64999-1-keescook@chromium.org

b4 am
~~~~~
usage:
  b4 am [-h] [-o OUTDIR] [-p USEPROJECT] [-c] [-n WANTNAME] [-m LOCALMBOX] [-v WANTVER] [-t] [-T] [-s] [-l] [-Q] [msgid]

positional arguments:
  msgid                 Message ID to process, or pipe a raw message

optional arguments:
  -h, --help            show this help message and exit
  -o OUTDIR, --outdir OUTDIR
                        Output into this directory (or use - to output mailbox contents to stdout)
  -p USEPROJECT, --use-project USEPROJECT
                        Use a specific project instead of guessing (linux-mm, linux-hardening, etc)
  -c, --check-newer-revisions
                        Check if newer patch revisions exist
  -n WANTNAME, --mbox-name WANTNAME
                        Filename to name the mbox file
  -m LOCALMBOX, --use-local-mbox LOCALMBOX
                        Instead of grabbing a thread from lore, process this mbox file
                        (or use - for stdin)
  -M, --save-as-maildir
                        Save as maildir (avoids mbox format ambiguities)
  -C, --no-cache        Do not use local cache
  -v WANTVER, --use-version WANTVER
                        Get a specific version of the patch/series
  -t, --apply-cover-trailers
                        Apply trailers sent to the cover letter to all patches
  -S, --sloppy-trailers
                        Apply trailers without email address match checking
  -T, --no-add-trailers
                        Do not add or sort any trailers
  -s, --add-my-sob      Add your own signed-off-by to every patch
  -l, --add-link        Add a lore.kernel.org/r/ link to every patch
  -Q, --quilt-ready     Save patches in a quilt-ready folder
  -P CHERRYPICK, --cherry-pick CHERRYPICK
                        Cherry-pick a subset of patches (e.g. "-P 1-2,4,6-", "-P _" to use just the msgid specified, or "-P \*globbing\*" to match on commit subject)
  -g, --guess-base
                        Try to guess the base of the series (if not specified)
  -3, --prep-3way
                        Prepare for a 3-way merge (tries to ensure that all index blobs exist by making a fake commit range)
  --cc-trailers
                        Copy all Cc'd addresses into Cc: trailers, if not already present
  --no-cover
                        Do not save the cover letter (on by default when using -o -)
  --no-partial-reroll
                        Do not reroll partial series when detected


*Example*: b4 am 20200313231252.64999-1-keescook@chromium.org

b4 attest
~~~~~~~~~
usage: b4 attest [-h] patchfile [patchfile ...]

positional arguments:
  patchfile             Patches to attest

*Example*: b4 attest outgoing/\*.patch

b4 pr
~~~~~
usage:
  command.py pr [-h] [-g GITDIR] [-b BRANCH] [-c] [-e] [-o OUTMBOX] [msgid]

positional arguments:
  msgid                 Message ID to process, or pipe a raw message

optional arguments:
  -h, --help            show this help message and exit
  -g GITDIR, --gitdir GITDIR
                        Operate on this git tree instead of current dir
  -b BRANCH, --branch BRANCH
                        Check out FETCH_HEAD into this branch after fetching
  -c, --check           Check if pull request has already been applied
  -e, --explode         Convert a pull request into an mbox full of patches
  -o OUTMBOX, --output-mbox OUTMBOX
                        Save exploded messages into this mailbox (default: msgid.mbx)
  -l, --retrieve-links  Attempt to retrieve any Link: URLs (use with -e)
  -f MAILFROM, --from-addr MAILFROM
                        Use this From: in exploded messages (use with -e)

*Example*: b4 pr 202003292120.2BDCB41@keescook

b4 ty
~~~~~
usage:
  b4 ty [-h] [-g GITDIR] [-o OUTDIR] [-l] [-s SEND [SEND ...]] [-d DISCARD [DISCARD ...]] [-a] [-b BRANCH] [--since SINCE]

optional arguments:
  -h, --help            show this help message and exit
  -g GITDIR, --gitdir GITDIR
                        Operate on this git tree instead of current dir
  -o OUTDIR, --outdir OUTDIR
                        Write thanks files into this dir (default=.)
  -l, --list            List pull requests and patch series you have retrieved
  -s SEND, --send SEND  Generate thankyous for specific entries from -l (e.g.: 1,3-5,7-; or "all")
  -d DISCARD, --discard DISCARD
                        Discard specific messages from -l (e.g.: 1,3-5,7-; or "all")
  -a, --auto            Use the Auto-Thankanator to figure out what got applied/merged
  -b BRANCH, --branch BRANCH
                        The branch to check against, instead of current
  --since SINCE         The --since option to use when auto-matching patches (default=1.week)

*Example*: b4 ty --auto

b4 diff
~~~~~~~
usage: b4 diff [-h] [-g GITDIR] [-p USEPROJECT] [-C] [-v WANTVERS [WANTVERS ...]] [-n] [-o OUTDIFF] [-c] [-m AMBOX AMBOX] [msgid]

positional arguments:
  msgid                 Message ID to process, pipe a raw message, or use -m

optional arguments:

  -h, --help            show this help message and exit
  -g GITDIR, --gitdir GITDIR
                        Operate on this git tree instead of current dir
  -p USEPROJECT, --use-project USEPROJECT
                        Use a specific project instead of guessing (linux-mm, linux-hardening, etc)
  -C, --no-cache        Do not use local cache

  -v WANTVERS [WANTVERS ...], --compare-versions WANTVERS [WANTVERS ...]
                        Compare specific versions instead of latest and one before that, e.g. -v 3 5

  -n, --no-diff
                        Do not generate a diff, just show the command to do it

  -o OUTDIFF, --output-diff OUTDIFF
                        Save diff into this file instead of outputting to stdout
  -c, --color
                        Force color output even when writing to file

  -m AMBOX AMBOX, --compare-am-mboxes AMBOX AMBOX
                        Compare two mbx files prepared with "b4 am"

*Example*: b4 diff 20200526205322.23465-1-mic@digikod.net

b4 kr
~~~~~
usage: b4 kr [-h] [-p USEPROJECT] [-m LOCALMBOX] [-C] [--show-keys] [msgid]

positional arguments:
  msgid                 Message ID to process, or pipe a raw message

optional arguments:
  -h, --help            show this help message and exit
  -p USEPROJECT, --use-project USEPROJECT
                        Use a specific project instead of guessing (linux-mm, linux-hardening, etc)
  -m LOCALMBOX, --use-local-mbox LOCALMBOX
                        Instead of grabbing a thread from lore, process this mbox file (or - for stdin)
  -C, --no-cache        Do not use local cache
  --show-keys           Show all developer keys from the thread

*Example*: b4 kr --show-keys 20210521184811.617875-1-konstantin@linuxfoundation.org

CONFIGURATION
-------------
B4 configuration is handled via git-config(1), so you can store it in
either the toplevel $HOME/.gitconfig file, or in a per-repository
.git/config file if your workflow changes per project.

Default configuration, with explanations::

   [b4]
      # Where to look up threads by message id
      midmask = https://lore.kernel.org/r/%s
      #
      # When recording Link: trailers, use this mask
      linkmask = https://lore.kernel.org/r/%s
      #
      # When duplicate messages exist, use the following order to decide
      # which list-id is likely to have the least mangled version. Default
      # preference is listed below, in the order of lists most likely to
      # preserve proper DKIM validation. Use shell-style globbing and
      # separate multiple entries with commas. Must end with ,*
      listid-preference = *.feeds.kernel.org,*.linux.dev,*.kernel.org,*
      #
      # Set to "yes" to save maildirs instead of mailboxes
      # This will help avoid mboxo/mboxrd format inconsistencies between
      # public-inbox, python, and git
      save-maildirs = no
      #
      # When processing thread trailers, sort them in this order.
      # Can use shell-globbing and must end with ,*
      # Some sorting orders:
      #trailer-order=link*,fixes*,cc*,reported*,suggested*,original*,co-*,tested*,reviewed*,acked*,signed-off*,*
      #trailer-order = fixes*,reported*,suggested*,original*,co-*,signed-off*,tested*,reviewed*,acked*,cc*,link*,*
      trailer-order = _preserve_
      #
      # Attestation-checking configuration parameters
      # off: do not bother checking attestation
      # check: print an attaboy when attestation is found
      # softfail: print a warning when no attestation found
      # hardfail: exit with an error when no attestation found
      attestation-policy = softfail
      #
      # Perform DKIM attestation?
      attestation-check-dkim = yes
      #
      # When showing attestation check results, do you like "fancy" (color, unicode)
      # or simple markers?
      attestation-checkmarks = fancy
      #
      # How long before we consider attestation to be too old?
      attestation-staleness-days = 30
      #
      # You can point this at a non-default home dir, if you like, or leave out to
      # use the OS default.
      attestation-gnupghome = None
      #
      # If this is not set, we'll use what we find in
      # git-config for gpg.program; and if that's not set,
      # we'll use "gpg" and hope for the best
      gpgbin = None
      #
      # How long to keep downloaded threads in cache (minutes)?
      cache-expire = 10
      # Used when creating summaries for b4 ty, and can be set to a value like
      # thanks-commit-url-mask = https://git.kernel.org/username/c/%.12s
      # See this page for more info on convenient git.kernel.org shorterners:
      # https://korg.wiki.kernel.org/userdoc/git-url-shorterners
      thanks-commit-url-mask = None
      # See thanks-pr-template.example. If not set, a default template will be used.
      thanks-pr-template = None
      # See thanks-am-template.example. If not set, a default template will be used.
      thanks-am-template = None


SUPPORT
-------
Please email tools@linux.kernel.org with support requests,
or browse the list archive at https://lore.kernel.org/tools.
0707010000001A000041ED00000000000000000000000661F953D700000000000000000000000000000000000000000000001200000000b4-0.8.0+2/patatt0707010000001B000081A400000000000000000000000161F953D700000046000000000000000000000000000000000000001D00000000b4-0.8.0+2/patatt/.gitignore*~
*.pyc
*.swp
__pycache__
.venv
.idea
*.egg-info
build
dist
outgoing
0707010000001C000041ED00000000000000000000000461F953D700000000000000000000000000000000000000000000001800000000b4-0.8.0+2/patatt/.keys0707010000001D000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000002000000000b4-0.8.0+2/patatt/.keys/ed255190707010000001E000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000002B00000000b4-0.8.0+2/patatt/.keys/ed25519/kernel.org0707010000001F000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000003200000000b4-0.8.0+2/patatt/.keys/ed25519/kernel.org/mricon07070100000020000081A400000000000000000000000161F953D70000002C000000000000000000000000000000000000003B00000000b4-0.8.0+2/patatt/.keys/ed25519/kernel.org/mricon/20210505i+0Am6o59VU+dAfK4WhkCl56BrA+rY4cXlq3AbO5M8c=07070100000021000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000002000000000b4-0.8.0+2/patatt/.keys/openpgp07070100000022000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000003400000000b4-0.8.0+2/patatt/.keys/openpgp/linuxfoundation.org07070100000023000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000003F00000000b4-0.8.0+2/patatt/.keys/openpgp/linuxfoundation.org/konstantin07070100000024000081A400000000000000000000000161F953D700005C66000000000000000000000000000000000000004700000000b4-0.8.0+2/patatt/.keys/openpgp/linuxfoundation.org/konstantin/default-----BEGIN PGP PUBLIC KEY BLOCK-----

mQINBE64XOsBEAC2CVgfiUwDHSqYPFtWxAEwHMoVDRQL5+Oz5NrvJsGRusoGMi4v
wnToaNgD4ETPaaXHUAJdyy19BY+TCIZxDd+LR1zmMfzNxgePFjIZ6x4XIUMMyH6u
jDnDkKJW/RBv262P0CRM9UXHUqyS6z3ijHowReo1FcYOp/isN9piPrKzTNLNoHM2
re1V5kI8p8rwTuuQD/0xMPs4eqMBlIr7/1E2ePVryHYs5pPGkHIKbC9BN83iV2La
YhDXqn3E9XhA1G5+nPYFNRrTSEcykoRwDhCuEA51wu2+jj0L09OO4MbzBkSZKASe
LndRVyI6t0x8ovYXcb7A4u0jiH7gVjcNcJ5NfwFUqaOQOxSluahhI497SJULbKIP
Pu3cv4/O/3Urn3fQsa689xbbUkSPhfGKG73FYnAuC5vxzBSkOB7iFRBhA37NfN5V
OhCbWfXipdBDxSYunac6FjArBG1tfaF8BflkQmKLiBuiH5zwkgju5kOzrko5iISL
0CM4zUTAUWbg1QnPvRjPzoT6tlsCOBY6jZK921Ft+uVjHg424/CVZ9A+kA33+Dfq
otnzNK4CLNnLT4OEPM6ETxLnA6PyldUjSTUekZ75/Rp+aJHt5v7Q2mqOcB/5ZA6A
+vaBgZAMfCZbU+D1FeXD8NNEQcRDWdqe0S/ZgXdU+IyqyQ3Ie4vqGGYpkQARAQAB
tDVLb25zdGFudGluIFJ5YWJpdHNldiA8a29uc3RhbnRpbkBsaW51eGZvdW5kYXRp
b24ub3JnPokCOwQTAQIAJQIbAwYLCQgHAwIGFQgCCQoLBBYCAwECHgECF4AFAlON
4fQCGQEACgkQ5j7cqTKd0H50bA//Q80DRvvB/cJjayynTjkX5rbL6MPS1X3+QRL9
AdhXp6NxsFAU8k/yScVNDnA9FpTiEwmz2SVyGA2zd7ldd14S8rSw8mzrWq0J9Ltk
guhUqbWDit+/5uvWpg97pNq3b6bEvUlFijn20NHtwr4Qz6cwSdor8BQInGqRUr/j
/lO1wYGhk2MdPXzmXdGw4FRNsaNNIoF/48kNb1OLKztBtl0feuA04OcVYN3vQn3Q
SS+1qhV4HTZGAoZlZG66bqEPFjxetZbZW2Zwi3/2Ad7fYaoyeI7B3SJ/a8l3rn7P
jRQrdgoykB1qK8lSM7GwOVRZ7LMTaf+Mz2g/48DzBG+hyV4yZDTB45xm5j49vEHk
dW1QvU1s9NjCUWB7OtC1DOyJcKD8VxO+mVxfEuPDiXeumNFi7NevUCVC8ktBO2yO
Kznyx776X8mo2d9SiUVP02rUM0+hWFrmQKuYsY9G+Phac7oPbWw0IlHoCgz8oHrb
8UVNAl2G/vMAYabCcELigcomQNXMQDd0xvPuSII7QthiHeLGmSgE6c285V8PNgJ0
QgxehxJbM8pAFFV+DDG1yaurKuQkuGZ+GhLVe4nuKpK8PbVMIrcc+oH4MeWDEIWz
z3RXWIP8+dZCp9HyzSPbA53IvyaaFvAWl/nL/1/Wq6zT2d2o8lKIe/vEKOenrArw
wHW0/AC0KEtvbnN0YW50aW4gUnlhYml0c2V2IDxtcmljb25Aa2VybmVsLm9yZz6J
AjgEEwECACICGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheABQJTjeHzAAoJEOY+
3KkyndB+3G0P/0LxLEIYD2EG8/ZQEj25FMNbw9n6rk7Fe/PgMKe8MZpNjpcyuuo6
ZW+c1B4Dew79rOu3kKJVgUWGS/6TQR97vQeVRLvBh68FSeaKyVDu3a9jL5ocWgZX
wzgoF9rSjrRhxIQllMPrB/I/GQnbta91DWSnvD24r9vg+m6LmvQhW2ZDY0DbJrOj
zlH8DsEYg+FmxtUQGj6gQfb/n1WWHhYuM3OIOzHJgSnlCCYLxnjf5iK7LgtEzeGt
0VepiUUdZk5IxI/nFYv8ouXHZrt76HM4SaRowq8Sm5YP+4mX0cVUPBZZIQnrsbVq
CfQwr2zaxTExlJ3kEeH74JO8e7nBK9YxuLq0dkwuHfROh03rrOlJXcxHvd+s7U9D
1wrz4SOFMWaUgFGwNhA+ToW3T5Dd7Oomusze4I5HGQUVHXK4zc65u+WuU4ZXDBWG
+5Y8y31IAwqX6qIwgoEHewFd1qLCZUVJCi2MCcR1MiIsVhjPGK+C1SWdNErVlq5b
8B/3IbzcHDFTV/RHENYoq0D4fyMBmyoS+erNy2+UsOy3pDhrGxbg2VWVkbTCssj3
pirNae9gNIQrZA9NdvHEeCrrA7W14zsgKZqWjjcJQLixjCxWPTfYq7PzAydSTa4f
RlGyHb6wTteLgJmQLdjULH2zyGO9xh7sjCVj4AycyNvnpBWRUPaDf7ABtDZLb25z
dGFudGluIFJ5YWJpdHNldiAoRmVkb3JhKSA8aWNvbkBmZWRvcmFwcm9qZWN0Lm9y
Zz6JAjgEEwECACIFAk7NMp8CGwMGCwkIBwMCBhUIAgkKCwQWAgMBAh4BAheAAAoJ
EOY+3KkyndB+EsEP/0DBPiCacPeBY6gWVc67Bt5KXbsJkonSr6ti+plXHhqe4mG7
Q0qBl7udgx4zYpUFVeuSPJ2rcQzOmVFpLWdDkkpPVqSESqwBAA9bgVfYDYtgSNwn
3lRuTzoaJJ742qpn+WNwg3K3WY3Xd5Ggs+xpLStLFI18Mz7cDhOB5kM9HGgxyDxA
8jGsz+5vGlDp8GHlJrG8jB8n/LamzjvQNlOZYyWCF7G+RAX9yoL39dHZz35SqcDU
9PdI4b239ihMPe01xQnoCjKxvhIcAQxwU3LenR1NDuj/BPD7k6g/OPKY1sWrlk+l
MLR8mIYRlWYstMNs+ztIsuIgtjbeewM8H58CF+W124Iib4r07YAyn8umtrL3KijI
lMUymOmuQrXGALiVdlqyH8u7kr4IHrtS0Am9v5ENyHWs/ExHXT7EvgLsRr0T+aKD
JOgVg0EdR7wT+FgSTv0QlQfGL+p2RTTrbFobtlr9mucBwELonPNWijOgDTa/wI9o
mu27NVjjsSP+zLhhjY73SSOFMT7cwHymRgGMo8fxFdkJB4xCfcE3KT7yaV+aafYN
IkxStPYFTvQZbU6BvHBATObg/ZYtTyS1M4fJOkfJGYUqBVwhB+B8Ijo/2iofwGon
XNtwO9Z6Bt9wBLxWiheQY1Ky/UIXJcMsYC/WgIhYx+Dlm8Exaoyc9MPdClLY0cop
yicBEAABAQAAAAAAAAAAAAAAAP/Y/+AAEEpGSUYAAQIAAAEAAQAA/9sAQwAFAwQE
BAMFBAQEBQUFBgcMCAcHBwcPCgsJDBEPEhIRDxEQExYcFxMUGhUQERghGBocHR8f
HxMXIiQiHiQcHh8e/9sAQwEFBQUHBgcOCAgOHhQRFB4eHh4eHh4eHh4eHh4eHh4e
Hh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4eHh4e/8AAEQgAZABkAwEiAAIR
AQMRAf/EAB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMC
BAMFBQQEAAABfQECAwAEEQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJ
ChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZHSElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3
eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1tre4ubrCw8TFxsfIycrS
09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEBAQEAAAAA
AAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEH
YXETIjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVG
R0hJSlNUVVZXWFlaY2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKj
pKWmp6ipqrKztLW2t7i5usLDxMXGx8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX2
9/j5+v/aAAwDAQACEQMRAD8A+y6KKKACiiory4gtLWW6uZVihhQvI7HAVQMkn8KA
JaxvEPivw34fXdrWt2Nj6LLMAx+i9TXyl8bf2kNb1G7n0jwSz6fYqxT7SBiaUeoP
8I+nNfP13b+IPEN0097qshkckszuWLfnQB+g4+M3w4M4hXxJEzHoRE5H8q6HSvGf
hfVHVLLWbWRm+6CSufzxX53aL4dmtCHlvd7A54Y816r4JvprZESQs6AjkHnHrQB9
tg5GRRXAfCHxBNqNlJpt05kaFBJC5OSUPBB+h/nXf0AFFFFABRRRQAUUUUAFcJ+0
BLND8HvEbwOUk+y4BB9WUEfka5r4nftFfDnwLfzabc30upahCdssNmAwjb0ZyQAf
YZrw74tftV+F/GXgbUfDVno2oWTXgRftDOrgKHDH5RjrjHWgDwwmJrlpT97uc8fh
U6X0SOpjkAI64NctHdaHNcyTXOr3JTf8sIh8vK+7c4P4Vct77QwcW+nRXOAeZL1x
k54P3R+X60AdjYapaGVVMjOOpx2Ndb4av1O4xSlVJ43L09q85025aYDbYeHICVx+
+vApB9e/Htz9a6zR4tZZg1rH4UYEYYJfE/iBjj6cigD6B+FfjCLSdXtpXdXib91L
g8bT1x+hr6TjdZI1kRgysAQR3Br4o8OWPiMeWWstBbJyfKmbOc/7np/kV9H/AAp8
XTtY2+h69CLe4jUJbyrlo3UDoW/hI7ZxmgD0qio2mhVdzSoF9SwxTba6trnd9nni
l2nDbGDY/KgCaiiigAryv9qvxpd+Bfgtq+q6dKYr+4KWVtIOsbSHBYe4UMR716pX
k/7Wfgu+8c/BLV9L0uJptQtSl7bRL1kaM5Kj1JUtj3xQB+ZF5cy3ErSyuzsxySxy
a0NK8M65qmmT6hYafLPbQEh2XGcgZOB1OMjpWTKjRuyOpVlOCCMEGve/gTclfh3d
TWVsJ7u2vPLZcE4VsNuIHOO34UAeTeBvDE3iDWhYsGjA+9kYxXtmmeBrTS9L8iGI
Fc5Zm4ya7nTPBNnF4rbX4BFGJ4RuiQcbu5FbPiDQ3vNOaCFASR8qkkDPbpQB483h
S0vneKKRN6nDANnFOh+F6SsTvRgBnit25+GOut5tz9rtYicFEt/MRvfc2ea67wXo
slvPFZ3EjMBj5gxI/M0Acp4Z+FYa/jZpjFGDyA2M1z3xM8N+PIvila+HvBNzrcou
bSOSOG0uHIzllJ4OAPl5PavXfFHgXxrqPisWul38Wn6QBuEqNJ5ztjIBwMAZ4+lf
Q3wm8KQeGfDEEbRo1/KM3E/3mY56bjzjvj3oA8p+C3wF1yyhh1P4keJdR1K4wGXT
EvHMSf8AXRs/MfYce5r6BsLO1sbdbezt4reFRgJGgUD8BU9FABRRRQAUUUUAeC/G
z9l/wR8Qb+fW9Oml8Oa3MS0s9tGHhnb+88XHPqVIJ6nNeM+Cfgf8SvhH4rmuL3+z
NT8N3SmOe6trjGwgHY2xsMG5I4BHzHmvuCvO/jdqccGl2dhvAeWXzCCew4/qfyoA
8msBGtrEYY/LToFxjGD6VoI6LzkDPSswXcG3bvUBT61C87TujI/yh9tAHUslq9iz
uQABzWRpflSXMUkKHYXG1sYB5rIvtdg0+6Swu/PLOCeIzsA926VFpOn6be6gJrXW
NmwAiMXeAB6AZoA98truwiazhkIV5o/lbsSOCPryK66xAFqmDkdR+deMzLaXN7YW
GmXUk13a7ZM7iwIJwQT06f0r2e0j8q2jjPVVANAEtFFFABRRRQAUUVT1zUrbR9Iu
tTvGCwW0TSOc4yAOn1PSgDz34ofFaHwbrg0iPT1u5jb7y7SldrnO1doBJ457da+d
/GfjTUfFGuZ1nUbh7iTAjs7bgomePlX7o92JrM8W6vr3j/xVf6nZypaR3D7Zb9+R
GgxiOAfxEAAbvb8K2vDnh7TtGh8m0QvKxBlnkO6SU+pagBbPRop4Mytdh/8AbnYn
9DWlaaRdWiutvcvNGwyUkOcHtg+v51ahU+ZlW4HGF5P61p2TKhIVTjPTjmgC7YpF
qtosMiL5ycMpGGBre8J+FZ/t0cyPD5YOTujBYfTIrjfEM1ytqL/RQjX0C72jZgol
j3bWGT0wTnJ/rWnoPjy7s9dh0i/ihtpwMM4mV0zgH7ynB6igD23RtEg/tIXpjUeU
oAwOprpqzfDV5bXmkxSW77sDD+obvWlQAUUUUAFFFFABXyp+1N8Tv7W8RReANFlL
2sJ8y/eM8OQcbc9xk4x659K9q/aE8cJ4A+Fuq62rhbxozBaDPPmMOD+Ayfwr4d+H
JuL69bWNRLTXUw3Mz888nP0GfzPtQB6foduYreJ5lw235I/4Yx6Yrfi5jwrDPris
CxusRjaN36kmteORCi7gwz70AXYz+8xgdM5B6GpluPKkhJ/ilVfwJqohBbOeBkAk
VDqU3ly6ep3fNcoOfr3oAwrLXXbxXb2WcxPJqduw/wBkeScfma5tHLmGUzmSYX21
hwNo2gY+hGKwIta+yfa9ZVeYpdXkjPXc5Nuij/voio9HuS16qKxJ8tLgD1KsVP6f
yoA+tP2Wtfkv9M1XRbuQvdae6rljksnO0/liva6+S/gzrY8N/F3T5HfbaayhsZee
PM6xn8wBX1pQAUUUUAFFFFAHyH/wUOv7rb4c0oSkWjq0rIP4mLqMn8B+tec+CoEg
8OWzxFlZwCTnnrRRQB0lsdybsAHGeB3rQ09y6DPGTjgkds0UUAakIPkwkMRkEmsj
xdK6X+j4OcXg6/7poooA8e1DK/DWzlB+eW9uFc+oNzEx/WNag8H3Er+IdF3H/WQS
o3uMmiigD07U5pI9KsryNis0Sxyow/hdSCDX3HpsrT6fbzPjdJErNj1IBoooAsUU
UUAFFFFAH//ZiQI4BBMBAgAiBQJPIXkoAhsDBgsJCAcDAgYVCAIJCgsEFgIDAQIe
AQIXgAAKCRDmPtypMp3QfkDLD/0bYj1H0kZDY1HprMqAhugR9Pi61ZSEkBof1fOf
qZuz55cKdZxsQCVMRLz3P5WFv9dzqeb6WP2khy9xDm4aMQ5nf2kMSKrkiXKcy9S+
r3m6BdR1dt3i2Y6HB9JLV/IzESsUJDEvO17mNMIW8YZeev5xO8QwV2zWUuUvYjKg
4/3yXmByrsvfWG1ew7sMJwgDMCCI8bXzVUC0TkTzgDmjvE/GHPqcPsGVkKFGqptc
yBWcZmEKuJFzAAgqwmMUCZF6Cmej4wDbt1WeXpsjNigFl8gWqGiCZTFHEuFJtVJe
3Mj0vWBAoIre9MzOoUgHpX5ke1q3KXC/pAfe71gQZvekfMss4yk7NzLygrRS2BKy
b12Hl7JWUpxVZm6YsL/h3DLGA6MGwjDA+99vZPjJbLfnPVjhFlKlu5kiwlFbnImY
0jvqK7KyNO7vnKp3Zct/gbGq1/wSsbRHn2ZkdvVgWH8+S2kq+2ZGL22hdIOx0CkC
DUqWIFTLkgqX+AyPmTFiZ16E/A8aXRf0+1Pu+k7xjwJ+zkAVQ7cVBieaqAZc8vvo
grUaSDjk0XWLD2dngD5g10KXN4OCvIkUlccOWc0vTYJczRayb8I+2AJ2Lf5zG8we
kf01ughgngP/3/iUSy3XI+xwA2HJsuCg7mawHTO2UE0ldQW1l98+k+R+29diERyI
6cMC8bRZS29uc3RhbnRpbiBSeWFiaXRzZXYgKExpbnV4Rm91bmRhdGlvbi5vcmcg
ZW1haWwgYWRkcmVzcykgPGtvbnN0YW50aW5AbGludXhmb3VuZGF0aW9uLm9yZz6J
AicEMAECABEFAlON4esKHQBEdXBsY2F0ZQAKCRDmPtypMp3Qfj9ID/43HgJWx83R
3spmufpl5vqpIUHK4uFeuzGfHDUl2TmheoXnTbYb+qhqowmjAy4WcVzrcGjp8uJ3
TxBr2xZTlMaRn4a/aVNORlV3hgM/nAk9RoA9wti3CaJ3GlRkx3w/qG9toznWSK4u
5JnCzrcfBr/FKKCmw7oeGHBQkPnGfXJxjG+4Iuknn5sdV24k075wpXL4uZRsG3U/
N0cPO8Nf/8YMzeVkiTmM3W6Zy7ubKl4RpizSWnRaYl7zxJqQ5GxSK9PtyTPCHTik
HFXABipRpIWGozS1McrUp1gAM3mQSoeL7qsxfoN0Zxn0WqQFqKCrAzcwsgbWRAMI
uH2ndIeP0DET6fyFRYI/XTOF/Kda8XbqAqKkyDqWiQJ2CUl146Whkdsa2M64BLr7
VBhE7QTx7pjMyEISBc2weMSvrAaH9bNLSEH0GiSPFBTAo+DF4wr8Gy6E0bHZ/k5+
MFpwPU5hgfi2Uflo2IhmwLOpXR1UvQKJ/OPsVQNMePNx6ItJob24NjK+vXks81nL
E36Tgknq4i8yp5Tf1ifWthdXYuAygxb0L4dVhzs4ddDPyJROT099R1Nfp/bKknyS
gegxnDoVMANHtJFGvfMLmz8BGS4JkDDK3k5vl7i4D2abd36IZ+M68WRmI9V64jZf
TTp2VpivHKlaDE1iX+6ESSrbF2PlTYCj47QmS29uc3RhbnRpbiBSeWFiaXRzZXYg
PGljb25AbXJpY29uLmNvbT6JAk4EEwEIADgWIQTeDmbjLx/dCQJma5bmPtypMp3Q
fgUCWunU2gIbAwULCQgHAgYVCgkICwIEFgIDAQIeAQIXgAAKCRDmPtypMp3QfsFw
EACUcFAleVyqsMuCFC61n/mOeapk6TsNCop9sfP64a2bhYM31DRkZHco8xrUB0dZ
6OHozzIzIK/v0SzurS3n7gHKfuktbSTvAbJMPubM8iXJyaKL/+DGHt6qJynD3tHt
SSR4c9aFrlnrn3Gefa3eQrgdNcieQcMCXOdePDHZyWKQ4gfe6zxb63SbMv3Ms25h
cmOf+HA1S8fM9bKrHEvebm23+2WOrQR/d5OPRXnWDz9yz+++eWQfdG+FUfxUz7ul
OG+C8jxzGjrAWgsvrAq48625GUrvuU2u5BJD2P1IWvEpQtFm3XnWvqP0hy5oT2i4
hHvPxumY6XuZsBvEQygGajj94xZS5Gn0kqGV5XV/I1Z4kY00Ig0KHEG+LL1O+eu2
ntfaqS2CZSlwbnfluqdgNNKs6lYsolvpqSCAXVVV27pkWo/To3E2RFvU7v33468K
ijBEHAjWlacmC6Ixs7PRmHiNGWK5Ewn0suzmPBy8lFtKBhT0JUyK12vkfrSFHs48
5TDk3uDQiyYh8lMkSuQIlBN9wfFMyPZTlfInNc7Aumczplkl6I5qz5rfaxz1uWg9
zI7deYAEoOJnaJG74stAXPx+iih2PbOpviXcr/ASL33Xg7A6ZF9Q3mmHPLym4q47
2VOaNj0AjLIUZC76oQdEXJz7Is3A/YSdgEIomBvrCGU3R7QuS29uc3RhbnRpbiBS
eWFiaXRzZXYgPGhiaWNAcGFyYW5vaWRiZWF2ZXJzLmNhPokCTgQTAQgAOBYhBN4O
ZuMvH90JAmZrluY+3KkyndB+BQJa6dUAAhsDBQsJCAcCBhUKCQgLAgQWAgMBAh4B
AheAAAoJEOY+3KkyndB+w1IQAJIXCI5iJSSvX0AP3JuTwU1IOXBXMrwOlaltpWFC
s3Md6slh4gD6bruTYYhbOjRmJuMKDPzxo7WaQ3ru29M0HftQxQKhhi7DAfi/7Kp3
F33t5d2mpoimK8Gc4D5kXGFQmKGuuNjs7hrOol8GUds8RIgQpplZ+4GItNLXzOpt
3O4iYkIQQrVpqdeT3xQv4OGjloDzoEk3skMgTyXWyI6wa2sqsptA6ocLdzCmF5PS
U7Uidm/TYBM+TneJPsvYOBpKxngWDTmgMXxUWIkkU+Wf2nNecTnWIcfq1e2786zg
rSeBCD3yxhfy1AUaWgwJf4v4ogbj8vBQ2EGJT9i+nQnNnW4RVRjY/uouCedrFr2C
49obuW97zi6lOyhfJPOsRDD5ODEn4BM5R9TrN7uKCMcPbb8tbg3ZjaMXv7z6KCrA
d7hLRgUTorO8uEFVIIY9TUc90NXYKrWc6/or+W/NTforIox4A5qAZkVcQBSLC7t+
6v+7wYz4DRP3oLlFPpbT7+gjrU6ub1j+/MAw8Vamonf0+2xnP8P9I8k8qU86Uir3
zAovZ3LRjdxVv0BEL8ydYK/Ye9CUVDmtyd84V7Ii2/yXZlrOYxy3QzoBVH+QjhDQ
huQkbIWRiC9LTjCbhPr7HJbAZNUGnODd4mpn/KrvDOXSvWV5RRpP/lGKV3asFMrH
4sqXuQINBE64XOsBEADWJbYsPaPfoe3VAKPRgUnkQOaXmnbVaBfGnVlpV/1gx+Px
uYKZJscxGeL9102hZtMJXW5CZfH8fCHtoAkUtvpSFAN+cx2Y6Hf/j7CKFnzDYgJJ
vccopHA2b0XRQPPmjz5Q7KUF5OK9UhCTXx7qLumPgdXbEoGwMGJt5Foxn9XD8I8h
7W0SwV30hRnYGcBv1dkRKrFvR6j8ve/wykJ6Tl5gzBIFb/4D1CUJJ4ni3yqX/JUr
QQjEasJYt5YXH+XB/J5FOvAHT9b1WfcBw/UqXw0OHNkcq9CIbbJfxc6qWIsKiISU
HiJDW+RhcRVv/kecin3VBkQHSlKpMXRdu45o6ZASPCyZ6db/PbbcU/RY1kKgJcy/
xncHEa6rwfZVnI0aGGDxgdsLUAuR2O1p7GO4wBJSGMrhln6ZfUCOlCy/Yrw3olCF
XkBW/NozRbfMf01K9T+M0o70SDDBlh6rKfMPxYP/Zm3YnrLcLPD60IgozPp496Bk
nck/5UAoR+GECgnSLPFxlBYhmtKP0CsJL1PTPYQ1ftgV7AnlNvfIW7NuIppZd5Qk
PUqUi66w1jtPM/o0fG+6yncFqdDr7uiT/M132pS3nHfO/u3FU9Cpfb2MfwaCoCpR
FjRNKeVLElGilUKGunzrNb8r3QOExx1KcTk4sfz53M9pNL5iUaFhHIzDFO6rGQAR
AQABiQKDBCgBAgBtBQJPIxK7Zh0BUmVwbGFjZWQgaW4gZmF2b3VyIG9mIGEga2V5
IHN0b3JlZCBvbiBPcGVuUEdQIGNhcmQuCih3aGljaCBkb2Vzbid0IHN1cHBvcnQg
NDA5Ni1iaXQgZW5jcnlwdGlvbiBrZXlzKQAKCRDmPtypMp3QfmuSD/9EpqWU+jXQ
mj5h4rMSwxRppIJ8SxfjlwHik6xaqtR3BaDRPfGvioJJ4MylbICvlW20mymgi0hP
RSSVEV56bq0PRzKQnEd2n/9m9BdOH9r+kshaj1jL87iDjblluM+iVr05Idi7iJFc
GTE94qk7ZBNk4tMGNBs/0fxqO5IUI56YKZcuKLDhHLRtlvq+OZPmNxjeou14StvJ
COi3EC4W9plEIybZolHRI4xa9+mnxk7y70kGeofZlFNU0ZUBkvVFqi3wA4IngrvM
ITllBAgZA831qo04CqZYaR0PfaUh+sVx/XaDi2ZIm48X5p6cttYVygZo5a8+VOby
vvo9LdVaZQI9++KMCti0qU+b2Ynhbs1Zf6JEYQeYH7UGSk3ZYJOF0FmcMQfD8pSZ
2SyJYJmXY3iDKyx9OHl9PYXpGlDjZHWoaZx+PHUqtOUvBF6TpYbm/+UnvMyo2BLO
8G4SEv0crekobWZLkw+rPEqnlzgN+o/BXRfykEjCNHuugBMeB6brf7PKyZDrQs/i
wmUowqFUjrLC/7HbbqOankoaTtZRf89TYtE0IfUNWzf2SOBG2A8HIkzZzD4YIM3O
AtFryen+rHvU4KnAyQRDyZqztlm4zlRbsrePw2PMRYRdWMXk3OlDc/lcLnohM02/
t2fb+hOws7yrdmfpFPatFr2QE/4n0cydg4kCHwQYAQIACQUCTrhc6wIbDAAKCRDm
PtypMp3QflR9D/9Z/Q2Ahoe1fX00xyApCWliJtJWwz85b+KXMe158jKzuGrcMRw1
2N3HdzgbZgzqS24M3ayRcSaXJSyKS0WmKW241uxkIZap00j1aT74DKLelelXjeuj
MX8DTbxKI58zOkbTHhcJmqnoL2zRPRUbX4f2zn+wiEB4UUO1oFaeqVKKoZMBESbm
BJkKPP6Y8Lu3s9VkyZTBxvCuenPiN5rDvEP8epj0mclOv3A3t4Kz5ihHPjKMNXl3
phtCS5RlriE9cV+b/5mgzbkz4roHkZYbeuFVoccUCckUkq1KsvnAHETGaxkSZZiA
rBY47sqbEvypSF/yGGdojPKtRz72Hoi7Sm+YLqAwPjMj7UZ+6lnMFs+5LYtzOxwf
2V1E72vdlp/LKCtWpdqd7z9fA/X7JTswwKR/F1kSfiLONVytL9URNSnYOji+UJKa
/Ex1Dr5A/M03hPVPavJV3iohQLM9p4xddLOuE05hR6GqPyij3B4ZwzNDFjb6tVxx
8i8QjPBEnqGJgwJ4LWwMZJ8g9KYHTPLFlh0YXGQn1K9IM/N/MtGnvGXEen0/6wEC
sxkJNHqVjwUaHxfCC7l6rT/eB8o6jeiWeTGHT1VhxWaOKikiTagyuAg0x+AUo6kZ
yblA0LaYJ2nwyoXRqFmQV3NgHo6vS8Jy8XAJtI0IV0KIC+kM8s2vfeAKQLkBjQRP
IXQ/AQwAmcDqQfXeItD/hQKTYSmup2PfWerCSZAG0Pjfumf2x9WykqpmhuGYftFF
ExhVJthmRixsK7KmBVNfW0icEtlBybD7HHFV1Lk7hwduVnwGFWmzCQMmEnq98M8J
XwpVXueThUrpwzOPBUEjTHy7QkNdX0Uh7p1DzbGF9WreMaoQktsMeb+UWsGV8KfC
x5xAz8IScUZm6yTtawu58/+DRZRa5/kpBjAZY7aWAzFqTtHJ/KsRu3fajL++BuBM
sKbD09+2CNJALWn8Bxr8TXMXbPwfCxoi3wJ7pU+dw/KvbKqNHKTi6OeQZSKc6RG8
IVA0E2n8P8VmU9+veN9L4FxgMUs9ry1/3tQOTrSVvC6HbUVSZw0gXvnDccdOwQEc
agNHyiWX5ga8EDJlS/LWn/HKsn/ook1ztS0pw8nNlRKSSILusVl3GCc+PaBKxEac
+JJtRVQAL2p/8sBvX3x3AQeAyAEOo/jJ4OEHZXJ+zwxChGFLDliHGiJKWvuz2UWE
o6x6wsZHABEBAAGJAjsEKAECACUFAlON4vUeHQFTdXBlcnNlZGVkIGJ5IDIwNDhS
Lzk5RTA1NTU3AAoJEOY+3KkyndB+Qc4P/3+auQq3bSIT4taigjAhiPldoDlFk2B+
7t8tgn+aNroRKKUF1j1dN6bwtRctAA7RcXEZeYn+VktQdu/vo+OGVsKnlqRhLlop
prI9LAzgVCSYIEPkGbxHiwE5ghVa4h3o92oJVuM21Xbfz6iER2GZKFm3moakMaFk
1LKClkPKx/sIbGSzzzgdewHH2ufc+u346I8z9EuI5CqvP0aD7CP0JmK8Pj/sg6c0
NqYupxJRuIK+6F2+7TcY50KRshQKyMrKLs21yt4iaOkFenBiIRJvGVcOpuMSpfho
6XxdMKdhQK2hgIMdqef2eBtBGBW1Dr9vGn7Y2yGNjfuv3goLIHyrrP3W5YdQ5LmS
YaRxAUXiUhTXPn/cAzQCtzYUQvj2Sg55BditRkLPV1BbLHbWwDRFOCzxnXWjTEfJ
DiH5x/vnobSuBj6yT8aH2T7W6dACyTUjVJ2zxlMakl6h/DrzWHk2A8hvgPDZOo4h
VEo/sfOlvnfstN83b1g7981+BUn4F5WSRKz0BPlaRkfZWBo8ezsa/MQUg8XILH4T
hgOWonqFCFk6r/KyXx0dmYhnlMguWM+Z3SGWRUq7N1ByzZZ+2uvImLUofkl+pEf+
H9Zrx0bctWylBGkGvaVnxUidn7bYx25Hc7CeflPL0SiT0OaWGDrzejMbXKgL3bce
tIWj3S5Mzr6miQO+BBgBAgAJBQJPIXQ/AhsCAakJEOY+3KkyndB+wN0gBBkBAgAG
BQJPIXQ/AAoJEI6WedmufEletnIL/0wGtZj/RTGbJmfg0orpA6EiQ/7WWKdPm1rV
Em6XPKayVZHEtiRtd4YXr1ZlrbB31OSxpjt3N1yk2vDim+xrzz+B9By/wbPzCkLL
f5f/SO4d2hNm16IiYiwBr4xPVz3b2F8QAInfiEZu69CJuXkGZNM8eJjZQcu0l7Ps
e8Fs9ShfeLZFVdFgt7C2DXuvcKALF27oINzeywD1M8wGtFgEr3OlbtihwRm8FBxV
W6e/BlMBT/ZISoHETR/TKMlmp2tlIeSJRRBz52ID7QkCCNQdQa3/T+zUAXrl+qOw
62tsAvxPNAtJy+CHU46CS3rlDtvCgJWBRpCYrZdv4VcTAEg1BsVYihkCEZlkMpa5
Mo+ydbagjR9UfOZH8nMMrKVjSF3B7WGb5qNs7BwL49CDhJAvrkD4FaluDhK7tezW
Nn7E++X8jwDoxfQekkdb/zlNTQ5Z7H0cfj4OUTVD0xSIvwOVrlh2UA9iHtHSfEG5
aRKNFlelzUW1gYvDpe405vm2ii8ANvu4D/9tG4gUAMP7E48r9wSPTuTf+Ew+7BJt
UJ3dm/oYdDni8cRp2cvqn92YNerlU0GLlLAs2aM9KNmWR68mlsjzWme7QihK2Trv
yyPrLAsvl/zLfbkNmqNo7JQiA05qv9UnD0NxhmRxxL3aTEYgRoBCfn64N4P9pmAl
rSnh9YwaBuvH9dewprCYWjrJnU+whBeH5UdmH3clqkQDo2JyO7WUKXkLv2UwSe8N
VqAHbZbnROo1yibdwMRgxO2dZu+yPcx3NUAWlIjAQySDtEUnk0LcfugsEyueGDYl
UKPgZ3b52IS1wAnpxkA4eFIFMs4+7dDJhDDo2tjkIc5sTo3UyiH0K1V8rjY0+lcz
1m3NmmfmomLA51jwSWXHJ15x4qj13IQi/HP1I5Kz//8aOb3qBeMmQXBjZzFvZUr5
tteuNYL+Tor7/QMtXp8ShcWao5CVpQlGlIOfIxkjmokwYdsC35KhaJXu7KrYTdCJ
ZNA/RKt6DoZ65Jr3atauV+WohjzGASolt2dXbXns+YKe38YIkhv4l8E9gDj3vmZ4
m6y4XWywMFqGgIXJysottpSFqddztSulCm4QllpAKrJZqbut3WJZwxucJsyAIkUQ
tVxSdf+atAvrlewGBGfyFsHo570lkEHCH93UH001TxU/rbjHSnirekJ9GmMPL6KP
rqEfT/OeHrl/mbkBDQRPIXUfAQgAodOkpJWWOBKZx1jISO2k+zTqpWZi860S0XPC
PZd6xmMGGksUgckagJoNvP6glO8/SwbyRkhL5AfOl7qSM7buOn/UUnbzRHTjuIPS
LSYRVw2KcLQWwOWjKvF7sQ0jiTQHdN8diXXJLK2Pn92W+WbEnv0Bv+9odVS8qxuj
XabVFvLo9u4mV1r5H95UVhnKbIwMUrqYIQtojdAINmHuEAt2nTYvsyb8sSiX9WXs
5/Ku44ItPg7qnsL7+mf74sfUMg2XWoCfM4vJEQMyfONfQ7wZS4RIbFrsVy26zaB0
fnoovAnlahVPflFsjox99WrCLnbIbmqy9U6tHCQyueGWMGpCLwARAQABiQI7BCgB
AgAlBQJTjeNlHh0BU3VwZXJzZWRlZCBieSAyMDQ4Ui9CNEUyNzFDRQAKCRDmPtyp
Mp3Qfi2FD/9Po7TmdEGvGC8j9E6VjqSmqWEiQfShAhM7V4PzsEm4Z19yiVtnFMvC
zKJI1ch8zHlGdEFMfYPEV6fq9LCXWOwps9CbyDMrvQi5JMv8DoAwBSenV2IZyI2s
uD9y5WrBl9Scnqx9uPNWRw9RnOUSsxFpBNa9FcqvXOWSkSSTk8Obv5QQiZ51ChTi
u+9VIU2h1S312RAXu7rGT8MUzc7O2zntuegtCxSJOhCVjPuugA8BobhzHJx4+Tkc
8j9tlX/R/1WYnqmk8EiINy6gQQjPbHf+5dRhjTg2j1sEaUt+lESU7U3v6xK4eS8C
7lmJPyPNI4Af2nT++yolN1DWy7ihP9yHqzsZDnD+IXdmtJqz/Lnbarh9M5zHG2F9
TAPWSZMnS1nm04XZ50EGHC92BhTNkE1gP0Oq0FiBu3dtRuapxksqElijvIYApPk+
IGQzsPT1DRf4gX5vPJQnqghCJ6/pBKgmR6c6R1MuhrufHUMd6ZXepzh7L6YI8Afm
a3f47Wa7AP7gGX4XTIrkU4co4ssuavMnMGtfRruSVI8wIL2Hbdfcz9pjLQdVkZXx
mCYggpVqcjkVa8ycam3iynZ0ZE+rYLtON+rrrl4PeRPsXD9CrPk8p5LIAo+Ver//
hio0k7rSn7zwhJa1NmOJ1ezAyggQCeXnE4h6ppnZKLs7pBgJ3OIzy4kCHwQYAQIA
CQUCTyF1HwIbDAAKCRDmPtypMp3QfploD/989emn/GRN/44xq187bHlEbZnBL9hO
JWptQKXTL/OsQARVUH32M3IL1cO1erTZdCxIX27vszNIdbgb2UadEb4n7TIw7Vm9
X+qD5y3e1mwfX6iNgvIcVYK4U8KMkfg+JJbZlo868H7LRuFM2FPKij0x6UALLITe
ois8CLGc6D9nE72ClngG3MVwn1i2RTtDxuBuAdY49hmsbX1tXS/52vXn6fXHyC2z
QTq2misKc/xXeHzyAzBhftiT5pL6iEwd+PF6udnPxvJNYYwgYhr+UEfoYn3HSnA1
4WtAhG4+VnvQgUsD4V8UoTkae7CvdnoltLeXD5CxMaAFsnTA8l+a+71wnulxafCn
No2wowJJmVELWDNlPbI0cuP16r74VLXzlqpx0erd/bkhDCUCFF8L2bTKfNwg+kJQ
usTuWHVxTWirnUFhO3QZ7s02WjCv2SPQxWcKHlnV/P14YCcjPaLqGn6e4Kn3AveF
bRvEjsS9DKm5z0JogaYFv7ZLjN1F4myl+PzwqNAQ1YwU4APk8AJcE9q0gowpOtZ1
Ro8+I0yiv1qbZbw0TocPnAYBvX3k2NNBN8LlIlDekJRp+VkUOkjJ4DFP87Xk498U
T7scvlWg1fv8m5t0KmdpDtbFf3SwoHjsmEnF5I45tbBj9DexM1WQC3cBV8dxyyAz
l6pjYsZJPOpEcLkBDQRTjd1CAQgAkk1yOTLf8rqq663n3Xflvo/SrVG5kVROhQas
Xo2lphuhD3aqTB7g65JJxvIaeRAyBitJWNAguryLJmxl3xUg9ZIQzP8Op+qyjYwU
WIVDozwiH9KoBkKaH3i3Wo05lpW3FgmW1hmB/iP+5qvK44RPW8ejBUwlgg+smH/3
1puqseIFbilQe5PF7DfDzCnC6NuClODpDV/q5qPTetyeYySFcO8J5/8CFFnf5rR1
NHw9qPnl9D+6WdKm7X2prZLUrSd1sHPPaGxkHE5/sgtiCiE7E8S8IeQmR6IQIZun
9N4MwSTEho8atayThraH+qbV9dV6SD5Fljr3brd9a17gXJs8ZQARAQABiQIfBBgB
AgAJBQJTjd1CAhsgAAoJEOY+3KkyndB+mg0P/1ZKUL6Vx71P69A4qvBdMUKIxr+c
phzLnVt5ZaAx8Ri+q/JiH8zAS65gxbbvic0g26CVqCjpXH/SuJTFmoW0pR6u+qq0
vxWhJhobNEgTarc3cj/soG9+hsWi4/Eavx7NLHVI9jRDesN1aCWzSpczqvbjZfDe
/zIFWahOWEYnhDQNkB3zdFi3DQ3SuGm30QngZbm7L5rG1f4MODTEH59a+LH8pcCI
nk7Zg63pDkpR7C8YhFtz2bHGviWMpYM52Rt2DRn1ia442qG3IMdA2kr4m0/391CB
hVKnvDbS2KR4HUAtt9T4D+KovSrEU82CZuScZ6BJi+V6fJoWAdLeE9jB7KxBoaXl
sQonr0XvnjYHlFW8WtFB58v1XKmonGkaOIGwjs+TPMqqpH4cj5YktOJ35a5T9No8
cyA3xOdyf78Pi69mPTvsyQrrzLKZ6uWDj/f/dsE0ihX4ubgQwzh3z64w8jQDEh9y
HGf6oVTverKgB8K9p2BEEMKj2k9z4iz5D76vrm+myF0b9OmdRs+Qfpz0h2ThgZ0F
xTKFYVCuHPGDiy+lsRQhj62vxP7bLeXMg+bhVWPvyvxAeULbZv4LSb9HCnI0yQBt
YSaisslvr2sPT12j1/H+x4L2C2WNMWXlkY21ImPLTlgyKatBMfpaGoyjGCbZ8Foa
EBa8wVxl9Gp6N/DQiQI+BCgBCAAoFiEE3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlrp
3kkKHQFPYnNvbGV0ZQAKCRDmPtypMp3QfkPCD/kBpnQIAsjLqHU7N2nmtNKNqXNN
2OwHOVlvfj629b336UiuxWHZPt6cjQNwVibMw9WBqmWXctOj6tycZgR9oJKfh9sm
FoBxkRavR7LViGFWT7UYgECo3x8chtHD1goYLlJjKi2AsVIE6CAWKYXHbGh1t8EW
sbkALaFk6LbvudWHbFDha0EjfNCFFS/10TItm4BguCdtIeUP2OylWCW58YppzO9n
imsY1nz3jpJn9RF44S5/A7dY5jteE/5c8a3hO9CH74g+vlqirmSh3SLNUEoNBUKT
j+1BdBEYn2GKWnGryg+83+76dYjs+9GfvNj5f6ytyVpkfc8kZVl126Z4mV/Nvz9g
niicFGb3Ruvvlg58NyWeQClMiUMJj3unpeFEof34lG4C2wi8rPeepxfBuOsj2nm6
I/BAddAE0bNOLeSfWvsHEY3oW2Lq80Ej4Ojs43SqiX7Ld+mVUAQBsetIb3jS2Ol0
qTJ9gY/7B9oKDXOhxJgp4rugHarevVVAG4gaJTk04zlXZUz1a+cEcYDfMEGJ3DW6
W/P5/X9rHd7o3vBjEFYvVvKGdB1f7tyKUnOUvgd0Zknu1gEN73qYp6t6HmMrWT35
4T4D0cmEekmhGJsV48WH+ot3Hq0d3S/1hzoLRwM293k+G+fUpswAdYk0egwamZ56
6F4BVlC3NfMMTiyf+LkCDQRWN5NiARAA2HrOyogaOCI+bjh+6Ua71CuPURvj0dHC
+DEUqgowKPSxw+lrd8q3AIPv055BXXgd8UPZ4qPZDst/AAikJ/n1jmW8jPUZsaCr
S76Uuo/kwShOznnlqTE2ZPMWloiuGchhpAuvAQjMrJ6GVpLskyZp5KhKSpu4+sR6
VMXmK5FjwRqAaoKBBt59FgyW5bJsUpJNJoLUEGx3PBvRbKN+yLWhGs5P9NjQ/0wq
UBYqLnMfnSqeSf361r9dKp5XQS4kyGYjpvFOpByCEJbiTrtbVsIU6f4/1NMbq4z+
dfpfdlZSPCYNWUalgzM8A0XU7xd8uAQRzndYZZZNmyr8jDem0+OKUWfqz03U91ot
BzjZ2JZ6epfBc4IM5WkWGfsWOjWnvI89FSYqT3f7EaAjV8rhvv3Dv6gWJ4E0GbaL
sXrTqBIDcAOdcsot7sUTe6Ajtxo6HwnGJlwzaReicpXAmJ9xZHxt7+8bLqWQY4mt
KTzvdnlWs8b4OGL7UazU/oI0Cfmvts3CuorSu1gJQ493GO5OZmRSXKTLZsCU+bDT
qBISDP2H7bZQ25VgFEuhrhxJokGBcEAGIdtqrhwUvBxOR7AngxSp8nbhvhFfZZD/
Tf56krQOtXfc8Gqxk22/q1PIk2dZqtNJvFpHh6EAez0MuJsVIBxmH3u8M/r0Ul3c
wufPTHyjROUAEQEAAYkCHwQYAQgACQUCVjeTYgIbIAAKCRDmPtypMp3QfrWSEACH
+sAr1ok7zipU9vhWQZ2zn/FCMd/aAV87juGe6MKEN0tgxiG/aRGNzHCr1LnTp4Oa
Oim0faYVAVgSDiEYeQK2ZTiSWWOXLdZ9gGaNONKAhWhjWKawx2OrKFCMcDkl2AHT
ao1nnYnUGs8mx33HFasy32Z8AeBMZZxYIO1J29vMev7BkjE8pP8tJ9P0SJljS/Zm
4oeiMGY21EtvLusZym7BzqT63W0kqQ9KNRcllPkxXslKaZ6On9EZn3y6cxMgrYSe
+bGIwPncgBMfc6CJrAU0sbsMGquI3RII3EZdH7QH5eIjrSGBjMsZoEJmGLtrEjEo
6ms+jBJjHVWMNp6qGnbkjtKp1t4OXAP2Zeu3TjeRqjLzjsd9SFmFGjF5FJ4haR29
7dmlinAMxKtY0OKHbLBj7jiV2f9TPWqva3LCPsX0vYACvOFlsJiAV3dXG1JHuIaZ
Di/wIo0QPeZI1u2fXGXZ5clA7lIcw+/SvJI0klCf7n8F07evS3jyiaNq+EF+MjRb
YLTL9lzRuo/yxOpcjONp3w9zE2n6BjfzAWCGA1SB9mvRVHQtyk87Z2QFHA0l4Qii
OP4UI7aMzZ/iygo7U8f0uKKnhnSkmvpZGVVK1TJVOZmmvlOPTT0rLosHiF9w5+60
5VocorfbUkt2oihoqBg7gnwq0SG9AnNsZWf1uCOIo7gzBFp952gWCSsGAQQB2kcP
AQEHQPVtSuFnhMmRe46yyGKpN35sCZ96RZEMD9PYfgY23NT3iQKtBBgBCAAgFiEE
3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlp952gCGwIAgQkQ5j7cqTKd0H52IAQZFggA
HRYhBHa+XbJSceFIHmeMNbbEHONWZJlsBQJafedoAAoJELbEHONWZJls60wA/2MV
lKqzJFUdje9B9lIPCMS1bVgt2s6N1F4aKYH+zJ3rAP9GC2b7IRlj6yqVqhIr7zy9
5KEHR2J+BANSiVJ7/7V9DcA4EACymPJNqnblefv04GsXXTbwYcTPwZ5FmuooM4l/
Ry8GB5f2S6CslyGUe75rZzdVrkl27VTlaFxkE27alB8NG148xttuhJqKD+O/hE6E
6x13ffoG7iL2nkUolr5hyJitN/JOocbc/1IIZtyJNEVBrVwtAtoy402NR/fYlB6s
ZrTtPiX0GA8eH8HxLwdqsjxH8Cjsm0wJJs/bqQ1VpBheiUHyGw2qIWEfl12wLWNH
iAHtD2RzFWTnRw2NLA1O2AqQ8ONaWLiU26MsSgraH7wVeEP1K2vQNZiN2Shn/+OE
LHeIno2MbD2M/FPdybSek/YshnJindRqrfcIsoJMQzDZQYmB8yj0MMsifoFTd7BX
8fQqWn68ADk40VMXvC+TZPEVQKquveSj67bsuuzJmMvPGKooKPTyOi9HL24X+von
PPEPwkIH5esSWFmoUDsFX4t3HTFlNetqeUz9RhuIZV9yV7HJN2mIseSJ7lhj0Xay
0m1Fka+A3RvGxb9tENnq6MJgg3E2Ubi8ZFI7fKOehuPOQxGhnohNHXMaZqcdedP/
Aku/5lBeOW4FGUWzFwRjnooONa8EblZsaoR9JHNeJKFW5+shaKOjJTIiBjoASt/2
zJxTWW3B7kA1PXqplvvwtCCnmMGkXICwLL7VGSX1Y5V6pA0yr777eXCsNgNUbwu1
KjYnoLg4BFrp358SCisGAQQBl1UBBQEBB0BrjZj+KTDK8qeug5u/uwXZ2DwlHR51
NCDcVYJGkFVbMwMBCAeJAjYEGAEIACAWIQTeDmbjLx/dCQJma5bmPtypMp3QfgUC
WunfnwIbDAAKCRDmPtypMp3QfgjpEACwiXruEVFvV8k9Vq2mx3GKedVwAp675Z35
UO6MaGKZ5XpSQojM2dI86QeZPgbFkY/JS3OWccjW5hAmy5DciRgXHQsAJsBRXubk
A8sfX0ySRUbEmLi6bxIzbm2md75IlP4rC/b3tdtSOTKlfDpa80mFpHFRtm20lS9T
8Eyz1RobpGIOIoSmcWG4UWdv0W4ioeMmVLnl0iR8DI6h+U7nApBFwSAZUu6nituk
CYmwu8AxlnWv3F2UgcdwLLuI9KnL98BB/gkxoxMk1X6SnQMvPPAWksyz+mPXgdCK
ylKkkzwQXo8a7CzDDExxku8hRk9oiGMjCZRnOYxC7RFkP/psUcJbv5t4uFqysyAh
+SSibfw4/cI7WVatzb9t0eBmsAOlmxA7sd9jdnu2xMCYQKHiLo8foMR+mHNM5q0T
E+K33cwTRiXVgqcAkfheI+A4oyzqzddxsxdYwXpoceWEcs+di9Qcwg5h0XmZ/6wI
vwj5SDUg1gQtnly+aFIwHjd4ggIbhOze03dN8KKivEs2EKzaXImTR0foY+lyq9bo
IWu6i3X9bxmmcpp4h8vKrKJcWrFG+q0ENaZoYqEuXiFJ9zxfJ1TdScPSOlZLVkKP
x/uBtR1RU2+//2yV7jJWK6raVXZ9hB4km3EuAQts8+UCsXM9jsD1Jlw1fEuMQEBp
vtlgqCEcWrkBDQRa6d/DAQgA1RDvHPo5wd72mXB1ztBCN9jPCrtlwXGRbwN/Kdbw
ANd99X4Ctr5m9wKMK5078Zbj8C2Yr6e4+1vxzXqBSzKWZohswpPPVC5B96RNmQrL
jJ5V8/TLU7ckI4MtCw+2K03i9l1srwxwXw8c56k4jjmk88PlMVTcr/urjx5unYH1
uHN3Sk3n1gAbEOTRrrPZWaZviyheEHe86nnQKDsBu3yiV9BepIxYkYxZm8sI7qKQ
lzpgwHaudNf+rKPiza9D6d8pgsd/5rlzKTintEcgN9x74AHJqaFj5HAxjyg/wgTr
ndNcWeB3Eu7G8nZGjDfR+upSNjmP8evufT6A8w4d8tzdfwARAQABiQI2BBgBCAAg
FiEE3g5m4y8f3QkCZmuW5j7cqTKd0H4FAlrp38MCGyAACgkQ5j7cqTKd0H4uCRAA
l8ygwpx87vrM9EtSt8Q7cOIj2tJfYCqhdUp32IV9OE3EPAWOV5hoSR325OTLdmv1
cE2aa35oK9eevkayAmBsprhfZD20tHB1P4wBUgcsxShJLxXxZsWLym7AU7xwCXv9
3G/fk5AqgZZjsYtWaulxzaBXo1Yr0sdUhSK0PJtqtMmJE2Q8nmOwpjx6XhO8MZxg
aRV4ktx5HyNchWKr52CcZh3y5xXxh6YUlf86k8kuN/exBzkAM581U66KP8fMFMre
pM2Z5IDm43VvHGVOa4shAmR9jIjqSXOrvgEfg2ys78aKe/fSu3GfR7lMVPD0ZKX4
lqXTCo3+4Xd7N+uPxPcEkOX2jevYdXRoHhcxH/++mSoNgV9pj/dGiBkDKUM/WOhZ
VZ9uvmDMEvprjSOlYFACkD/TNhW/O4Zi09snENWX3wDAU/u2VlySjz732YBF438q
JOycw/36tKCZlDlTorGhzODpxx9bSDJ7w7CsetB19lVoe0zEJY/bEHLxy9QA527g
1TGgzvIvC48l69WJTv1CLIiFcqEs4jgB3ynC/TPL/HpzBldicVVMddn5cZqkJOO8
9qTVgBckOmoBeLDSSKsURwXI9BQtSdfG9PpaRt2GPXUW5p7ipHjsI+4wEXTrOylu
hjAqNyQU6VSX0D6woKyUHVFkapTDnExtGkY+3M7NAYQ=
=chX+
-----END PGP PUBLIC KEY BLOCK-----
07070100000025000081A400000000000000000000000161F953D70000039C000000000000000000000000000000000000001A00000000b4-0.8.0+2/patatt/COPYINGThe MIT-Zero License

Copyright (c) 2021 by the Linux Foundation

Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so.

THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
07070100000026000081A400000000000000000000000161F953D70000058D000000000000000000000000000000000000001600000000b4-0.8.0+2/patatt/DCODeveloper Certificate of Origin
Version 1.1

Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
1 Letterman Drive
Suite D4700
San Francisco, CA, 94129

Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.


Developer's Certificate of Origin 1.1

By making a contribution to this project, I certify that:

(a) The contribution was created in whole or in part by me and I
    have the right to submit it under the open source license
    indicated in the file; or

(b) The contribution is based upon previous work that, to the best
    of my knowledge, is covered under an appropriate open source
    license and I have the right under that license to submit that
    work with modifications, whether created in whole or in part
    by me, under the same open source license (unless I am
    permitted to submit under a different license), as indicated
    in the file; or

(c) The contribution was provided directly to me by some other
    person who certified (a), (b) or (c) and I have not modified
    it.

(d) I understand and agree that this project and the contribution
    are public and that a record of the contribution (including all
    personal information I submit with it, including my sign-off) is
    maintained indefinitely and may be redistributed consistent with
    this project or the open source license(s) involved.
07070100000027000081A400000000000000000000000161F953D700000041000000000000000000000000000000000000001E00000000b4-0.8.0+2/patatt/MANIFEST.ininclude COPYING
include DCO
include README.rst
include man/*.rst
07070100000028000081A400000000000000000000000161F953D700005141000000000000000000000000000000000000001D00000000b4-0.8.0+2/patatt/README.rstpatatt: cryptographic patch attestation for the masses
======================================================

This utility allows an easy way to add end-to-end cryptographic
attestation to patches sent via mail. It does so by adapting the DKIM
email signature standard to include cryptographic signatures via the
X-Developer-Signature email header.

If your project workflow doesn't use patches sent via email, then you
don't need this and should simply start signing your tags and commits.

Basic concepts
--------------
DKIM is a widely adopted standard for domain-level attestation of email
messages. It works by hashing the message body and certain individual
headers, and then creating a cryptographic signature of the resulting
hash. The receiving side obtains the public key of the sending domain
from its DNS record and checks the signature and header/body hashes. If
the signature verifies and the resulting hashes are identical, then
there is a high degree of assurance that neither the body of the message
nor any of the signed headers were modified in transit.

This utility uses the exact same DKIM standard to hash the headers and
the body of the patch message, but uses a different set of fields and
canonicalization routines:

- the d= field is not used (no domain signatures involved)
- the q= field is not used (key lookup is left to the client)
- the c= field is not used (see below for canonicalization)
- the i= field is optional, but MUST be the canonical email address of
  the sender, if not the same as the From: field

Canonicalization
~~~~~~~~~~~~~~~~
Patatt uses the "relaxed/simple" canonicalization as defined by the DKIM
standard, but the message is first parsed by the "git-mailinfo" command
in order to achieve the following:

- normalize any content-transfer-encoding modifications (convert back
  from base64/quoted-printable/etc into 8-bit)
- use any encountered in-body From: and Subject: headers to
  rewrite the outer message headers
- perform the subject-line normalization in order to strip content not
  considered by git-am when applying the patch (i.e. drop [PATCH .*] and
  other bracketed prefix content)

To achieve this, the message is passed through git-mailinfo with the
following flags::

    cat orig.msg | git mailinfo --encoding=utf-8 --no-scissors m p > i

Patatt then uses the data found in "i" to replace the From: and Subject:
headers of the original message, and concatenates "m" and "p" back
together to form the body of the message, which is then normalized using
CRLF line endings and the DKIM "simple" body canonicalization (any
trailing blank lines are removed).

Any other headers included in signing are modified using the "relaxed"
header canonicalization routines as defined in the DKIM RFC.

In other words, the body and some of the headers are normalized and
reconstituted using the "git-mailinfo" command, and then canonicalized
using DKIM's relaxed/simple standard.

Supported Signature Algorithms
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
DKIM standard mostly relies on RSA signatures, though RFC 8463 extends
it to support ED25519 keys as well. While it is possible to use any of
the DKIM-defined algorithms, patatt only supports the following
two signing/hashing schemes:

- ed25519-sha256: exactly as defined in RFC8463
- openpgp-sha256: uses OpenPGP to create the signature

Note: Since GnuPG supports multiple signing key algorithms,
openpgp-sha256 signatures can be done using EDDSA keys as well. However,
since OpenPGP output includes additional headers, the "ed25519-sha256"
and "openpgp-sha256" schemes are not interchangeable even when ed25519
keys are used in both cases.

In the future, patatt may add support for more algorithms, especially if
that allows incorporating TPM and U2F devices (e.g. for offloading
credential storage and crypto operations into a sandboxed environment).

X-Developer-Key header
~~~~~~~~~~~~~~~~~~~~~~
Patatt adds a separate ``X-Developer-Key:`` header with public key
information. It is merely informational and ISN'T and SHOULDN'T be used
for performing any kind of message validation (for obvious reasons). It
is included to make it easier for maintainers to obtain the
contributor's public key before performing whatever necessary
verification steps prior to its inclusion into their individual or
project-wide keyrings.

This also allows keeping a historical record of contributor keys via
list archive services such as lore.kernel.org and others.

Getting started as contributor
------------------------------
It is very easy to start signing your patches with patatt.

Installing
~~~~~~~~~~
You can install from pip::

    pip install --user patatt

Make sure your PATH includes $HOME/.local/bin.

Alternatively, you can clone this repository and symlink patatt.sh into
your path::

    cd bin
    ln -s ~/path/to/patatt/patatt.sh patatt

After this, you should be able to run ``patatt --help`` without
specifying the full path to the repository.

Using PGP
~~~~~~~~~
If you already have a PGP key, you can simply start using it to sign
patches. Add the following to your ~/.gitconfig::

    [patatt]
        signingkey = openpgp:KEYID

The KEYID should be the 16-character identifier of your key, for
example::

    [patatt]
        signingkey = openpgp:E63EDCA9329DD07E

Using ed25519
~~~~~~~~~~~~~
If you don't already have a PGP key, you can opt to generate and use a
new ed25519 key instead (see below for some considerations on pros and
cons of PGP vs ed25519 keys).

To generate a new keypair, run::

    patatt genkey

You will see an output similar to the following::

    Generating a new ed25519 keypair
    Wrote: /home/user/.local/share/patatt/private/20210505.key
    Wrote: /home/user/.local/share/patatt/public/20210505.pub
    Wrote: /home/user/.local/share/patatt/public/ed25519/example.org/user/default
    Add the following to your .git/config (or global ~/.gitconfig):
    ---
    [patatt]
        signingkey = ed25519:20210505
    ---
    Next, communicate the contents of the following file to the
    repository keyring maintainers for inclusion into the project:
    /home/user/.local/share/patatt/public/20210505.pub

Please make sure to back up your new private key, located in
``~/.local/share/patatt/private``. It is short enough to simply
print/write out for storing offline.

Next, just do as instructions say. If the project for which you are
contributing patches already uses patatt attestation, please work with
the project maintainers to add your public key to the repository. If
they aren't yet using patatt, just start signing your patches and
hopefully the project will start keeping its own keyring in the future.

Testing if it's working
~~~~~~~~~~~~~~~~~~~~~~~
To test if it's working::

    $ git format-patch -1 --stdout | patatt sign > /tmp/test

If you didn't get an error message, then the process was successful. You
can review /tmp/test to see that ``X-Developer-Signature`` and
``X-Developer-Key`` headers were successfully added.

You can now validate your own message::

    $ patatt validate /tmp/test

Automatic signing via the sendemail-validate hook
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If everything is working well, you can start automatically signing all
outgoing patches sent via git-send-email::

    $ echo 'patatt sign --hook "${1}"' > "$(git rev-parse --git-dir)/hooks/sendemail-validate"
    $ chmod a+x "$(git rev-parse --git-dir)/hooks/sendemail-validate"

PGP vs ed25519 keys considerations
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
If you don't already have a PGP key, you may wonder whether it makes
sense to create a new PGP key or start using standalone ed25519 keys.

Reasons to choose PGP:

- you can protect the PGP private key with a passphrase (gpg-agent will
  manage it for you so you only need to enter it once per session)
- you can move your PGP key to an OpenPGP-compliant smartcard to further
  protect your key from being leaked/stolen
- you can use PGP keys to sign git tags/commits, not just mailed patches

If you choose to create a new PGP key, you can use the following guide:
https://github.com/lfit/itpol/blob/master/protecting-code-integrity.md

Reasons to choose a standalone ed25519 key:

- much smaller signatures, especially compared to PGP RSA keys
- implements the DKIM ed25519 signing standard
- faster operation

If you choose ed25519 keys, you will need to make sure that PyNaCl is
installed (pip install should have already taken care of it for you).

Getting started as a project maintainer
---------------------------------------
Patatt implements basic signature validation, but it's a tool aimed
primarily at contributors. If you are processing mailed-in patches, then
you should look into using b4, which aims at making the entire process
easier. B4 properly recognizes X-Developer-Signature headers starting
with version 0.7.0 and uses the patatt library as well.

- https://pypi.org/project/b4/

That said, keyring management as discussed below applies both to patatt
and b4, so you can read on for an overview.

In-git pubkey management
~~~~~~~~~~~~~~~~~~~~~~~~
The trickiest part of all decentralized PKI schemes is not the crypto
itself, but public key distribution and management. PGP famously tried
to solve this problem by relying on cross-key certification and
keyservers, but the results were not encouraging.

On the other hand, within the context of git repositories, we already
have a suitable mechanism for distributing developer public keys, which
is the repository itself. Consider this:

- git is already decentralized and can be mirrored to multiple
  locations, avoiding any single points of failure
- all contents are already versioned and key additions/removals can be
  audited and "git blame'd"
- git commits themselves can be cryptographically signed, which allows a
  small subset of developers to act as "trusted introducers" to many
  other contributors (mimicking the "keysigning" process)

The idea of using git itself for keyring management was originally
suggested by the did:git project, though we do not currently implement
the proposed standard itself.

- https://github.com/dhuseby/did-git-spec/blob/master/did-git-spec.md

Keyring structure
~~~~~~~~~~~~~~~~~
The keyring is structured as follows::

    - dir: topdir (e.g. ".keys")
      |
      - dir: keytype (e.g. "ed25519" or "openpgp")
        |
        - dir: address-domainname (e.g. "example.org")
          |
          - dir: address-localpart (e.g. "developer")
            |
            - file: selector (e.g. "default")

The main reasoning behind this structure was to make it easy for
multiple project maintainers to manage keys without causing any
unnecessary git merge complications. Keeping all public keys in
individual files helps achieve this goal.

For example, let's take the following signature::

    From: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
    X-Developer-Signature: v=1; a=ed25519-sha256; t=1620240207; l=2577;
     h=from:subject; bh=yqviDBgyf3/dQgHcBe3B7fTP39SuKnYInPBxnOiuGcA=;
     b=Xzd0287MvPE9NLX7xbQ6xnyrvqQOMK01mxHnrPmm1f6O7KKyogc8YH6IAlwIPdo+jk1CkdYYQsyZ
     sS0cJdX2B4uTmV9mxOe7hssjtjLcj5/NU9zAw6WJARybaNAKH8rv

The key would be found in the following subpath::

    .keys/ed25519/linuxfoundation.org/konstantin/default

If i= and s= fields are specified in the signature, as below::

    X-Developer-Signature: v=1; a=ed25519-sha256; t=1620244687; l=12645;
     i=mricon@kernel.org; s=20210505; h=from:subject;
     bh=KRCBcYiMdeoSX0l1XJ2YzP/uJhmym3Pi6CmbN9fs4aM=;
     b=sSY2vXzju7zU3KK4VQ5vFa5iPpDr3nrf221lnpq2+uuXmCODlAsgoqDmjKUBmbPtlY1Bcb2N0XZQ
     0KX+OShCAAwB5U1dtFtRnB/mgVibMxwl68A7OivGIVYe491yll5q

Then the path would reflect those parameters::

    .keys/ed25519/kernel.org/mricon/20210505

In the case of ed25519 keys, the contents of the file are just the
base64-encoded public key itself. For openpgp keys, the format should be
the ascii-armored public key export, for example obtained by using the
following command::

    gpg -a --export --export-options export-minimal keyid

Whose keys to add to the keyring
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
It does not really make sense to require cryptographic attestation for
patches submitted by occasional contributors. The only keys added to the
keyring should be those of the core maintainers who have push access to
the "canonical" repository location, plus the keys belonging to regular
contributors with a long-term ongoing relationship with the project.

Managing the keyring: small teams
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For smaller repositories with a handful of core maintainers, it makes
sense to keep the keyring in the main branch, together with all other
project files.

Managing the keyring: large teams
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
For large teams with thousands of regular contributors and teams of
subsystem maintainers (e.g. the Linux kernel), it does not make sense to
have a centrally managed keyring tracked in the main repository.
Instead, each subsystem maintainer team should manage their own keyring
in a separate ref of their own repository.

For example, to create a blank new ref called ``refs/meta/keyring``::

    git symbolic-ref HEAD refs/meta/keyring
    git reset --hard
    mkdir ed25519 openpgp

Individual public key files can then be added and committed following
the same structure as described above. Keeping the keyring outside the
regular development branch ensures that it doesn't interfere with
submitted pull requests or git-format-patch operations. Keeping the ref
under ``refs/meta/`` will hide it from most GUI interfaces, but if that
is not the goal, then it can be stored in ``refs/heads`` just like any
other branch.

To commit and push the files after adding them, regular git operations
should be used::

    git commit -asS
    git push origin HEAD:refs/meta/keyring
    # Switch back to the development environment
    git checkout regular-branch

To make changes to an existing keyring ref, a similar workflow can be
used::

    git fetch origin refs/meta/keyring
    # Verify that the commit is signed
    git verify-commit FETCH_HEAD
    git checkout FETCH_HEAD
    # make any changes to the keys
    git commit -asS
    git push origin HEAD:refs/meta/keyring
    git checkout regular-branch

Alternatively, if key additions/updates are frequent enough, the remote
ref can be checked out into its own workdir and set up for proper
remote tracking.

Telling patatt where to find the keyring(s)
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
To use the keyring with patatt or b4, just tell them which paths to
check, via the ``keyringsrc`` setting (can be specified multiple
times and will be checked in the listed order)::

    [patatt]
        # Empty ref means "use currently checked out ref in this repo"
        keyringsrc = ref:::.keys
        # Use a dedicated ref in this repo called refs/meta/keyring
        keyringsrc = ref::refs/meta/keyring:
        # Use a ref in a different repo
        keyringsrc = ref:~/path/to/another/repo:refs/heads/main:.keys
        # Use a regular dir on disk
        keyringsrc = ~/git/korg-pgpkeys/.keyring

For b4, use the same configuration under the ``[b4]`` section.

External and local-only keyrings
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Any path on disk can be used for a keyring location, and some will
always be checked just in case. The following locations are added by
default::

    ref:::.keys
    ref:::.local-keys
    ref::refs/meta/keyring:
    $XDG_DATA_HOME/patatt/public

The ":::" means "whatever ref is checked out in the current repo",
and $XDG_DATA_HOME usually points at $HOME/.local/share.

Getting support and contributing patches
----------------------------------------
Please send patches and support requests to tools@linux.kernel.org.

Submissions must be made under the terms of the Linux Foundation
certificate of contribution and should include a Signed-off-by: line.
Please read the DCO file for full legal definition of what that implies.

Frequently seen commentary
--------------------------
Why is this library even needed? Why not...

Why not simply PGP-sign all patches?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
PGP-signing patches causes important problems for reviewers. If a patch
is inline-signed, then this not only adds textual headers/footers, but
adds additional escaping in the protected body, converting all '^-'
sequences into '^- -', which corrupts patches.

MIME-signing is better, but has several other downsides:

- messages are now sent as multipart mime structures, which causes some
  tooling to no longer properly handle the patch content
- the signature attachments may be stripped/quarantined by email
  gateways that don't properly recognize OpenPGP mime signatures
- the From/Subject headers are rarely included into protected content,
  even though they are crucial parts of what ends up going into a git
  commit

These considerations have resulted in many projects specifically
requesting that patches should NOT be sent PGP-signed.

Why not just rely on proper code review?
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Code review is a crucial step of the development process and patatt does
not aim to replace it. However, there are several areas where the
process can be abused by malicious parties in the absence of end-to-end
cryptographic attestation:

1. A maintainer who struggles with code review volume may delegate parts
   of their duties to a submaintainer. If that person submits aggregated
   patch series to the maintainer after performing that work, there must
   be a mechanism to ensure that none of the reviewed patches have been
   modified between when they were reviewed by the trusted submaintainer
   and when the upstream developer applies them to their tree. Up to
   now, the only mechanism to ensure this was via signed pull requests
   -- with patatt this is now also possible with regular patch series.

2. It is important to ensure that what developer reviews is what
   actually ends up being applied to their git tree. Linux development
   process consists of collecting follow-up trailers (Tested-by,
   Reviewed-by, etc), so various tooling exists to aggregate these
   trailers and create the collated patch series containing all
   follow-up tags (see b4, patchwork, etc). Patatt signing provides a
   mechanism to ensure that what that developer reviewed and approved
   and what they applied to their tree is the exact same code and hasn't
   been maliciously modified in-between review and "git am" (e.g. by
   archival services such as lore.kernel.org, mail hosting providers,
   someone with access to the developer's inbox, etc).

3. An attacker may attempt to impersonate a well-known developer by
   submitting malicious code, perhaps with the hope that it receives
   less scrutiny and is accepted without rigorous code review. Even if
   this attempt is unsuccessful (and it most likely would be), this may
   cause unnecessary reputation damage to the person being impersonated.
   Cryptographic signatures (and lack thereof) will help the developer
   quickly establish that the attack was performed without their
   involvement.

Why not just rely on DKIM?
~~~~~~~~~~~~~~~~~~~~~~~~~~
DKIM standard is great, but there are several places where it falls a
bit short when it comes to patch attestation:

1. The signing is done by the mail gateways that may or may not be
   properly checking that the "From:" header matches the identity of the
   authenticated user. For example, a service that allows free account
   registration may not check that alice@example.org sends outgoing
   email with "bob@example.org" in the "From:" field, which would allow
   Alice to impersonate Bob and have the messages arrive with a valid
   DKIM signature.

2. DKIM is usually seen as merely a spam reduction mechanism, so there's
   usually little incentive for infrastructure administrators to be too
   strict about how they handle the private keys used for DKIM signing.
   Most likely, they are just stored on disk without a passphrase and
   accessible by the SMTP daemon.

3. DKIM's "relaxed" canonicalization standard for message bodies
   replaces all multiple whitespace characters with a single space
   before the body hash is signed. This poses significant problems for
   patches where whitespace is syntactically significant (Python,
   Makefiles, etc). A "return True" with a different indent will pass
   DKIM signature check and may introduce a serious security
   vulnerability.

4. DKIM doesn't prevent typosquatting attacks. For example, an attacker
   attempting to impersonate known.developer@companyname.com may send an
   email from known.developer@company-name.com or any other
   similar-looking address or domain, with valid DKIM signatures in
   every case.
07070100000029000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000001600000000b4-0.8.0+2/patatt/man0707010000002A000081A400000000000000000000000161F953D700000ACF000000000000000000000000000000000000001F00000000b4-0.8.0+2/patatt/man/patatt.5.\" Man page generated from reStructuredText.
.
.TH PATATT 5 "2021-05-21" "0.4.0" ""
.SH NAME
PATATT \- DKIM-like cryptographic patch attestation
.
.nr rst2man-indent-level 0
.
.de1 rstReportMargin
\\$1 \\n[an-margin]
level \\n[rst2man-indent-level]
level margin: \\n[rst2man-indent\\n[rst2man-indent-level]]
-
\\n[rst2man-indent0]
\\n[rst2man-indent1]
\\n[rst2man-indent2]
..
.de1 INDENT
.\" .rstReportMargin pre:
. RS \\$1
. nr rst2man-indent\\n[rst2man-indent-level] \\n[an-margin]
. nr rst2man-indent-level +1
.\" .rstReportMargin post:
..
.de UNINDENT
. RE
.\" indent \\n[an-margin]
.\" old: \\n[rst2man-indent\\n[rst2man-indent-level]]
.nr rst2man-indent-level -1
.\" new: \\n[rst2man-indent\\n[rst2man-indent-level]]
.in \\n[rst2man-indent\\n[rst2man-indent-level]]u
..
.SH SYNOPSIS
.sp
patatt {sign,validate,genkey} [options]
.SH DESCRIPTION
.sp
This tools allows cryptographically signing patches sent via email
by using DKIM\-like message headers. This approach is both effective and
doesn\(aqt interfere with other code review tools the way inline or
detached PGP signatures do. For a full overview of core concepts and
considerations, please see README.
.sp
If you already have a PGP key configured for signing git tags or
commits, then you should be able to use patatt without any additional
configuration. Try running the following in any git repository:
.INDENT 0.0
.INDENT 3.5
.sp
.nf
.ft C
git format\-patch \-1 \-\-stdout | patatt sign
.ft P
.fi
.UNINDENT
.UNINDENT
.sp
If patatt is not finding your PGP key, try adding the following to your
~/.gitconfig:
.INDENT 0.0
.INDENT 3.5
.sp
.nf
.ft C
[user]
    signingkey = [yourkeyid]
.ft P
.fi
.UNINDENT
.UNINDENT
.sp
To find out your keyid, run \fBgpg \-\-list\-secret\-keys\fP\&. If you want to
use a specific subkey, you can specify the subkey ID with a \fB!\fP at the
end.
.SH USING AS A GIT HOOK
.sp
If you use \fBgit\-send\-email\fP for sending patches, then you can get
them automatically signed via the \fBsendemail\-validate\fP hook:
.INDENT 0.0
.INDENT 3.5
.sp
.nf
.ft C
$ echo \(aqpatatt sign \-\-hook "${1}"\(aq >> .git/hooks/sendemail\-validate
$ chmod a+x .git/hooks/sendemail\-validate
.ft P
.fi
.UNINDENT
.UNINDENT
.SH SUBCOMMANDS
.INDENT 0.0
.IP \(bu 2
\fIpatatt sign\fP: sign stdin or RFC2822 files passed as arguments
.IP \(bu 2
\fIpatatt validate\fP: basic validation for signed messages
.IP \(bu 2
\fIpatatt genkey\fP: generate a new ed25519 keypair
.UNINDENT
.sp
You can run \fBpatatt [subcommand] \-\-help\fP to see a summary of flags for
each subcommand.
.SH SUPPORT
.sp
Please email \fI\%tools@linux.kernel.org\fP with support requests.
.SH AUTHOR
mricon@kernel.org

License: MIT-0
.SH COPYRIGHT
The Linux Foundation and contributors
.\" Generated by docutils manpage writer.
.
0707010000002B000081A400000000000000000000000161F953D700000758000000000000000000000000000000000000002300000000b4-0.8.0+2/patatt/man/patatt.5.rstPATATT
======
-----------------------------------------
DKIM-like cryptographic patch attestation
-----------------------------------------

:Author:    mricon@kernel.org
:Date:      2021-05-21
:Copyright: The Linux Foundation and contributors
:License:   MIT-0
:Version:   0.4.0
:Manual section: 5

SYNOPSIS
--------
patatt {sign,validate,genkey} [options]

DESCRIPTION
-----------
This tools allows cryptographically signing patches sent via email
by using DKIM-like message headers. This approach is both effective and
doesn't interfere with other code review tools the way inline or
detached PGP signatures do. For a full overview of core concepts and
considerations, please see README.

If you already have a PGP key configured for signing git tags or
commits, then you should be able to use patatt without any additional
configuration. Try running the following in any git repository::

    git format-patch -1 --stdout | patatt sign

If patatt is not finding your PGP key, try adding the following to your
~/.gitconfig::

    [user]
        signingkey = [yourkeyid]

To find out your keyid, run ``gpg --list-secret-keys``. If you want to
use a specific subkey, you can specify the subkey ID with a ``!`` at the
end.

USING AS A GIT HOOK
-------------------
If you use ``git-send-email`` for sending patches, then you can get
them automatically signed via the ``sendemail-validate`` hook::

    $ echo 'patatt sign --hook "${1}"' >> .git/hooks/sendemail-validate
    $ chmod a+x .git/hooks/sendemail-validate

SUBCOMMANDS
-----------
* *patatt sign*: sign stdin or RFC2822 files passed as arguments
* *patatt validate*: basic validation for signed messages
* *patatt genkey*: generate a new ed25519 keypair

You can run ``patatt [subcommand] --help`` to see a summary of flags for
each subcommand.

SUPPORT
-------
Please email tools@linux.kernel.org with support requests.
0707010000002C000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000001900000000b4-0.8.0+2/patatt/patatt0707010000002D000081ED00000000000000000000000161F953D7000000F5000000000000000000000000000000000000001C00000000b4-0.8.0+2/patatt/patatt.sh#!/usr/bin/env bash
#
# Run patatt from a git checkout.
#

REAL_SCRIPT=$(realpath -e ${BASH_SOURCE[0]})
SCRIPT_TOP="${SCRIPT_TOP:-$(dirname ${REAL_SCRIPT})}"

exec env PYTHONPATH="${SCRIPT_TOP}" python3 "${SCRIPT_TOP}/patatt/__init__.py" "${@}"
0707010000002E000081A400000000000000000000000161F953D70000ADA7000000000000000000000000000000000000002500000000b4-0.8.0+2/patatt/patatt/__init__.py# -*- coding: utf-8 -*-
#
# Copyright (C) 2021 by The Linux Foundation
# SPDX-License-Identifier: MIT-0
#
__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'

import sys
import os
import re

import hashlib
import base64
import subprocess
import logging
import tempfile
import time
import datetime

import urllib.parse
import email.utils
import email.header

from pathlib import Path
from typing import Optional, Tuple, Union
from io import BytesIO

logger = logging.getLogger(__name__)

# Overridable via [patatt] parameters
GPGBIN = 'gpg'

# Hardcoded defaults
DEVSIG_HDR = b'X-Developer-Signature'
DEVKEY_HDR = b'X-Developer-Key'

# Result and severity levels
RES_VALID = 0
RES_NOKEY = 8
RES_ERROR = 16
RES_BADSIG = 32

REQ_HDRS = [b'from', b'subject']
OPT_HDRS = [b'message-id']

# Quick cache for key info
KEYCACHE = dict()

# My version
__VERSION__ = '0.4.6'
MAX_SUPPORTED_FORMAT_VERSION = 1


class SigningError(Exception):
    def __init__(self, message: str, errors: Optional[list] = None):
        super().__init__(message)
        self.errors = errors


class ConfigurationError(Exception):
    def __init__(self, message: str, errors: Optional[list] = None):
        super().__init__(message)
        self.errors = errors


class ValidationError(Exception):
    def __init__(self, message: str, errors: Optional[list] = None):
        super().__init__(message)
        self.errors = errors


class NoKeyError(ValidationError):
    def __init__(self, message: str, errors: Optional[list] = None):
        super().__init__(message)
        self.errors = errors


class BodyValidationError(ValidationError):
    def __init__(self, message: str, errors: Optional[list] = None):
        super().__init__(message, errors)


class DevsigHeader:
    def __init__(self, hval: Optional[bytes] = None):
        self._headervals = list()
        self._body_hash = None
        # it doesn't need to be in any particular order,
        # but that's just anarchy, anarchy, I say!
        self._order = ['v', 'a', 't', 'l', 'i', 's', 'h', 'bh']
        self.hval = None
        self.hdata = dict()

        if hval:
            self.from_bytes(hval)
        else:
            self.hdata['v'] = b'1'

    def from_bytes(self, hval: bytes) -> None:
        self.hval = DevsigHeader._dkim_canonicalize_header(hval)
        hval = re.sub(rb'\s*', b'', self.hval)
        for chunk in hval.split(b';'):
            parts = chunk.split(b'=', 1)
            if len(parts) < 2:
                continue
            self.set_field(parts[0].decode(), parts[1])

    def get_field(self, field: str, decode: bool = False) -> Union[None, str, bytes]:
        value = self.hdata.get(field)
        if isinstance(value, bytes) and decode:
            return value.decode()
        return value

    def set_field(self, field: str, value: Union[None, str, bytes]) -> None:
        if value is None:
            del self.hdata[field]
            return
        if isinstance(value, str):
            value = value.encode()
        self.hdata[field] = value

    # do any git-mailinfo normalization prior to calling this
    def set_body(self, body: bytes, maxlen: Optional[int] = None) -> None:
        if maxlen:
            if maxlen > len(body):
                raise ValidationError('maxlen is larger than payload')
            if maxlen < len(body):
                body = body[:maxlen]

            self.hdata['l'] = bytes(len(body))

        hashed = hashlib.sha256()
        hashed.update(body)
        self._body_hash = base64.b64encode(hashed.digest())

    # do any git-mailinfo normalization prior to calling this
    def set_headers(self, headers: list, mode: str) -> None:
        parsed = list()
        allhdrs = set()
        # DKIM operates on headers in reverse order
        for header in reversed(headers):
            try:
                left, right = header.split(b':', 1)
                hname = left.strip().lower()
                parsed.append((hname, right))
                allhdrs.add(hname)
            except ValueError:
                continue

        reqset = set(REQ_HDRS)
        optset = set(OPT_HDRS)
        self._headervals = list()
        if mode == 'sign':
            # Make sure REQ_HDRS is a subset of allhdrs
            if not reqset.issubset(allhdrs):
                raise SigningError('The following required headers not present: %s'
                                   % (b', '.join(reqset.difference(allhdrs)).decode()))
            # Add optional headers that are actually present
            optpresent = list(allhdrs.intersection(optset))
            signlist = REQ_HDRS + sorted(optpresent)
            self.hdata['h'] = b':'.join(signlist)

        elif mode == 'validate':
            hfield = self.get_field('h')
            signlist = [x.strip() for x in hfield.split(b':')]
            # Make sure REQ_HEADERS are in this set
            if not reqset.issubset(set(signlist)):
                raise ValidationError('The following required headers not signed: %s'
                                      % (b', '.join(reqset.difference(set(signlist))).decode()))
        else:
            raise RuntimeError('Unknown set_header mode: %s' % mode)

        for shname in signlist:
            if shname not in allhdrs:
                # Per RFC:
                # Nonexistent header fields do not contribute to the signature computation (that is, they are
                # treated as the null input, including the header field name, the separating colon, the header field
                # value, and any CRLF terminator).
                continue
            at = 0
            for hname, rawval in list(parsed):
                if hname == shname:
                    self._headervals.append(hname + b':' + DevsigHeader._dkim_canonicalize_header(rawval))
                    parsed.pop(at)
                    break
                at += 1

    def sanity_check(self) -> None:
        if 'a' not in self.hdata:
            raise RuntimeError('Must set "a" field first')
        if not self._body_hash:
            raise RuntimeError('Must use set_body first')
        if not self._headervals:
            raise RuntimeError('Must use set_headers first')

    def validate(self, keyinfo: Union[str, bytes, None]) -> Tuple[str, str]:
        self.sanity_check()
        # Start by validating the body hash. If it fails to match, we can
        # bail early, before needing to do any signature validation.
        if self.get_field('bh') != self._body_hash:
            raise BodyValidationError('Body content validation failed')
        # Check that we have a b= field
        if not self.get_field('b'):
            raise RuntimeError('Missing "b=" value')
        pts = self.hval.rsplit(b'b=', 1)
        dshdr = pts[0] + b'b='
        bdata = re.sub(rb'\s*', b'', pts[1])
        algo = self.get_field('a', decode=True)
        if algo.startswith('ed25519'):
            sdigest = DevsigHeader._validate_ed25519(bdata, keyinfo)
            signtime = self.get_field('t', decode=True)
            signkey = keyinfo
            if not signtime:
                raise ValidationError('t= field is required for ed25519 sigs')
        elif algo.startswith('openpgp'):
            sdigest, (good, valid, trusted, signkey, signtime) = DevsigHeader._validate_openpgp(bdata, keyinfo)
        else:
            raise ValidationError('Unknown algorithm: %s', algo)

        # Now we calculate our own digest
        hashed = hashlib.sha256()
        # Add in our _headervals first (they aready have CRLF endings)
        hashed.update(b''.join(self._headervals))
        # and the devsig header now, without the trailing CRLF
        hashed.update(DEVSIG_HDR.lower() + b':' + dshdr)
        vdigest = hashed.digest()
        if sdigest != vdigest:
            raise ValidationError('Header validation failed')

        return signkey, signtime

    def sign(self, keyinfo: bytes, split: bool = True) -> Tuple[bytes, bytes]:
        self.sanity_check()
        self.set_field('bh', self._body_hash)
        algo = self.get_field('a', decode=True)
        hparts = list()
        for fn in self._order:
            fv = self.get_field(fn)
            if fv is not None:
                hparts.append(b'%s=%s' % (fn.encode(), fv))

        hparts.append(b'b=')
        dshval = b'; '.join(hparts)
        hashed = hashlib.sha256()
        # Add in our _headervals first (they aready have CRLF endings)
        hashed.update(b''.join(self._headervals))
        # and ourselves now, without the trailing CRLF
        hashed.update(DEVSIG_HDR.lower() + b':' + dshval)
        digest = hashed.digest()

        if algo.startswith('ed25519'):
            bval, pkinfo = DevsigHeader._sign_ed25519(digest, keyinfo)
        elif algo.startswith('openpgp'):
            bval, pkinfo = DevsigHeader._sign_openpgp(digest, keyinfo)
        else:
            raise RuntimeError('Unknown a=%s' % algo)

        if split:
            return dshval + DevsigHeader.splitter(bval), pkinfo

        return dshval + bval, pkinfo

    @staticmethod
    def _sign_ed25519(payload: bytes, privkey: bytes) -> Tuple[bytes, bytes]:
        global KEYCACHE
        try:
            from nacl.signing import SigningKey
            from nacl.encoding import Base64Encoder
        except ModuleNotFoundError:
            raise RuntimeError('This operation requires PyNaCl libraries')

        if privkey not in KEYCACHE:
            sk = SigningKey(privkey, encoder=Base64Encoder)
            vk = base64.b64encode(sk.verify_key.encode())
            KEYCACHE[privkey] = (sk, vk)
        else:
            sk, vk = KEYCACHE[privkey]

        bdata = sk.sign(payload, encoder=Base64Encoder)

        return bdata, vk

    @staticmethod
    def _validate_ed25519(sigdata: bytes, pubkey: bytes) -> bytes:
        try:
            from nacl.signing import VerifyKey
            from nacl.encoding import Base64Encoder
            from nacl.exceptions import BadSignatureError
        except ModuleNotFoundError:
            raise RuntimeError('This operation requires PyNaCl libraries')

        vk = VerifyKey(pubkey, encoder=Base64Encoder)
        try:
            return vk.verify(sigdata, encoder=Base64Encoder)
        except BadSignatureError:
            raise ValidationError('Failed to validate signature')

    @staticmethod
    def _sign_openpgp(payload: bytes, keyid: bytes) -> Tuple[bytes, bytes]:
        global KEYCACHE
        gpgargs = ['-s', '-u', keyid]
        ecode, out, err = gpg_run_command(gpgargs, payload)
        if ecode > 0:
            raise SigningError('Running gpg failed', errors=err.decode().split('\n'))
        bdata = base64.b64encode(out)
        # Now get the fingerprint of this keyid
        if keyid not in KEYCACHE:
            gpgargs = ['--with-colons', '--fingerprint', keyid]
            ecode, out, err = gpg_run_command(gpgargs)
            if ecode > 0:
                raise SigningError('Running gpg failed', errors=err.decode().split('\n'))
            pkid = None
            keyfp = None
            for line in out.split(b'\n'):
                if line.startswith(b'pub:'):
                    fields = line.split(b':')
                    pkid = fields[4]
                elif line.startswith(b'fpr:') and pkid:
                    fields = line.split(b':')
                    if fields[9].find(pkid) > 0:
                        keyfp = fields[9]
                        break
            KEYCACHE[keyid] = keyfp
        else:
            keyfp = KEYCACHE[keyid]

        return bdata, keyfp

    @staticmethod
    def _validate_openpgp(sigdata: bytes, pubkey: Optional[bytes]) -> Tuple[bytes, tuple]:
        global KEYCACHE
        bsigdata = base64.b64decode(sigdata)
        vrfyargs = ['--verify', '--output', '-', '--status-fd=2']
        if pubkey:
            with tempfile.TemporaryFile(suffix='.patch-attest-poc') as temp_keyring:
                keyringargs = ['--no-default-keyring', f'--keyring={temp_keyring.name}']
                if pubkey in KEYCACHE:
                    logger.debug('Reusing cached keyring')
                    temp_keyring.write(KEYCACHE[pubkey])
                else:
                    logger.debug('Importing into new keyring')
                    gpgargs = keyringargs + ['--status-fd=1', '--import']
                    ecode, out, err = gpg_run_command(gpgargs, stdin=pubkey)
                    # look for IMPORT_OK
                    if out.find(b'[GNUPG:] IMPORT_OK') < 0:
                        raise ValidationError('Could not import GnuPG public key')
                    KEYCACHE[pubkey] = temp_keyring.read()
                gpgargs = keyringargs + vrfyargs
                ecode, out, err = gpg_run_command(gpgargs, stdin=bsigdata)

        else:
            logger.debug('Verifying using default keyring')
            ecode, out, err = gpg_run_command(vrfyargs, stdin=bsigdata)

        if ecode > 0:
            if err.find(b'[GNUPG:] NO_PUBKEY '):
                raise NoKeyError('No matching key found')
            raise ValidationError('Failed to validate PGP signature')

        good, valid, trusted, signkey, signtime = DevsigHeader._check_gpg_status(err)
        if good and valid:
            return out, (good, valid, trusted, signkey, signtime)

        raise ValidationError('Failed to validate PGP signature')

    @staticmethod
    def _check_gpg_status(status: bytes) -> Tuple[bool, bool, bool, str, str]:
        good = False
        valid = False
        trusted = False
        signtime = ''
        signkey = ''

        gs_matches = re.search(rb'^\[GNUPG:] GOODSIG ([0-9A-F]+)\s+(.*)$', status, flags=re.M)
        if gs_matches:
            good = True
        vs_matches = re.search(rb'^\[GNUPG:] VALIDSIG ([0-9A-F]+) (\d{4}-\d{2}-\d{2}) (\d+)', status, flags=re.M)
        if vs_matches:
            valid = True
            signkey = vs_matches.groups()[0].decode()
            signtime = vs_matches.groups()[2].decode()
        ts_matches = re.search(rb'^\[GNUPG:] TRUST_(FULLY|ULTIMATE)', status, flags=re.M)
        if ts_matches:
            trusted = True

        return good, valid, trusted, signkey, signtime

    @staticmethod
    def splitter(longstr: bytes, limit: int = 78) -> bytes:
        splitstr = list()
        first = True
        while len(longstr) > limit:
            at = limit
            if first:
                first = False
                at -= 2
            splitstr.append(longstr[:at])
            longstr = longstr[at:]
        splitstr.append(longstr)
        return b' '.join(splitstr)

    @staticmethod
    def _dkim_canonicalize_header(hval: bytes) -> bytes:
        # Handle MIME encoded-word syntax or other types of header encoding if
        # present. The decode_header() function requires a str argument (not
        # bytes) so we must decode our bytes first, this is easy as RFC2822 (sec
        # 2.2) says header fields must be composed of US-ASCII characters. The
        # resulting string is re-encoded to allow further processing.
        if b'?q?' in hval:
            hval = hval.decode('ascii', errors='ignore')
            hval = str(email.header.make_header(email.header.decode_header(hval)))
            hval = hval.encode('utf-8')
        # We only do relaxed for headers
        #    o  Unfold all header field continuation lines as described in
        #       [RFC5322]; in particular, lines with terminators embedded in
        #       continued header field values (that is, CRLF sequences followed by
        #       WSP) MUST be interpreted without the CRLF.  Implementations MUST
        #       NOT remove the CRLF at the end of the header field value.
        hval = re.sub(rb'[\r\n]', b'', hval)
        #    o  Convert all sequences of one or more WSP characters to a single SP
        #       character.  WSP characters here include those before and after a
        #       line folding boundary.
        hval = re.sub(rb'\s+', b' ', hval)
        #    o  Delete all WSP characters at the end of each unfolded header field
        #       value.
        #    o  Delete any WSP characters remaining before and after the colon
        #       separating the header field name from the header field value.  The
        #       colon separator MUST be retained.
        hval = hval.strip() + b'\r\n'
        return hval


class PatattMessage:
    def __init__(self, msgdata: bytes):
        self.headers = list()
        self.body = b''
        self.lf = b'\n'
        self.signed = False

        self.canon_headers = None
        self.canon_body = None
        self.canon_identity = None

        self.sigs = None

        self.load_from_bytes(msgdata)

    def git_canonicalize(self):
        if self.canon_body is not None:
            return

        # Generate a new payload using m and p and canonicalize with \r\n endings,
        # trimming any excess blank lines ("simple" DKIM canonicalization).
        m, p, i = PatattMessage._get_git_mailinfo(b''.join(self.headers) + self.lf + self.body)
        self.canon_body = b''
        for line in re.sub(rb'[\r\n]*$', b'', m + p).split(b'\n'):
            self.canon_body += re.sub(rb'[\r\n]*$', b'', line) + b'\r\n'

        idata = dict()
        for line in re.sub(rb'[\r\n]*$', b'', i).split(b'\n'):
            left, right = line.split(b':', 1)
            idata[left.lower()] = right.strip()

        # Theoretically, we should always see an "Email" line
        self.canon_identity = idata.get(b'email', b'').decode()

        # Now substituting headers returned by mailinfo
        self.canon_headers = list()
        for header in self.headers:
            try:
                left, right = header.split(b':', 1)
                lleft = left.lower()
                if lleft == b'from':
                    right = b' ' + idata.get(b'author', b'') + b' <' + idata.get(b'email', b'') + b'>'
                elif lleft == b'subject':
                    right = b' ' + idata.get(b'subject', b'')
                self.canon_headers.append(left + b':' + right)
            except ValueError:
                self.canon_headers.append(header)

    def sign(self, algo: str, keyinfo: Union[str, bytes], identity: Optional[str], selector: Optional[str]) -> None:
        # Remove any devsig headers
        for header in list(self.headers):
            if header.startswith(DEVSIG_HDR) or header.startswith(DEVKEY_HDR):
                self.headers.remove(header)
        self.git_canonicalize()
        ds = DevsigHeader()
        ds.set_headers(self.canon_headers, mode='sign')
        ds.set_body(self.canon_body)
        ds.set_field('l', str(len(self.canon_body)))
        if identity and identity != self.canon_identity:
            ds.set_field('i', identity)
        if selector:
            ds.set_field('s', selector)

        if algo not in ('ed25519', 'openpgp'):
            raise SigningError('Unsupported algorithm: %s' % algo)

        ds.set_field('a', '%s-sha256' % algo)
        if algo == 'ed25519':
            # Set signing time for ed25519 sigs
            ds.set_field('t', str(int(time.time())))
        hv, pkinfo = ds.sign(keyinfo)

        dshdr = email.header.make_header([(DEVSIG_HDR + b': ' + hv, 'us-ascii')], maxlinelen=78)
        self.headers.append(dshdr.encode().encode() + self.lf)

        # Make informational header about the key used
        idata = [
            b'i=%s' % identity.encode(),
            b'a=%s' % algo.encode(),
        ]
        if algo == 'openpgp':
            idata.append(b'fpr=%s' % pkinfo)
        else:
            idata.append(b'pk=%s' % pkinfo)

        dkhdr = email.header.make_header([(DEVKEY_HDR + b': ' + b'; '.join(idata), 'us-ascii')], maxlinelen=78)
        self.headers.append(dkhdr.encode().encode() + self.lf)

    def validate(self, identity: str, pkey: Union[bytes, str, None], trim_body: bool = False) -> str:
        vds = None
        for ds in self.sigs:
            if ds.get_field('i', decode=True) == identity:
                vds = ds
                break
        if vds is None:
            raise ValidationError('No signatures matching identity %s' % identity)

        self.git_canonicalize()
        vds.set_headers(self.canon_headers, mode='validate')

        if trim_body:
            lfield = vds.get_field('l')
            if lfield:
                try:
                    maxlen = int(lfield)
                    vds.set_body(self.canon_body, maxlen=maxlen)
                except ValueError:
                    vds.set_body(self.canon_body)
        else:
            vds.set_body(self.canon_body)

        return vds.validate(pkey)

    def as_bytes(self):
        return b''.join(self.headers) + self.lf + self.body

    def as_string(self, encoding='utf-8'):
        return self.as_bytes().decode(encoding)

    def load_from_bytes(self, msgdata: bytes) -> None:
        # We use simplest parsing -- using Python's email module would be overkill
        ldshn = DEVSIG_HDR.lower()
        with BytesIO(msgdata) as fh:
            while True:
                line = fh.readline()
                if not len(line):
                    break

                if not len(line.strip()):
                    self.lf = line
                    self.body = fh.read()
                    break

                # is it a wrapped header?
                if line[0] in ("\x09", "\x20", 0x09, 0x20):
                    if not len(self.headers):
                        raise RuntimeError('Not a valid RFC2822 message')
                    # attach it to the previous header
                    self.headers[-1] += line
                    continue
                # Is it a signature header?
                if line.lower().startswith(ldshn):
                    self.signed = True
                self.headers.append(line)

        if not len(self.headers) or not len(self.body):
            raise RuntimeError('Not a valid RFC2822 message')

    def get_sigs(self) -> list:
        if self.sigs is not None:
            return self.sigs

        ldshn = DEVSIG_HDR.lower()
        self.sigs = list()
        from_id = None

        for header in self.headers:
            try:
                left, right = header.split(b':', 1)
                hn = left.strip().lower()
                hv = right
                if hn == ldshn:
                    self.sigs.append(DevsigHeader(hv))
                elif hn == b'from':
                    parts = email.utils.parseaddr(hv.decode().strip())
                    from_id = parts[1]
            except ValueError:
                raise RuntimeError('Error parsing headers')

        if from_id:
            for ds in self.sigs:
                if 'i' not in ds.hdata:
                    ds.set_field('i', from_id)

        return self.sigs

    @staticmethod
    def _get_git_mailinfo(payload: bytes) -> Tuple[bytes, bytes, bytes]:
        with tempfile.TemporaryDirectory(suffix='.git-mailinfo') as td:
            mf = os.path.join(td, 'm')
            pf = os.path.join(td, 'p')
            cmdargs = ['git', 'mailinfo', '--encoding=utf-8', '--no-scissors', mf, pf]
            ecode, i, err = _run_command(cmdargs, stdin=payload)
            if ecode > 0:
                logger.debug('FAILED  : Failed running git-mailinfo:')
                logger.debug(err.decode())
                raise RuntimeError('Failed to run git-mailinfo: %s' % err.decode())

            with open(mf, 'rb') as mfh:
                m = mfh.read()
            with open(pf, 'rb') as pfh:
                p = pfh.read()
            return m, p, i


def get_data_dir():
    if 'XDG_DATA_HOME' in os.environ:
        datahome = os.environ['XDG_DATA_HOME']
    else:
        datahome = os.path.join(str(Path.home()), '.local', 'share')
    datadir = os.path.join(datahome, 'patatt')
    Path(datadir).mkdir(parents=True, exist_ok=True)
    return datadir


def _run_command(cmdargs: list, stdin: bytes = None, env: Optional[dict] = None) -> Tuple[int, bytes, bytes]:
    sp = subprocess.Popen(cmdargs, stdout=subprocess.PIPE, stdin=subprocess.PIPE, stderr=subprocess.PIPE, env=env)
    logger.debug('Running %s', ' '.join(cmdargs))
    (output, error) = sp.communicate(input=stdin)
    return sp.returncode, output, error


def git_run_command(gitdir: Optional[str], args: list, stdin: Optional[bytes] = None,
                    env: Optional[dict] = None) -> Tuple[int, bytes, bytes]:
    if gitdir:
        args = ['git', '--git-dir', gitdir, '--no-pager'] + args
    else:
        args = ['git', '--no-pager'] + args
    return _run_command(args, stdin=stdin, env=env)


def get_config_from_git(regexp: str, section: Optional[str] = None, defaults: Optional[dict] = None,
                        multivals: Optional[list] = None):
    if multivals is None:
        multivals = list()

    args = ['config', '-z', '--get-regexp', regexp]
    ecode, out, err = git_run_command(None, args)
    if defaults is None:
        defaults = dict()

    if not len(out):
        return defaults

    gitconfig = defaults
    out = out.decode()

    for line in out.split('\x00'):
        if not line:
            continue
        key, value = line.split('\n', 1)
        try:
            chunks = key.split('.')
            # Drop the starting part
            chunks.pop(0)
            cfgkey = chunks.pop(-1).lower()
            if len(chunks):
                if not section:
                    # Ignore it
                    continue
                # We're in a subsection
                sname = '.'.join(chunks)
                if sname != section:
                    # Not our section
                    continue
            elif section:
                # We want config from a subsection specifically
                continue

            if cfgkey in multivals:
                if cfgkey not in gitconfig:
                    gitconfig[cfgkey] = list()
                gitconfig[cfgkey].append(value)
            else:
                gitconfig[cfgkey] = value
        except ValueError:
            logger.debug('Ignoring git config entry %s', line)

    return gitconfig


def gpg_run_command(cmdargs: list, stdin: bytes = None) -> Tuple[int, bytes, bytes]:
    cmdargs = [GPGBIN, '--batch', '--no-auto-key-retrieve', '--no-auto-check-trustdb'] + cmdargs
    return _run_command(cmdargs, stdin)


def get_git_toplevel(gitdir: str = None) -> str:
    cmdargs = ['git']
    if gitdir:
        cmdargs += ['--git-dir', gitdir]
    cmdargs += ['rev-parse', '--show-toplevel']
    ecode, out, err = _run_command(cmdargs)
    if ecode == 0:
        return out.decode().strip()
    return ''


def make_pkey_path(keytype: str, identity: str, selector: str) -> str:
    chunks = identity.split('@', 1)
    if len(chunks) != 2:
        raise ValidationError('identity must include both local and domain parts')
    local = chunks[0].lower()
    domain = chunks[1].lower()
    selector = selector.lower()
    # urlencode all potentially untrusted bits to make sure nobody tries path-based badness
    keypath = os.path.join(urllib.parse.quote_plus(keytype), urllib.parse.quote_plus(domain),
                           urllib.parse.quote_plus(local), urllib.parse.quote_plus(selector))

    return keypath


def get_public_key(source: str, keytype: str, identity: str, selector: str) -> Tuple[bytes, str]:
    keypath = make_pkey_path(keytype, identity, selector)
    logger.debug('Looking for %s in %s', keypath, source)

    # ref:refs/heads/someref:in-repo/path
    if source.startswith('ref:'):
        # split by :
        parts = source.split(':', 4)
        if len(parts) < 4:
            raise ConfigurationError('Invalid ref, must have at least 3 colons: %s' % source)
        gitrepo = parts[1]
        gitref = parts[2]
        gitsub = parts[3]
        if not gitrepo:
            gitrepo = get_git_toplevel()
        if not gitrepo:
            raise KeyError('Not in a git tree, so cannot use a ref:: source')

        gitrepo = os.path.expanduser(gitrepo)
        if gitrepo.find('$') >= 0:
            gitrepo = os.path.expandvars(gitrepo)
        if os.path.isdir(os.path.join(gitrepo, '.git')):
            gittop = os.path.join(gitrepo, '.git')
        else:
            gittop = gitrepo

        # it could omit the refspec, meaning "whatever the current ref"
        # grab the key from a fully ref'ed path
        subpath = os.path.join(gitsub, keypath)

        if not gitref:
            # What is our current ref?
            cmdargs = ['symbolic-ref', 'HEAD']
            ecode, out, err = git_run_command(gittop, cmdargs)
            if ecode == 0:
                gitref = out.decode().strip()
        if not gitref:
            raise KeyError('Could not figure out current ref in %s' % gittop)

        keysrc = f'{gitref}:{subpath}'
        cmdargs = ['show', keysrc]
        ecode, out, err = git_run_command(gittop, cmdargs)
        if ecode == 0:
            # Handle one level of symlinks
            if out.find(b'\n') < 0 < out.find(b'/'):
                # Check this path as well
                linktgt = os.path.normpath(os.path.join(os.path.dirname(subpath), out.decode()))
                keysrc = f'{gitref}:{linktgt}'
                cmdargs = ['show', keysrc]
                ecode, out, err = git_run_command(gittop, cmdargs)
                if ecode == 0:
                    logger.debug('KEYSRC  : %s (symlinked)', keysrc)
                    return out, 'ref:%s:%s' % (gittop, keysrc)
            logger.debug('KEYSRC  : %s', keysrc)
            return out, 'ref:%s:%s' % (gittop, keysrc)

        # Does it exist on disk but hasn't been committed yet?
        fullpath = os.path.join(gitrepo, subpath)
        if os.path.exists(fullpath):
            with open(fullpath, 'rb') as fh:
                logger.debug('KEYSRC  : %s', fullpath)
                return fh.read(), fullpath

        raise KeyError('Could not find %s in %s:%s' % (subpath, gittop, gitref))

    # It's a disk path, then
    # Expand ~ and env vars
    source = os.path.expanduser(source)
    if source.find('$') >= 0:
        source = os.path.expandvars(source)
    fullpath = os.path.join(source, keypath)
    if os.path.exists(fullpath):
        with open(fullpath, 'rb') as fh:
            logger.debug('Loaded key from %s', fullpath)
            return fh.read(), fullpath

    raise KeyError('Could not find %s' % fullpath)


def _load_messages(cmdargs) -> dict:
    import sys
    if not sys.stdin.isatty():
        messages = {'-': sys.stdin.buffer.read()}
    elif len(cmdargs.msgfile):
        # Load all message from the files passed to make sure they all parse correctly
        messages = dict()
        for msgfile in cmdargs.msgfile:
            with open(msgfile, 'rb') as fh:
                messages[msgfile] = fh.read()
    else:
        logger.critical('E: Pipe a message to sign or pass filenames with individual messages')
        raise RuntimeError('Nothing to do')

    return messages


def sign_message(msgdata: bytes, algo: str, keyinfo: Union[str, bytes],
                 identity: Optional[str], selector: Optional[str]) -> bytes:
    pm = PatattMessage(msgdata)
    pm.sign(algo, keyinfo, identity=identity, selector=selector)
    return pm.as_bytes()


def cmd_sign(cmdargs, config: dict) -> None:
    # Do we have the signingkey defined?
    usercfg = get_config_from_git(r'user\..*')
    if not config.get('identity') and usercfg.get('email'):
        # Use user.email
        config['identity'] = usercfg.get('email')
    if not config.get('signingkey'):
        if usercfg.get('signingkey'):
            logger.info('N: Using pgp key %s defined by user.signingkey', usercfg.get('signingkey'))
            logger.info('N: Override by setting patatt.signingkey')
            config['signingkey'] = 'openpgp:%s' % usercfg.get('signingkey')
        else:
            logger.critical('E: patatt.signingkey is not set')
            logger.critical('E: Perhaps you need to run genkey first?')
            sys.exit(1)

    try:
        messages = _load_messages(cmdargs)
    except IOError as ex:
        logger.critical('E: %s', ex)
        sys.exit(1)

    sk = config.get('signingkey')
    if sk.startswith('ed25519:'):
        algo = 'ed25519'
        identifier = sk[8:]
        keysrc = None
        if identifier.startswith('/') and os.path.exists(identifier):
            keysrc = identifier
        else:
            # datadir/private/%s.key
            ddir = get_data_dir()
            skey = os.path.join(ddir, 'private', '%s.key' % identifier)
            if os.path.exists(skey):
                keysrc = skey
            else:
                # finally, try .git/%s.key
                gtdir = get_git_toplevel()
                if gtdir:
                    skey = os.path.join(gtdir, '.git', '%s.key' % identifier)
                    if os.path.exists(skey):
                        keysrc = skey

        if not keysrc:
            logger.critical('E: Could not find the key matching %s', identifier)
            sys.exit(1)

        logger.info('N: Using ed25519 key: %s', keysrc)
        with open(keysrc, 'r') as fh:
            keydata = fh.read()

    elif sk.startswith('openpgp:'):
        algo = 'openpgp'
        keydata = sk[8:]
    else:
        logger.critical('E: Unknown key type: %s', sk)
        sys.exit(1)

    for fn, msgdata in messages.items():
        try:
            pm = PatattMessage(msgdata)
            pm.sign(algo, keydata, identity=config.get('identity'), selector=config.get('selector'))
            logger.debug('--- SIGNED MESSAGE STARTS ---')
            logger.debug(pm.as_string())
            if fn == '-':
                sys.stdout.buffer.write(pm.as_bytes())
            else:
                with open(fn, 'wb') as fh:
                    fh.write(pm.as_bytes())

                logger.critical('SIGN | %s', os.path.basename(fn))

        except SigningError as ex:
            logger.critical('E: %s', ex)
            sys.exit(1)

        except RuntimeError as ex:
            logger.critical('E: %s: %s' % (fn, ex))
            sys.exit(1)


def validate_message(msgdata: bytes, sources: list, trim_body: bool = False) -> list:
    attestations = list()
    pm = PatattMessage(msgdata)
    if not pm.signed:
        logger.debug('message is not signed')
        return attestations

    # Find all identities for which we have public keys
    for ds in pm.get_sigs():
        errors = list()
        a = ds.get_field('a', decode=True)
        i = ds.get_field('i', decode=True)
        s = ds.get_field('s', decode=True)
        t = ds.get_field('t', decode=True)
        if not s:
            s = 'default'
        if a.startswith('ed25519'):
            algo = 'ed25519'
        elif a.startswith('openpgp'):
            algo = 'openpgp'
        else:
            errors.append('%s/%s Unknown algorigthm: %s' % (i, s, a))
            attestations.append((RES_ERROR, i, t, None, a, errors))
            continue

        pkey = keysrc = None
        for source in sources:
            try:
                pkey, keysrc = get_public_key(source, algo, i, s)
                break
            except KeyError:
                pass

        if not pkey and algo == 'ed25519':
            errors.append('%s/%s no matching ed25519 key found' % (i, s))
            attestations.append((RES_NOKEY, i, t, None, algo, errors))
            continue

        try:
            signkey, signtime = pm.validate(i, pkey, trim_body=trim_body)
            if keysrc is None:
                # Default keyring used
                keysrc = '(default keyring)/%s' % signkey
            attestations.append((RES_VALID, i, signtime, keysrc, algo, errors))
        except NoKeyError:
            # Not in default keyring
            errors.append('%s/%s no matching openpgp key found' % (i, s))
            attestations.append((RES_NOKEY, i, t, None, algo, errors))
        except ValidationError:
            if keysrc is None:
                errors.append('failed to validate using default keyring')
            else:
                errors.append('failed to validate using %s' % keysrc)
            attestations.append((RES_BADSIG, i, t, keysrc, algo, errors))

    return attestations


def cmd_validate(cmdargs, config: dict):
    import mailbox
    if len(cmdargs.msgfile) == 1:
        # Try to open as an mbox file
        try:
            mbox = mailbox.mbox(cmdargs.msgfile[0])
        except IOError as ex:
            logger.critical('E: %s', ex)
            sys.exit(1)

        messages = dict()
        for msg in mbox:
            subject = msg.get('Subject', 'No subject')
            messages[subject] = msg.as_bytes()
    else:
        try:
            messages = _load_messages(cmdargs)
        except IOError as ex:
            logger.critical('E: %s', ex)
            sys.exit(1)

    ddir = get_data_dir()
    pdir = os.path.join(ddir, 'public')
    sources = config.get('keyringsrc')

    if pdir not in sources:
        sources.append(pdir)

    if config.get('trimbody', 'no') == 'yes':
        trim_body = True
    else:
        trim_body = False

    highest_err = 0
    for fn, msgdata in messages.items():
        try:
            attestations = validate_message(msgdata, sources, trim_body=trim_body)
            for result, identity, signtime, keysrc, algo, errors in attestations:
                if result > highest_err:
                    highest_err = result

                if result == RES_VALID:
                    logger.critical('  PASS | %s, %s', identity, fn)
                    if keysrc:
                        logger.info('       | key: %s', keysrc)
                    else:
                        logger.info('       | key: default GnuPG keyring')
                elif result <= RES_NOKEY:
                    logger.critical(' NOKEY | %s, %s', identity, fn)
                    for error in errors:
                        logger.critical('       | %s', error)
                elif result <= RES_ERROR:
                    logger.critical(' ERROR | %s, %s', identity, fn)
                    for error in errors:
                        logger.critical('       | %s', error)
                else:
                    logger.critical('BADSIG | %s, %s', identity, fn)
                    for error in errors:
                        logger.critical('       | %s', error)

        except RuntimeError as ex:
            highest_err = RES_ERROR
            logger.critical(' ERROR | err: %s | %s', ex, fn)

    sys.exit(highest_err)


def cmd_genkey(cmdargs, config: dict) -> None:
    try:
        from nacl.signing import SigningKey
    except ModuleNotFoundError:
        raise RuntimeError('This operation requires PyNaCl libraries')

    # Do we have the signingkey defined?
    usercfg = get_config_from_git(r'user\..*')
    if not config.get('identity'):
        if not usercfg.get('email'):
            logger.critical('This operation requires user.email to be set')
            sys.exit(1)
        # Use user.email
        config['identity'] = usercfg.get('email')

    identifier = cmdargs.keyname
    if not identifier:
        identifier = datetime.datetime.today().strftime('%Y%m%d')

    ddir = get_data_dir()
    sdir = os.path.join(ddir, 'private')
    pdir = os.path.join(ddir, 'public')
    if not os.path.exists(sdir):
        os.mkdir(sdir, mode=0o0700)
    if not os.path.exists(pdir):
        os.mkdir(pdir, mode=0o0755)
    skey = os.path.join(sdir, '%s.key' % identifier)
    pkey = os.path.join(pdir, '%s.pub' % identifier)
    # Do we have a key with this identifier already present?
    if os.path.exists(skey) and not cmdargs.force:
        logger.critical('Key already exists: %s', skey)
        logger.critical('Use a different -n or pass -f to overwrite it')
        raise RuntimeError('Key already exists')

    logger.critical('Generating a new ed25519 keypair')
    newkey = SigningKey.generate()

    # Make sure we write it as 0600
    def priv_opener(path, flags):
        return os.open(path, flags, 0o0600)

    with open(skey, 'wb', opener=priv_opener) as fh:
        fh.write(base64.b64encode(bytes(newkey)))
        logger.critical('Wrote: %s', skey)

    with open(pkey, 'wb') as fh:
        fh.write(base64.b64encode(newkey.verify_key.encode()))
        logger.critical('Wrote: %s', pkey)

    # Also copy it into our local keyring
    dpkey = os.path.join(pdir, make_pkey_path('ed25519', config.get('identity'), 'default'))
    Path(os.path.dirname(dpkey)).mkdir(parents=True, exist_ok=True)
    if not os.path.exists(dpkey):
        with open(dpkey, 'wb') as fh:
            fh.write(base64.b64encode(newkey.verify_key.encode()))
            logger.critical('Wrote: %s', dpkey)
    else:
        spkey = os.path.join(pdir, make_pkey_path('ed25519', config.get('identity'), identifier))
        with open(spkey, 'wb') as fh:
            fh.write(base64.b64encode(newkey.verify_key.encode()))
            logger.critical('Wrote: %s', spkey)

    logger.critical('Add the following to your .git/config (or global ~/.gitconfig):')
    logger.critical('---')
    if cmdargs.section:
        logger.critical('[patatt "%s"]', cmdargs.section)
    else:
        logger.critical('[patatt]')
    logger.critical('    signingkey = ed25519:%s', identifier)
    logger.critical('    selector = %s', identifier)
    logger.critical('---')
    logger.critical('Next, communicate the contents of the following file to the')
    logger.critical('repository keyring maintainers for inclusion into the project:')
    logger.critical(pkey)


def command() -> None:
    import argparse
    # noinspection PyTypeChecker
    parser = argparse.ArgumentParser(
        prog='patatt',
        description='Cryptographically attest patches before sending out',
        formatter_class=argparse.ArgumentDefaultsHelpFormatter
    )
    parser.add_argument('-v', '--verbose', action='store_true', default=False,
                        help='Be a bit more verbose')
    parser.add_argument('-d', '--debug', action='store_true', default=False,
                        help='Show debugging output')
    parser.add_argument('-s', '--section', dest='section', default=None,
                        help='Use config section [patatt "sectionname"]')
    parser.add_argument('--version', action='version', version=__VERSION__)

    subparsers = parser.add_subparsers(help='sub-command help', dest='subcmd')

    sp_sign = subparsers.add_parser('sign', help='Cryptographically attest an RFC2822 message')
    sp_sign.add_argument('--hook', dest='hookmode', action='store_true', default=False,
                         help='Git hook mode')
    sp_sign.add_argument('msgfile', nargs='*', help='RFC2822 message files to sign')
    sp_sign.set_defaults(func=cmd_sign)

    sp_val = subparsers.add_parser('validate', help='Validate a devsig-signed message')
    sp_val.add_argument('msgfile', nargs='*', help='Individual signed message files to validate or an mbox')
    sp_val.set_defaults(func=cmd_validate)

    sp_gen = subparsers.add_parser('genkey', help='Generate a new ed25519 keypair')
    sp_gen.add_argument('-n', '--keyname', default=None,
                        help='Name to use for the key, e.g. "workstation", or "default"')
    sp_gen.add_argument('-f', '--force', action='store_true', default=False,
                        help='Overwrite any existing keys, if found')
    sp_gen.set_defaults(func=cmd_genkey)

    _args = parser.parse_args()

    logger.setLevel(logging.DEBUG)

    ch = logging.StreamHandler()
    formatter = logging.Formatter('%(message)s')
    try:
        if _args.hookmode:
            formatter = logging.Formatter('patatt: %(message)s')
    except AttributeError:
        pass
    ch.setFormatter(formatter)

    if _args.verbose:
        ch.setLevel(logging.INFO)
    elif _args.debug:
        ch.setLevel(logging.DEBUG)
    else:
        ch.setLevel(logging.CRITICAL)

    logger.addHandler(ch)
    config = get_config_from_git(r'patatt\..*', section=_args.section, multivals=['keyringsrc'])
    # Append some extra keyring locations
    if 'keyringsrc' not in config:
        config['keyringsrc'] = list()
    config['keyringsrc'] += ['ref:::.keys', 'ref:::.local-keys', 'ref::refs/meta/keyring:']
    logger.debug('config: %s', config)

    if 'func' not in _args:
        parser.print_help()
        sys.exit(1)

    try:
        _args.func(_args, config)
    except RuntimeError:
        sys.exit(1)


if __name__ == '__main__':
    command()
0707010000002F000081A400000000000000000000000161F953D700000007000000000000000000000000000000000000002300000000b4-0.8.0+2/patatt/requirements.txtPyNaCl
07070100000030000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000001A00000000b4-0.8.0+2/patatt/samples07070100000031000081A400000000000000000000000161F953D7000003E8000000000000000000000000000000000000002D00000000b4-0.8.0+2/patatt/samples/ed25519-signed.txtFrom 82d3e4a03a72b787849fd406e985f3027fa04907 Mon Sep 17 00:00:00 2001
From: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
Date: Wed, 5 May 2021 17:11:46 -0400
Subject: [PATCH] Specify subset of the world
X-Developer-Signature: v=1; a=ed25519-sha256; t=1620249230; l=403;
 i=mricon@kernel.org; s=20210505; h=from:subject;
 bh=aWNA6NFmS5xpRH5Gpy45nWiKCOnDOKHOYOV7Y6lyLcU=;
 b=6eJfjTMpYzbUgSeNZf3OqQDjzFVooz6WfaEitTMIpYOWLCXRU8qCR3cjUTmLET5S8prJVMypxnZl
 C1/hsoX8DWfyZmyquQSjCCNv2ISvZ8vEKCjXt2g0xmgK+XstajvB
X-Developer-Key: i=mricon@kernel.org; a=ed25519;
 pk=i+0Am6o59VU+dAfK4WhkCl56BrA+rY4cXlq3AbO5M8c=

We don't want to say hello to the *whole* world, do we? Just the
attested world, please.

Signed-off-by: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
---
 hello.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/hello.txt b/hello.txt
index 18249f3..977f79b 100644
--- a/hello.txt
+++ b/hello.txt
@@ -1 +1 @@
-Hello world.
+Hello attested world.
-- 
2.30.2

07070100000032000081A400000000000000000000000161F953D700000428000000000000000000000000000000000000002900000000b4-0.8.0+2/patatt/samples/pgp-signed.txtFrom 82d3e4a03a72b787849fd406e985f3027fa04907 Mon Sep 17 00:00:00 2001
From: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
Date: Wed, 5 May 2021 17:11:46 -0400
Subject: [PATCH] Specify subset of the world
X-Developer-Signature: v=1; a=openpgp-sha256; l=403; h=from:subject;
 bh=aWNA6NFmS5xpRH5Gpy45nWiKCOnDOKHOYOV7Y6lyLcU=;
 b=owGbwMvMwCG27YjM47CUmTmMp9WSGBImc6WkxnPMSdRqXmTvUr/vDJNAxRutvTblG+aIdndskFKK
 9zDtKGVhEONgkBVTZCnbF7spqPChh1x6jynMHFYmkCEMXJwCMJH+Lob/MStfTr7/tPydwKmcytxN82
 0ObFx4Uf6ftazRnu0sKpsc3jH893rSXHbuxoXrL0SkeeN/75lzb/u39zrtrNMU9gVbqZfqcAEA
X-Developer-Key: i=konstantin@linuxfoundation.org; a=openpgp;
 fpr=DE0E66E32F1FDD0902666B96E63EDCA9329DD07E

We don't want to say hello to the *whole* world, do we? Just the
attested world, please.

Signed-off-by: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
---
 hello.txt | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/hello.txt b/hello.txt
index 18249f3..977f79b 100644
--- a/hello.txt
+++ b/hello.txt
@@ -1 +1 @@
-Hello world.
+Hello attested world.
-- 
2.30.2

07070100000033000081ED00000000000000000000000161F953D7000001BA000000000000000000000000000000000000002A00000000b4-0.8.0+2/patatt/sendemail-validate-hook#!/usr/bin/env bash
if which patatt>/dev/null 2>&1; then
    # We have it in path, so just execute it
    patatt sign --hook "${1}"
else
    # Assume we're symlinked into a git checkout
    REAL_SCRIPT=$(realpath -e ${BASH_SOURCE[0]})
    SCRIPT_TOP="${SCRIPT_TOP:-$(dirname ${REAL_SCRIPT})}"
    PATATT_TOP=$(realpath -e ${SCRIPT_TOP})
    exec env PYTHONPATH="${PATATT_TOP}" python3 "${PATATT_TOP}/patatt/__init__.py" sign --hook "${1}"
fi
07070100000034000081A400000000000000000000000161F953D70000057D000000000000000000000000000000000000001B00000000b4-0.8.0+2/patatt/setup.py#!/usr/bin/env python3

import os
import re
from setuptools import setup

# Utility function to read the README file.
# Used for the long_description.  It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...


def read(fname):
    return open(os.path.join(os.path.dirname(__file__), fname)).read()


def find_version(source):
    version_file = read(source)
    version_match = re.search(r"^__VERSION__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
    if version_match:
        return version_match.group(1)
    raise RuntimeError("Unable to find version string.")


NAME = 'patatt'

setup(
    version=find_version('patatt/__init__.py'),
    url='https://git.kernel.org/pub/scm/utils/patatt/patatt.git/about/',
    name=NAME,
    description='A simple library to add cryptographic attestation to patches sent via email',
    author='Konstantin Ryabitsev',
    author_email='mricon@kernel.org',
    packages=['patatt'],
    license='MIT-0',
    long_description=read('README.rst'),
    long_description_content_type='text/x-rst',
    data_files = [('share/man/man5', ['man/patatt.5'])],
    keywords=['git', 'patches', 'attestation'],
    install_requires=[
        'pynacl',
    ],
    python_requires='>=3.6',
    entry_points={
        'console_scripts': [
            'patatt=patatt:command'
        ],
    },
)
07070100000035000081A400000000000000000000000161F953D700000077000000000000000000000000000000000000001C00000000b4-0.8.0+2/requirements.txtrequests~=2.25.0
# These are optional, needed for attestation features
dnspython~=2.1.0
dkimpy~=1.0.5
patatt>=0.4,<2.0
07070100000036000081A400000000000000000000000161F953D7000005FD000000000000000000000000000000000000001400000000b4-0.8.0+2/setup.py#!/usr/bin/env python3

import os
import re
from setuptools import setup

# Utility function to read the README file.
# Used for the long_description.  It's nice, because now 1) we have a top level
# README file and 2) it's easier to type in the README file than to put a raw
# string in below ...


def read(fname):
    return open(os.path.join(os.path.dirname(__file__), fname)).read()


def find_version(source):
    version_file = read(source)
    version_match = re.search(r"^__VERSION__ = ['\"]([^'\"]*)['\"]", version_file, re.M)
    if version_match:
        return version_match.group(1)
    raise RuntimeError("Unable to find version string.")


NAME = 'b4'

setup(
    version=find_version('b4/__init__.py'),
    url='https://git.kernel.org/pub/scm/utils/b4/b4.git/tree/README.rst',
    project_urls={
        'Community': 'https://lore.kernel.org/tools'
    },
    name=NAME,
    description='A tool to work with public-inbox and patch archives',
    author='Konstantin Ryabitsev',
    author_email='mricon@kernel.org',
    packages=['b4'],
    license='GPLv2+',
    long_description=read('man/b4.5.rst'),
    long_description_content_type='text/x-rst',
    data_files = [('share/man/man5', ['man/b4.5'])],
    keywords=['git', 'lore.kernel.org', 'patches'],
    install_requires=[
        'requests~=2.24',
        'dkimpy~=1.0',
        'dnspython~=2.0',
        'patatt>=0.4,<2.0',
    ],
    python_requires='>=3.6',
    entry_points={
        'console_scripts': [
            'b4=b4.command:cmd'
        ],
    },
)
07070100000037000041ED00000000000000000000000361F953D700000000000000000000000000000000000000000000001100000000b4-0.8.0+2/tests07070100000038000081A400000000000000000000000161F953D700000000000000000000000000000000000000000000001D00000000b4-0.8.0+2/tests/__init__.py07070100000039000041ED00000000000000000000000261F953D700000000000000000000000000000000000000000000001900000000b4-0.8.0+2/tests/samples0707010000003A000081A400000000000000000000000161F953D700000169000000000000000000000000000000000000002800000000b4-0.8.0+2/tests/samples/gpg-badsig.txt[GNUPG:] NEWSIG
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] BADSIG B6C41CE35664996C Konstantin Ryabitsev <konstantin@linuxfoundation.org>
0707010000003B000081A400000000000000000000000161F953D70000034F000000000000000000000000000000000000003600000000b4-0.8.0+2/tests/samples/gpg-good-invalid-notrust.txt[GNUPG:] NEWSIG
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] SIG_ID 5clUiMzlfE8KIyEu++mBk6I0Rnc 2021-06-09 1623274836
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] GOODSIG B6C41CE35664996C Konstantin Ryabitsev <konstantin@linuxfoundation.org>
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] TRUST_UNDEFINED 0 tofu
0707010000003C000081A400000000000000000000000161F953D7000003D7000000000000000000000000000000000000003400000000b4-0.8.0+2/tests/samples/gpg-good-valid-notrust.txt[GNUPG:] NEWSIG
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] SIG_ID 5clUiMzlfE8KIyEu++mBk6I0Rnc 2021-06-09 1623274836
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] GOODSIG B6C41CE35664996C Konstantin Ryabitsev <konstantin@linuxfoundation.org>
[GNUPG:] VALIDSIG 76BE5DB25271E1481E678C35B6C41CE35664996C 2021-06-09 1623274836 0 4 0 22 8 01 DE0E66E32F1FDD0902666B96E63EDCA9329DD07E
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] TRUST_UNDEFINED 0 tofu
0707010000003D000081A400000000000000000000000161F953D7000003D6000000000000000000000000000000000000003400000000b4-0.8.0+2/tests/samples/gpg-good-valid-trusted.txt[GNUPG:] NEWSIG
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] SIG_ID 5clUiMzlfE8KIyEu++mBk6I0Rnc 2021-06-09 1623274836
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] GOODSIG B6C41CE35664996C Konstantin Ryabitsev <konstantin@linuxfoundation.org>
[GNUPG:] VALIDSIG 76BE5DB25271E1481E678C35B6C41CE35664996C 2021-06-09 1623274836 0 4 0 22 8 01 DE0E66E32F1FDD0902666B96E63EDCA9329DD07E
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] KEYEXPIRED 1446574742
[GNUPG:] KEYEXPIRED 1525881230
[GNUPG:] KEY_CONSIDERED DE0E66E32F1FDD0902666B96E63EDCA9329DD07E 0
[GNUPG:] TRUST_ULTIMATE 0 tofu
0707010000003E000081A400000000000000000000000161F953D700000093000000000000000000000000000000000000002B00000000b4-0.8.0+2/tests/samples/gpg-no-pubkey.txt[GNUPG:] NEWSIG
[GNUPG:] ERRSIG B6C41CE35664996C 22 8 01 1623274836 9 76BE5DB25271E1481E678C35B6C41CE35664996C
[GNUPG:] NO_PUBKEY B6C41CE35664996C
0707010000003F000081A400000000000000000000000161F953D7000006D4000000000000000000000000000000000000002200000000b4-0.8.0+2/tests/test___init__.pyimport pytest  # noqa
import b4
import re
import os


@pytest.mark.parametrize('source,expected', [
    ('good-valid-trusted', (True, True, True, 'B6C41CE35664996C', '1623274836')),
    ('good-valid-notrust', (True, True, False, 'B6C41CE35664996C', '1623274836')),
    ('good-invalid-notrust', (True, False, False, 'B6C41CE35664996C', None)),
    ('badsig', (False, False, False, 'B6C41CE35664996C', None)),
    ('no-pubkey', (False, False, False, None, None)),
])
def test_check_gpg_status(source, expected):
    with open(f'tests/samples/gpg-{source}.txt', 'r') as fh:
        status = fh.read()
    assert b4.check_gpg_status(status) == expected


@pytest.mark.parametrize('source,regex,flags,ismbox', [
    (None, r'^From git@z ', 0, False),
    (None, r'\n\nFrom git@z ', 0, False),
])
def test_save_git_am_mbox(tmpdir, source, regex, flags, ismbox):
    import re
    if source is not None:
        if ismbox:
            import mailbox
            mbx = mailbox.mbox(f'tests/samples/{source}.txt')
            msgs = list(mbx)
        else:
            import email
            with open(f'tests/samples/{source}.txt', 'rb') as fh:
                msg = email.message_from_binary_file(fh)
            msgs = [msg]
    else:
        import email.message
        msgs = list()
        for x in range(0, 3):
            msg = email.message.EmailMessage()
            msg.set_payload(f'Hello world {x}\n')
            msg['Subject'] = f'Hello world {x}'
            msg['From'] = f'Me{x} <me{x}@foo.bar>'
            msgs.append(msg)
    dest = os.path.join(tmpdir, 'out')
    with open(dest, 'w') as fh:
        b4.save_git_am_mbox(msgs, fh)
    with open(dest, 'r') as fh:
        res = fh.read()
    assert re.search(regex, res, flags=flags)
07070100000040000081A400000000000000000000000161F953D700000595000000000000000000000000000000000000002600000000b4-0.8.0+2/thanks-am-template.example# Lines starting with '#' will be removed
# You can have two different templates for responding to
# pull requests and for responding to patch series, though
# in reality the difference will probably be one word:
# "merged/pulled" vs. "applied".
# Keeping it short and sweet is preferred.
#
On ${sentdate}, ${fromname} wrote:
# quote will be limited to 5-6 lines, respecting paragraphs
${quote}

# You can also use ${branch} and ${treename} if you set
# b4.thanks-treename in your config, e.g.:
#Applied to ${treename} (${branch}), thanks!
#
# If you track multiple remotes in the same repo, then you can add
# the following values to [remote], to be loaded when you run
# b4 ty -b foo/branchname:
# [remote "foo"]
#   url = https://...
#   fetch = ...
#   b4-treename = uname/sound.git
#   b4-commit-url-mask = https://git.kernel.org/uname/sound/c/%.8s
Applied to ${branch}, thanks!

# for patch series, the summary is a list of each patch with a link
# to the commit id in your tree, so you probably want to set
# b4.thanks-commit-url-mask in gitconfig to a value like:
# [b4]
#   thanks-commit-url-mask = https://git.kernel.org/username/c/%.12s
#
# Check this page for info on convenient URL shorteners:
# https://korg.wiki.kernel.org/userdoc/git-url-shorterners
${summary}

Best regards,
-- 
# if ~/.signature exists, it will be put here, otherwise
# the contents will be "user.name <user.email>" from gitconfig
${signature}
07070100000041000081A400000000000000000000000161F953D70000057B000000000000000000000000000000000000002600000000b4-0.8.0+2/thanks-pr-template.example# Lines starting with '#' will be removed
# You can have two different templates for responding to
# pull requests and for responding to patch series, though
# in reality the difference will probably be one word:
# "merged/pulled" vs. "applied".
# Keeping it short and sweet is preferred.
#
On ${sentdate}, ${fromname} wrote:
# quote will be limited to 5-6 lines, respecting paragraphs
${quote}

# You can also use ${branch} and ${treename} if you set
# b4.thanks-treename in your config, e.g.:
#Merged into ${treename} (${branch}), thanks!
#
# If you track multiple remotes in the same repo, then you can add
# the following values to [remote], to be loaded when you run
# b4 ty -b foo/branchname:
# [remote "foo"]
#   url = https://...
#   fetch = ...
#   b4-treename = uname/sound.git
#   b4-commit-url-mask = https://git.kernel.org/uname/sound/c/%.8s
Merged into ${branch}, thanks!

# for pull requests, the summary is a one-liner with the merge commit,
# so you probably want to set b4.thanks-commit-url-mask in gitconfig
# to a value like:
# [b4]
#   thanks-commit-url-mask = https://git.kernel.org/username/c/%.12s
#
# Check this page for info on convenient URL shorteners:
# https://korg.wiki.kernel.org/userdoc/git-url-shorterners
${summary}

Best regards,
-- 
# if ~/.signature exists, it will be put here, otherwise
# the contents will be "user.name <user.email>" from gitconfig
${signature}
07070100000000000000000000000000000000000000010000000000000000000000000000000000000000000000000000000B00000000TRAILER!!!747 blocks
openSUSE Build Service is sponsored by