aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKonstantin Ryabitsev <konstantin@linuxfoundation.org>2022-07-15 16:15:00 -0400
committerKonstantin Ryabitsev <konstantin@linuxfoundation.org>2022-07-15 16:15:00 -0400
commita2f81bdad0c4a3cbc2dca4e78424030310219ba4 (patch)
tree757be06c011ec73e2afb1fd36dc7ed218338303c
parent05523677e7574eec399c8842f7191e1df1638d50 (diff)
downloadb4-a2f81bdad0c4a3cbc2dca4e78424030310219ba4.tar.gz
Initial implementation of b4 submit
This is the first rough implementation of "b4 submit". Currently implemented: - b4 submit --new : to start a new branch - b4 submit --edit-cover : to edit the cover message - b4 submit --update-trailers : to receive latest trailer updates from the mailing lists - b4 submit --send : sends the messages using existing git.sendemail configs For details, see "b4 submit --help". Signed-off-by: Konstantin Ryabitsev <konstantin@linuxfoundation.org>
-rw-r--r--b4/__init__.py412
-rw-r--r--b4/command.py52
-rw-r--r--b4/pr.py148
-rw-r--r--b4/submit.py761
-rw-r--r--b4/ty.py96
-rw-r--r--misc/send-receive.py170
-rw-r--r--misc/test.sqlitebin0 -> 24576 bytes
m---------patatt0
-rw-r--r--requirements.txt1
9 files changed, 1427 insertions, 213 deletions
diff --git a/b4/__init__.py b/b4/__init__.py
index c07ee91..b5f2aa6 100644
--- a/b4/__init__.py
+++ b/b4/__init__.py
@@ -15,6 +15,7 @@ import email.generator
import tempfile
import pathlib
import argparse
+import smtplib
import urllib.parse
import datetime
@@ -27,8 +28,9 @@ import pwd
import requests
+from pathlib import Path
from contextlib import contextmanager
-from typing import Optional, Tuple, Set, List, TextIO
+from typing import Optional, Tuple, Set, List, TextIO, Union
from email import charset
charset.add_charset('utf-8', None)
@@ -92,6 +94,7 @@ LOREADDR = 'https://lore.kernel.org'
DEFAULT_CONFIG = {
'midmask': LOREADDR + '/all/%s',
'linkmask': LOREADDR + '/r/%s',
+ 'searchmask': LOREADDR + '/all/?x=m&t=1&q=%s',
'listid-preference': '*.feeds.kernel.org,*.linux.dev,*.kernel.org,*',
'save-maildirs': 'no',
# off: do not bother checking attestation
@@ -132,6 +135,8 @@ USER_CONFIG = None
REQSESSION = None
# Indicates that we've cleaned cache already
_CACHE_CLEANED = False
+# Used to track mailmap replacements
+MAILMAP_INFO = dict()
class LoreMailbox:
@@ -341,13 +346,15 @@ class LoreMailbox:
def add_message(self, msg):
msgid = LoreMessage.get_clean_msgid(msg)
- if msgid in self.msgid_map:
+ if msgid and msgid in self.msgid_map:
logger.debug('Already have a message with this msgid, skipping %s', msgid)
return
lmsg = LoreMessage(msg)
logger.debug('Looking at: %s', lmsg.full_subject)
- self.msgid_map[lmsg.msgid] = lmsg
+
+ if msgid:
+ self.msgid_map[lmsg.msgid] = lmsg
if lmsg.reply:
# We'll figure out where this belongs later
@@ -479,6 +486,16 @@ class LoreSeries:
return slug[:100]
+ def add_extra_trailers(self, trailers: tuple) -> None:
+ for lmsg in self.patches[1:]:
+ if lmsg is None:
+ continue
+ lmsg.followup_trailers += trailers
+
+ def add_cover_trailers(self) -> None:
+ if self.patches[0] and self.patches[0].followup_trailers: # noqa
+ self.add_extra_trailers(self.patches[0].followup_trailers) # noqa
+
def get_am_ready(self, noaddtrailers=False, covertrailers=False, addmysob=False, addlink=False,
linkmask=None, cherrypick=None, copyccs=False, allowbadchars=False) -> list:
@@ -521,6 +538,9 @@ class LoreSeries:
logger.debug('Attestation info is not the same')
break
+ if covertrailers:
+ self.add_cover_trailers()
+
at = 1
msgs = list()
logger.info('---')
@@ -535,8 +555,6 @@ class LoreSeries:
raise KeyError('Cherrypick not in series')
if lmsg is not None:
- if self.has_cover and covertrailers and self.patches[0].followup_trailers: # noqa
- lmsg.followup_trailers += self.patches[0].followup_trailers # noqa
if addlink:
lmsg.followup_trailers.append(('Link', linkmask % lmsg.msgid, None, None))
if addmysob:
@@ -837,6 +855,7 @@ class LoreMessage:
# Body and body-based info
self.body = None
+ self.message = None
self.charset = 'utf-8'
self.has_diff = False
self.has_diffstat = False
@@ -1303,6 +1322,14 @@ class LoreMessage:
return msg2
@staticmethod
+ def get_patch_id(diff: str) -> Optional[str]:
+ gitargs = ['patch-id', '--stable']
+ ecode, out = git_run_command(None, gitargs, stdin=diff.encode())
+ if ecode > 0:
+ return None
+ return out.split(maxsplit=1)[0]
+
+ @staticmethod
def get_patchwork_hash(diff: str) -> str:
"""Generate a hash from a diff. Lifted verbatim from patchwork."""
@@ -1362,7 +1389,7 @@ class LoreMessage:
return indexes
@staticmethod
- def find_trailers(body, followup=False):
+ def find_trailers(body: str, followup: bool = False) -> Tuple[List[Tuple], List[str]]:
ignores = {'phone', 'email'}
headers = {'subject', 'date', 'from'}
nonperson = {'fixes', 'subject', 'date', 'link', 'buglink', 'obsoleted-by'}
@@ -1418,7 +1445,9 @@ class LoreMessage:
was_trailer = False
others.append(line)
- return trailers, others
+ # convert to tuples for ease of matching
+ ttrailers = [tuple(x) for x in trailers]
+ return ttrailers, others
@staticmethod
def get_body_parts(body):
@@ -1484,7 +1513,7 @@ class LoreMessage:
return githeaders, message, trailers, basement, signature
- def fix_trailers(self, copyccs=False):
+ def fix_trailers(self, copyccs=False, signoff=None):
config = get_main_config()
attpolicy = config['attestation-policy']
@@ -1512,11 +1541,21 @@ class LoreMessage:
trailers.append(('Cc', pair[1], None, None)) # noqa
fixtrailers = list()
+ # If we received a signoff trailer:
+ # - if it's already present, we move it to the bottom
+ # - if not already present, we add it
+ new_signoff = True
for trailer in trailers:
if list(trailer[:3]) in fixtrailers:
# Dupe
continue
+ if signoff and tuple(trailer[:3]) == tuple(signoff):
+ # Skip it, we'll add it at the bottom
+ new_signoff = False
+ logger.debug(' . %s: %s', signoff[0], signoff[1])
+ continue
+
fixtrailers.append(list(trailer[:3]))
if trailer[:3] not in btrailers:
extra = ''
@@ -1537,6 +1576,12 @@ class LoreMessage:
else:
logger.debug(' . %s: %s', trailer[0], trailer[1])
+ if signoff:
+ # Tack on our signoff at the bottom
+ fixtrailers.append(list(signoff))
+ if new_signoff:
+ logger.info(' + %s: %s', signoff[0], signoff[1])
+
# Reconstitute the message
self.body = ''
if bheaders:
@@ -1545,16 +1590,21 @@ class LoreMessage:
self.body += '%s: %s\n' % (bheader[0], bheader[1])
self.body += '\n'
+ newmessage = ''
if len(message):
- self.body += message.rstrip('\r\n') + '\n'
+ newmessage += message.rstrip('\r\n') + '\n'
if len(fixtrailers):
- self.body += '\n'
+ newmessage += '\n'
if len(fixtrailers):
for trailer in fixtrailers:
- self.body += '%s: %s\n' % (trailer[0], trailer[1])
+ newmessage += '%s: %s\n' % (trailer[0], trailer[1])
if trailer[2]:
- self.body += '%s\n' % trailer[2]
+ newmessage += '%s\n' % trailer[2]
+
+ self.message = self.subject + '\n\n' + newmessage
+ self.body += newmessage
+
if len(basement):
self.body += '---\n'
self.body += basement.rstrip('\r\n') + '\n\n'
@@ -1702,9 +1752,9 @@ class LoreSubject:
subject = re.sub(r'^\s*\[[^]]*]\s*', '', subject)
self.subject = subject
- def get_slug(self):
- unsafe = '%04d_%s' % (self.counter, self.subject)
- return re.sub(r'\W+', '_', unsafe).strip('_').lower()
+ def get_slug(self, sep='_'):
+ unsafe = '%04d%s%s' % (self.counter, sep, self.subject)
+ return re.sub(r'\W+', sep, unsafe).strip(sep).lower()
def __repr__(self):
out = list()
@@ -1871,7 +1921,7 @@ def gpg_run_command(args: List[str], stdin: Optional[bytes] = None) -> Tuple[int
def git_run_command(gitdir: Optional[str], args: List[str], stdin: Optional[bytes] = None,
- logstderr: bool = False) -> Tuple[int, str]:
+ logstderr: bool = False, decode: bool = True) -> Tuple[int, Union[str, bytes]]:
cmdargs = ['git', '--no-pager']
if gitdir:
if os.path.exists(os.path.join(gitdir, '.git')):
@@ -1881,10 +1931,12 @@ def git_run_command(gitdir: Optional[str], args: List[str], stdin: Optional[byte
ecode, out, err = _run_command(cmdargs, stdin=stdin)
- out = out.decode(errors='replace')
+ if decode:
+ out = out.decode(errors='replace')
if logstderr and len(err.strip()):
- err = err.decode(errors='replace')
+ if decode:
+ err = err.decode(errors='replace')
logger.debug('Stderr: %s', err)
out += err
@@ -1903,6 +1955,11 @@ def git_get_command_lines(gitdir: Optional[str], args: list) -> List[str]:
return lines
+def git_get_repo_status(gitdir: Optional[str] = None) -> List[str]:
+ args = ['status', '--porcelain=v1']
+ return git_get_command_lines(gitdir, args)
+
+
@contextmanager
def git_temp_worktree(gitdir=None, commitish=None):
"""Context manager that creates a temporary work tree and chdirs into it. The
@@ -2229,32 +2286,48 @@ def mailsplit_bytes(bmbox: bytes, outdir: str) -> list:
return msgs
-def get_pi_thread_by_url(t_mbx_url, nocache=False):
+def get_pi_search_results(query: str, nocache: bool = False):
+ config = get_main_config()
+ searchmask = config.get('searchmask')
+ if not searchmask:
+ logger.critical('b4.searchmask is not defined')
+ return None
msgs = list()
- cachedir = get_cache_file(t_mbx_url, 'pi.msgs')
+ query = urllib.parse.quote_plus(query)
+ query_url = searchmask % query
+ cachedir = get_cache_file(query_url, 'pi.msgs')
if os.path.exists(cachedir) and not nocache:
logger.debug('Using cached copy: %s', cachedir)
for msg in os.listdir(cachedir):
with open(os.path.join(cachedir, msg), 'rb') as fh:
msgs.append(email.message_from_binary_file(fh))
- else:
- logger.critical('Grabbing thread from %s', t_mbx_url.split('://')[1])
- session = get_requests_session()
- resp = session.get(t_mbx_url)
- if resp.status_code == 404:
- logger.critical('That message-id is not known.')
- return None
- if resp.status_code != 200:
- logger.critical('Server returned an error: %s', resp.status_code)
- return None
- t_mbox = gzip.decompress(resp.content)
- resp.close()
- if not len(t_mbox):
- logger.critical('No messages found for that query')
- return None
- # Convert into individual files using git-mailsplit
- with tempfile.TemporaryDirectory(suffix='-mailsplit') as tfd:
- msgs = mailsplit_bytes(t_mbox, tfd)
+ return msgs
+
+ loc = urllib.parse.urlparse(query_url)
+ logger.info('Grabbing search results from %s', loc.netloc)
+ session = get_requests_session()
+ # For the query to retrieve a mbox file, we need to send a POST request
+ resp = session.post(query_url, data='')
+ if resp.status_code == 404:
+ logger.info('Nothing matching that query.')
+ return None
+ if resp.status_code != 200:
+ logger.info('Server returned an error: %s', resp.status_code)
+ return None
+ t_mbox = gzip.decompress(resp.content)
+ resp.close()
+ if not len(t_mbox):
+ logger.critical('No messages found for that query')
+ return None
+
+ return split_and_dedupe_pi_results(t_mbox, cachedir=cachedir)
+
+
+def split_and_dedupe_pi_results(t_mbox: bytes, cachedir: Optional[str] = None) -> List[email.message.Message]:
+ # Convert into individual files using git-mailsplit
+ with tempfile.TemporaryDirectory(suffix='-mailsplit') as tfd:
+ msgs = mailsplit_bytes(t_mbox, tfd)
+ if cachedir:
if os.path.exists(cachedir):
shutil.rmtree(cachedir)
shutil.copytree(tfd, cachedir)
@@ -2269,6 +2342,34 @@ def get_pi_thread_by_url(t_mbx_url, nocache=False):
return list(deduped.values())
+def get_pi_thread_by_url(t_mbx_url: str, nocache: bool = False):
+ msgs = list()
+ cachedir = get_cache_file(t_mbx_url, 'pi.msgs')
+ if os.path.exists(cachedir) and not nocache:
+ logger.debug('Using cached copy: %s', cachedir)
+ for msg in os.listdir(cachedir):
+ with open(os.path.join(cachedir, msg), 'rb') as fh:
+ msgs.append(email.message_from_binary_file(fh))
+ return msgs
+
+ logger.critical('Grabbing thread from %s', t_mbx_url.split('://')[1])
+ session = get_requests_session()
+ resp = session.get(t_mbx_url)
+ if resp.status_code == 404:
+ logger.critical('That message-id is not known.')
+ return None
+ if resp.status_code != 200:
+ logger.critical('Server returned an error: %s', resp.status_code)
+ return None
+ t_mbox = gzip.decompress(resp.content)
+ resp.close()
+ if not len(t_mbox):
+ logger.critical('No messages found for that query')
+ return None
+
+ return split_and_dedupe_pi_results(t_mbox, cachedir=cachedir)
+
+
def get_pi_thread_by_msgid(msgid: str, useproject: Optional[str] = None, nocache: bool = False,
onlymsgids: Optional[set] = None) -> Optional[list]:
qmsgid = urllib.parse.quote_plus(msgid)
@@ -2317,21 +2418,96 @@ def get_pi_thread_by_msgid(msgid: str, useproject: Optional[str] = None, nocache
return strict
-@contextmanager
-def git_format_patches(gitdir, start, end, prefixes=None, extraopts=None):
- with tempfile.TemporaryDirectory() as tmpd:
- gitargs = ['format-patch', '--cover-letter', '-o', tmpd, '--signature', f'b4 {__VERSION__}']
- if prefixes is not None and len(prefixes):
- gitargs += ['--subject-prefix', ' '.join(prefixes)]
- if extraopts:
- gitargs += extraopts
- gitargs += ['%s..%s' % (start, end)]
- ecode, out = git_run_command(gitdir, gitargs)
+def git_range_to_patches(gitdir: Optional[str], start: str, end: str,
+ covermsg: Optional[email.message.EmailMessage] = None,
+ prefixes: Optional[List[str]] = None,
+ msgid_tpt: Optional[str] = None,
+ seriests: Optional[int] = None,
+ mailfrom: Optional[Tuple[str, str]] = None,
+ extrahdrs: Optional[List[Tuple[str, str]]] = None,
+ keepdate: bool = False) -> List[Tuple[str, email.message.Message]]:
+ patches = list()
+ commits = git_get_command_lines(gitdir, ['rev-list', f'{start}..{end}'])
+ if not commits:
+ raise RuntimeError(f'Could not run rev-list {start}..{end}')
+ for commit in commits:
+ ecode, out = git_run_command(gitdir, ['show', '--format=email', commit], decode=False)
if ecode > 0:
- logger.critical('ERROR: Could not convert pull request into patches')
- logger.critical(out)
- yield None
- yield tmpd
+ raise RuntimeError(f'Could not get a patch out of {commit}')
+ msg = email.message_from_bytes(out)
+ logger.debug(' %s', msg.get('Subject'))
+
+ patches.append((commit, msg))
+
+ startfrom = 1
+ fullcount = len(patches)
+ patches.insert(0, (None, covermsg))
+ if covermsg:
+ startfrom = 0
+
+ # Go through and apply any outstanding fixes
+ if prefixes:
+ prefixes = ' ' + ' '.join(prefixes)
+ else:
+ prefixes = ''
+
+ for counter in range(startfrom, fullcount+1):
+ msg = patches[counter][1]
+ subject = msg.get('Subject')
+ csubject = re.sub(r'^\[PATCH]\s*', '', subject)
+ pline = '[PATCH%s %s/%s]' % (prefixes, str(counter).zfill(len(str(fullcount))), fullcount)
+ msg.replace_header('Subject', f'{pline} {csubject}')
+ inbodyhdrs = list()
+ if mailfrom:
+ # Move the original From and Date into the body
+ origfrom = msg.get('From')
+ if origfrom:
+ origpair = email.utils.parseaddr(origfrom)
+ if origpair[1] != mailfrom[1]:
+ msg.replace_header('From', format_addrs([mailfrom]))
+ inbodyhdrs.append(f'From: {origfrom}')
+ else:
+ msg.add_header('From', format_addrs([mailfrom]))
+
+ if seriests:
+ patchts = seriests + counter
+ origdate = msg.get('Date')
+ if origdate:
+ if keepdate:
+ inbodyhdrs.append(f'Date: {origdate}')
+ msg.replace_header('Date', email.utils.formatdate(patchts, localtime=True))
+ else:
+ msg.add_header('Date', email.utils.formatdate(patchts, localtime=True))
+
+ payload = msg.get_payload()
+ if inbodyhdrs:
+ payload = '\n'.join(inbodyhdrs) + '\n\n' + payload
+ if not payload.find('\n-- \n') > 0:
+ payload += f'\n-- \nb4 {__VERSION__}\n'
+ msg.set_payload(payload)
+
+ if extrahdrs is None:
+ extrahdrs = list()
+ for hdrname, hdrval in extrahdrs:
+ try:
+ msg.replace_header(hdrname, hdrval)
+ except KeyError:
+ msg.add_header(hdrname, hdrval)
+
+ if msgid_tpt:
+ msg.add_header('Message-Id', msgid_tpt % str(counter))
+ refto = None
+ if counter > 0 and covermsg:
+ # Thread to the cover letter
+ refto = msgid_tpt % str(0)
+ if counter > 1 and not covermsg:
+ # Tread to the first patch
+ refto = msgid_tpt % str(1)
+ if refto:
+ msg.add_header('References', refto)
+ msg.add_header('In-Reply-To', refto)
+
+ return patches
def git_commit_exists(gitdir, commit_id):
@@ -2529,8 +2705,8 @@ def read_template(tptfile):
return tpt
-def get_smtp(identity: Optional[str] = None):
- import smtplib
+def get_smtp(identity: Optional[str] = None,
+ dryrun: bool = False) -> Tuple[Union[smtplib.SMTP, smtplib.SMTP_SSL, None], str]:
if identity:
sconfig = get_config_from_git(rf'sendemail\.{identity}\..*')
sectname = f'sendemail.{identity}'
@@ -2550,6 +2726,9 @@ def get_smtp(identity: Optional[str] = None):
raise smtplib.SMTPException('Invalid smtpport entry in %s' % sectname)
encryption = sconfig.get('smtpencryption')
+ if dryrun:
+ return None, fromaddr
+
logger.info('Connecting to %s:%s', server, port)
# We only authenticate if we have encryption
if encryption:
@@ -2647,3 +2826,128 @@ def patchwork_set_state(msgids: List[str], state: str) -> bool:
logger.info(' -> %s : %s', state, title)
except requests.exceptions.RequestException as ex:
logger.debug('Patchwork REST error: %s', ex)
+
+
+def send_smtp(smtp: Union[smtplib.SMTP, smtplib.SMTP_SSL, None], msg: email.message.Message,
+ fromaddr: str, destaddrs: Optional[Union[Tuple, Set]] = None,
+ patatt_sign: bool = False, dryrun: bool = False,
+ maxheaderlen: Optional[int] = None) -> bool:
+ if not msg.get('X-Mailer'):
+ msg.add_header('X-Mailer', f'b4 {__VERSION__}')
+ msg.set_charset('utf-8')
+ msg.replace_header('Content-Transfer-Encoding', '8bit')
+ msg.policy = email.policy.EmailPolicy(utf8=True, cte_type='8bit')
+ # Python's sendmail implementation seems to have some logic problems where 8-bit messages are involved.
+ # As far as I understand the difference between 8BITMIME (supported by nearly all smtp servers) and
+ # SMTPUTF8 (supported by very few), SMTPUTF8 is only required when the addresses specified in either
+ # "MAIL FROM" or "RCPT TO" lines of the _protocol exchange_ themselves have 8bit characters, not
+ # anything in the From: header of the DATA payload. Python's smtplib seems to always try to encode
+ # strings as ascii regardless of what was policy was specified.
+ # Work around this by getting the payload as string and then encoding to bytes ourselves.
+ if maxheaderlen is None:
+ if dryrun:
+ # Make it fit the terminal window, but no wider than 120 minus visual padding
+ ts = shutil.get_terminal_size((120, 20))
+ maxheaderlen = ts.columns - 8
+ if maxheaderlen > 112:
+ maxheaderlen = 112
+ else:
+ # Use a sane-ish default (we don't need to stick to 80, but
+ # we need to make sure it's shorter than 255)
+ maxheaderlen = 120
+
+ emldata = msg.as_string(maxheaderlen=maxheaderlen)
+ # Force compliant eols
+ emldata = re.sub(r'\r\n|\n|\r(?!\n)', '\r\n', emldata)
+ bdata = emldata.encode()
+ if patatt_sign:
+ import patatt
+ # patatt.logger = logger
+ bdata = patatt.rfc2822_sign(bdata)
+ if dryrun or smtp is None:
+ logger.info(' --- DRYRUN: message follows ---')
+ logger.info(' | ' + bdata.decode().rstrip().replace('\n', '\n | '))
+ logger.info(' --- DRYRUN: message ends ---')
+ return True
+ if not destaddrs:
+ alldests = email.utils.getaddresses([str(x) for x in msg.get_all('to', [])])
+ alldests += email.utils.getaddresses([str(x) for x in msg.get_all('cc', [])])
+ destaddrs = {x[1] for x in alldests}
+ smtp.sendmail(fromaddr, destaddrs, bdata)
+ # TODO: properly catch exceptions on sending
+ return True
+
+
+def git_get_current_branch(gitdir: Optional[str] = None, short: bool = True) -> Optional[str]:
+ gitargs = ['symbolic-ref', '-q', 'HEAD']
+ ecode, out = git_run_command(gitdir, gitargs)
+ if ecode > 0:
+ logger.critical('Not able to get current branch (git symbolic-ref HEAD)')
+ return None
+ mybranch = out.strip()
+ if short:
+ return re.sub(r'^refs/heads/', '', mybranch)
+ return mybranch
+
+
+def get_excluded_addrs() -> Set[str]:
+ config = get_main_config()
+ excludes = set()
+ c_excludes = config.get('email-exclude')
+ if c_excludes:
+ for entry in c_excludes.split(','):
+ excludes.add(entry.strip())
+
+ return excludes
+
+
+def cleanup_email_addrs(addresses: List[Tuple[str, str]], excludes: Set[str],
+ gitdir: Optional[str]) -> List[Tuple[str, str]]:
+ global MAILMAP_INFO
+ for entry in list(addresses):
+ # Check if it's in excludes
+ removed = False
+ for exclude in excludes:
+ if fnmatch.fnmatch(entry[1], exclude):
+ logger.debug('Removed %s due to matching %s', entry[1], exclude)
+ addresses.remove(entry)
+ removed = True
+ break
+ if removed:
+ continue
+ # Check if it's mailmap-replaced
+ if entry[1] in MAILMAP_INFO:
+ if MAILMAP_INFO[entry[1]]:
+ addresses.remove(entry)
+ addresses.append(MAILMAP_INFO[entry[1]])
+ continue
+ logger.debug('Checking if %s is mailmap-replaced', entry[1])
+ args = ['check-mailmap', f'<{entry[1]}>']
+ ecode, out = git_run_command(gitdir, args)
+ if ecode != 0:
+ MAILMAP_INFO[entry[1]] = None
+ continue
+ replacement = email.utils.getaddresses([out.strip()])
+ if len(replacement) == 1:
+ if entry[1] == replacement[0][1]:
+ MAILMAP_INFO[entry[1]] = None
+ continue
+ logger.debug('Replaced %s with mailmap-updated %s', entry[1], replacement[0][1])
+ MAILMAP_INFO[entry[1]] = replacement[0]
+ addresses.remove(entry)
+ addresses.append(replacement[0])
+
+ return addresses
+
+
+def get_email_signature() -> str:
+ usercfg = get_user_config()
+ # Do we have a .signature file?
+ sigfile = os.path.join(str(Path.home()), '.signature')
+ if os.path.exists(sigfile):
+ with open(sigfile, 'r', encoding='utf-8') as fh:
+ signature = fh.read()
+ else:
+ signature = '%s <%s>' % (usercfg['name'], usercfg['email'])
+
+ return signature
diff --git a/b4/command.py b/b4/command.py
index 6a29f0a..49ce767 100644
--- a/b4/command.py
+++ b/b4/command.py
@@ -71,6 +71,11 @@ def cmd_kr(cmdargs):
b4.kr.main(cmdargs)
+def cmd_submit(cmdargs):
+ import b4.submit
+ b4.submit.main(cmdargs)
+
+
def cmd_am(cmdargs):
import b4.mbox
b4.mbox.main(cmdargs)
@@ -235,6 +240,53 @@ def cmd():
help='Show all developer keys found in a thread')
sp_kr.set_defaults(func=cmd_kr)
+ # b4 submit
+ sp_submit = subparsers.add_parser('submit', help='Submit patches for review')
+ # xg_submit = sp_submit.add_mutually_exclusive_group()
+ # xg_submit.add_argument('--web-auth-new', action='store_true', default=False,
+ # help='Register a new email and pubkey with a web submission endpoint')
+ # xg_submit.add_argument('--web-auth-verify',
+ # help='Submit a response to a challenge received from a web submission endpoint')
+ sp_submit.add_argument('--edit-cover', action='store_true', default=False,
+ help='Edit the cover letter in your defined $EDITOR (or core.editor)')
+ sp_submit.add_argument('--reroll', action='store_true', default=False,
+ help='Increment revision and add changelog templates to the cover letter')
+ nn_submit = sp_submit.add_argument_group('New series', 'Set up a new work branch for a new patch series')
+ nn_submit.add_argument('-n', '--new', dest='new_series_name',
+ help='Create a new branch and start working on new series')
+ nn_submit.add_argument('-f', '--fork-point', dest='fork_point',
+ help='Create new branch at this fork point instead of HEAD')
+ ag_submit = sp_submit.add_argument_group('Sync trailers', 'Update series with latest received trailers')
+ ag_submit.add_argument('-u', '--update-trailers', action='store_true', default=False,
+ help='Update commits with latest received trailers')
+ ag_submit.add_argument('-s', '--signoff', action='store_true', default=False,
+ help='Add my Signed-off-by trailer, if not already present')
+ ag_submit.add_argument('-S', '--sloppy-trailers', dest='sloppytrailers', action='store_true', default=False,
+ help='Apply trailers without email address match checking')
+ ag_submit.add_argument('-F', '--trailers-from', dest='thread_msgid',
+ help='Look for new trailers in the thread with this msgid instead of using the change-id')
+ se_submit = sp_submit.add_argument_group('Send series', 'Submits your series for review')
+ se_submit.add_argument('--send', action='store_true', default=False,
+ help='Submit the series for review')
+ se_submit.add_argument('-d', '--dry-run', dest='dryrun', action='store_true', default=False,
+ help='Do not actually send, just dump out raw smtp messages to the stdout')
+ se_submit.add_argument('-o', '--output-dir',
+ help='Do not send, just write patches into this directory (git-format-patch mode)')
+ se_submit.add_argument('--prefixes', nargs='+', choices=['RFC', 'WIP', 'RESEND'],
+ help='Prefixes to add to PATCH (e.g. RFC, WIP, RESEND)')
+ se_submit.add_argument('--no-auto-to-cc', action='store_true', default=False,
+ help='Do not automatically collect To: and Cc: addresses')
+ se_submit.add_argument('--to', nargs='+',
+ help='Addresses to add to the automatically collected To: list')
+ se_submit.add_argument('--cc', nargs='+',
+ help='Addresses to add to the automatically collected Cc: list')
+ se_submit.add_argument('--not-me-too', action='store_true', default=False,
+ help='Remove yourself from the To: or Cc: list')
+ se_submit.add_argument('--no-sign', action='store_true', default=False,
+ help='Do not cryptographically sign your patches with patatt')
+
+ sp_submit.set_defaults(func=cmd_submit)
+
cmdargs = parser.parse_args()
logger.setLevel(logging.DEBUG)
diff --git a/b4/pr.py b/b4/pr.py
index 825391e..596d3ad 100644
--- a/b4/pr.py
+++ b/b4/pr.py
@@ -318,84 +318,78 @@ def explode(gitdir, lmsg, mailfrom=None, retrieve_links=True, fpopts=None):
# of the archived threads.
linked_ids.add(lmsg.msgid)
- with b4.git_format_patches(gitdir, lmsg.pr_base_commit, 'FETCH_HEAD', prefixes=prefixes, extraopts=fpopts) as pdir:
- if pdir is None:
- raise RuntimeError('Could not run format-patches')
-
- for msgfile in sorted(os.listdir(pdir)):
- with open(os.path.join(pdir, msgfile), 'rb') as fh:
- msg = email.message_from_binary_file(fh)
-
- msubj = b4.LoreSubject(msg.get('subject', ''))
-
- # Is this the cover letter?
- if msubj.counter == 0:
- # We rebuild the message from scratch
- # The cover letter body is the pull request body, plus a few trailers
- body = '%s\n\nbase-commit: %s\nPR-Link: %s\n' % (
- lmsg.body.strip(), lmsg.pr_base_commit, config['linkmask'] % lmsg.msgid)
-
- # Make it a multipart if we're doing retrieve_links
- if retrieve_links:
- cmsg = MIMEMultipart()
- cmsg.attach(MIMEText(body, 'plain'))
- else:
- cmsg = email.message.EmailMessage()
- cmsg.set_payload(body)
-
- cmsg.add_header('From', mailfrom)
- cmsg.add_header('Subject', '[' + ' '.join(msubj.prefixes) + '] ' + lmsg.subject)
- cmsg.add_header('Date', lmsg.msg.get('Date'))
- cmsg.set_charset('utf-8')
- cmsg.replace_header('Content-Transfer-Encoding', '8bit')
-
- msg = cmsg
-
+ # FIXME: This is currently broken due to changes to git_range_to_patches
+ msgs = b4.git_range_to_patches(gitdir, lmsg.pr_base_commit, 'FETCH_HEAD', with_cover=True,
+ prefixes=prefixes, extraopts=fpopts)
+ for msg in msgs:
+ msubj = b4.LoreSubject(msg.get('subject', ''))
+
+ # Is this the cover letter?
+ if msubj.counter == 0:
+ # We rebuild the message from scratch
+ # The cover letter body is the pull request body, plus a few trailers
+ body = '%s\n\nbase-commit: %s\nPR-Link: %s\n' % (
+ lmsg.body.strip(), lmsg.pr_base_commit, config['linkmask'] % lmsg.msgid)
+
+ # Make it a multipart if we're doing retrieve_links
+ if retrieve_links:
+ cmsg = MIMEMultipart()
+ cmsg.attach(MIMEText(body, 'plain'))
else:
- # Move the original From and Date into the body
- prepend = list()
- if msg.get('From') != mailfrom:
- cleanfrom = b4.LoreMessage.clean_header(msg['from'])
- prepend.append('From: %s' % ''.join(cleanfrom))
- msg.replace_header('From', mailfrom)
-
- prepend.append('Date: %s' % msg['date'])
- body = '%s\n\n%s' % ('\n'.join(prepend), msg.get_payload(decode=True).decode('utf-8'))
- msg.set_payload(body)
- msg.replace_header('Subject', msubj.full_subject)
-
- if retrieve_links:
- matches = re.findall(r'^Link:\s+https?://.*/(\S+@\S+)[^/]', body, flags=re.M | re.I)
- if matches:
- linked_ids.update(matches)
- matches = re.findall(r'^Message-ID:\s+(\S+@\S+)', body, flags=re.M | re.I)
- if matches:
- linked_ids.update(matches)
-
- # Add a number of seconds equalling the counter, in hopes it gets properly threaded
- newdate = lmsg.date + timedelta(seconds=msubj.counter)
- msg.replace_header('Date', utils.format_datetime(newdate))
-
- # Thread it to the cover letter
- msg.add_header('In-Reply-To', '<b4-exploded-0-%s>' % lmsg.msgid)
- msg.add_header('References', '<b4-exploded-0-%s>' % lmsg.msgid)
-
- msg.add_header('To', format_addrs(allto))
- if allcc:
- msg.add_header('Cc', format_addrs(allcc))
-
- # Set the message-id based on the original pull request msgid
- msg.add_header('Message-Id', '<b4-exploded-%s-%s>' % (msubj.counter, lmsg.msgid))
-
- if mailfrom != lmsg.msg.get('From'):
- msg.add_header('Reply-To', lmsg.msg.get('From'))
- msg.add_header('X-Original-From', lmsg.msg.get('From'))
-
- if lmsg.msg['List-Id']:
- msg.add_header('X-Original-List-Id', b4.LoreMessage.clean_header(lmsg.msg['List-Id']))
- logger.info(' %s', msg.get('Subject'))
- msg.set_charset('utf-8')
- msgs.append(msg)
+ cmsg = email.message.EmailMessage()
+ cmsg.set_payload(body)
+
+ cmsg.add_header('From', mailfrom)
+ cmsg.add_header('Subject', '[' + ' '.join(msubj.prefixes) + '] ' + lmsg.subject)
+ cmsg.add_header('Date', lmsg.msg.get('Date'))
+ cmsg.set_charset('utf-8')
+ cmsg.replace_header('Content-Transfer-Encoding', '8bit')
+
+ msg = cmsg
+
+ else:
+ # Move the original From and Date into the body
+ prepend = list()
+ if msg.get('From') != mailfrom:
+ cleanfrom = b4.LoreMessage.clean_header(msg['from'])
+ prepend.append('From: %s' % ''.join(cleanfrom))
+ msg.replace_header('From', mailfrom)
+
+ prepend.append('Date: %s' % msg['date'])
+ body = '%s\n\n%s' % ('\n'.join(prepend), msg.get_payload(decode=True).decode('utf-8'))
+ msg.set_payload(body)
+ msg.replace_header('Subject', msubj.full_subject)
+
+ if retrieve_links:
+ matches = re.findall(r'^Link:\s+https?://.*/(\S+@\S+)[^/]', body, flags=re.M | re.I)
+ if matches:
+ linked_ids.update(matches)
+ matches = re.findall(r'^Message-ID:\s+(\S+@\S+)', body, flags=re.M | re.I)
+ if matches:
+ linked_ids.update(matches)
+
+ # Add a number of seconds equalling the counter, in hopes it gets properly threaded
+ newdate = lmsg.date + timedelta(seconds=msubj.counter)
+ msg.replace_header('Date', utils.format_datetime(newdate))
+
+ # Thread it to the cover letter
+ msg.add_header('In-Reply-To', '<b4-exploded-0-%s>' % lmsg.msgid)
+ msg.add_header('References', '<b4-exploded-0-%s>' % lmsg.msgid)
+
+ msg.add_header('To', format_addrs(allto))
+ if allcc:
+ msg.add_header('Cc', format_addrs(allcc))
+
+ # Set the message-id based on the original pull request msgid
+ msg.add_header('Message-Id', '<b4-exploded-%s-%s>' % (msubj.counter, lmsg.msgid))
+
+ if mailfrom != lmsg.msg.get('From'):
+ msg.add_header('Reply-To', lmsg.msg.get('From'))
+ msg.add_header('X-Original-From', lmsg.msg.get('From'))
+
+ if lmsg.msg['List-Id']:
+ msg.add_header('X-Original-List-Id', b4.LoreMessage.clean_header(lmsg.msg['List-Id']))
+ logger.info(' %s', msg.get('Subject'))
logger.info('Exploded %s messages', len(msgs))
if retrieve_links and linked_ids:
diff --git a/b4/submit.py b/b4/submit.py
new file mode 100644
index 0000000..b82b022
--- /dev/null
+++ b/b4/submit.py
@@ -0,0 +1,761 @@
+#!/usr/bin/env python3
+# -*- coding: utf-8 -*-
+# SPDX-License-Identifier: GPL-2.0-or-later
+# Copyright (C) 2020 by the Linux Foundation
+#
+__author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'
+
+import email.message
+import os
+import sys
+import b4
+import re
+import argparse
+import uuid
+import time
+import datetime
+import json
+import tempfile
+import subprocess
+import shlex
+import email
+import pathlib
+
+# from nacl.signing import SigningKey
+# from nacl.encoding import Base64Encoder
+
+from typing import Optional, Tuple, List
+from email import utils
+from string import Template
+
+try:
+ import patatt
+ can_patatt = True
+except ModuleNotFoundError:
+ can_patatt = False
+
+try:
+ import git_filter_repo as fr # noqa
+ can_gfr = True
+except ModuleNotFoundError:
+ can_gfr = False
+
+logger = b4.logger
+
+MAGIC_MARKER = '--- b4-submit-tracking ---'
+
+DEFAULT_COVER_TEMPLATE = """
+${cover}
+
+---
+${shortlog}
+
+${diffstat}
+---
+base-commit: ${base_commit}
+change-id: ${change_id}
+
+Best regards,
+--
+${signature}
+"""
+
+DEFAULT_CHANGELOG_TEMPLATE = """
+Changes in v${newrev}:
+- EDITME: describe what is new in this series revision.
+- EDITME: use bulletpoints and terse descriptions.
+- Link to v${oldrev}: ${oldrev_link}
+
+"""
+
+# def auth_new(cmdargs: argparse.Namespace) -> None:
+# # Check if we have a patatt signingkey already defined
+# endpoint, name, email, ptskey = get_configs()
+# skey, pkey = get_patatt_ed25519keys(ptskey)
+# logger.info('Will submit a new email authorization request to:')
+# logger.info(' Endpoint: %s', endpoint)
+# logger.info(' Name: %s', name)
+# logger.info(' Email: %s', email)
+# logger.info(' Key: %s (%s)', pkey, ptskey)
+# logger.info('---')
+# confirm = input('Confirm selection [y/N]: ')
+# if confirm != 'y':
+# logger.info('Exiting')
+# sys.exit(0)
+# req = {
+# 'action': 'auth-new',
+# 'name': name,
+# 'email': email,
+# 'key': pkey,
+# }
+# ses = b4.get_requests_session()
+# res = ses.post(endpoint, json=req)
+# logger.info('---')
+# if res.status_code == 200:
+# try:
+# rdata = res.json()
+# if rdata.get('result') == 'success':
+# logger.info('Challenge generated and sent to %s', email)
+# logger.info('Once you receive it, run b4 submit --web-auth-verify [challenge-string]')
+# sys.exit(0)
+#
+# except Exception as ex: # noqa
+# logger.critical('Odd response from the endpoint: %s', res.text)
+# sys.exit(1)
+#
+# logger.critical('500 response from the endpoint: %s', res.text)
+# sys.exit(1)
+#
+#
+# def auth_verify(cmdargs: argparse.Namespace) -> None:
+# endpoint, name, email, ptskey = get_configs()
+# skey, pkey = get_patatt_ed25519keys(ptskey)
+# challenge = cmdargs.auth_verify
+# logger.info('Signing challenge using key %s', ptskey)
+# sk = SigningKey(skey.encode(), encoder=Base64Encoder)
+# bdata = sk.sign(challenge.encode(), encoder=Base64Encoder)
+# req = {
+# 'action': 'auth-verify',
+# 'name': name,
+# 'email': email,
+# 'challenge': challenge,
+# 'sigdata': bdata.decode(),
+# }
+# ses = b4.get_requests_session()
+# res = ses.post(endpoint, json=req)
+# logger.info('---')
+# if res.status_code == 200:
+# try:
+# rdata = res.json()
+# if rdata.get('result') == 'success':
+# logger.info('Challenge successfully verified for %s', email)
+# logger.info('You may now use this endpoint for submitting patches.')
+# sys.exit(0)
+#
+# except Exception as ex: # noqa
+# logger.critical('Odd response from the endpoint: %s', res.text)
+# sys.exit(1)
+#
+# logger.critical('500 response from the endpoint: %s', res.text)
+# sys.exit(1)
+
+
+def start_new_series(cmdargs: argparse.Namespace) -> None:
+ status = b4.git_get_repo_status()
+ if len(status):
+ logger.critical('CRITICAL: Repository contains uncommitted changes.')
+ logger.critical(' Stash or commit them first.')
+ sys.exit(1)
+
+ usercfg = b4.get_user_config()
+ if 'name' not in usercfg or 'email' not in usercfg:
+ logger.critical('CRITICAL: Unable to add your Signed-off-by: git returned no user.name or user.email')
+ sys.exit(1)
+
+ if not cmdargs.fork_point:
+ cmdargs.fork_point = 'HEAD'
+ slug = re.sub(r'\W+', '-', cmdargs.new_series_name).strip('-').lower()
+ branchname = 'b4/%s' % slug
+ args = ['checkout', '-b', branchname, cmdargs.fork_point]
+ ecode, out = b4.git_run_command(None, args, logstderr=True)
+ if ecode > 0:
+ logger.critical('CRITICAL: Failed to create a new branch %s', branchname)
+ logger.critical(out)
+ sys.exit(ecode)
+ logger.info('Created new branch %s', branchname)
+ # create an empty commit containing basic cover letter details
+ msgdata = ('EDITME: cover title for %s' % cmdargs.new_series_name,
+ '',
+ '# Lines starting with # will be removed from the cover letter. You can use',
+ '# them to add notes or reminders to yourself.',
+ '',
+ 'EDITME: describe the purpose of this series. The information you put here',
+ 'will be used by the project maintainer to make a decision whether your',
+ 'patches should be reviewed, and in what priority order. Please be very',
+ 'detailed and link to any relevant discussions or sites that the maintainer',
+ 'can review to better understand your proposed changes.',
+ '',
+ 'Signed-off-by: %s <%s>' % (usercfg.get('name', ''), usercfg.get('email', '')),
+ '',
+ '# You can add other trailers to the cover letter. Any email addresses found in',
+ '# these trailers will be added to the addresses specified/generated during',
+ '# the --send stage.',
+ '',
+ '',
+ )
+ # We don't need all the entropy of uuid, just some of it
+ changeid = '%s-%s-%s' % (datetime.date.today().strftime('%Y%m%d'), slug, uuid.uuid4().hex[:12])
+ tracking = {
+ 'series': {
+ 'revision': 1,
+ 'change-id': changeid,
+ },
+ }
+ message = '\n'.join(msgdata) + make_magic_json(tracking)
+ args = ['commit', '--allow-empty', '-F', '-']
+ ecode, out = b4.git_run_command(None, args, stdin=message.encode(), logstderr=True)
+ if ecode > 0:
+ logger.critical('CRITICAL: Generating cover letter commit failed:')
+ logger.critical(out)
+ logger.info('Created empty commit with the cover letter.')
+ logger.info('You can prepare your commits now.')
+
+
+def make_magic_json(data: dict) -> str:
+ mj = (f'{MAGIC_MARKER}\n'
+ '# This section is used internally by b4 submit for tracking purposes.\n')
+ return mj + json.dumps(data, indent=2)
+
+
+def load_cover(cover_commit: str, strip_comments: bool = False) -> Tuple[str, dict]:
+ # Grab the cover contents
+ gitargs = ['show', '-s', '--format=%B', cover_commit]
+ ecode, out = b4.git_run_command(None, gitargs)
+ if ecode > 0:
+ logger.critical('CRITICAL: unable to load cover letter')
+ sys.exit(1)
+ # Split on MAGIC_MARKER
+ cover, magic_json = out.split(MAGIC_MARKER)
+ # drop everything until the first {
+ junk, mdata = magic_json.split('{', maxsplit=1)
+ jdata = json.loads('{' + mdata)
+ logger.debug('tracking data: %s', jdata)
+ if strip_comments:
+ cover = re.sub(r'^#.*$', '', cover, flags=re.M)
+ while '\n\n\n' in cover:
+ cover = cover.replace('\n\n\n', '\n\n')
+ return cover.strip(), jdata
+
+
+def update_cover(commit: str, content: str, tracking: dict) -> None:
+ cover_message = content + '\n\n' + make_magic_json(tracking)
+ fred = FRCommitMessageEditor()
+ fred.add(commit, cover_message)
+ args = fr.FilteringOptions.parse_args(['--force', '--quiet', '--refs', f'{commit}~1..HEAD'])
+ args.refs = [f'{commit}~1..HEAD']
+ frf = fr.RepoFilter(args, commit_callback=fred.callback)
+ logger.info('Invoking git-filter-repo to update the cover letter.')
+ frf.run()
+
+
+def check_our_branch() -> bool:
+ mybranch = b4.git_get_current_branch()
+ if mybranch.startswith('b4/'):
+ return True
+ logger.info('CRITICAL: This does not look like a b4-managed branch.')
+ logger.info(' "%s" does not start with "b4/"', mybranch)
+ return False
+
+
+def find_cover_commit() -> Optional[str]:
+ # Walk back commits until we find the cover letter
+ # Our covers always contain the MAGIC_MARKER line
+ logger.debug('Looking for the cover letter commit with magic marker "%s"', MAGIC_MARKER)
+ gitargs = ['log', '--grep', MAGIC_MARKER, '-F', '--pretty=oneline', '--max-count=1']
+ lines = b4.git_get_command_lines(None, gitargs)
+ if not lines:
+ return None
+ found = lines[0].split()[0]
+ logger.debug('Cover commit found in %s', found)
+ return found
+
+
+class FRCommitMessageEditor:
+ edit_map: dict
+
+ def __init__(self, edit_map: Optional[dict] = None):
+ if edit_map:
+ self.edit_map = edit_map
+ else:
+ self.edit_map = dict()
+
+ def add(self, commit: str, message: str):
+ self.edit_map[commit.encode()] = message.encode()
+
+ def callback(self, commit, metadata): # noqa
+ if commit.original_id in self.edit_map:
+ commit.message = self.edit_map[commit.original_id]
+
+
+def edit_cover(cover_commit: str) -> None:
+ cover, tracking = load_cover(cover_commit)
+ # What's our editor? And yes, the default is vi, bite me.
+ corecfg = b4.get_config_from_git(r'core\..*', {'editor': os.environ.get('EDITOR', 'vi')})
+ editor = corecfg.get('editor')
+ logger.debug('editor=%s', editor)
+ # We give it a suffix .rst in hopes that editors autoload restructured-text rules
+ with tempfile.NamedTemporaryFile(suffix='.rst') as temp_cover:
+ temp_cover.write(cover.encode())
+ temp_cover.seek(0)
+ sp = shlex.shlex(editor, posix=True)
+ sp.whitespace_split = True
+ cmdargs = list(sp) + [temp_cover.name]
+ logger.debug('Running %s' % ' '.join(cmdargs))
+ sp = subprocess.Popen(cmdargs)
+ sp.wait()
+ new_cover = temp_cover.read().decode(errors='replace').strip()
+
+ if new_cover == cover:
+ logger.info('Cover letter unchanged.')
+ return
+ if not len(new_cover.strip()):
+ logger.info('New cover letter blank, leaving current one unchanged.')
+ return
+
+ update_cover(cover_commit, new_cover, tracking)
+ logger.info('Cover letter updated.')
+
+
+def update_trailers(cover_commit: str, cmdargs: argparse.Namespace) -> None:
+ if cmdargs.signoff:
+ usercfg = b4.get_user_config()
+ if 'name' not in usercfg or 'email' not in usercfg:
+ logger.critical('CRITICAL: Unable to add your Signed-off-by: git returned no user.name or user.email')
+ sys.exit(1)
+ signoff = ('Signed-off-by', f"{usercfg['name']} <{usercfg['email']}>", None)
+ else:
+ signoff = None
+
+ try:
+ patches = b4.git_range_to_patches(None, cover_commit, 'HEAD')
+ except RuntimeError as ex:
+ logger.critical('CRITICAL: Failed to convert range to patches: %s', ex)
+ sys.exit(1)
+
+ logger.info('Calculating patch-ids from %s commits', len(patches)-1)
+ msg_map = dict()
+ commit_map = dict()
+ # Ignore the cover letter
+ for commit, msg in patches[1:]:
+ body = msg.get_payload()
+ patchid = b4.LoreMessage.get_patch_id(body)
+ msg_map[patchid] = msg
+ commit_map[patchid] = commit
+
+ if cmdargs.thread_msgid:
+ cmdargs.msgid = cmdargs.thread_msgid
+ msgid = b4.get_msgid(cmdargs)
+ logger.info('Retrieving thread matching %s', msgid)
+ list_msgs = b4.get_pi_thread_by_msgid(msgid, nocache=True)
+ else:
+ cover, tracking = load_cover(cover_commit, strip_comments=True)
+ changeid = tracking['series'].get('change-id')
+ logger.info('Checking change-id "%s"', changeid)
+ query = f'"change-id: {changeid}"'
+ list_msgs = b4.get_pi_search_results(query, nocache=True)
+
+ bbox = b4.LoreMailbox()
+ for list_msg in list_msgs:
+ bbox.add_message(list_msg)
+
+ updates = dict()
+ lser = bbox.get_series(sloppytrailers=cmdargs.sloppytrailers)
+ mismatches = list(lser.trailer_mismatches)
+ for lmsg in lser.patches[1:]:
+ addtrailers = list(lmsg.followup_trailers)
+ if lser.has_cover and len(lser.patches[0].followup_trailers):
+ addtrailers += list(lser.patches[0].followup_trailers)
+ if not addtrailers:
+ logger.debug('No follow-up trailers received to the %s', lmsg.subject)
+ continue
+ patchid = b4.LoreMessage.get_patch_id(lmsg.body)
+ if patchid not in commit_map:
+ logger.debug('No match for patchid %s', patchid)
+ continue
+ parts = b4.LoreMessage.get_body_parts(msg_map[patchid].get_payload())
+ if signoff and signoff not in parts[2]:
+ updates[patchid] = list()
+ for ftrailer in addtrailers:
+ if ftrailer[:3] not in parts[2]:
+ if patchid not in updates:
+ updates[patchid] = list()
+ updates[patchid].append(ftrailer)
+ # Check if we've applied mismatched trailers already
+ if not cmdargs.sloppytrailers and mismatches:
+ for mtrailer in list(mismatches):
+ check = (mtrailer[0], mtrailer[1], None)
+ if check in parts[2]:
+ logger.debug('Removing already-applied mismatch %s', check)
+ mismatches.remove(mtrailer)
+
+ if not updates:
+ logger.info('No trailer updates found.')
+ return
+
+ if len(mismatches):
+ logger.critical('---')
+ logger.critical('NOTE: some trailers ignored due to from/email mismatches:')
+ for tname, tvalue, fname, femail in lser.trailer_mismatches:
+ logger.critical(' ! Trailer: %s: %s', tname, tvalue)
+ logger.critical(' Msg From: %s <%s>', fname, femail)
+ logger.critical('NOTE: Rerun with -S to apply them anyway')
+
+ logger.info('---')
+ # Create the map of new messages
+ fred = FRCommitMessageEditor()
+ for patchid, newtrailers in updates.items():
+ # Make it a LoreMessage, so we can run attestation on received trailers
+ cmsg = b4.LoreMessage(msg_map[patchid])
+ logger.info(' %s', cmsg.subject)
+ cmsg.followup_trailers = newtrailers
+ cmsg.fix_trailers(signoff=signoff)
+ fred.add(commit_map[patchid], cmsg.message)
+ logger.info('---')
+ args = fr.FilteringOptions.parse_args(['--force', '--quiet', '--refs', f'{cover_commit}..HEAD'])
+ args.refs = [f'{cover_commit}..HEAD']
+ frf = fr.RepoFilter(args, commit_callback=fred.callback)
+ logger.info('Invoking git-filter-repo to update trailers.')
+ frf.run()
+ logger.info('Trailers updated.')
+
+
+def get_addresses_from_cmd(cmdargs: List[str], msgbytes: bytes) -> List[Tuple[str, str]]:
+ ecode, out, err = b4._run_command(cmdargs, stdin=msgbytes) # noqa
+ if ecode > 0:
+ logger.critical('CRITICAL: Running %s failed:', ' '.join(cmdargs))
+ logger.critical(err.decode())
+ raise RuntimeError('Running command failed: %s' % ' '.join(cmdargs))
+ addrs = out.strip().decode()
+ if not addrs:
+ return list()
+ return utils.getaddresses(addrs.split('\n'))
+
+
+def get_series_details(cover_commit: str) -> Tuple[str, str, str]:
+ # Not sure if we can reasonably expect all automation to handle this correctly
+ # gitargs = ['describe', '--long', f'{cover_commit}~1']
+ gitargs = ['rev-parse', f'{cover_commit}~1']
+ lines = b4.git_get_command_lines(None, gitargs)
+ base_commit = lines[0]
+ gitargs = ['shortlog', f'{cover_commit}..']
+ ecode, shortlog = b4.git_run_command(None, gitargs)
+ gitargs = ['diff', '--stat', f'{cover_commit}..']
+ ecode, diffstat = b4.git_run_command(None, gitargs)
+ return base_commit, shortlog.rstrip(), diffstat.rstrip()
+
+
+def send(cover_commit: str, cmdargs: argparse.Namespace) -> None:
+ # Check if the cover letter has 'EDITME' in it
+ cover, tracking = load_cover(cover_commit, strip_comments=True)
+ if 'EDITME' in cover:
+ logger.critical('CRITICAL: Looks like the cover letter needs to be edited first.')
+ logger.info('---')
+ logger.info(cover)
+ logger.info('---')
+ sys.exit(1)
+
+ config = b4.get_main_config()
+ cover_template = DEFAULT_COVER_TEMPLATE
+ if config.get('submit-cover-template'):
+ # Try to load this template instead
+ try:
+ cover_template = b4.read_template(config['submit-cover-template'])
+ except FileNotFoundError:
+ logger.critical('ERROR: submit-cover-template says to use %s, but it does not exist',
+ config['submit-cover-template'])
+ sys.exit(2)
+
+ # Generate the patches and collect all the addresses from trailers
+ parts = b4.LoreMessage.get_body_parts(cover)
+ trailers = set()
+ trailers.update(parts[2])
+
+ # Put together the cover letter
+ csubject, cbody = cover.split('\n', maxsplit=1)
+ base_commit, shortlog, diffstat = get_series_details(cover_commit)
+ change_id = tracking['series'].get('change-id')
+ revision = tracking['series'].get('revision')
+ tptvals = {
+ 'subject': csubject,
+ 'cover': cbody.strip(),
+ 'shortlog': shortlog,
+ 'diffstat': diffstat,
+ 'change_id': change_id,
+ 'base_commit': base_commit,
+ 'signature': b4.get_email_signature(),
+ }
+ body = Template(cover_template.lstrip()).safe_substitute(tptvals)
+ cmsg = email.message.EmailMessage()
+ cmsg.set_payload(body)
+ cmsg.add_header('Subject', csubject)
+ if cmdargs.prefixes:
+ prefixes = list(cmdargs.prefixes)
+ else:
+ prefixes = list()
+
+ prefixes.append(f'v{revision}')
+ seriests = int(time.time())
+ usercfg = b4.get_user_config()
+ myemail = usercfg.get('email')
+ myname = usercfg.get('name')
+ if myemail:
+ msgdomain = re.sub(r'^[^@]*@', '', myemail)
+ else:
+ # Use the hostname of the system
+ import platform
+ msgdomain = platform.node()
+ chunks = change_id.rsplit('-', maxsplit=1)
+ stablepart = chunks[0]
+ # Message-IDs must not be predictable to avoid stuffing attacks
+ randompart = uuid.uuid4().hex[:12]
+ msgid_tpt = f'<{stablepart}-v{revision}-%s-{randompart}@{msgdomain}>'
+
+ try:
+ patches = b4.git_range_to_patches(None, cover_commit, 'HEAD',
+ covermsg=cmsg, prefixes=prefixes,
+ msgid_tpt=msgid_tpt,
+ seriests=seriests,
+ mailfrom=(myname, myemail))
+ except RuntimeError as ex:
+ logger.critical('CRITICAL: Failed to convert range to patches: %s', ex)
+ sys.exit(1)
+
+ logger.info('Converted the branch to %s patches', len(patches)-1)
+ seen = set()
+ todests = list()
+ if config.get('submit-to'):
+ for pair in utils.getaddresses([config.get('submit-to')]):
+ if pair[1] not in seen:
+ seen.add(pair[1])
+ todests.append(pair)
+ ccdests = list()
+ if config.get('submit-cc'):
+ for pair in utils.getaddresses([config.get('submit-cc')]):
+ if pair[1] not in seen:
+ seen.add(pair[1])
+ ccdests.append(pair)
+ excludes = set()
+ # These override config values
+ if cmdargs.to:
+ todests = [('', x) for x in cmdargs.to]
+ seen.update(set(cmdargs.to))
+ if cmdargs.cc:
+ ccdests = [('', x) for x in cmdargs.cc]
+ seen.update(set(cmdargs.cc))
+
+ if not cmdargs.no_auto_to_cc:
+ logger.info('Populating the To: and Cc: fields with automatically collected addresses')
+
+ # Use a sane tocmd and cccmd for the kernel
+ # TODO: make it definable in the config
+ tocmdstr = tocmd = None
+ cccmdstr = cccmd = None
+ topdir = b4.git_get_toplevel()
+ getm = os.path.join(topdir, 'scripts', 'get_maintainer.pl')
+ if os.access(getm, os.X_OK):
+ logger.debug('Using kernel get_maintainer.pl for to and cc list')
+ tocmdstr = f'{getm} --nogit --nogit-fallback --nogit-chief-penguins --norolestats --nol'
+ cccmdstr = f'{getm} --nogit --nogit-fallback --nogit-chief-penguins --norolestats --nom'
+ if tocmdstr:
+ sp = shlex.shlex(tocmdstr, posix=True)
+ sp.whitespace_split = True
+ tocmd = list(sp)
+ if cccmdstr:
+ sp = shlex.shlex(cccmdstr, posix=True)
+ sp.whitespace_split = True
+ cccmd = list(sp)
+
+ seen = set()
+ # Go through them again to make to/cc headers
+ for commit, msg in patches:
+ if not msg:
+ continue
+ body = msg.get_payload()
+ parts = b4.LoreMessage.get_body_parts(body)
+ trailers.update(parts[2])
+ msgbytes = msg.as_bytes()
+ if tocmd:
+ for pair in get_addresses_from_cmd(tocmd, msgbytes):
+ if pair[1] not in seen:
+ seen.add(pair[1])
+ todests.append(pair)
+ if cccmd:
+ for pair in get_addresses_from_cmd(cccmd, msgbytes):
+ if pair[1] not in seen:
+ seen.add(pair[1])
+ ccdests.append(pair)
+
+ # add addresses seen in trailers
+ for trailer in trailers:
+ if '@' in trailer[1]:
+ for pair in utils.getaddresses([trailer[1]]):
+ if pair[1] not in seen:
+ seen.add(pair[1])
+ ccdests.append(pair)
+
+ excludes = b4.get_excluded_addrs()
+ if cmdargs.not_me_too:
+ excludes.add(myemail)
+
+ allto = list()
+ allcc = list()
+ alldests = set()
+
+ if todests:
+ allto = b4.cleanup_email_addrs(todests, excludes, None)
+ alldests.update(set([x[1] for x in allto]))
+ if ccdests:
+ allcc = b4.cleanup_email_addrs(ccdests, excludes, None)
+ alldests.update(set([x[1] for x in allcc]))
+
+ if not len(allto):
+ # Move all cc's into the To field if there's nothing in "To"
+ allto = list(allcc)
+ allcc = list()
+
+ if cmdargs.output_dir:
+ pathlib.Path(cmdargs.output_dir).mkdir(parents=True, exist_ok=True)
+ for commit, msg in patches:
+ if not msg:
+ continue
+ msg.add_header('To', b4.format_addrs(allto))
+ if allcc:
+ msg.add_header('Cc', b4.format_addrs(allcc))
+ msg.set_charset('utf-8')
+ msg.replace_header('Content-Transfer-Encoding', '8bit')
+ msg.policy = email.policy.EmailPolicy(utf8=True, cte_type='8bit')
+ subject = msg.get('Subject', '')
+ ls = b4.LoreSubject(subject)
+ filen = '%s.patch' % ls.get_slug(sep='-')
+ with open(os.path.join(cmdargs.output_dir, filen), 'w') as fh:
+ fh.write(msg.as_string(unixfrom=True, maxheaderlen=80))
+ logger.info(' %s', filen)
+ return
+
+ # And now we go through each message to set addressees and send them off
+ sign = True
+ if cmdargs.no_sign or config.get('submit-no-sign', '').lower() in {'yes', 'true', 'y'}:
+ sign = False
+ identity = config.get('sendemail-identity')
+ try:
+ smtp, fromaddr = b4.get_smtp(identity, dryrun=cmdargs.dryrun)
+ except Exception as ex: # noqa
+ logger.critical('Failed to configure the smtp connection:')
+ logger.critical(ex)
+ sys.exit(1)
+
+ counter = 0
+ cover_msgid = None
+ # TODO: Need to send obsoleted-by follow-ups, just need to figure out where.
+ for commit, msg in patches:
+ if not msg:
+ continue
+ if cover_msgid is None:
+ cover_msgid = b4.LoreMessage.get_clean_msgid(msg)
+ msg.add_header('To', b4.format_addrs(allto))
+ if allcc:
+ msg.add_header('Cc', b4.format_addrs(allcc))
+ logger.info(' %s', msg.get('Subject'))
+ if b4.send_smtp(smtp, msg, fromaddr=fromaddr, destaddrs=alldests, patatt_sign=sign,
+ dryrun=cmdargs.dryrun):
+ counter += 1
+
+ logger.info('---')
+ if cmdargs.dryrun:
+ logger.info('DRYRUN: Would have sent %s messages', counter)
+ return
+ else:
+ logger.info('Sent %s messages', counter)
+
+ if not cover_msgid:
+ return
+
+ logger.info('Recording series message-id in cover letter tracking')
+ cover, tracking = load_cover(cover_commit, strip_comments=False)
+ vrev = f'v{revision}'
+ if 'history' not in tracking['series']:
+ tracking['series']['history'] = dict()
+ if vrev not in tracking['series']['history']:
+ tracking['series']['history'][vrev] = list()
+ tracking['series']['history'][vrev].append(cover_msgid)
+ update_cover(cover_commit, cover, tracking)
+
+
+def reroll(cover_commit: str, cmdargs: argparse.Namespace) -> None:
+ cover, tracking = load_cover(cover_commit, strip_comments=False)
+ oldrev = tracking['series']['revision']
+ newrev = oldrev + 1
+ tracking['series']['revision'] = newrev
+ sections = cover.split('---\n')
+ vrev = f'v{oldrev}'
+ if 'history' in tracking['series'] and vrev in tracking['series']['history']:
+ # Use the latest link we have
+ config = b4.get_main_config()
+ oldrev_link = config.get('linkmask') % tracking['series']['history'][vrev][-1]
+ else:
+ oldrev_link = 'EDITME (not found in tracking)'
+ tptvals = {
+ 'oldrev': oldrev,
+ 'newrev': newrev,
+ 'oldrev_link': oldrev_link,
+ }
+ prepend = Template(DEFAULT_CHANGELOG_TEMPLATE.lstrip()).safe_substitute(tptvals)
+ found = False
+ new_sections = list()
+ for section in sections:
+ if re.search(r'^changes in v\d+', section, flags=re.I | re.M):
+ # This is our section
+ new_sections.append(prepend + section)
+ found = True
+ else:
+ new_sections.append(section)
+ if found:
+ new_cover = '---\n'.join(new_sections)
+ else:
+ new_cover = cover + '\n\n---\n' + prepend
+ logger.info('Created new revision v%s', newrev)
+ logger.info('Updating cover letter with templated changelog entries.')
+ update_cover(cover_commit, new_cover, tracking)
+ logger.info('You may now edit the cover letter using "b4 submit --edit-cover"')
+
+
+def main(cmdargs: argparse.Namespace) -> None:
+ if not can_gfr:
+ logger.critical('ERROR: b4 submit requires git-filter-repo. You should be able')
+ logger.critical(' to install it from your distro packages, or from pip.')
+ sys.exit(1)
+
+ config = b4.get_main_config()
+ if 'submit-endpoint' not in config:
+ config['submit-endpoint'] = 'https://lkml.kernel.org/_b4_submit'
+
+ if cmdargs.new_series_name:
+ start_new_series(cmdargs)
+
+ if not check_our_branch():
+ return
+
+ cover_commit = find_cover_commit()
+ if not cover_commit:
+ logger.critical('CRITICAL: Unable to find cover letter commit')
+ sys.exit(1)
+
+ if cmdargs.edit_cover:
+ edit_cover(cover_commit)
+ return
+
+ elif cmdargs.update_trailers:
+ update_trailers(cover_commit, cmdargs)
+ return
+
+ elif cmdargs.send:
+ send(cover_commit, cmdargs)
+ return
+
+ elif cmdargs.reroll:
+ reroll(cover_commit, cmdargs)
+ return
+
+ logger.critical('No action requested, please see "b4 submit --help"')
+ sys.exit(1)
+
+ # if not can_patatt:
+ # logger.critical('ERROR: b4 submit requires patatt library. See:')
+ # logger.critical(' https://git.kernel.org/pub/scm/utils/patatt/patatt.git/about/')
+ # sys.exit(1)
+
+ # if cmdargs.web_auth_new:
+ # auth_new(cmdargs)
+ #
+ # if cmdargs.web_auth_verify:
+ # auth_verify(cmdargs)
diff --git a/b4/ty.py b/b4/ty.py
index 3c607b0..dae5206 100644
--- a/b4/ty.py
+++ b/b4/ty.py
@@ -7,13 +7,13 @@ __author__ = 'Konstantin Ryabitsev <konstantin@linuxfoundation.org>'
import os
import sys
+
import b4
import re
import email
import email.message
import email.policy
import json
-import fnmatch
from string import Template
from email import utils
@@ -51,8 +51,6 @@ ${signature}
MY_COMMITS = None
# Used to track additional branch info
BRANCH_INFO = None
-# Used to track mailmap replacements
-MAILMAP_INFO = dict()
def git_get_merge_id(gitdir, commit_id, branch=None):
@@ -76,63 +74,21 @@ def git_get_commit_message(gitdir, rev):
return b4.git_run_command(gitdir, args)
-def fix_dests(addresses, excludes, gitdir):
- global MAILMAP_INFO
- for entry in list(addresses):
- # Check if it's in excludes
- removed = False
- for exclude in excludes:
- if fnmatch.fnmatch(entry[1], exclude):
- logger.debug('Removed %s due to matching %s', entry[1], exclude)
- addresses.remove(entry)
- removed = True
- break
- if removed:
- continue
- # Check if it's mailmap-replaced
- if entry[1] in MAILMAP_INFO:
- if MAILMAP_INFO[entry[1]]:
- addresses.remove(entry)
- addresses.append(MAILMAP_INFO[entry[1]])
- continue
- logger.debug('Checking if %s is mailmap-replaced', entry[1])
- args = ['check-mailmap', f'<{entry[1]}>']
- ecode, out = b4.git_run_command(gitdir, args)
- if ecode != 0:
- MAILMAP_INFO[entry[1]] = None
- continue
- replacement = utils.getaddresses([out.strip()])
- if len(replacement) == 1:
- if entry == replacement[0]:
- MAILMAP_INFO[entry[1]] = None
- continue
- logger.debug('Replaced %s with mailmap-updated %s', entry[1], replacement[0][1])
- MAILMAP_INFO[entry[1]] = replacement[0]
- addresses.remove(entry)
- addresses.append(replacement[0])
-
- return addresses
-
-
def make_reply(reply_template, jsondata, gitdir):
body = Template(reply_template).safe_substitute(jsondata)
# Conform to email standards
body = body.replace('\n', '\r\n')
msg = email.message_from_string(body)
msg['From'] = '%s <%s>' % (jsondata['myname'], jsondata['myemail'])
- config = b4.get_main_config()
- # Remove ourselves and original sender from allto or allcc
- excludes = [jsondata['myemail'], jsondata['fromemail']]
- c_excludes = config.get('email-exclude')
- if c_excludes:
- for entry in c_excludes.split(','):
- excludes.append(entry.strip())
-
- allto = fix_dests(utils.getaddresses([jsondata['to']]), excludes, gitdir)
- allcc = fix_dests(utils.getaddresses([jsondata['cc']]), excludes, gitdir)
-
- # Add original sender to the To
- newto = fix_dests([(jsondata['fromname'], jsondata['fromemail'])], excludes[2:], gitdir)
+ excludes = b4.get_excluded_addrs()
+ newto = b4.cleanup_email_addrs([(jsondata['fromname'], jsondata['fromemail'])], excludes, gitdir)
+
+ # Exclude ourselves and original sender from allto or allcc
+ excludes.add(jsondata['myemail'])
+ excludes.add(jsondata['fromemail'])
+ allto = b4.cleanup_email_addrs(utils.getaddresses([jsondata['to']]), excludes, gitdir)
+ allcc = b4.cleanup_email_addrs(utils.getaddresses([jsondata['cc']]), excludes, gitdir)
+
if newto:
allto += newto
@@ -428,6 +384,7 @@ def send_messages(listing, branch, cmdargs):
gitdir = cmdargs.gitdir
datadir = b4.get_data_dir()
fromaddr = None
+ smtp = None
if cmdargs.sendemail:
# See if we have sendemail-identity set
config = b4.get_main_config()
@@ -445,13 +402,7 @@ def send_messages(listing, branch, cmdargs):
os.mkdir(cmdargs.outdir)
usercfg = b4.get_user_config()
- # Do we have a .signature file?
- sigfile = os.path.join(str(Path.home()), '.signature')
- if os.path.exists(sigfile):
- with open(sigfile, 'r', encoding='utf-8') as fh:
- signature = fh.read()
- else:
- signature = '%s <%s>' % (usercfg['name'], usercfg['email'])
+ signature = b4.get_email_signature()
outgoing = 0
msgids = list()
@@ -477,29 +428,10 @@ def send_messages(listing, branch, cmdargs):
msg.set_charset('utf-8')
msg.replace_header('Content-Transfer-Encoding', '8bit')
if cmdargs.sendemail:
- msg.policy = email.policy.EmailPolicy(utf8=True, cte_type='8bit')
- emldata = msg.as_string()
if not fromaddr:
fromaddr = jsondata['myemail']
- if cmdargs.dryrun:
- logger.info('--- DRYRUN: message follows ---')
- logger.info('\t' + emldata.replace('\n', '\n\t'))
- logger.info('--- DRYRUN: message ends ---')
- else:
- alldests = email.utils.getaddresses([str(x) for x in msg.get_all('to', [])])
- alldests += email.utils.getaddresses([str(x) for x in msg.get_all('cc', [])])
- sendto = {x[1] for x in alldests}
- logger.info(' Sending: %s', msg.get('subject'))
- # Python's sendmail implementation seems to have some logic problems where 8-bit messages are involved.
- # As far as I understand the difference between 8BITMIME (supported by nearly all smtp servers) and
- # SMTPUTF8 (supported by very few), SMTPUTF8 is only required when the addresses specified in either
- # "MAIL FROM" or "RCPT TO" lines of the _protocol exchange_ themselves have 8bit characters, not
- # anything in the From: header of the DATA payload. Python's smtplib seems to always try to encode
- # strings as ascii regardless of what was policy was specified.
- # Work around this by getting the payload as string and then encoding to bytes ourselves.
- # Force compliant eols
- emldata = re.sub(r'(?:\r\n|\n|\r(?!\n))', '\r\n', emldata) # noqa
- smtp.sendmail(fromaddr, sendto, emldata.encode()) # noqa
+ logger.info(' Sending: %s', msg.get('subject'))
+ b4.send_smtp(smtp, msg, fromaddr, dryrun=cmdargs.dryrun)
else:
slug_from = re.sub(r'\W', '_', jsondata['fromemail'])
slug_subj = re.sub(r'\W', '_', jsondata['subject'])
diff --git a/misc/send-receive.py b/misc/send-receive.py
new file mode 100644
index 0000000..7b47798
--- /dev/null
+++ b/misc/send-receive.py
@@ -0,0 +1,170 @@
+#!/usr/bin/env python3
+# noinspection PyUnresolvedReferences
+import falcon
+import os
+import logging
+import json
+import sqlalchemy as sa
+
+from nacl.signing import VerifyKey
+from nacl.encoding import Base64Encoder
+from nacl.exceptions import BadSignatureError
+
+DB_VERSION = 1
+
+logger = logging.getLogger('b4-send-receive')
+
+
+# noinspection PyBroadException, PyMethodMayBeStatic
+class SendReceiveListener(object):
+
+ def __init__(self, _engine):
+ self._engine = _engine
+ # You shouldn't use this in production
+ if self._engine.driver == 'pysqlite':
+ self._init_sa_db()
+
+ def _init_sa_db(self):
+ logger.info('Creating tables')
+ conn = self._engine.connect()
+ md = sa.MetaData()
+ meta = sa.Table('meta', md,
+ sa.Column('version', sa.Integer())
+ )
+ auth = sa.Table('auth', md,
+ sa.Column('auth_id', sa.Integer(), primary_key=True),
+ sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.sql.func.now()),
+ sa.Column('email', sa.Text(), nullable=False),
+ sa.Column('name', sa.Text(), nullable=False),
+ sa.Column('pubkey', sa.Text(), nullable=False),
+ )
+ sa.Index('idx_email_pubkey', auth.c.pubkey, auth.c.email, unique=True)
+ challenge = sa.Table('challenge', md,
+ sa.Column('challenge_id', sa.Integer(), primary_key=True),
+ sa.Column('created', sa.DateTime(), nullable=False, server_default=sa.sql.func.now()),
+ sa.Column('pubkey', sa.Text(), nullable=False),
+ sa.Column('email', sa.Text(), nullable=False),
+ sa.Column('challenge', sa.Text(), nullable=False),
+ )
+ sa.Index('idx_uniq_challenge', challenge.c.pubkey, challenge.c.email, challenge.c.challenge, unique=True)
+ md.create_all(self._engine)
+ q = sa.insert(meta).values(version=DB_VERSION)
+ conn.execute(q)
+
+ def on_get(self, req, resp): # noqa
+ resp.status = falcon.HTTP_200
+ resp.content_type = falcon.MEDIA_TEXT
+ resp.text = "We don't serve GETs here\n"
+
+ def send_error(self, resp, message):
+ resp.status = falcon.HTTP_500
+ resp.text = json.dumps({'result': 'error', 'message': message})
+
+ def send_success(self, resp, message):
+ resp.status = falcon.HTTP_200
+ resp.text = json.dumps({'result': 'success', 'message': message})
+
+ def auth_new(self, jdata, resp):
+ # Is it already authorized?
+ conn = self._engine.connect()
+ md = sa.MetaData()
+ t_auth = sa.Table('auth', md, autoload=True, autoload_with=self._engine)
+ email = jdata.get('email')
+ pubkey = jdata.get('key')
+ q = sa.select([t_auth.c.auth_id]).where(t_auth.c.email == email, t_auth.c.pubkey == pubkey)
+ rp = conn.execute(q)
+ if len(rp.fetchall()):
+ self.send_error(resp, message='%s:%s is already authorized' % (email, pubkey))
+ return
+ # delete any existing challenges for this and create a new one
+ t_challenge = sa.Table('challenge', md, autoload=True, autoload_with=self._engine)
+ q = sa.delete(t_challenge).where(t_challenge.c.email == email, t_challenge.c.pubkey == pubkey)
+ conn.execute(q)
+ # create new challenge
+ import uuid
+ cstr = str(uuid.uuid4())
+ q = sa.insert(t_challenge).values(pubkey=pubkey, email=email, challenge=cstr)
+ conn.execute(q)
+ # TODO: Actual mail sending
+ logger.info('Challenge: %s', cstr)
+ self.send_success(resp, message='Challenge generated')
+
+ def auth_verify(self, jdata, resp):
+ # Do we have a record for this email/challenge?
+ conn = self._engine.connect()
+ md = sa.MetaData()
+ t_challenge = sa.Table('challenge', md, autoload=True, autoload_with=self._engine)
+ email = jdata.get('email', '')
+ challenge = jdata.get('challenge', '')
+ sigdata = jdata.get('sigdata', '')
+ q = sa.select([t_challenge.c.pubkey]).where(t_challenge.c.email == email, t_challenge.c.challenge == challenge)
+ rp = conn.execute(q)
+ qres = rp.fetchall()
+ if not len(qres):
+ self.send_error(resp, message='No such challenge for %s' % email)
+ return
+ pubkey = qres[0][0]
+ vk = VerifyKey(pubkey.encode(), encoder=Base64Encoder)
+ try:
+ vk.verify(sigdata.encode(), encoder=Base64Encoder)
+ except BadSignatureError:
+ self.send_error(resp, message='Could not validate signature for %s' % email)
+ return
+ # validated at this point, so record this as valid auth
+ name = jdata.get('name')
+ t_auth = sa.Table('auth', md, autoload=True, autoload_with=self._engine)
+ q = sa.insert(t_auth).values(pubkey=pubkey, name=name, email=email)
+ conn.execute(q)
+ q = sa.delete(t_challenge).where(t_challenge.c.email == email, t_challenge.c.challenge == challenge)
+ conn.execute(q)
+ self.send_success(resp, message='Challenge verified')
+
+ def on_post(self, req, resp):
+ if not req.content_length:
+ resp.status = falcon.HTTP_500
+ resp.content_type = falcon.MEDIA_TEXT
+ resp.text = 'Payload required\n'
+ return
+ raw = req.bounded_stream.read()
+ try:
+ jdata = json.loads(raw)
+ except:
+ resp.status = falcon.HTTP_500
+ resp.content_type = falcon.MEDIA_TEXT
+ resp.text = 'Failed to parse the request\n'
+ return
+ logger.info(jdata)
+ action = jdata.get('action')
+ if action == 'auth-new':
+ self.auth_new(jdata, resp)
+ if action == 'auth-verify':
+ self.auth_verify(jdata, resp)
+ else:
+ resp.status = falcon.HTTP_500
+ resp.content_type = falcon.MEDIA_TEXT
+ resp.text = 'Unknown action: %s\n' % action
+ return
+
+
+app = falcon.App()
+dburl = os.getenv('DB_URL', 'sqlite:///:memory:')
+engine = sa.create_engine(dburl)
+srl = SendReceiveListener(engine)
+mp = os.getenv('MOUNTPOINT', '/_b4_submit')
+app.add_route(mp, srl)
+
+
+if __name__ == '__main__':
+ from wsgiref.simple_server import make_server
+ logger.setLevel(logging.DEBUG)
+ ch = logging.StreamHandler()
+ formatter = logging.Formatter('%(message)s')
+ ch.setFormatter(formatter)
+ ch.setLevel(logging.DEBUG)
+ logger.addHandler(ch)
+
+ with make_server('', 8000, app) as httpd:
+ logger.info('Serving on port 8000...')
+
+ # Serve until process is killed
+ httpd.serve_forever()
diff --git a/misc/test.sqlite b/misc/test.sqlite
new file mode 100644
index 0000000..fc7a99b
--- /dev/null
+++ b/misc/test.sqlite
Binary files differ
diff --git a/patatt b/patatt
-Subproject d1279c4ca7f29b6f98d784c7bf053c3b2ff1157
+Subproject 0eb41be65707a1e156a59fd25ea9824c1a9e95c
diff --git a/requirements.txt b/requirements.txt
index 166914b..c52cbed 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -3,3 +3,4 @@ requests>=2.24,<3.0
dnspython>=2.1,<3.0
dkimpy>=1.0,<2.0
patatt>=0.5,<2.0
+git-filter-repo>=2.30,<3.0