mirror of
https://github.com/yt-dlp/yt-dlp.git
synced 2024-11-23 00:52:01 +00:00
[devscripts] make_changelog
: Fix changelog grouping and add networking group (#8124)
Authored by: Grub4K
This commit is contained in:
parent
836e06d246
commit
30ba233d4c
@ -68,6 +68,25 @@
|
|||||||
{
|
{
|
||||||
"action": "change",
|
"action": "change",
|
||||||
"when": "b03fa7834579a01cc5fba48c0e73488a16683d48",
|
"when": "b03fa7834579a01cc5fba48c0e73488a16683d48",
|
||||||
"short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b"
|
"short": "[ie/twitter] Revert 92315c03774cfabb3a921884326beb4b981f786b",
|
||||||
|
"authors": ["pukkandan"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "fcd6a76adc49d5cd8783985c7ce35384b72e545f",
|
||||||
|
"short": "[test] Add tests for socks proxies (#7908)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "4bf912282a34b58b6b35d8f7e6be535770c89c76",
|
||||||
|
"short": "[rh:urllib] Remove dot segments during URL normalization (#7662)",
|
||||||
|
"authors": ["coletdjnz"]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"action": "change",
|
||||||
|
"when": "59e92b1f1833440bb2190f847eb735cf0f90bc85",
|
||||||
|
"short": "[rh:urllib] Simplify gzip decoding (#7611)",
|
||||||
|
"authors": ["Grub4K"]
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
|
@ -31,35 +31,27 @@ class CommitGroup(enum.Enum):
|
|||||||
EXTRACTOR = 'Extractor'
|
EXTRACTOR = 'Extractor'
|
||||||
DOWNLOADER = 'Downloader'
|
DOWNLOADER = 'Downloader'
|
||||||
POSTPROCESSOR = 'Postprocessor'
|
POSTPROCESSOR = 'Postprocessor'
|
||||||
|
NETWORKING = 'Networking'
|
||||||
MISC = 'Misc.'
|
MISC = 'Misc.'
|
||||||
|
|
||||||
@classmethod
|
|
||||||
@property
|
|
||||||
def ignorable_prefixes(cls):
|
|
||||||
return ('core', 'downloader', 'extractor', 'misc', 'postprocessor', 'upstream')
|
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
@lru_cache
|
@lru_cache
|
||||||
def commit_lookup(cls):
|
def subgroup_lookup(cls):
|
||||||
return {
|
return {
|
||||||
name: group
|
name: group
|
||||||
for group, names in {
|
for group, names in {
|
||||||
cls.PRIORITY: {'priority'},
|
|
||||||
cls.CORE: {
|
cls.CORE: {
|
||||||
'aes',
|
'aes',
|
||||||
'cache',
|
'cache',
|
||||||
'compat_utils',
|
'compat_utils',
|
||||||
'compat',
|
'compat',
|
||||||
'cookies',
|
'cookies',
|
||||||
'core',
|
|
||||||
'dependencies',
|
'dependencies',
|
||||||
'formats',
|
'formats',
|
||||||
'jsinterp',
|
'jsinterp',
|
||||||
'networking',
|
|
||||||
'outtmpl',
|
'outtmpl',
|
||||||
'plugins',
|
'plugins',
|
||||||
'update',
|
'update',
|
||||||
'upstream',
|
|
||||||
'utils',
|
'utils',
|
||||||
},
|
},
|
||||||
cls.MISC: {
|
cls.MISC: {
|
||||||
@ -67,23 +59,40 @@ def commit_lookup(cls):
|
|||||||
'cleanup',
|
'cleanup',
|
||||||
'devscripts',
|
'devscripts',
|
||||||
'docs',
|
'docs',
|
||||||
'misc',
|
|
||||||
'test',
|
'test',
|
||||||
},
|
},
|
||||||
cls.EXTRACTOR: {'extractor', 'ie'},
|
cls.NETWORKING: {
|
||||||
cls.DOWNLOADER: {'downloader', 'fd'},
|
'rh',
|
||||||
cls.POSTPROCESSOR: {'postprocessor', 'pp'},
|
},
|
||||||
}.items()
|
}.items()
|
||||||
for name in names
|
for name in names
|
||||||
}
|
}
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get(cls, value):
|
@lru_cache
|
||||||
result = cls.commit_lookup().get(value)
|
def group_lookup(cls):
|
||||||
if result:
|
result = {
|
||||||
logger.debug(f'Mapped {value!r} => {result.name}')
|
'fd': cls.DOWNLOADER,
|
||||||
|
'ie': cls.EXTRACTOR,
|
||||||
|
'pp': cls.POSTPROCESSOR,
|
||||||
|
'upstream': cls.CORE,
|
||||||
|
}
|
||||||
|
result.update({item.name.lower(): item for item in iter(cls)})
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get(cls, value: str) -> tuple[CommitGroup | None, str | None]:
|
||||||
|
group, _, subgroup = (group.strip().lower() for group in value.partition('/'))
|
||||||
|
|
||||||
|
result = cls.group_lookup().get(group)
|
||||||
|
if not result:
|
||||||
|
if subgroup:
|
||||||
|
return None, value
|
||||||
|
subgroup = group
|
||||||
|
result = cls.subgroup_lookup().get(subgroup)
|
||||||
|
|
||||||
|
return result, subgroup or None
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
@dataclass
|
||||||
class Commit:
|
class Commit:
|
||||||
@ -198,19 +207,23 @@ def _prepare_cleanup_misc_items(self, items):
|
|||||||
for commit_infos in cleanup_misc_items.values():
|
for commit_infos in cleanup_misc_items.values():
|
||||||
sorted_items.append(CommitInfo(
|
sorted_items.append(CommitInfo(
|
||||||
'cleanup', ('Miscellaneous',), ', '.join(
|
'cleanup', ('Miscellaneous',), ', '.join(
|
||||||
self._format_message_link(None, info.commit.hash).strip()
|
self._format_message_link(None, info.commit.hash)
|
||||||
for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
|
for info in sorted(commit_infos, key=lambda item: item.commit.hash or '')),
|
||||||
[], Commit(None, '', commit_infos[0].commit.authors), []))
|
[], Commit(None, '', commit_infos[0].commit.authors), []))
|
||||||
|
|
||||||
return sorted_items
|
return sorted_items
|
||||||
|
|
||||||
def format_single_change(self, info):
|
def format_single_change(self, info: CommitInfo):
|
||||||
message = self._format_message_link(info.message, info.commit.hash)
|
message, sep, rest = info.message.partition('\n')
|
||||||
|
if '[' not in message:
|
||||||
|
# If the message doesn't already contain markdown links, try to add a link to the commit
|
||||||
|
message = self._format_message_link(message, info.commit.hash)
|
||||||
|
|
||||||
if info.issues:
|
if info.issues:
|
||||||
message = message.replace('\n', f' ({self._format_issues(info.issues)})\n', 1)
|
message = f'{message} ({self._format_issues(info.issues)})'
|
||||||
|
|
||||||
if info.commit.authors:
|
if info.commit.authors:
|
||||||
message = message.replace('\n', f' by {self._format_authors(info.commit.authors)}\n', 1)
|
message = f'{message} by {self._format_authors(info.commit.authors)}'
|
||||||
|
|
||||||
if info.fixes:
|
if info.fixes:
|
||||||
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
fix_message = ', '.join(f'{self._format_message_link(None, fix.hash)}' for fix in info.fixes)
|
||||||
@ -219,16 +232,14 @@ def format_single_change(self, info):
|
|||||||
if authors != info.commit.authors:
|
if authors != info.commit.authors:
|
||||||
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
fix_message = f'{fix_message} by {self._format_authors(authors)}'
|
||||||
|
|
||||||
message = message.replace('\n', f' (With fixes in {fix_message})\n', 1)
|
message = f'{message} (With fixes in {fix_message})'
|
||||||
|
|
||||||
return message[:-1]
|
return message if not sep else f'{message}{sep}{rest}'
|
||||||
|
|
||||||
def _format_message_link(self, message, hash):
|
def _format_message_link(self, message, hash):
|
||||||
assert message or hash, 'Improperly defined commit message or override'
|
assert message or hash, 'Improperly defined commit message or override'
|
||||||
message = message if message else hash[:HASH_LENGTH]
|
message = message if message else hash[:HASH_LENGTH]
|
||||||
if not hash:
|
return f'[{message}]({self.repo_url}/commit/{hash})' if hash else message
|
||||||
return f'{message}\n'
|
|
||||||
return f'[{message}\n'.replace('\n', f']({self.repo_url}/commit/{hash})\n', 1)
|
|
||||||
|
|
||||||
def _format_issues(self, issues):
|
def _format_issues(self, issues):
|
||||||
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
return ', '.join(f'[#{issue}]({self.repo_url}/issues/{issue})' for issue in issues)
|
||||||
@ -318,7 +329,7 @@ def _get_commits_and_fixes(self, default_author):
|
|||||||
for commitish, revert_commit in reverts.items():
|
for commitish, revert_commit in reverts.items():
|
||||||
reverted = commits.pop(commitish, None)
|
reverted = commits.pop(commitish, None)
|
||||||
if reverted:
|
if reverted:
|
||||||
logger.debug(f'{commit} fully reverted {reverted}')
|
logger.debug(f'{commitish} fully reverted {reverted}')
|
||||||
else:
|
else:
|
||||||
commits[revert_commit.hash] = revert_commit
|
commits[revert_commit.hash] = revert_commit
|
||||||
|
|
||||||
@ -337,7 +348,7 @@ def apply_overrides(self, overrides):
|
|||||||
for override in overrides:
|
for override in overrides:
|
||||||
when = override.get('when')
|
when = override.get('when')
|
||||||
if when and when not in self and when != self._start:
|
if when and when not in self and when != self._start:
|
||||||
logger.debug(f'Ignored {when!r}, not in commits {self._start!r}')
|
logger.debug(f'Ignored {when!r} override')
|
||||||
continue
|
continue
|
||||||
|
|
||||||
override_hash = override.get('hash') or when
|
override_hash = override.get('hash') or when
|
||||||
@ -365,7 +376,7 @@ def groups(self):
|
|||||||
for commit in self:
|
for commit in self:
|
||||||
upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
|
upstream_re = self.UPSTREAM_MERGE_RE.search(commit.short)
|
||||||
if upstream_re:
|
if upstream_re:
|
||||||
commit.short = f'[core/upstream] Merged with youtube-dl {upstream_re.group(1)}'
|
commit.short = f'[upstream] Merged with youtube-dl {upstream_re.group(1)}'
|
||||||
|
|
||||||
match = self.MESSAGE_RE.fullmatch(commit.short)
|
match = self.MESSAGE_RE.fullmatch(commit.short)
|
||||||
if not match:
|
if not match:
|
||||||
@ -410,25 +421,20 @@ def details_from_prefix(prefix):
|
|||||||
if not prefix:
|
if not prefix:
|
||||||
return CommitGroup.CORE, None, ()
|
return CommitGroup.CORE, None, ()
|
||||||
|
|
||||||
prefix, _, details = prefix.partition('/')
|
prefix, *sub_details = prefix.split(':')
|
||||||
prefix = prefix.strip()
|
|
||||||
details = details.strip()
|
|
||||||
|
|
||||||
group = CommitGroup.get(prefix.lower())
|
group, details = CommitGroup.get(prefix)
|
||||||
if group is CommitGroup.PRIORITY:
|
if group is CommitGroup.PRIORITY and details:
|
||||||
prefix, _, details = details.partition('/')
|
details = details.partition('/')[2].strip()
|
||||||
|
|
||||||
if not details and prefix and prefix not in CommitGroup.ignorable_prefixes:
|
if details and '/' in details:
|
||||||
logger.debug(f'Replaced details with {prefix!r}')
|
logger.error(f'Prefix is overnested, using first part: {prefix}')
|
||||||
details = prefix or None
|
details = details.partition('/')[0].strip()
|
||||||
|
|
||||||
if details == 'common':
|
if details == 'common':
|
||||||
details = None
|
details = None
|
||||||
|
elif group is CommitGroup.NETWORKING and details == 'rh':
|
||||||
if details:
|
details = 'Request Handler'
|
||||||
details, *sub_details = details.split(':')
|
|
||||||
else:
|
|
||||||
sub_details = []
|
|
||||||
|
|
||||||
return group, details, sub_details
|
return group, details, sub_details
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user