Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Refactored some logging statements to use lazy % formatting #3326

Merged
merged 7 commits into from
Mar 7, 2023
27 changes: 14 additions & 13 deletions scripts/regressor_finder.py
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,16 @@ def __init__(

with ThreadPoolExecutorResult(max_workers=3) as executor:
if self.git_repo_url is not None:
logger.info(f"Cloning {self.git_repo_url} to {self.git_repo_dir}...")
logger.info("Cloning %s to %s...", self.git_repo_url, self.git_repo_dir)
executor.submit(
self.clone_git_repo, self.git_repo_url, self.git_repo_dir
)

if self.tokenized_git_repo_url is not None:
logger.info(
f"Cloning {self.tokenized_git_repo_url} to {self.tokenized_git_repo_dir}..."
"Cloning %s to %s...",
self.tokenized_git_repo_url,
self.tokenized_git_repo_dir,
)
executor.submit(
self.clone_git_repo,
Expand All @@ -93,9 +95,9 @@ def clone_git_repo(self, repo_url, repo_dir):
)
)()

logger.info(f"{repo_dir} cloned")
logger.info("%s cloned", repo_dir)

logger.info(f"Fetching {repo_dir}")
logger.info("Fetching %s", repo_dir)

tenacity.retry(
wait=tenacity.wait_exponential(multiplier=1, min=16, max=64),
Expand All @@ -109,7 +111,7 @@ def clone_git_repo(self, repo_url, repo_dir):
)
)()

logger.info(f"{repo_dir} fetched")
logger.info("%s fetched", repo_dir)

def init_mapping(self):
if self.tokenized_git_repo_url is not None:
Expand Down Expand Up @@ -158,20 +160,19 @@ def get_commits_to_ignore(self) -> None:

commits_to_ignore.append({"rev": backedout, "type": "backedout"})

logger.info(f"{len(commits_to_ignore)} commits to ignore...")
logger.info("%d commits to ignore...", len(commits_to_ignore))

# Skip backed-out commits which aren't in the repository (commits which landed *before* the Mercurial history
# started, and backouts which mentioned a bad hash in their message).
commits_to_ignore = [
c for c in commits_to_ignore if c["rev"][:12] in all_commits
]

logger.info(f"{len(commits_to_ignore)} commits to ignore...")
logger.info("%d commits to ignore...", len(commits_to_ignore))

logger.info(
"...of which {} are backed-out".format(
sum(1 for commit in commits_to_ignore if commit["type"] == "backedout")
)
"...of which %d are backed-out",
sum(1 for commit in commits_to_ignore if commit["type"] == "backedout"),
)

db.write(IGNORED_COMMITS_DB, commits_to_ignore)
Expand All @@ -194,7 +195,7 @@ def find_bug_fixing_commits(self) -> None:
for bug_fixing_commit in db.read(BUG_FIXING_COMMITS_DB)
)
logger.info(
f"Already classified {len(prev_bug_fixing_commits_nodes)} commits..."
"Already classified %d commits...", len(prev_bug_fixing_commits_nodes)
)

# TODO: Switch to the pure Defect model, as it's better in this case.
Expand Down Expand Up @@ -352,7 +353,7 @@ def mercurial_to_git(revs):
)
f.writelines("{}\n".format(git_hash) for git_hash in git_hashes)

logger.info(f"{len(bug_fixing_commits)} commits to analyze")
logger.info("%d commits to analyze", len(bug_fixing_commits))

# Skip already found bug-introducing commits.
bug_fixing_commits = [
Expand Down Expand Up @@ -393,7 +394,7 @@ def _init(git_repo_dir):
thread_local.git.get_head()

def find_bic(bug_fixing_commit):
logger.info("Analyzing {}...".format(bug_fixing_commit["rev"]))
logger.info("Analyzing %s...", bug_fixing_commit["rev"])

git_fix_revision = tuple(mercurial_to_git([bug_fixing_commit["rev"]]))[0]

Expand Down