Skip to content

Migrate to logger.warning usage #1643

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 1 commit into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 4 additions & 4 deletions patchwork/common/client/scm.py
Original file line number Diff line number Diff line change
Expand Up @@ -521,7 +521,7 @@ def find_issue_by_id(self, slug: str, issue_id: int) -> IssueText | None:
comments=[issue_comment.body for issue_comment in issue.get_comments()],
)
except GithubException as e:
logger.warn(f"Failed to get issue: {e}")
logger.warning(f"Failed to get issue: {e}")
return None

def get_pr_by_url(self, url: str) -> GithubPullRequest | None:
Expand All @@ -534,7 +534,7 @@ def find_pr_by_id(self, slug: str, pr_id: int) -> GithubPullRequest | None:
pr = repo.get_pull(pr_id)
return GithubPullRequest(pr)
except GithubException as e:
logger.warn(f"Failed to get PR: {e}")
logger.warning(f"Failed to get PR: {e}")
return None

def find_prs(
Expand Down Expand Up @@ -650,7 +650,7 @@ def find_issue_by_id(self, slug: str, issue_id: int) -> IssueText | None:
comments=[note["body"] for note in issue.notes.list()],
)
except GitlabError as e:
logger.warn(f"Failed to get issue: {e}")
logger.warning(f"Failed to get issue: {e}")
return None

def get_pr_by_url(self, url: str) -> GitlabMergeRequest | None:
Expand All @@ -663,7 +663,7 @@ def find_pr_by_id(self, slug: str, pr_id: int) -> GitlabMergeRequest | None:
mr = project.mergerequests.get(pr_id)
return GitlabMergeRequest(mr)
except GitlabError as e:
logger.warn(f"Failed to get MR: {e}")
logger.warning(f"Failed to get MR: {e}")
return None

def find_prs(
Expand Down
4 changes: 2 additions & 2 deletions patchwork/steps/CommitChanges/CommitChanges.py
Original file line number Diff line number Diff line change
Expand Up @@ -116,7 +116,7 @@ def __init__(self, inputs: dict):

self.modified_code_files = inputs["modified_code_files"]
if len(self.modified_code_files) < 1:
logger.warn("No modified files to commit changes for.")
logger.warning("No modified files to commit changes for.")
self.enabled = False

self.force = inputs.get("force_branch_creation", True)
Expand All @@ -134,7 +134,7 @@ def __get_repo_tracked_modified_files(self, repo: Repo) -> set[Path]:
repo_changed_file = Path(item.a_path)
possible_ignored_grok = path_filter.get_grok_ignored(repo_changed_file)
if possible_ignored_grok is not None:
logger.warn(f'Ignoring file: {item.a_path} because of "{possible_ignored_grok}" in .gitignore file.')
logger.warning(f'Ignoring file: {item.a_path} because of "{possible_ignored_grok}" in .gitignore file.')
continue
repo_changed_files.add(repo_dir_path / repo_changed_file)

Expand Down
4 changes: 2 additions & 2 deletions patchwork/steps/CreatePR/CreatePR.py
Original file line number Diff line number Diff line change
Expand Up @@ -55,11 +55,11 @@ def __init__(self, inputs: dict):
self.force = bool(inputs.get("force_pr_creation", False))
self.base_branch = inputs.get("base_branch")
if self.enabled and self.base_branch is None:
logger.warn("Base branch not provided. Skipping PR creation.")
logger.warning("Base branch not provided. Skipping PR creation.")
self.enabled = False
self.target_branch = inputs["target_branch"]
if self.enabled and self.base_branch == self.target_branch:
logger.warn("Base branch and target branch are the same. Skipping PR creation.")
logger.warning("Base branch and target branch are the same. Skipping PR creation.")
self.enabled = False

def __push(self, repo):
Expand Down
10 changes: 5 additions & 5 deletions patchwork/steps/ExtractCode/ExtractCode.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,8 +31,8 @@ def get_source_code_context(
if count_openai_tokens(context) <= context_token_length:
return position.start, position.end
else:
logger.warn(
f"The selected context is larger than the contex_size limit of {context_token_length}. You can increase the context_size and try again. "
logger.warning(
f"The selected context is larger than the context_size limit of {context_token_length}. You can increase the context_size and try again. "
)

return None, None
Expand Down Expand Up @@ -77,7 +77,7 @@ def read_and_get_source_code_context(file_path: str, start_line: int, end_line:
def parse_sarif_location(base_path: Path, location_str: str) -> Path | None:
uri = urlparse(location_str)
if uri.scheme != "file" and uri.scheme != "":
logger.warn(f'Unsupported URI scheme "{uri.scheme}" for location: "{location_str}"')
logger.warning(f'Unsupported URI scheme "{uri.scheme}" for location: "{location_str}"')
return None

path = Path(uri.path)
Expand Down Expand Up @@ -118,7 +118,7 @@ def resolve_artifact_location(
if location is not None:
return location

logger.warn(f"Unable to find file for artifact index: {artifact_index}")
logger.warning(f"Unable to find file for artifact index: {artifact_index}")

uri = artifact_location.get("uri")
if uri is not None:
Expand Down Expand Up @@ -240,7 +240,7 @@ def transform_sarif_results(
artifact_location = physical_location.get("artifactLocation", {})
uri = resolve_artifact_location(base_path, artifact_location, artifact_locations)
if uri is None:
logger.warn(
logger.warning(
f'Unable to find file for ".runs[{run_idx}].results[{result_idx}].locations[{location_idx}]"'
)
continue
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -30,7 +30,7 @@ def run(self) -> dict:

extracted_response_func = self.response_partitioned_dict
if len(self.partitions) == 0:
logger.warn("No partitions specified for model response, will default to using the entire response.")
logger.warning("No partitions specified for model response, will default to using the entire response.")
extracted_response_func = self.auto_pass_dict

outputs = []
Expand Down