Skip to content
This repository was archived by the owner on Apr 3, 2024. It is now read-only.

Commit 8c42cf1

Browse files
authored
Use PyUpgrade to use Python 3.6 features (apache#11447)
Use features like `f-strings` instead of format across the code-base. More details: https://github.com/asottile/pyupgrade
1 parent 8000ab7 commit 8c42cf1

File tree

323 files changed

+1046
-1099
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

323 files changed

+1046
-1099
lines changed

.pre-commit-config.yaml

Lines changed: 5 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -177,6 +177,11 @@ repos:
177177
- id: fix-encoding-pragma
178178
args:
179179
- --remove
180+
- repo: https://github.com/asottile/pyupgrade
181+
rev: v2.7.3
182+
hooks:
183+
- id: pyupgrade
184+
args: ["--py36-plus"]
180185
- repo: https://github.com/pre-commit/pygrep-hooks
181186
rev: v1.6.0
182187
hooks:

BREEZE.rst

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -2005,8 +2005,8 @@ This is the current syntax for `./breeze <./breeze>`_:
20052005
helm-lint incorrect-use-of-LoggingMixin insert-license isort language-matters
20062006
lint-dockerfile lint-openapi mermaid mixed-line-ending mypy mypy-helm
20072007
no-relative-imports pre-commit-descriptions provide-create-sessions pydevd
2008-
pydocstyle pylint pylint-tests python-no-log-warn restrict-start_date rst-backticks
2009-
setup-order setup-installation shellcheck sort-in-the-wild stylelint
2008+
pydocstyle pylint pylint-tests python-no-log-warn pyupgrade restrict-start_date
2009+
rst-backticks setup-order setup-installation shellcheck sort-in-the-wild stylelint
20102010
trailing-whitespace update-breeze-file update-extras update-local-yml-file
20112011
update-setup-cfg-file yamllint
20122012

STATIC_CODE_CHECKS.rst

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -94,6 +94,8 @@ require Breeze Docker images to be installed locally:
9494
----------------------------------- ---------------------------------------------------------------- ------------
9595
``fix-encoding-pragma`` Removes encoding header from python files.
9696
----------------------------------- ---------------------------------------------------------------- ------------
97+
``pyupgrade`` Runs PyUpgrade
98+
----------------------------------- ---------------------------------------------------------------- ------------
9799
``flake8`` Runs flake8. *
98100
----------------------------------- ---------------------------------------------------------------- ------------
99101
``forbid-tabs`` Fails if tabs are used in the project.

airflow/api/auth/backend/kerberos_auth.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -79,7 +79,7 @@ def init_app(app):
7979

8080
service = 'airflow'
8181

82-
_KERBEROS_SERVICE.service_name = "{}@{}".format(service, hostname)
82+
_KERBEROS_SERVICE.service_name = f"{service}@{hostname}"
8383

8484
if 'KRB5_KTNAME' not in os.environ:
8585
os.environ['KRB5_KTNAME'] = conf.get('kerberos', 'keytab')

airflow/api/client/json_client.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -43,7 +43,7 @@ def _request(self, url, method='GET', json=None):
4343
return resp.json()
4444

4545
def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None):
46-
endpoint = '/api/experimental/dags/{}/dag_runs'.format(dag_id)
46+
endpoint = f'/api/experimental/dags/{dag_id}/dag_runs'
4747
url = urljoin(self._api_base_url, endpoint)
4848
data = self._request(url, method='POST',
4949
json={
@@ -54,13 +54,13 @@ def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None):
5454
return data['message']
5555

5656
def delete_dag(self, dag_id):
57-
endpoint = '/api/experimental/dags/{}/delete_dag'.format(dag_id)
57+
endpoint = f'/api/experimental/dags/{dag_id}/delete_dag'
5858
url = urljoin(self._api_base_url, endpoint)
5959
data = self._request(url, method='DELETE')
6060
return data['message']
6161

6262
def get_pool(self, name):
63-
endpoint = '/api/experimental/pools/{}'.format(name)
63+
endpoint = f'/api/experimental/pools/{name}'
6464
url = urljoin(self._api_base_url, endpoint)
6565
pool = self._request(url)
6666
return pool['pool'], pool['slots'], pool['description']
@@ -83,7 +83,7 @@ def create_pool(self, name, slots, description):
8383
return pool['pool'], pool['slots'], pool['description']
8484

8585
def delete_pool(self, name):
86-
endpoint = '/api/experimental/pools/{}'.format(name)
86+
endpoint = f'/api/experimental/pools/{name}'
8787
url = urljoin(self._api_base_url, endpoint)
8888
pool = self._request(url, method='DELETE')
8989
return pool['pool'], pool['slots'], pool['description']

airflow/api/client/local_client.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -30,11 +30,11 @@ def trigger_dag(self, dag_id, run_id=None, conf=None, execution_date=None):
3030
run_id=run_id,
3131
conf=conf,
3232
execution_date=execution_date)
33-
return "Created {}".format(dag_run)
33+
return f"Created {dag_run}"
3434

3535
def delete_dag(self, dag_id):
3636
count = delete_dag.delete_dag(dag_id)
37-
return "Removed {} record(s)".format(count)
37+
return f"Removed {count} record(s)"
3838

3939
def get_pool(self, name):
4040
the_pool = pool.get_pool(name=name)

airflow/api/common/experimental/__init__.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -27,18 +27,18 @@ def check_and_get_dag(dag_id: str, task_id: Optional[str] = None) -> DagModel:
2727
"""Checks that DAG exists and in case it is specified that Task exist"""
2828
dag_model = DagModel.get_current(dag_id)
2929
if dag_model is None:
30-
raise DagNotFound("Dag id {} not found in DagModel".format(dag_id))
30+
raise DagNotFound(f"Dag id {dag_id} not found in DagModel")
3131

3232
dagbag = DagBag(
3333
dag_folder=dag_model.fileloc,
3434
read_dags_from_db=True
3535
)
3636
dag = dagbag.get_dag(dag_id)
3737
if not dag:
38-
error_message = "Dag id {} not found".format(dag_id)
38+
error_message = f"Dag id {dag_id} not found"
3939
raise DagNotFound(error_message)
4040
if task_id and not dag.has_task(task_id):
41-
error_message = 'Task {} not found in dag {}'.format(task_id, dag_id)
41+
error_message = f'Task {task_id} not found in dag {dag_id}'
4242
raise TaskNotFound(error_message)
4343
return dag
4444

airflow/api/common/experimental/delete_dag.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ def delete_dag(dag_id: str, keep_records_in_log: bool = True, session=None) -> i
4242
log.info("Deleting DAG: %s", dag_id)
4343
dag = session.query(DagModel).filter(DagModel.dag_id == dag_id).first()
4444
if dag is None:
45-
raise DagNotFound("Dag id {} not found".format(dag_id))
45+
raise DagNotFound(f"Dag id {dag_id} not found")
4646

4747
# Scheduler removes DAGs without files from serialized_dag table every dag_dir_list_interval.
4848
# There may be a lag, so explicitly removes serialized DAG here.

airflow/api/common/experimental/mark_tasks.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -94,11 +94,11 @@ def set_state(
9494
return []
9595

9696
if not timezone.is_localized(execution_date):
97-
raise ValueError("Received non-localized date {}".format(execution_date))
97+
raise ValueError(f"Received non-localized date {execution_date}")
9898

9999
task_dags = {task.dag for task in tasks}
100100
if len(task_dags) > 1:
101-
raise ValueError("Received tasks from multiple DAGs: {}".format(task_dags))
101+
raise ValueError(f"Received tasks from multiple DAGs: {task_dags}")
102102
dag = next(iter(task_dags))
103103
if dag is None:
104104
raise ValueError("Received tasks with no DAG")
@@ -247,7 +247,7 @@ def get_execution_dates(dag, execution_date, future, past):
247247
"""Returns dates of DAG execution"""
248248
latest_execution_date = dag.get_latest_execution_date()
249249
if latest_execution_date is None:
250-
raise ValueError("Received non-localized date {}".format(execution_date))
250+
raise ValueError(f"Received non-localized date {execution_date}")
251251
# determine date range of dag runs and tasks to consider
252252
end_date = latest_execution_date if future else execution_date
253253
if 'start_date' in dag.default_args:

airflow/api/common/experimental/trigger_dag.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def _trigger_dag(
4848
dag = dag_bag.get_dag(dag_id) # prefetch dag if it is stored serialized
4949

5050
if dag_id not in dag_bag.dags:
51-
raise DagNotFound("Dag id {} not found".format(dag_id))
51+
raise DagNotFound(f"Dag id {dag_id} not found")
5252

5353
execution_date = execution_date if execution_date else timezone.utcnow()
5454

@@ -62,7 +62,7 @@ def _trigger_dag(
6262
min_dag_start_date = dag.default_args["start_date"]
6363
if min_dag_start_date and execution_date < min_dag_start_date:
6464
raise ValueError(
65-
"The execution_date [{0}] should be >= start_date [{1}] from DAG's default_args".format(
65+
"The execution_date [{}] should be >= start_date [{}] from DAG's default_args".format(
6666
execution_date.isoformat(),
6767
min_dag_start_date.isoformat()))
6868

@@ -112,7 +112,7 @@ def trigger_dag(
112112
"""
113113
dag_model = DagModel.get_current(dag_id)
114114
if dag_model is None:
115-
raise DagNotFound("Dag id {} not found in DagModel".format(dag_id))
115+
raise DagNotFound(f"Dag id {dag_id} not found in DagModel")
116116

117117
dagbag = DagBag(dag_folder=dag_model.fileloc, read_dags_from_db=True)
118118
triggers = _trigger_dag(

airflow/api_connexion/endpoints/task_instance_endpoint.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -245,7 +245,7 @@ def post_clear_task_instances(dag_id: str, session=None):
245245

246246
dag = current_app.dag_bag.get_dag(dag_id)
247247
if not dag:
248-
error_message = "Dag id {} not found".format(dag_id)
248+
error_message = f"Dag id {dag_id} not found"
249249
raise NotFound(error_message)
250250
reset_dag_runs = data.pop('reset_dag_runs')
251251
task_instances = dag.clear(get_tis=True, **data)
@@ -287,7 +287,7 @@ def post_set_task_instances_state(dag_id, session):
287287
except ValidationError as err:
288288
raise BadRequest(detail=str(err.messages))
289289

290-
error_message = "Dag ID {} not found".format(dag_id)
290+
error_message = f"Dag ID {dag_id} not found"
291291
try:
292292
dag = current_app.dag_bag.get_dag(dag_id)
293293
if not dag:
@@ -300,7 +300,7 @@ def post_set_task_instances_state(dag_id, session):
300300
task = dag.task_dict.get(task_id)
301301

302302
if not task:
303-
error_message = "Task ID {} not found".format(task_id)
303+
error_message = f"Task ID {task_id} not found"
304304
raise NotFound(error_message)
305305

306306
tis = set_state(

airflow/api_connexion/schemas/common_schema.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -124,7 +124,7 @@ def get_obj_type(self, obj):
124124
elif isinstance(obj, CronExpression):
125125
return "CronExpression"
126126
else:
127-
raise Exception("Unknown object type: {}".format(obj.__class__.__name__))
127+
raise Exception(f"Unknown object type: {obj.__class__.__name__}")
128128

129129

130130
class ColorField(fields.String):

airflow/cli/commands/connection_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -182,7 +182,7 @@ def connections_add(args):
182182
missing_args.append('conn-uri or conn-type')
183183
if missing_args:
184184
msg = ('The following args are required to add a connection:' +
185-
' {missing!r}'.format(missing=missing_args))
185+
f' {missing_args!r}')
186186
raise SystemExit(msg)
187187
if invalid_args:
188188
msg = ('The following args are not compatible with the ' +

airflow/cli/commands/dag_command.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -110,10 +110,10 @@ def dag_backfill(args, dag=None):
110110
run_conf = json.loads(args.conf)
111111

112112
if args.dry_run:
113-
print("Dry run of DAG {0} on {1}".format(args.dag_id,
114-
args.start_date))
113+
print("Dry run of DAG {} on {}".format(args.dag_id,
114+
args.start_date))
115115
for task in dag.tasks:
116-
print("Task {0}".format(task.task_id))
116+
print(f"Task {task.task_id}")
117117
ti = TaskInstance(task, args.start_date)
118118
ti.dry_run()
119119
else:
@@ -239,7 +239,7 @@ def _display_dot_via_imgcat(dot: Dot):
239239
def _save_dot_to_file(dot: Dot, filename: str):
240240
filename_without_ext, _, ext = filename.rpartition('.')
241241
dot.render(filename=filename_without_ext, format=ext, cleanup=True)
242-
print("File {} saved".format(filename))
242+
print(f"File {filename} saved")
243243

244244

245245
@cli_utils.action_logging
@@ -319,7 +319,7 @@ def dag_list_jobs(args, dag=None):
319319
dagbag = DagBag()
320320

321321
if args.dag_id not in dagbag.dags:
322-
error_message = "Dag id {} not found".format(args.dag_id)
322+
error_message = f"Dag id {args.dag_id} not found"
323323
raise AirflowException(error_message)
324324
queries.append(BaseJob.dag_id == args.dag_id)
325325

@@ -350,7 +350,7 @@ def dag_list_dag_runs(args, dag=None):
350350
dagbag = DagBag()
351351

352352
if args.dag_id is not None and args.dag_id not in dagbag.dags:
353-
error_message = "Dag id {} not found".format(args.dag_id)
353+
error_message = f"Dag id {args.dag_id} not found"
354354
raise AirflowException(error_message)
355355

356356
state = args.state.lower() if args.state else None
@@ -363,7 +363,7 @@ def dag_list_dag_runs(args, dag=None):
363363
)
364364

365365
if not dag_runs:
366-
print('No dag runs for {dag_id}'.format(dag_id=args.dag_id))
366+
print(f'No dag runs for {args.dag_id}')
367367
return
368368

369369
dag_runs.sort(key=lambda x: x.execution_date, reverse=True)

airflow/cli/commands/pool_command.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -85,7 +85,7 @@ def pool_import_helper(filepath):
8585
"""Helps import pools from the json file"""
8686
api_client = get_current_api_client()
8787

88-
with open(filepath, 'r') as poolfile:
88+
with open(filepath) as poolfile:
8989
data = poolfile.read()
9090
try: # pylint: disable=too-many-nested-blocks
9191
pools_json = json.loads(data)

airflow/cli/commands/task_command.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,7 +143,7 @@ def task_run(args, dag=None):
143143
"""Runs a single task instance"""
144144
# Load custom airflow config
145145
if args.cfg_path:
146-
with open(args.cfg_path, 'r') as conf_file:
146+
with open(args.cfg_path) as conf_file:
147147
conf_dict = json.load(conf_file)
148148

149149
if os.path.exists(args.cfg_path):
@@ -238,7 +238,7 @@ def task_failed_deps(args):
238238
if failed_deps:
239239
print("Task instance dependencies not met:")
240240
for dep in failed_deps:
241-
print("{}: {}".format(dep.dep_name, dep.reason))
241+
print(f"{dep.dep_name}: {dep.reason}")
242242
else:
243243
print("Task instance dependencies are all met.")
244244

airflow/cli/commands/user_command.py

Lines changed: 9 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,7 @@ def users_create(args):
4848
role = appbuilder.sm.find_role(args.role)
4949
if not role:
5050
valid_roles = appbuilder.sm.get_all_roles()
51-
raise SystemExit('{} is not a valid role. Valid roles are: {}'.format(args.role, valid_roles))
51+
raise SystemExit(f'{args.role} is not a valid role. Valid roles are: {valid_roles}')
5252

5353
if args.use_random_password:
5454
password = ''.join(random.choice(string.printable) for _ in range(16))
@@ -61,12 +61,12 @@ def users_create(args):
6161
raise SystemExit('Passwords did not match!')
6262

6363
if appbuilder.sm.find_user(args.username):
64-
print('{} already exist in the db'.format(args.username))
64+
print(f'{args.username} already exist in the db')
6565
return
6666
user = appbuilder.sm.add_user(args.username, args.firstname, args.lastname,
6767
args.email, role, password)
6868
if user:
69-
print('{} user {} created.'.format(args.role, args.username))
69+
print(f'{args.role} user {args.username} created.')
7070
else:
7171
raise SystemExit('Failed to create user.')
7272

@@ -80,10 +80,10 @@ def users_delete(args):
8080
user = next(u for u in appbuilder.sm.get_all_users()
8181
if u.username == args.username)
8282
except StopIteration:
83-
raise SystemExit('{} is not a valid user.'.format(args.username))
83+
raise SystemExit(f'{args.username} is not a valid user.')
8484

8585
if appbuilder.sm.del_register_user(user):
86-
print('User {} deleted.'.format(args.username))
86+
print(f'User {args.username} deleted.')
8787
else:
8888
raise SystemExit('Failed to delete user.')
8989

@@ -108,7 +108,7 @@ def users_manage_role(args, remove=False):
108108
role = appbuilder.sm.find_role(args.role)
109109
if not role:
110110
valid_roles = appbuilder.sm.get_all_roles()
111-
raise SystemExit('{} is not a valid role. Valid roles are: {}'.format(args.role, valid_roles))
111+
raise SystemExit(f'{args.role} is not a valid role. Valid roles are: {valid_roles}')
112112

113113
if remove:
114114
if role in user.roles:
@@ -167,10 +167,10 @@ def users_import(args):
167167

168168
users_list = None # pylint: disable=redefined-outer-name
169169
try:
170-
with open(json_file, 'r') as file:
170+
with open(json_file) as file:
171171
users_list = json.loads(file.read())
172172
except ValueError as e:
173-
print("File '{}' is not valid JSON. Error: {}".format(json_file, e))
173+
print(f"File '{json_file}' is not valid JSON. Error: {e}")
174174
sys.exit(1)
175175

176176
users_created, users_updated = _import_users(users_list)
@@ -194,7 +194,7 @@ def _import_users(users_list): # pylint: disable=redefined-outer-name
194194
role = appbuilder.sm.find_role(rolename)
195195
if not role:
196196
valid_roles = appbuilder.sm.get_all_roles()
197-
print("Error: '{}' is not a valid role. Valid roles are: {}".format(rolename, valid_roles))
197+
print(f"Error: '{rolename}' is not a valid role. Valid roles are: {valid_roles}")
198198
sys.exit(1)
199199
else:
200200
roles.append(role)

airflow/cli/commands/variable_command.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -82,7 +82,7 @@ def variables_export(args):
8282

8383
def _import_helper(filepath):
8484
"""Helps import variables from the file"""
85-
with open(filepath, 'r') as varfile:
85+
with open(filepath) as varfile:
8686
data = varfile.read()
8787

8888
try:
@@ -101,7 +101,7 @@ def _import_helper(filepath):
101101
suc_count += 1
102102
print("{} of {} variables successfully updated.".format(suc_count, len(var_json)))
103103
if fail_count:
104-
print("{} variable(s) failed to be updated.".format(fail_count))
104+
print(f"{fail_count} variable(s) failed to be updated.")
105105

106106

107107
def _variable_export_helper(filepath):

airflow/cli/commands/webserver_command.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -153,7 +153,7 @@ def _wait_until_true(self, fn, timeout: int = 0) -> None:
153153
while not fn():
154154
if 0 < timeout <= time.time() - start_time:
155155
raise AirflowWebServerTimeout(
156-
"No response from gunicorn master within {0} seconds".format(timeout)
156+
f"No response from gunicorn master within {timeout} seconds"
157157
)
158158
sleep(0.1)
159159

@@ -328,7 +328,7 @@ def webserver(args):
328328

329329
if args.debug:
330330
print(
331-
"Starting the web server on port {0} and host {1}.".format(
331+
"Starting the web server on port {} and host {}.".format(
332332
args.port, args.hostname))
333333
app = create_app(testing=conf.getboolean('core', 'unit_test_mode'))
334334
app.run(debug=True, use_reloader=not app.config['TESTING'],

0 commit comments

Comments
 (0)