diff --git a/airflow/api/__init__.py b/airflow/api/__init__.py index da07429869877..e192549846dc1 100644 --- a/airflow/api/__init__.py +++ b/airflow/api/__init__.py @@ -40,6 +40,6 @@ def load_auth(): log.info("Loaded API auth backend: %s", backend) backends.append(auth) except ImportError as err: - log.critical("Cannot import %s for API authentication due to: %s", backend, err) + log.critical("Cannot import %s for API authentication.", backend) raise AirflowException(err) return backends diff --git a/airflow/api/auth/backend/kerberos_auth.py b/airflow/api/auth/backend/kerberos_auth.py index fb76e8a1aa0fa..dd2fab5889b6c 100644 --- a/airflow/api/auth/backend/kerberos_auth.py +++ b/airflow/api/auth/backend/kerberos_auth.py @@ -86,8 +86,8 @@ def init_app(app): try: log.info("Kerberos init: %s %s", service, hostname) principal = kerberos.getServerPrincipalDetails(service, hostname) - except kerberos.KrbError as err: - log.warning("Kerberos: %s", err) + except kerberos.KrbError: + log.warning("A Kerberos error has occurred.", exc_info=True) else: log.info("Kerberos API: server is %s", principal) diff --git a/airflow/cli/commands/info_command.py b/airflow/cli/commands/info_command.py index fc03615210af3..bf504ea87ac72 100644 --- a/airflow/cli/commands/info_command.py +++ b/airflow/cli/commands/info_command.py @@ -356,8 +356,8 @@ def _upload_text_to_fileio(content): try: return resp.json()["link"] except ValueError as e: - log.debug(e) - raise FileIoException("Failed to send report to file.io service.") + log.debug("Failed to send report to file.io service.") + raise FileIoException(e) def _send_report_to_fileio(info): diff --git a/airflow/cli/commands/webserver_command.py b/airflow/cli/commands/webserver_command.py index c74513d23e2b9..627376945b0ac 100644 --- a/airflow/cli/commands/webserver_command.py +++ b/airflow/cli/commands/webserver_command.py @@ -213,9 +213,8 @@ def start(self) -> NoReturn: # Throttle loop sleep(1) - except (AirflowWebServerTimeout, OSError) as err: - self.log.error(err) - self.log.error("Shutting down webserver") + except (AirflowWebServerTimeout, OSError): + self.log.exception("Shutting down webserver") try: self.gunicorn_master_proc.terminate() self.gunicorn_master_proc.wait() diff --git a/airflow/configuration.py b/airflow/configuration.py index e8d592dacb100..bd0312ec6c764 100644 --- a/airflow/configuration.py +++ b/airflow/configuration.py @@ -674,11 +674,14 @@ def getimport(self, section: str, key: str, **kwargs) -> Any: try: return import_string(full_qualified_path) except ImportError as e: - log.error(e) - raise AirflowConfigException( - f'The object could not be loaded. Please check "{key}" key in "{section}" section. ' - f'Current value: "{full_qualified_path}".' + log.error( + 'The object could not be loaded. Please check %r key in %r section. ' + 'Current value: %r.', + key, + section, + full_qualified_path, ) + raise AirflowConfigException(e) def getjson( self, section: str, key: str, fallback=_UNSET, **kwargs diff --git a/airflow/dag_processing/manager.py b/airflow/dag_processing/manager.py index b1d23034bf460..89f61b4742354 100644 --- a/airflow/dag_processing/manager.py +++ b/airflow/dag_processing/manager.py @@ -674,8 +674,8 @@ def _fetch_callbacks(self, max_callbacks: int, session: Session = NEW_SESSION): try: self._add_callback_to_queue(callback.get_callback_request()) session.delete(callback) - except Exception as e: - self.log.warning("Error adding callback for execution: %s, %s", callback, e) + except Exception: + self.log.warning("Error adding %r callback for execution.", callback, exc_info=True) guard.commit() def _add_callback_to_queue(self, request: CallbackRequest): diff --git a/airflow/executors/celery_executor.py b/airflow/executors/celery_executor.py index 3aeedc76c9912..6822184997135 100644 --- a/airflow/executors/celery_executor.py +++ b/airflow/executors/celery_executor.py @@ -127,8 +127,8 @@ def _execute_in_fork(command_to_exec: CommandType, celery_task_id: Optional[str] args.func(args) ret = 0 - except Exception as e: - log.exception("[%s] Failed to execute task %s.", celery_task_id, str(e)) + except Exception: + log.exception("[%s] Failed to execute task.", celery_task_id) ret = 1 finally: Sentry.flush() @@ -291,7 +291,7 @@ def _process_tasks(self, task_tuples: List[TaskTuple]) -> None: self.queued_tasks.pop(key) self.task_publish_retries.pop(key, None) if isinstance(result, ExceptionWithTraceback): - self.log.error(CELERY_SEND_ERR_MSG_HEADER + ": %s\n%s\n", result.exception, result.traceback) + self.log.error("%s: %s\n%s\n", CELERY_SEND_ERR_MSG_HEADER, result.exception, result.traceback) self.event_buffer[key] = (State.FAILED, None) elif result is not None: result.backend = cached_celery_backend @@ -416,8 +416,8 @@ def _send_stalled_tis_back_to_scheduler( if celery_async_result: try: app.control.revoke(celery_async_result.task_id) - except Exception as ex: - self.log.error("Error revoking task instance %s from celery: %s", key, ex) + except Exception: + self.log.exception("Error revoking task instance %s from celery.", key) def debug_dump(self) -> None: """Called in response to SIGUSR2 by the scheduler""" @@ -650,7 +650,8 @@ def _get_many_using_multiprocessing(self, async_results) -> Mapping[str, EventBu for task_id, state_or_exception, info in task_id_to_states_and_info: if isinstance(state_or_exception, ExceptionWithTraceback): self.log.error( - CELERY_FETCH_ERR_MSG_HEADER + ":%s\n%s\n", + "%s:%s\n%s\n", + CELERY_FETCH_ERR_MSG_HEADER, state_or_exception.exception, state_or_exception.traceback, ) diff --git a/airflow/executors/debug_executor.py b/airflow/executors/debug_executor.py index 0177602b19615..be3198d182e56 100644 --- a/airflow/executors/debug_executor.py +++ b/airflow/executors/debug_executor.py @@ -78,10 +78,10 @@ def _run_task(self, ti: TaskInstance) -> bool: ti.run(job_id=ti.job_id, **params) self.change_state(key, State.SUCCESS) return True - except Exception as e: + except Exception: ti.set_state(State.FAILED) self.change_state(key, State.FAILED) - self.log.exception("Failed to execute task: %s.", str(e)) + self.log.exception("Failed to execute task.") return False def queue_task_instance( diff --git a/airflow/executors/executor_loader.py b/airflow/executors/executor_loader.py index 723060db0f179..30d3307650ff3 100644 --- a/airflow/executors/executor_loader.py +++ b/airflow/executors/executor_loader.py @@ -97,11 +97,12 @@ def load_executor(cls, executor_name: str) -> "BaseExecutor": executor_cls, import_source = cls.import_executor_cls(executor_name) log.debug("Loading executor %s from %s", executor_name, import_source.value) except ImportError as e: - log.error(e) - raise AirflowConfigException( - f'The module/attribute could not be loaded. Please check "executor" key in "core" section. ' - f'Current value: "{executor_name}".' + log.error( + 'The module/attribute could not be loaded. Please check "executor" key in "core" section. ' + 'Current value: %r.', + executor_name ) + raise AirflowConfigException(e) log.info("Loaded executor: %s", executor_name) return executor_cls() diff --git a/airflow/executors/kubernetes_executor.py b/airflow/executors/kubernetes_executor.py index 7a9e0552a0870..78a26a70c75ff 100644 --- a/airflow/executors/kubernetes_executor.py +++ b/airflow/executors/kubernetes_executor.py @@ -597,10 +597,9 @@ def sync(self) -> None: self.log.info('Changing state of %s to %s', results, state) try: self._change_state(key, state, pod_id, namespace) - except Exception as e: + except Exception: self.log.exception( - "Exception: %s when attempting to change state of %s to %s, re-queueing.", - e, + "Exception when attempting to change state of %s to %s, re-queueing.", results, state, ) @@ -619,10 +618,9 @@ def sync(self) -> None: try: self.kube_scheduler.run_next(task) except PodReconciliationError as e: - self.log.error( + self.log.exception( "Pod reconciliation failed, likely due to kubernetes library upgrade. " "Try clearing the task to re-run.", - exc_info=True, ) self.fail(task[0], e) except ApiException as e: @@ -741,8 +739,8 @@ def adopt_launched_task( ) pod_ids.pop(pod_id) self.running.add(pod_id) - except ApiException as e: - self.log.info("Failed to adopt pod %s. Reason: %s", pod.metadata.name, e) + except ApiException: + self.log.info("Failed to adopt pod %s.", pod.metadata.name, exc_info=True) def _adopt_completed_pods(self, kube_client: client.CoreV1Api) -> None: """ @@ -767,8 +765,8 @@ def _adopt_completed_pods(self, kube_client: client.CoreV1Api) -> None: namespace=pod.metadata.namespace, body=PodGenerator.serialize_pod(pod), ) - except ApiException as e: - self.log.info("Failed to adopt pod %s. Reason: %s", pod.metadata.name, e) + except ApiException: + self.log.info("Failed to adopt pod %s.", pod.metadata.name, exc_info=True) def _flush_task_queue(self) -> None: if not self.task_queue: @@ -798,10 +796,9 @@ def _flush_result_queue(self) -> None: ) try: self._change_state(key, state, pod_id, namespace) - except Exception as e: + except Exception: self.log.exception( - 'Ignoring exception: %s when attempting to change state of %s to %s.', - e, + 'Ignoring the following exception when attempting to change state of %s to %s.', results, state, ) diff --git a/airflow/executors/local_executor.py b/airflow/executors/local_executor.py index 431add69bcfbb..5f315272abf81 100644 --- a/airflow/executors/local_executor.py +++ b/airflow/executors/local_executor.py @@ -91,8 +91,8 @@ def _execute_work_in_subprocess(self, command: CommandType) -> str: try: subprocess.check_call(command, close_fds=True) return State.SUCCESS - except subprocess.CalledProcessError as e: - self.log.error("Failed to execute task %s.", str(e)) + except subprocess.CalledProcessError: + self.log.exception("Failed to execute task.") return State.FAILED def _execute_work_in_fork(self, command: CommandType) -> str: @@ -124,8 +124,8 @@ def _execute_work_in_fork(self, command: CommandType) -> str: args.func(args) ret = 0 return State.SUCCESS - except Exception as e: - self.log.exception("Failed to execute task %s.", e) + except Exception: + self.log.exception("Failed to execute task.") return State.FAILED finally: Sentry.flush() diff --git a/airflow/executors/sequential_executor.py b/airflow/executors/sequential_executor.py index 456e3e9893e8b..0c64f83acf924 100644 --- a/airflow/executors/sequential_executor.py +++ b/airflow/executors/sequential_executor.py @@ -61,9 +61,9 @@ def sync(self) -> None: try: subprocess.check_call(command, close_fds=True) self.change_state(key, State.SUCCESS) - except subprocess.CalledProcessError as e: + except subprocess.CalledProcessError: self.change_state(key, State.FAILED) - self.log.error("Failed to execute task %s.", str(e)) + self.log.exception("Failed to execute task.") self.commands_to_run = [] diff --git a/airflow/jobs/backfill_job.py b/airflow/jobs/backfill_job.py index c5b98c2a8df2a..b4a0c1eb68ea9 100644 --- a/airflow/jobs/backfill_job.py +++ b/airflow/jobs/backfill_job.py @@ -589,8 +589,8 @@ def _per_task_process(key, ti: TaskInstance, session=None): _per_task_process(key, ti, session) session.commit() - except (NoAvailablePoolSlot, DagConcurrencyLimitReached, TaskConcurrencyLimitReached) as e: - self.log.debug(e) + except (NoAvailablePoolSlot, DagConcurrencyLimitReached, TaskConcurrencyLimitReached): + self.log.debug("Unable to schedule Task Instance.", exc_info=True) self.heartbeat(only_if_necessary=is_unit_test) # execute the tasks in the queue diff --git a/airflow/jobs/base_job.py b/airflow/jobs/base_job.py index 7befccf956c41..0758be77d84e8 100644 --- a/airflow/jobs/base_job.py +++ b/airflow/jobs/base_job.py @@ -152,8 +152,8 @@ def kill(self, session=None): job.end_date = timezone.utcnow() try: self.on_kill() - except Exception as e: - self.log.error('on_kill() method failed: %s', str(e)) + except Exception: + self.log.exception('on_kill() method failed.') session.merge(job) session.commit() raise AirflowException("Job shut down externally.") diff --git a/airflow/jobs/scheduler_job.py b/airflow/jobs/scheduler_job.py index 338ceedf8a4b4..e196e683cbe2d 100644 --- a/airflow/jobs/scheduler_job.py +++ b/airflow/jobs/scheduler_job.py @@ -797,8 +797,8 @@ def _update_dag_run_state_for_paused_dags(self): if callback_to_run: self._send_dag_callbacks_to_processor(dag, callback_to_run) self._paused_dag_without_running_dagruns.add(dag_id) - except Exception as e: # should not fail the scheduler - self.log.exception('Failed to update dag run state for paused dags due to %s', str(e)) + except Exception: # should not fail the scheduler + self.log.exception('Failed to update dag run state for paused dags.') def _run_scheduler_loop(self) -> None: """ diff --git a/airflow/jobs/triggerer_job.py b/airflow/jobs/triggerer_job.py index ac7d22a6b1da9..29962386db24f 100644 --- a/airflow/jobs/triggerer_job.py +++ b/airflow/jobs/triggerer_job.py @@ -302,7 +302,7 @@ async def cleanup_finished_triggers(self): continue except BaseException as e: # This is potentially bad, so log it. - self.log.exception("Trigger %s exited with error %s", details["name"], e) + self.log.exception("Trigger %s exited with error.", details["name"]) saved_exc = e else: # See if they foolishly returned a TriggerEvent diff --git a/airflow/models/baseoperator.py b/airflow/models/baseoperator.py index ab9543c127f20..eaa7c81d43169 100644 --- a/airflow/models/baseoperator.py +++ b/airflow/models/baseoperator.py @@ -350,7 +350,7 @@ def __dir__(self): return dir(self.warnings) def warn(self, message, category=None, stacklevel=1, source=None): - self.warnings.warn(message, category, stacklevel + 2, source) + self.warnings.warn(message, category, stacklevel + 2, source) # noqa: G010 if func.__globals__.get('warnings') is sys.modules['warnings']: # Yes, this is slightly hacky, but it _automatically_ sets the right diff --git a/airflow/models/dag.py b/airflow/models/dag.py index 4cef709b16ff2..25b7d4c252544 100644 --- a/airflow/models/dag.py +++ b/airflow/models/dag.py @@ -2137,8 +2137,8 @@ def pickle_info(self): pickled = pickle.dumps(self) d['pickle_len'] = len(pickled) d['pickling_duration'] = str(timezone.utcnow() - dttm) - except Exception as e: - self.log.debug(e) + except Exception: + self.log.debug("Error while getting pickle info.", exc_info=True) d['is_picklable'] = False d['stacktrace'] = traceback.format_exc() return d diff --git a/airflow/models/dagbag.py b/airflow/models/dagbag.py index 929842fd0da4c..cee06c7a9aa7a 100644 --- a/airflow/models/dagbag.py +++ b/airflow/models/dagbag.py @@ -279,8 +279,8 @@ def process_file(self, filepath, only_if_updated=True, safe_mode=True): and file_last_changed_on_disk == self.file_last_changed[filepath] ): return [] - except Exception as e: - self.log.exception(e) + except Exception: + self.log.exception("Exception occurred when processing filepath %r.", filepath) return [] if filepath.endswith(".py") or not zipfile.is_zipfile(filepath): @@ -532,8 +532,8 @@ def collect_dags( dags=str([dag.dag_id for dag in found_dags]), ) ) - except Exception as e: - self.log.exception(e) + except Exception: + self.log.exception("Exception occurred when collecting DAGs from %s", dag_folder) self.dagbag_stats = sorted(stats, key=lambda x: x.duration, reverse=True) diff --git a/airflow/models/taskinstance.py b/airflow/models/taskinstance.py index c198c6f09df28..89459b595c9c9 100644 --- a/airflow/models/taskinstance.py +++ b/airflow/models/taskinstance.py @@ -1382,7 +1382,8 @@ def _log_state(self, lead_msg: str = ''): params.append(self.map_index) message += 'map_index=%d, ' self.log.info( - message + 'execution_date=%s, start_date=%s, end_date=%s', + '%sexecution_date=%s, start_date=%s, end_date=%s', + message, *params, self._date_or_empty('execution_date'), self._date_or_empty('start_date'), @@ -1457,14 +1458,14 @@ def _run_raw_task( session.merge(self) session.commit() return - except AirflowSmartSensorException as e: - self.log.info(e) + except AirflowSmartSensorException: + self.log.info("Task successfully registered in smart sensor.", exc_info=True) return except AirflowSkipException as e: # Recording SKIP # log only if exception has any arguments to prevent log flooding if e.args: - self.log.info(e) + self.log.info("Skipping task.", exc_info=True) if not test_mode: self.refresh_from_db(lock_for_update=True, session=session) self.state = State.SKIPPED @@ -1638,7 +1639,7 @@ def _run_finished_callback(self, callback, context, callback_type): if callback: callback(context) except Exception: # pylint: disable=broad-except - self.log.exception(f"Error when executing {callback_type} callback") + self.log.exception("Error when executing %s callback", callback_type) def _execute_task(self, context, task_orig): """Executes Task (optionally with a Timeout) and pushes Xcom results""" @@ -1865,7 +1866,7 @@ def handle_failure(self, error, test_mode=None, context=None, force_fail=False, if error: if isinstance(error, BaseException): tb = self.get_truncated_error_traceback(error, truncate_to=self._execute_task) - self.log.error("Task failed with exception", exc_info=(type(error), error, tb)) + self.log.error("Task failed with exception", exc_info=(type(error), error, tb)) # noqa: G201 else: self.log.error("%s", error) if not test_mode: diff --git a/airflow/models/variable.py b/airflow/models/variable.py index 82253eddbc58b..09591eaa74ea6 100644 --- a/airflow/models/variable.py +++ b/airflow/models/variable.py @@ -239,10 +239,13 @@ def check_for_write_conflict(key: str) -> None: var_val = secrets_backend.get_variable(key=key) if var_val is not None: log.warning( - "The variable {key} is defined in the {cls} secrets backend, which takes " + "The variable %s is defined in the %s secrets backend, which takes " "precedence over reading from the database. The value in the database will be " "updated, but to read it you have to delete the conflicting variable " - "from {cls}".format(key=key, cls=secrets_backend.__class__.__name__) + "from %s", + key, + secrets_backend.__class__.__name__, + secrets_backend.__class__.__name__, ) return except Exception: diff --git a/airflow/providers_manager.py b/airflow/providers_manager.py index eb7bd9676a7bc..74842867f9a1c 100644 --- a/airflow/providers_manager.py +++ b/airflow/providers_manager.py @@ -471,8 +471,8 @@ def _discover_all_airflow_builtin_providers_from_local_sources(self) -> None: try: for path in airflow.providers.__path__: # type: ignore[attr-defined] self._add_provider_info_from_local_source_files_on_path(path) - except Exception as e: - log.warning("Error when loading 'provider.yaml' files from airflow sources: %s", e) + except Exception: + log.warning("Error when loading 'provider.yaml' files from airflow sources.", exc_info=True) def _add_provider_info_from_local_source_files_on_path(self, path) -> None: """ @@ -761,12 +761,12 @@ def _import_hook( field_behaviours = hook_class.get_ui_field_behaviour() if field_behaviours: self._add_customized_fields(package_name, hook_class, field_behaviours) - except Exception as e: + except Exception: log.warning( - "Exception when importing '%s' from '%s' package: %s", + "Exception when importing '%s' from '%s' package.", hook_class_name, package_name, - e, + exc_info=True, ) return None hook_connection_type = self._get_attr(hook_class, 'conn_type') @@ -837,12 +837,12 @@ def _add_customized_fields(self, package_name: str, hook_class: type, customized ) return self._field_behaviours[connection_type] = customized_fields - except Exception as e: + except Exception: log.warning( - "Error when loading customized fields from package '%s' hook class '%s': %s", + "Error when loading customized fields from package %r hook class %r.", package_name, hook_class.__name__, - e, + exc_info=True, ) def _discover_extra_links(self) -> None: diff --git a/airflow/settings.py b/airflow/settings.py index bea68ec8cfc10..fe9df1751e888 100644 --- a/airflow/settings.py +++ b/airflow/settings.py @@ -450,8 +450,8 @@ def validate_session(): try: session.execute("select 1") conn_status = True - except exc.DBAPIError as err: - log.error(err) + except exc.DBAPIError: + log.exception("A database operation has failed.") conn_status = False session.close() return conn_status diff --git a/airflow/stats.py b/airflow/stats.py index ddcb307c3ef2d..de8648d15a5ce 100644 --- a/airflow/stats.py +++ b/airflow/stats.py @@ -219,7 +219,7 @@ def wrapper(_self, stat=None, *args, **kwargs): stat = handler_stat_name_func(stat) return fn(_self, stat, *args, **kwargs) except InvalidStatsNameException: - log.error('Invalid stat name: %s.', stat, exc_info=True) + log.exception('Invalid stat name: %s.', stat) return None return cast(T, wrapper) @@ -344,8 +344,8 @@ def __getattr__(cls, name): if not cls.instance: try: cls.instance = cls.factory() - except (socket.gaierror, ImportError) as e: - log.error("Could not configure StatsClient: %s, using DummyStatsLogger instead.", e) + except (socket.gaierror, ImportError): + log.exception("Could not configure StatsClient; using DummyStatsLogger instead.") cls.instance = DummyStatsLogger() return getattr(cls.instance, name) diff --git a/airflow/task/task_runner/standard_task_runner.py b/airflow/task/task_runner/standard_task_runner.py index 7aaf05fba3bf4..fe07ddb9d4802 100644 --- a/airflow/task/task_runner/standard_task_runner.py +++ b/airflow/task/task_runner/standard_task_runner.py @@ -91,14 +91,13 @@ def _start_by_fork(self): dag = get_dag(args.subdir, args.dag_id) args.func(args, dag=dag) return_code = 0 - except Exception as exc: + except Exception: return_code = 1 - self.log.error( - "Failed to execute job %s for task %s (%s; %r)", + self.log.exception( + "Failed to execute job %s for task %s (%r)", job_id, self._task_instance.task_id, - exc, os.getpid(), ) except SystemExit as sys_ex: diff --git a/airflow/utils/cli_action_loggers.py b/airflow/utils/cli_action_loggers.py index 3d40018305214..012b36a217f06 100644 --- a/airflow/utils/cli_action_loggers.py +++ b/airflow/utils/cli_action_loggers.py @@ -101,8 +101,8 @@ def default_action_log(log, **_): try: with create_session() as session: session.add(log) - except Exception as error: - logging.warning("Failed to log action with %s", error) + except Exception: + logging.warning("Failed to log action.", exc_info=True) __pre_exec_callbacks = [] # type: List[Callable] diff --git a/airflow/utils/file.py b/airflow/utils/file.py index db786a5d88fa6..415025cc2c13b 100644 --- a/airflow/utils/file.py +++ b/airflow/utils/file.py @@ -61,8 +61,8 @@ def compile(pattern: str, base_dir: Path, definition_file: Path) -> Optional[_Ig """Build an ignore rule from the supplied regexp pattern and log a useful warning if it is invalid""" try: return _RegexpIgnoreRule(re.compile(pattern), base_dir) - except re.error as e: - log.warning("Ignoring invalid regex '%s' from %s: %s", pattern, definition_file, e) + except re.error: + log.warning("Ignoring invalid regex '%s' from %s.", pattern, definition_file, exc_info=True) return None @staticmethod diff --git a/airflow/utils/log/secrets_masker.py b/airflow/utils/log/secrets_masker.py index bde5141719bec..53dc690df6fc8 100644 --- a/airflow/utils/log/secrets_masker.py +++ b/airflow/utils/log/secrets_masker.py @@ -219,13 +219,11 @@ def _redact(self, item: Redactable, name: Optional[str], depth: int) -> Redacted # I think this should never happen, but it does not hurt to leave it just in case # Well. It happened (see https://github.com/apache/airflow/issues/19816#issuecomment-983311373) # but it caused infinite recursion, so we need to cast it to str first. - except Exception as e: + except Exception: log.warning( - "Unable to redact %s, please report this via . " - "Error was: %s: %s", + "Unable to redact %s, please report this via .", repr(item), - type(e).__name__, - str(e), + exc_info=True, ) return item diff --git a/airflow/utils/platform.py b/airflow/utils/platform.py index 3697f8705b45a..74512d4085890 100644 --- a/airflow/utils/platform.py +++ b/airflow/utils/platform.py @@ -59,8 +59,8 @@ def get_airflow_git_version(): git_version = None try: git_version = str(pkgutil.get_data('airflow', 'git_version'), encoding="UTF-8") - except Exception as e: - log.debug(e) + except Exception: + log.debug("Exception occurred retrieving git version.", exc_info=True) return git_version diff --git a/airflow/www/api/experimental/endpoints.py b/airflow/www/api/experimental/endpoints.py index 75256f13736fd..735756bbb6b1c 100644 --- a/airflow/www/api/experimental/endpoints.py +++ b/airflow/www/api/experimental/endpoints.py @@ -122,7 +122,7 @@ def trigger_dag(dag_id): try: dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date, replace_microseconds) except AirflowException as err: - log.error(err) + log.exception("Exception occurred when triggering DAG %r for run_id %r.", dag_id, run_id) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -143,7 +143,7 @@ def delete_dag(dag_id): try: count = delete.delete_dag(dag_id) except AirflowException as err: - log.error(err) + log.exception("Exception occurred when deleting DAG %r.", dag_id) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -165,7 +165,9 @@ def dag_runs(dag_id): state = request.args.get('state') dagruns = get_dag_runs(dag_id, state) except AirflowException as err: - log.info(err) + log.info( + "Exception occurred when listing DAG Runs for DAG %r with state %s.", dag_id, state, exc_info=True + ) response = jsonify(error=f"{err}") response.status_code = 400 return response @@ -194,7 +196,7 @@ def get_dag_code(dag_id): try: return get_code(dag_id) except AirflowException as err: - log.info(err) + log.info("Exception occurred when retrieving DAG code for DAG %r.", dag_id, exc_info=True) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -207,7 +209,7 @@ def task_info(dag_id, task_id): try: t_info = get_task(dag_id, task_id) except AirflowException as err: - log.info(err) + log.info("Exception occurred when retrieving task %r in DAG %r.", task_id, dag_id, exc_info=True) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -268,7 +270,13 @@ def task_instance_info(dag_id, execution_date, task_id): try: ti_info = get_task_instance(dag_id, task_id, execution_date) except AirflowException as err: - log.info(err) + log.info( + "Exception occurred when retrieving Task Instance for task %r in DAG %r for execution_date %s.", + task_id, + dag_id, + execution_date, + exc_info=True, + ) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -304,7 +312,12 @@ def dag_run_status(dag_id, execution_date): try: dr_info = get_dag_run_state(dag_id, execution_date) except AirflowException as err: - log.info(err) + log.info( + "Exception occurred when retrieving DAG Run state for DAG %r with execution_date %s.", + dag_id, + execution_date, + exc_info=True, + ) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -342,7 +355,7 @@ def get_pool(name): try: pool = pool_api.get_pool(name=name) except AirflowException as err: - log.error(err) + log.exception("Exception occurred when retrieving pool %r.", name) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -357,7 +370,7 @@ def get_pools(): try: pools = pool_api.get_pools() except AirflowException as err: - log.error(err) + log.exception("Exception occurred when retrieving all pools.") response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -373,7 +386,7 @@ def create_pool(): try: pool = pool_api.create_pool(**params) except AirflowException as err: - log.error(err) + log.exception("Exception occurred when creating pool.") response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -388,7 +401,7 @@ def delete_pool(name): try: pool = pool_api.delete_pool(name=name) except AirflowException as err: - log.error(err) + log.exception("Exception occurred when deleting pool %r.", name) response = jsonify(error=f"{err}") response.status_code = err.status_code return response @@ -417,7 +430,11 @@ def get_lineage(dag_id: str, execution_date: str): try: lineage = get_lineage_api(dag_id=dag_id, execution_date=execution_dt) except AirflowException as err: - log.error(err) + log.exception( + "Exception occurred when retrieving lineage details for DAG %r and execution_date %s.", + dag_id, + execution_date, + ) response = jsonify(error=f"{err}") response.status_code = err.status_code return response diff --git a/airflow/www/extensions/init_appbuilder.py b/airflow/www/extensions/init_appbuilder.py index 55003cde3ffed..7b9fe4276378b 100644 --- a/airflow/www/extensions/init_appbuilder.py +++ b/airflow/www/extensions/init_appbuilder.py @@ -62,8 +62,8 @@ def dynamic_class_import(class_path): package = __import__(module_path) return reduce(getattr, tmp[1:], package) except Exception as e: - log.exception(e) - log.error(LOGMSG_ERR_FAB_ADDON_IMPORT.format(class_path, e)) + log.exception("Dynamic import of %r has failed.", module_path) + log.error(LOGMSG_ERR_FAB_ADDON_IMPORT.format(class_path, e)) # noqa: G001 class AirflowAppBuilder: @@ -323,10 +323,10 @@ def _add_addon_views(self): addon_class.register_views() addon_class.post_process() self.addon_managers[addon] = addon_class - log.info(LOGMSG_INF_FAB_ADDON_ADDED.format(str(addon))) + log.info(LOGMSG_INF_FAB_ADDON_ADDED.format(str(addon))) # noqa: G001 except Exception as e: - log.exception(e) - log.error(LOGMSG_ERR_FAB_ADDON_PROCESS.format(addon, e)) + log.exception("Failed to register addons.") + log.error(LOGMSG_ERR_FAB_ADDON_PROCESS.format(addon, e)) # noqa: G001 def _check_and_init(self, baseview): if hasattr(baseview, 'datamodel'): @@ -412,7 +412,7 @@ def add_view( appbuilder.add_link("google", href="www.google.com", icon = "fa-google-plus") """ baseview = self._check_and_init(baseview) - log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, name)) + log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, name)) # noqa: G001 if not self._view_exists(baseview): baseview.appbuilder = self @@ -512,7 +512,7 @@ def add_view_no_menu(self, baseview, endpoint=None, static_folder=None): A BaseView type class instantiated. """ baseview = self._check_and_init(baseview) - log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, "")) + log.info(LOGMSG_INF_FAB_ADD_VIEW.format(baseview.__class__.__name__, "")) # noqa: G001 if not self._view_exists(baseview): baseview.appbuilder = self @@ -522,7 +522,7 @@ def add_view_no_menu(self, baseview, endpoint=None, static_folder=None): self.register_blueprint(baseview, endpoint=endpoint, static_folder=static_folder) self._add_permission(baseview) else: - log.warning(LOGMSG_WAR_FAB_VIEW_EXISTS.format(baseview.__class__.__name__)) + log.warning(LOGMSG_WAR_FAB_VIEW_EXISTS.format(baseview.__class__.__name__)) # noqa: G001 return baseview def security_cleanup(self): @@ -584,16 +584,16 @@ def _add_permission(self, baseview, update_perms=False): try: self.sm.add_permissions_view(baseview.base_permissions, baseview.class_permission_name) except Exception as e: - log.exception(e) - log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_VIEW.format(str(e))) + log.exception("Adding permission(s) to a view failed.") + log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_VIEW.format(str(e))) # noqa: G001 def _add_permissions_menu(self, name, update_perms=False): if self.update_perms or update_perms: try: self.sm.add_permissions_menu(name) except Exception as e: - log.exception(e) - log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_MENU.format(str(e))) + log.exception("Adding permission(s) to a menu failed.") + log.error(LOGMSG_ERR_FAB_ADD_PERMISSION_MENU.format(str(e))) # noqa: G001 def _add_menu_permissions(self, update_perms=False): if self.update_perms or update_perms: diff --git a/airflow/www/extensions/init_jinja_globals.py b/airflow/www/extensions/init_jinja_globals.py index ca78ab4047a25..f8e78e4988103 100644 --- a/airflow/www/extensions/init_jinja_globals.py +++ b/airflow/www/extensions/init_jinja_globals.py @@ -47,9 +47,9 @@ def init_jinja_globals(app): try: airflow_version = airflow.__version__ - except Exception as e: + except Exception: airflow_version = None - logging.error(e) + logging.exception("Exception occurred when retrieving Airflow version.") git_version = get_airflow_git_version() diff --git a/airflow/www/extensions/init_security.py b/airflow/www/extensions/init_security.py index c1018ee7e6304..73efd00b5eb7a 100644 --- a/airflow/www/extensions/init_security.py +++ b/airflow/www/extensions/init_security.py @@ -56,5 +56,5 @@ def init_api_experimental_auth(app): auth.init_app(app) app.api_auth.append(auth) except ImportError as err: - log.critical("Cannot import %s for API authentication due to: %s", backend, err) + log.critical("Cannot import %s for API authentication.", backend, exc_info=True) raise AirflowException(err) diff --git a/airflow/www/fab_security/manager.py b/airflow/www/fab_security/manager.py index 8399f11df367e..f1e7d4b4c3de4 100644 --- a/airflow/www/fab_security/manager.py +++ b/airflow/www/fab_security/manager.py @@ -875,14 +875,14 @@ def auth_user_db(self, username, password): "c0976a03d2f18f680bfff877c9a965db9eedc51bc0be87c", "password", ) - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) # noqa: G001 return None elif check_password_hash(user.password, password): self.update_user_auth_stat(user, True) return user else: self.update_user_auth_stat(user, False) - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) # noqa: G001 return None def _search_ldap(self, ldap, con, username): @@ -1062,7 +1062,7 @@ def auth_user_ldap(self, username, password): try: con.start_tls_s() except Exception: - log.error(LOGMSG_ERR_SEC_AUTH_LDAP_TLS.format(self.auth_ldap_server)) + log.error(LOGMSG_ERR_SEC_AUTH_LDAP_TLS.format(self.auth_ldap_server)) # noqa: G001 return None # Define variables, so we can check if they are set in later steps @@ -1090,7 +1090,7 @@ def auth_user_ldap(self, username, password): # If search failed, go away if user_dn is None: - log.info(LOGMSG_WAR_SEC_NOLDAP_OBJ.format(username)) + log.info(LOGMSG_WAR_SEC_NOLDAP_OBJ.format(username)) # noqa: G001 return None # Bind with user_dn/password (validates credentials) @@ -1099,7 +1099,7 @@ def auth_user_ldap(self, username, password): self.update_user_auth_stat(user, False) # Invalid credentials, go away - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) # noqa: G001 return None # Flow 2 - (Direct Search Bind): @@ -1130,7 +1130,7 @@ def auth_user_ldap(self, username, password): self.update_user_auth_stat(user, False) # Invalid credentials, go away - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(bind_username)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(bind_username)) # noqa: G001 return None # Search for `username` (if AUTH_LDAP_SEARCH is set) @@ -1144,7 +1144,7 @@ def auth_user_ldap(self, username, password): # If search failed, go away if user_dn is None: - log.info(LOGMSG_WAR_SEC_NOLDAP_OBJ.format(username)) + log.info(LOGMSG_WAR_SEC_NOLDAP_OBJ.format(username)) # noqa: G001 return None # Sync the user's roles @@ -1169,7 +1169,7 @@ def auth_user_ldap(self, username, password): # If user registration failed, go away if not user: - log.info(LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(username)) + log.info(LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(username)) # noqa: G001 return None # LOGIN SUCCESS (only if user is now registered) @@ -1184,10 +1184,10 @@ def auth_user_ldap(self, username, password): if isinstance(e, dict): msg = getattr(e, "message", None) if (msg is not None) and ("desc" in msg): - log.error(LOGMSG_ERR_SEC_AUTH_LDAP.format(e.message["desc"])) + log.error(LOGMSG_ERR_SEC_AUTH_LDAP.format(e.message["desc"])) # noqa: G001 return None else: - log.error(e) + log.exception("An LDAP exception occurred.") return None def auth_user_oid(self, email): @@ -1198,7 +1198,7 @@ def auth_user_oid(self, email): """ user = self.find_user(email=email) if user is None or (not user.is_active): - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(email)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(email)) # noqa: G001 return None else: self.update_user_auth_stat(user) @@ -1227,7 +1227,7 @@ def auth_user_remote_user(self, username): # If user does not exist on the DB and not auto user registration, # or user is inactive, go away. elif user is None or (not user.is_active): - log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) + log.info(LOGMSG_WAR_SEC_LOGIN_FAILED.format(username)) # noqa: G001 return None self.update_user_auth_stat(user) diff --git a/airflow/www/fab_security/sqla/manager.py b/airflow/www/fab_security/sqla/manager.py index 8ee1d900f6ce0..49c0a5e21c3a0 100644 --- a/airflow/www/fab_security/sqla/manager.py +++ b/airflow/www/fab_security/sqla/manager.py @@ -106,7 +106,7 @@ def create_db(self): log.info(c.LOGMSG_INF_SEC_ADD_DB) super().create_db() except Exception as e: - log.error(c.LOGMSG_ERR_SEC_CREATE_DB.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_CREATE_DB.format(str(e))) # noqa: G001 exit(1) def find_register_user(self, registration_hash): @@ -137,7 +137,7 @@ def add_register_user(self, username, first_name, last_name, email, password="", self.get_session.commit() return register_user except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_REGISTER_USER.format(str(e))) # noqa: G001 self.appbuilder.get_session.rollback() return None @@ -152,7 +152,7 @@ def del_register_user(self, register_user): self.get_session.commit() return True except Exception as e: - log.error(c.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_DEL_REGISTER_USER.format(str(e))) # noqa: G001 self.get_session.rollback() return False @@ -210,10 +210,10 @@ def add_user( user.password = generate_password_hash(password) self.get_session.add(user) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_ADD_USER.format(username)) + log.info(c.LOGMSG_INF_SEC_ADD_USER.format(username)) # noqa: G001 return user except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_USER.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_USER.format(str(e))) # noqa: G001 self.get_session.rollback() return False @@ -224,9 +224,9 @@ def update_user(self, user): try: self.get_session.merge(user) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_UPD_USER.format(user)) + log.info(c.LOGMSG_INF_SEC_UPD_USER.format(user)) # noqa: G001 except Exception as e: - log.error(c.LOGMSG_ERR_SEC_UPD_USER.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_UPD_USER.format(str(e))) # noqa: G001 self.get_session.rollback() return False @@ -241,10 +241,10 @@ def add_role(self, name: str) -> Optional[Role]: role.name = name self.get_session.add(role) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_ADD_ROLE.format(name)) + log.info(c.LOGMSG_INF_SEC_ADD_ROLE.format(name)) # noqa: G001 return role except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_ROLE.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_ROLE.format(str(e))) # noqa: G001 self.get_session.rollback() return role @@ -256,9 +256,9 @@ def update_role(self, role_id, name: str) -> Optional[Role]: role.name = name self.get_session.merge(role) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_UPD_ROLE.format(role)) + log.info(c.LOGMSG_INF_SEC_UPD_ROLE.format(role)) # noqa: G001 except Exception as e: - log.error(c.LOGMSG_ERR_SEC_UPD_ROLE.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_UPD_ROLE.format(str(e))) # noqa: G001 self.get_session.rollback() return None return role @@ -348,7 +348,7 @@ def create_action(self, name): self.get_session.commit() return action except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_PERMISSION.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_PERMISSION.format(str(e))) # noqa: G001 self.get_session.rollback() return action @@ -362,7 +362,7 @@ def delete_action(self, name: str) -> bool: """ action = self.get_action(name) if not action: - log.warning(c.LOGMSG_WAR_SEC_DEL_PERMISSION.format(name)) + log.warning(c.LOGMSG_WAR_SEC_DEL_PERMISSION.format(name)) # noqa: G001 return False try: perms = ( @@ -371,13 +371,13 @@ def delete_action(self, name: str) -> bool: .all() ) if perms: - log.warning(c.LOGMSG_WAR_SEC_DEL_PERM_PVM.format(action, perms)) + log.warning(c.LOGMSG_WAR_SEC_DEL_PERM_PVM.format(action, perms)) # noqa: G001 return False self.get_session.delete(action) self.get_session.commit() return True except Exception as e: - log.error(c.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e))) # noqa: G001 self.get_session.rollback() return False @@ -417,7 +417,7 @@ def create_resource(self, name) -> Resource: self.get_session.commit() return resource except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_VIEWMENU.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_VIEWMENU.format(str(e))) # noqa: G001 self.get_session.rollback() return resource @@ -430,7 +430,7 @@ def delete_resource(self, name: str) -> bool: """ resource = self.get_resource(name) if not resource: - log.warning(c.LOGMSG_WAR_SEC_DEL_VIEWMENU.format(name)) + log.warning(c.LOGMSG_WAR_SEC_DEL_VIEWMENU.format(name)) # noqa: G001 return False try: perms = ( @@ -439,13 +439,13 @@ def delete_resource(self, name: str) -> bool: .all() ) if perms: - log.warning(c.LOGMSG_WAR_SEC_DEL_VIEWMENU_PVM.format(resource, perms)) + log.warning(c.LOGMSG_WAR_SEC_DEL_VIEWMENU_PVM.format(resource, perms)) # noqa: G001 return False self.get_session.delete(resource) self.get_session.commit() return True except Exception as e: - log.error(c.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_DEL_PERMISSION.format(str(e))) # noqa: G001 self.get_session.rollback() return False @@ -505,10 +505,10 @@ def create_permission(self, action_name, resource_name) -> Optional[Permission]: try: self.get_session.add(perm) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_ADD_PERMVIEW.format(str(perm))) + log.info(c.LOGMSG_INF_SEC_ADD_PERMVIEW.format(str(perm))) # noqa: G001 return perm except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_PERMVIEW.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_PERMVIEW.format(str(e))) # noqa: G001 self.get_session.rollback() return None @@ -531,7 +531,7 @@ def delete_permission(self, action_name: str, resource_name: str) -> None: self.get_session.query(self.role_model).filter(self.role_model.permissions.contains(perm)).first() ) if roles: - log.warning(c.LOGMSG_WAR_SEC_DEL_PERMVIEW.format(resource_name, action_name, roles)) + log.warning(c.LOGMSG_WAR_SEC_DEL_PERMVIEW.format(resource_name, action_name, roles)) # noqa: G001 return try: # delete permission on resource @@ -540,9 +540,9 @@ def delete_permission(self, action_name: str, resource_name: str) -> None: # if no more permission on permission view, delete permission if not self.get_session.query(self.permission_model).filter_by(action=perm.action).all(): self.delete_action(perm.action.name) - log.info(c.LOGMSG_INF_SEC_DEL_PERMVIEW.format(action_name, resource_name)) + log.info(c.LOGMSG_INF_SEC_DEL_PERMVIEW.format(action_name, resource_name)) # noqa: G001 except Exception as e: - log.error(c.LOGMSG_ERR_SEC_DEL_PERMVIEW.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_DEL_PERMVIEW.format(str(e))) # noqa: G001 self.get_session.rollback() def perms_include_action(self, perms, action_name): @@ -565,9 +565,9 @@ def add_permission_to_role(self, role: Role, permission: Permission) -> None: role.permissions.append(permission) self.get_session.merge(role) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_ADD_PERMROLE.format(str(permission), role.name)) + log.info(c.LOGMSG_INF_SEC_ADD_PERMROLE.format(str(permission), role.name)) # noqa: G001 except Exception as e: - log.error(c.LOGMSG_ERR_SEC_ADD_PERMROLE.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_ADD_PERMROLE.format(str(e))) # noqa: G001 self.get_session.rollback() def remove_permission_from_role(self, role: Role, permission: Permission) -> None: @@ -582,7 +582,7 @@ def remove_permission_from_role(self, role: Role, permission: Permission) -> Non role.permissions.remove(permission) self.get_session.merge(role) self.get_session.commit() - log.info(c.LOGMSG_INF_SEC_DEL_PERMROLE.format(str(permission), role.name)) + log.info(c.LOGMSG_INF_SEC_DEL_PERMROLE.format(str(permission), role.name)) # noqa: G001 except Exception as e: - log.error(c.LOGMSG_ERR_SEC_DEL_PERMROLE.format(str(e))) + log.error(c.LOGMSG_ERR_SEC_DEL_PERMROLE.format(str(e))) # noqa: G001 self.get_session.rollback() diff --git a/docs/exts/docroles.py b/docs/exts/docroles.py index 618857d70ba74..028a5f909efce 100644 --- a/docs/exts/docroles.py +++ b/docs/exts/docroles.py @@ -93,7 +93,7 @@ def template_field_role( template_fields = get_template_field(app.env, text) except RoleException as e: msg = inliner.reporter.error( - f"invalid class name {text} \n{e}", + f"invalid class name {text} \n{e}", # noqa: G004 line=lineno, ) prb = inliner.problematic(rawtext, rawtext, msg) diff --git a/setup.py b/setup.py index 2435ac046e5aa..0e39de5c67d99 100644 --- a/setup.py +++ b/setup.py @@ -103,8 +103,8 @@ def rm_all_files(files: List[str]) -> None: for file in files: try: os.remove(file) - except Exception as e: - logger.warning("Error when removing %s: %s", file, e) + except Exception: + logger.warning("Error when removing %s", file, exc_info=True) def run(self) -> None: """Remove temporary files and directories."""