Skip to content

Commit 607c703

Browse files
authored
Merge pull request #441 from LAAC-LSCP/logging_scripts_micha
Logging scripts micha
2 parents 8551b71 + 9eb90f3 commit 607c703

12 files changed

+239
-187
lines changed

.gitignore

+4
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,6 @@
11
/output
22
.DS_Store
3+
._*
34

45
# pycharm project files
56
/.idea
@@ -83,6 +84,9 @@ instance/
8384
# Sphinx documentation
8485
docs/_build/
8586

87+
# PyCharm
88+
/.idea/
89+
8690
# PyBuilder
8791
target/
8892

CHANGELOG.md

+1
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ All notable changes to this project will be documented in this file.
1010

1111
### Changed
1212

13+
- the output of the CLI in the terminal is now handled by the logger module and not by print statements
1314
- validating a dataset now results in warnings for broken symlinks and no errors anymore (#425)
1415
- validation with recordings existing but for which mediainfo can't read the sample rate no longer fail but outputs a warning.
1516

ChildProject/annotations.py

+21-23
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@
88
import sys
99
import traceback
1010
from typing import Callable, Dict, Iterable, List, Optional, Set, Tuple, Union
11+
import logging
1112

1213
from . import __version__
1314
from .projects import ChildProject
@@ -16,6 +17,11 @@
1617
from .utils import Segment, intersect_ranges, path_is_parent, TimeInterval, series_to_datetime, find_lines_involved_in_overlap
1718

1819

20+
# Create a logger for the module (file)
21+
logger_annotations = logging.getLogger(__name__)
22+
# messages are propagated to the higher level logger (ChildProject), used in cmdline.py
23+
logger_annotations.propagate = True
24+
1925
class AnnotationManager:
2026
INDEX_COLUMNS = [
2127
IndexColumn(
@@ -353,12 +359,7 @@ def read(self) -> Tuple[List[str], List[str]]:
353359
return errors, warnings
354360

355361
def validate_annotation(self, annotation: dict) -> Tuple[List[str], List[str]]:
356-
print(
357-
"validating {} from {}...".format(
358-
annotation["annotation_filename"], annotation["set"]
359-
)
360-
)
361-
362+
logger_annotations.info("Validating %s from %s...", annotation["annotation_filename"], annotation["set"])
362363
segments = IndexTable(
363364
"segments",
364365
path=os.path.join(
@@ -490,11 +491,12 @@ def _import_annotation(
490491
if self.annotations[(self.annotations['set'] == annotation['set']) &
491492
(self.annotations['annotation_filename'] == annotation_filename)].shape[0] > 0:
492493
if overwrite_existing:
493-
print(f"Warning: annotation file {output_filename} will be overwritten")
494+
logger_annotations.warning("Annotation file %s will be overwritten", output_filename)
495+
494496
else:
495497
error_filename = output_filename.replace('\\','/')
496498
annotation["error"] = f"annotation file {error_filename} already exists, to reimport it, use the overwrite_existing flag"
497-
print(f"Error: {annotation['error']}")
499+
logger_annotations.error("Error: %s", annotation['error'])
498500
annotation["imported_at"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
499501
return annotation
500502

@@ -509,7 +511,8 @@ def _import_annotation(
509511
if ovl_annots.shape[0] > 0:
510512
array_tup = list(ovl_annots[['set','recording_filename','range_onset', 'range_offset']].itertuples(index=False, name=None))
511513
annotation["error"] = f"importation for set <{annotation['set']}> recording <{annotation['recording_filename']}> from {annotation['range_onset']} to {annotation['range_offset']} cannot continue because it overlaps with these existing annotation lines: {array_tup}"
512-
print(f"Error: {annotation['error']}")
514+
logger_annotations.error("Error: %s", annotation['error'])
515+
#(f"Error: {annotation['error']}")
513516
annotation["imported_at"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
514517
return annotation
515518

@@ -541,10 +544,7 @@ def _import_annotation(
541544
)
542545
except:
543546
annotation["error"] = traceback.format_exc()
544-
print(
545-
"an error occured while processing '{}'".format(path), file=sys.stderr
546-
)
547-
print(traceback.format_exc(), file=sys.stderr)
547+
logger_annotations.error("An error occurred while processing '%s'", path, exc_info=True)
548548

549549
if df is None or not isinstance(df, pd.DataFrame):
550550
annotation["imported_at"] = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
@@ -654,9 +654,7 @@ def import_annotations(
654654

655655
builtin = input_processed[input_processed["format"].isin(converters.keys())]
656656
if not builtin["format"].map(lambda f: converters[f].THREAD_SAFE).all():
657-
print(
658-
"warning: some of the converters do not support multithread importation; running on 1 thread"
659-
)
657+
logger_annotations.warning("warning: some of the converters do not support multithread importation; running on 1 thread")
660658
threads = 1
661659

662660
#if the input to import has overlaps in it, raise an error immediately, nothing will be imported
@@ -688,14 +686,15 @@ def import_annotations(
688686
axis=1,
689687
inplace=True,
690688
)
689+
691690
if 'error' in imported.columns:
692691
errors = imported[~imported["error"].isnull()]
693692
imported = imported[imported["error"].isnull()]
694693
#when errors occur, separate them in a different csv in extra
695694
if errors.shape[0] > 0:
696695
output = os.path.join(self.project.path, "extra","errors_import_{}.csv".format(datetime.datetime.now().strftime("%Y%m%d-%H%M%S")))
697696
errors.to_csv(output, index=False)
698-
print(f"Errors summary exported to {output}")
697+
logger_annotations.info("Errors summary exported to %s", output)
699698
else:
700699
errors = None
701700

@@ -709,7 +708,7 @@ def import_annotations(
709708
sets = set(input_processed['set'].unique())
710709
outdated_sets = self._check_for_outdated_merged_sets(sets= sets)
711710
for warning in outdated_sets:
712-
print("warning: {}".format(warning))
711+
logger_annotations.warning("warning: %s", warning)
713712

714713
return (imported, errors)
715714

@@ -763,19 +762,18 @@ def remove_set(self, annotation_set: str, recursive: bool = False):
763762
path = os.path.join(
764763
self.project.path, "annotations", annotation_set, "converted"
765764
)
766-
767765
try:
768766
rmtree(path)
769767
except:
770-
print("could not delete '{}', as it does not exist (yet?)".format(path))
768+
logger_annotations.info("could not delete '%s', as it does not exist (yet?)", path)
771769
pass
772770

773771
self.annotations = self.annotations[self.annotations["set"] != annotation_set]
774772
self.write()
775-
773+
776774
outdated_sets = self._check_for_outdated_merged_sets(sets= {annotation_set})
777775
for warning in outdated_sets:
778-
print("warning: {}".format(warning))
776+
logger_annotations.warning("warning: %s", warning)
779777

780778
def rename_set(
781779
self,
@@ -1419,7 +1417,7 @@ def get_within_ranges(
14191417
)
14201418

14211419
if missing_data == "warn":
1422-
print(f"warning: {error_message}")
1420+
logger_annotations.warning("warning: %s", error_message)
14231421
else:
14241422
raise Exception(error_message)
14251423

0 commit comments

Comments
 (0)