Skip to content

Commit 1edfd9c

Browse files
committed
Version [1.2.0]
1 parent f1f72ce commit 1edfd9c

File tree

5 files changed

+162
-57
lines changed

5 files changed

+162
-57
lines changed

CHANGELOG.md

+7
Original file line numberDiff line numberDiff line change
@@ -7,6 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
77

88

99

10+
## Version [1.2.0] - 2024-19-07
11+
12+
### Added
13+
14+
- [Issue #18](https://github.com/I-am-PUID-0/DMB/issues/18): Added DMB_LOG_SIZE environment variable to set the maximum size of the log file; Default is 10MB
15+
16+
1017
## Version [1.1.0] - 2024-17-07
1118

1219
### Changed

README.md

+2
Original file line numberDiff line numberDiff line change
@@ -98,6 +98,7 @@ services:
9898
# - CLEANUP_INTERVAL=1
9999
# - DMB_LOG_LEVEL=DEBUG # Master log level for all program logs in DMB
100100
# - DMB_LOG_COUNT=2
101+
# - DMB_LOG_SIZE=10M
101102
# Example to attach to gluetun vpn container if realdebrid blocks IP address
102103
# network_mode: container:gluetun
103104
ports:
@@ -171,6 +172,7 @@ of this parameter has the format `<VARIABLE_NAME>=<VALUE>`.
171172
|`CLEANUP_INTERVAL`| Interval between duplicate cleanup in hours. Values can be any positive [whole](https://www.oxfordlearnersdictionaries.com/us/definition/english/whole-number) or [decimal](https://www.oxfordreference.com/display/10.1093/oi/authority.20110803095705740;jsessionid=3FDC96CC0D79CCE69702661D025B9E9B#:~:text=The%20separator%20used%20between%20the,number%20expressed%20in%20decimal%20representation.) point based number. Ex. a value of .5 would yield thirty minutes and 1.5 would yield one and a half hours | `24` || :heavy_check_mark: | :heavy_check_mark:|
172173
|`DMB_LOG_LEVEL`| The level at which logs should be captured. See the python [Logging Levels](https://docs.python.org/3/library/logging.html#logging-levels) documentation for more details | `INFO` |
173174
|`DMB_LOG_COUNT`| The number logs to retain. Result will be value + current log | `2` |
175+
|`DMB_LOG_SIZE`| The size of the log file before it is rotated. Valid options are 'K' (kilobytes), 'M' (megabytes), and 'G' (gigabytes) | `10M` |
174176
|`ZURG_ENABLED`| Set the value "true" to enable the Zurg process | `false ` | | | :heavy_check_mark:|
175177
|`GITHUB_TOKEN`| GitHub Personal Token for use with Zurg private repo. Requires Zurg [sponsorship](https://github.com/sponsors/debridmediamanager) | `false ` | | | :heavy_check_mark:|
176178
|`ZURG_VERSION`| The version of Zurg to use. If enabled, the value should contain v0.9.x or v0.9.x-hotfix.x format | `latest` | | | :heavy_check_mark: |

base/__init__.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -3,7 +3,7 @@
33
from dotenv import load_dotenv, find_dotenv
44
from datetime import datetime, timedelta
55
import logging
6-
from logging.handlers import TimedRotatingFileHandler
6+
from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler, BaseRotatingHandler
77
from packaging.version import Version, parse as parse_version
88
import time
99
import os

main.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,7 @@
1010
def main():
1111
logger = get_logger()
1212

13-
version = '1.1.0'
13+
version = '1.2.0'
1414

1515
ascii_art = f'''
1616

utils/logger.py

+151-55
Original file line numberDiff line numberDiff line change
@@ -124,63 +124,155 @@ def time_to_complete(start_time):
124124
time_string += f"{seconds} second(s)"
125125
return time_string
126126

127-
class CustomTimedRotatingFileHandler(TimedRotatingFileHandler):
128-
def __init__(self, filename, when='h', interval=1, backupCount=0, encoding=None, delay=False, utc=False, atTime=None):
129-
self.rollover_filename = filename
130-
TimedRotatingFileHandler.__init__(self, self.rollover_filename, when, interval, backupCount, encoding, delay, utc, atTime)
131-
127+
class CustomRotatingFileHandler(BaseRotatingHandler):
128+
def __init__(self, filename, when='midnight', interval=1, backupCount=0, maxBytes=0, encoding=None, delay=False, utc=False, atTime=None):
129+
self.when = when
130+
self.backupCount = backupCount
131+
self.maxBytes = maxBytes
132+
self.utc = utc
133+
self.atTime = atTime
134+
self.interval = self.computeInterval(when, interval)
135+
self.rolloverAt = self.computeRollover(time.time())
136+
self.logger = logging.getLogger('CustomRotatingFileHandler')
137+
self.logger.setLevel(logging.DEBUG)
138+
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%b %e, %Y %H:%M:%S')
139+
stream_handler = logging.StreamHandler()
140+
stream_handler.setFormatter(formatter)
141+
if not self.logger.hasHandlers():
142+
self.logger.addHandler(stream_handler)
143+
super().__init__(filename, 'a', encoding, delay)
144+
145+
def computeInterval(self, when, interval):
146+
if when == 'S':
147+
return interval
148+
elif when == 'M':
149+
return interval * 60
150+
elif when == 'H':
151+
return interval * 60 * 60
152+
elif when == 'D' or when == 'midnight':
153+
return interval * 60 * 60 * 24
154+
elif when.startswith('W'):
155+
day = int(when[1])
156+
current_day = time.localtime().tm_wday
157+
days_to_wait = (day - current_day) % 7
158+
return interval * 60 * 60 * 24 * 7 + days_to_wait * 60 * 60 * 24
159+
else:
160+
raise ValueError("Invalid rollover interval specified: %s" % when)
161+
162+
def computeRollover(self, currentTime):
163+
if self.when == 'midnight':
164+
t = time.localtime(currentTime)
165+
current_hour = t.tm_hour
166+
current_minute = t.tm_min
167+
current_second = t.tm_sec
168+
seconds_until_midnight = ((24 - current_hour - 1) * 3600) + ((60 - current_minute - 1) * 60) + (60 - current_second)
169+
rollover_time = currentTime + seconds_until_midnight + 1
170+
else:
171+
rollover_time = currentTime + self.interval
172+
return rollover_time
173+
174+
def shouldRollover(self, record):
175+
if self.stream is None:
176+
self.stream = self._open()
177+
if self.maxBytes > 0:
178+
self.stream.seek(0, 2)
179+
if self.stream.tell() + len(self.format(record)) >= self.maxBytes:
180+
return 1
181+
t = int(time.time())
182+
if t >= self.rolloverAt:
183+
return 1
184+
return 0
185+
132186
def doRollover(self):
187+
self.logger.debug("Performing rollover")
133188
if self.stream:
134189
self.stream.close()
135-
self.stream = None
136-
137-
base_file_name_without_date = self.baseFilename.rsplit('-', 3)[0]
138-
current_date = time.strftime("%Y-%m-%d")
139-
correct_filename = base_file_name_without_date + '-' + current_date + '.log'
140-
141-
if self.rollover_filename != correct_filename:
142-
new_filename = correct_filename
190+
current_time = int(time.time())
191+
base_filename_with_path, ext = os.path.splitext(self.baseFilename)
192+
base_filename = os.path.basename(base_filename_with_path)
193+
dir_name = os.path.dirname(base_filename_with_path)
194+
match = re.search(r'(\d{4}-\d{2}-\d{2})', base_filename)
195+
if match:
196+
base_date = match.group(1)
143197
else:
144-
new_filename = self.rollover_filename
145-
146-
filenames_to_delete = self.getFilesToDelete()
147-
for filename in filenames_to_delete:
148-
os.remove(filename)
149-
150-
self.rollover_filename = new_filename
151-
self.baseFilename = self.rollover_filename
152-
self.stream = self._open()
153-
154-
new_rollover_at = self.computeRollover(self.rolloverAt)
155-
while new_rollover_at <= time.time():
156-
new_rollover_at = new_rollover_at + self.interval
157-
if self.utc:
158-
dst_at_rollover = time.localtime(new_rollover_at)[-1]
198+
base_date = None
199+
current_date = time.strftime("%Y-%m-%d", time.localtime(current_time))
200+
if base_date:
201+
base_filename_without_date = base_filename.replace(f"-{base_date}", "")
159202
else:
160-
dst_at_rollover = time.gmtime(new_rollover_at)[-1]
203+
base_filename_without_date = base_filename
204+
for i in range(self.backupCount - 1, 0, -1):
205+
sfn = os.path.join(dir_name, f"{base_filename_without_date}-{base_date}_{i}.log" if base_date else f"{base_filename_without_date}_{i}.log")
206+
dfn = os.path.join(dir_name, f"{base_filename_without_date}-{base_date}_{i + 1}.log" if base_date else f"{base_filename_without_date}_{i + 1}.log")
207+
if os.path.exists(sfn):
208+
self.logger.debug(f"Renaming {sfn} to {dfn}")
209+
if os.path.exists(dfn):
210+
os.remove(dfn)
211+
os.rename(sfn, dfn)
212+
dfn = os.path.join(dir_name, f"{base_filename_without_date}-{base_date}_1.log" if base_date else f"{base_filename_without_date}_1.log")
213+
self.logger.debug(f"Renaming {self.baseFilename} to {dfn}")
214+
if os.path.exists(dfn):
215+
os.remove(dfn)
216+
os.rename(self.baseFilename, dfn)
217+
if self.backupCount > 0:
218+
files_to_delete = self.getFilesToDelete(base_filename_without_date)
219+
for s in files_to_delete:
220+
self.logger.debug(f"Deleting old log file {s}")
221+
os.remove(s)
222+
new_log_filename = os.path.join(dir_name, f"{base_filename_without_date}-{current_date}.log")
223+
self.baseFilename = new_log_filename
224+
if not self.delay:
225+
self.stream = self._open()
226+
self.rolloverAt = self.computeRollover(current_time)
161227

162-
if time.localtime(time.time())[-1] != dst_at_rollover:
163-
addend = -3600 if time.localtime(time.time())[-1] else 3600
164-
new_rollover_at += addend
165-
self.rolloverAt = new_rollover_at
166-
167-
def getFilesToDelete(self):
168-
dirName, baseName = os.path.split(self.baseFilename)
169-
fileNames = os.listdir(dirName)
228+
def getFilesToDelete(self, base_filename):
229+
dir_name = os.path.dirname(self.baseFilename)
230+
file_names = os.listdir(dir_name)
170231
result = []
171-
prefix = baseName.split('-', 1)[0] + "-"
172-
plen = len(prefix)
173-
for fileName in fileNames:
174-
if fileName[:plen] == prefix:
175-
suffix = fileName[plen:]
176-
if re.compile(r"^\d{4}-\d{2}-\d{2}.log$").match(suffix):
177-
result.append(os.path.join(dirName, fileName))
178-
result.sort()
179-
if len(result) < self.backupCount:
180-
result = []
232+
base_filename_without_date = re.sub(r'-\d{4}-\d{2}-\d{2}', '', os.path.basename(base_filename))
233+
base_filename_pattern = re.escape(base_filename_without_date) + r"-\d{4}-\d{2}-\d{2}(_\d+)?\.log$"
234+
self.logger.debug(f"Base filename pattern: {base_filename_pattern}")
235+
pattern = re.compile(base_filename_pattern)
236+
for file_name in file_names:
237+
self.logger.debug(f"Checking file: {file_name}")
238+
if pattern.match(file_name):
239+
self.logger.debug(f"Matched file: {file_name}")
240+
result.append(os.path.join(dir_name, file_name))
241+
result.sort(key=lambda x: (self.extract_date(x), self.extract_index(x)))
242+
self.logger.debug(f"Files considered for deletion: {result}")
243+
if len(result) <= self.backupCount:
244+
return []
181245
else:
182-
result = result[:len(result) - self.backupCount]
183-
return result
246+
files_to_delete = result[:len(result) - self.backupCount]
247+
self.logger.debug(f"Files to delete: {files_to_delete}")
248+
return files_to_delete
249+
250+
@staticmethod
251+
def extract_date(file_path):
252+
file_name = os.path.basename(file_path)
253+
match = re.search(r"(\d{4}-\d{2}-\d{2})", file_name)
254+
if match:
255+
return match.group(1)
256+
return "9999-99-99"
257+
258+
@staticmethod
259+
def extract_index(file_path):
260+
file_name = os.path.basename(file_path)
261+
match = re.search(r"_(\d+)\.log$", file_name)
262+
if match:
263+
return int(match.group(1))
264+
return 0
265+
266+
def parse_size(size_str):
267+
size_str = size_str.strip().upper()
268+
if size_str.endswith('K'):
269+
return int(size_str[:-1]) * 1024
270+
elif size_str.endswith('M'):
271+
return int(size_str[:-1]) * 1024 * 1024
272+
elif size_str.endswith('G'):
273+
return int(size_str[:-1]) * 1024 * 1024 * 1024
274+
else:
275+
return int(size_str)
184276

185277
def get_logger(log_name='DMB', log_dir='./log'):
186278
current_date = time.strftime("%Y-%m-%d")
@@ -194,20 +286,24 @@ def get_logger(log_name='DMB', log_dir='./log'):
194286
log_level_env = os.getenv('DMB_LOG_LEVEL')
195287
if log_level_env:
196288
log_level = log_level_env.upper()
197-
os.environ['LOG_LEVEL'] = log_level
198-
os.environ['RCLONE_LOG_LEVEL'] = log_level
199289
else:
200290
log_level = 'INFO'
201-
numeric_level = getattr(logging, log_level, logging.INFO)
202-
logger.setLevel(numeric_level)
291+
numeric_level = getattr(logging, log_level, logging.INFO)
292+
logger.setLevel(numeric_level)
293+
max_log_size_env = os.getenv('DMB_LOG_SIZE')
294+
try:
295+
max_log_size = parse_size(max_log_size_env) if max_log_size_env else 10 * 1024 * 1024
296+
except (ValueError, TypeError):
297+
max_log_size = 10 * 1024 * 1024
298+
203299
log_path = os.path.join(log_dir, log_filename)
204-
handler = CustomTimedRotatingFileHandler(log_path, when="midnight", interval=1, backupCount=backupCount)
300+
handler = CustomRotatingFileHandler(log_path, when="midnight", interval=1, backupCount=backupCount, maxBytes=max_log_size)
205301
os.chmod(log_path, 0o666)
206302
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s', datefmt='%b %e, %Y %H:%M:%S')
207303
handler.setFormatter(formatter)
208304
stdout_handler = logging.StreamHandler(sys.stdout)
209305
stdout_handler.setFormatter(formatter)
210-
306+
211307
for hdlr in logger.handlers[:]:
212308
logger.removeHandler(hdlr)
213309
logger.addHandler(handler)

0 commit comments

Comments
 (0)