Skip to content

Commit

Permalink
SDK新增滚动日志功能 (#706)
Browse files Browse the repository at this point in the history
* SDK新增滚动日志功能

* update-error-filename

* update

* update

* update

* 增加多进程日志支持

---------

Co-authored-by: yinjiaqi <[email protected]>
Co-authored-by: zhouhao10 <[email protected]>
  • Loading branch information
3 people authored Jan 2, 2025
1 parent be92f8c commit 90585ac
Show file tree
Hide file tree
Showing 5 changed files with 465 additions and 25 deletions.
35 changes: 35 additions & 0 deletions docs/BasisModule/Trace/Debug.md
Original file line number Diff line number Diff line change
Expand Up @@ -51,4 +51,39 @@ System.setProperty("APPBUILDER_LOGLFILE", "/tmp/appbuilder.log");
```golang
// golang
os.Setenv("APPBUILDER_LOGLEVEL", "/tmp/appbuilder.log")
```

## `setLogConfig`功能

Appbuilder-SDK新增滚动日志功能

主要参数:
- console_output: 数据类型bool,默认值True,LOG日志是否在控制台输出
- loglevel: 数据类型str,默认值"DEBUG",LOG日志级别
- log_path: 数据类型str,默认值"/tmp",默认日志存放路径。
- file_name: 数据类型str,默认值为进程id,日志名前缀
- rotate_frequency: 数据类型str,默认值"MIDNIGHT",LOG日志滚动更新时间单位
- "S": 以秒为单位
- "M": 以分钟为单位
- "H": 以小时为单位
- "D": 以天为时间单位
- "MIDNIGHT": 每日凌晨更新
- rotate_interval: 数据类型int,默认值1,LOG日志按时间滚动的参数,默认值为1,与when参数联合使用
- max_file_size: 数据类型Optional[int],默认值None,传入`None`或负数会自动更新为系统最大整数`sys.maxsize`,单个滚动的LOG日志文件的最大大小,例:10M即为10\*1024\*1024 即需要传入 # 以B为单位
- total_log_size: 数据类型Optional[int],默认值None,传入`None`或负数会自动更新为系统最大整数`sys.maxsize`,当前目录下可储存的LOG日志文件的最大大小,例:10M即为10\*1024\*1024 # 以B为单位
- max_log_files: 数据类型Optional[int],默认值None,传入`None`或负数会自动更新为系统最大整数`sys.maxsize`,当前目录下可储存的LOG日志文件的最大数量

**注意:`setLogConfig`会自动生成error.file_name日志与file_name日志文件分别储存`error`级别日志和`loglevel`级别的日志,且两种日志文件的滚动逻辑是独立的,不相互影响。**
```python
# python
appbuilder.logger.setLogConfig(
console_output = False,
loglevel="DEBUG"
log_path="/tmp",,
rotate_frequency="MIDNIGHT", # 每日凌晨更新
rotate_interval=1,
max_file_size=100 * 1024 *1024, # 最大日志大小为100MB
total_log_size=1024 * 1024 *1024, # 最大储存1GB的日志
max_log_files=10, # 当前目录储存的最大LOG日志数
)
```
3 changes: 3 additions & 0 deletions python/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -203,8 +203,11 @@ def get_default_header():

from appbuilder.utils.trace.tracer import AppBuilderTracer, AppbuilderInstrumentor

from .utils.logger_file_headler import SizeAndTimeRotatingFileHandler

__all__ = [
"logger",
"SizeAndTimeRotatingFileHandler",
"BadRequestException",
"ForbiddenException",
"NotFoundException",
Expand Down
138 changes: 138 additions & 0 deletions python/tests/test_log_set_log_config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,138 @@
# Copyright (c) 2024 Baidu, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import logging
import unittest


from appbuilder import SizeAndTimeRotatingFileHandler
from appbuilder.utils.logger_util import LoggerWithLoggerId

class TestLogSetLogConfig(unittest.TestCase):
def test_set_log_config(self):
lwl=LoggerWithLoggerId(logger='test_logger',extra={'logid':'test_logid'},loglevel='INFO')
lwl.setLogConfig(
console_output = True,
loglevel='DEBUG',
file_name='test.log',
rotate_frequency='D',
rotate_interval=0, # 测试rotate_interval<1时,自动更新为1
max_file_size=None, # 测试not max_file_size or max_file_size <= 0时,自动更新为sys.maxsize
total_log_size=None, # 测试not total_log_size or total_log_size <= 0时,自动更新为sys.maxsize
max_log_files=None, # 测试not max_log_files or max_log_files <= 0时,自动更新为sys.maxsize
)

def test_set_log_config_log_path(self):
os.environ["APPBUILDER_LOGPATH"] = "/tmp"
lwl=LoggerWithLoggerId(logger='test_logger',extra={'logid':'test_logid'},loglevel='INFO')
lwl.setLogConfig(
console_output = True,
loglevel='DEBUG',
log_path='/tmp',
file_name='test.log',
rotate_frequency='D',
rotate_interval=0, # 测试rotate_interval<1时,自动更新为1
max_file_size=None, # 测试not max_file_size or max_file_size <= 0时,自动更新为sys.maxsize
total_log_size=None, # 测试not total_log_size or total_log_size <= 0时,自动更新为sys.maxsize
max_log_files=None, # 测试not max_log_files or max_log_files <= 0时,自动更新为sys.maxsize
)

def test_set_log_config_raise_error(self):
lwl=LoggerWithLoggerId(logger='test_logger',extra={'logid':'test_logid'},loglevel='INFO')
with self.assertRaises(ValueError):
lwl.setLogConfig(
console_output = True,
loglevel='DEBUG',
file_name='test.log',
rotate_frequency='ERROR-FREQUENCY',
rotate_interval=0, # 测试rotate_interval<1时,自动更新为1
max_file_size=None, # 测试not max_file_size or max_file_size <= 0时,自动更新为sys.maxsize
total_log_size=None, # 测试not total_log_size or total_log_size <= 0时,自动更新为sys.maxsize
max_log_files=None, # 测试not max_log_files or max_log_files <= 0时,自动更新为sys.maxsize
)

with self.assertRaises(ValueError):
lwl.setLogConfig(
console_output = True,
loglevel='ERROR-LEVEL',
file_name='test.log',
rotate_frequency='D',
rotate_interval=0, # 测试rotate_interval<1时,自动更新为1
max_file_size=0, # 测试not max_file_size or max_file_size <= 0时,自动更新为sys.maxsize
total_log_size=None, # 测试not total_log_size or total_log_size <= 0时,自动更新为sys.maxsize
max_log_files=None, # 测试not max_log_files or max_log_files <= 0时,自动更新为sys.maxsize
)

def test_rolling_with_time(self):
time_msgs = ['S', 'M', 'H', 'D', 'MIDNIGHT']
for time_msg in time_msgs:
logger = logging.getLogger('CustomLogger')
logger.setLevel(logging.DEBUG)
handler = SizeAndTimeRotatingFileHandler(
file_name ='test.log',
rotate_frequency=time_msg,
rotate_interval=1,
max_file_size=1024*100*1024,
max_log_files=10,
total_log_size=1024*300*1024
)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

for _ in range(2):
logger.info("This is a test log message.")
time.sleep(0.1)

def test_rolling_with_size(self):
logger = logging.getLogger('CustomLogger')
logger.setLevel(logging.DEBUG)
handler = SizeAndTimeRotatingFileHandler(
file_name ='test.log',
rotate_frequency='S',
rotate_interval=10,
max_file_size=1*1024,
max_log_files=2,
total_log_size=1024*300*1024
)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

for i in range(100):
logger.info("This is a test log message."*100)
time.sleep(0.001)

def test_rolling_to_total_max_size(self):
logger = logging.getLogger('CustomLogger')
logger.setLevel(logging.DEBUG)
handler = SizeAndTimeRotatingFileHandler(
file_name ='test.log',
rotate_frequency='S',
rotate_interval=100,
max_file_size=10*1024,
max_log_files=10000,
total_log_size=20*1024
)
formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(message)s')
handler.setFormatter(formatter)
logger.addHandler(handler)

for _ in range(100):
logger.info("This is a test log message."*100)
time.sleep(0.001)

if __name__ == '__main__':
unittest.main()
98 changes: 98 additions & 0 deletions python/utils/logger_file_headler.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,98 @@
# Copyright (c) 2024 Baidu, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import time
import glob
import logging
from datetime import datetime, timedelta

class SizeAndTimeRotatingFileHandler(logging.Handler):
def __init__(self,
file_name,
rotate_frequency='MIDNIGHT',
rotate_interval=1,
max_file_size=0,
max_log_files=0,
total_log_size=0
):
super().__init__()
self.file_name = file_name
self.rotate_frequency = rotate_frequency.upper()
self.rotate_interval = rotate_interval
self.max_file_size = max_file_size
self.max_log_files = max_log_files
self.total_log_size = total_log_size
self.current_time = datetime.now()
self.current_file = self.file_name
self.stream = open(self.current_file, 'a')
self.last_rollover = time.time()

def _get_new_filename(self):
suffix = self.current_time.strftime("%Y-%m-%d_%H-%M-%S")
return f"{self.file_name}.{suffix}"

def emit(self, record):
if self.shouldRollover(record):
self.doRollover()
self.stream.write(self.format(record) + '\n')
self.stream.flush()

def shouldRollover(self, record):
current_time = time.time()
current_size = os.path.getsize(self.current_file)

time_rollover = False
if self.rotate_frequency == 'S':
time_rollover = current_time >= self.last_rollover + self.rotate_interval
elif self.rotate_frequency == 'M':
time_rollover = current_time >= self.last_rollover + self.rotate_interval * 60
elif self.rotate_frequency == 'H':
time_rollover = current_time >= self.last_rollover + self.rotate_interval * 3600
elif self.rotate_frequency == 'D':
time_rollover = current_time >= self.last_rollover + self.rotate_interval * 86400
elif self.rotate_frequency == 'MIDNIGHT':
time_rollover = datetime.fromtimestamp(current_time).date() != datetime.fromtimestamp(self.last_rollover).date()

size_rollover = current_size >= self.max_file_size if self.max_file_size > 0 else False

return time_rollover or size_rollover

def doRollover(self):
self.stream.close()
self.current_time = datetime.now()
new_filename = self._get_new_filename()
os.rename(self.current_file, new_filename) # Rename current file to new name
self.current_file = self.file_name
self.stream = open(self.current_file, 'a')
self.last_rollover = time.time()
self.manage_log_files()

def manage_log_files(self):
log_files = sorted(glob.glob(f"{self.file_name}.*"), key=os.path.getmtime)

while len(log_files) > self.max_log_files:
oldest_log = log_files.pop(0)
os.remove(oldest_log)

while self._total_size(log_files) > self.total_log_size:
if log_files:
oldest_log = log_files.pop(0)
os.remove(oldest_log)

def _total_size(self, files):
return sum(os.path.getsize(f) for f in files if os.path.exists(f))

def close(self):
self.stream.close()
super().close()
Loading

0 comments on commit 90585ac

Please sign in to comment.