diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml new file mode 100644 index 0000000..9ea5d9d --- /dev/null +++ b/.github/workflows/build.yml @@ -0,0 +1,43 @@ +name: Test python-json-logger + +on: + push: + branches: + - master + workflow_dispatch: + inputs: + logLevel: + description: 'Log level' + required: true + default: 'warning' + type: choice + options: + - info + - warning + - debug + pull_request: + types: [opened, reopened] + +jobs: + test: + runs-on: "ubuntu-20.04" #Moving down to 20.04 (latest is 22.04) because of python3.6 support + strategy: + fail-fast: false + matrix: + python-version: ["pypy-3.8", "pypy-3.9", "3.6", "3.7", "3.8", "3.9", "3.10", "3.11"] + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python ${{ matrix.python-version }} + uses: actions/setup-python@v4 + with: + python-version: ${{ matrix.python-version }} + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions + + - name: Test with tox + run: tox diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..ee269e8 --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,32 @@ +name: Release python-json-logger build + +on: + release: + types: [ created ] + + workflow_dispatch: + +jobs: + publish: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v3 + + - name: Set up Python 3.11 + uses: actions/setup-python@v4 + with: + python-version: 3.11 + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install setuptools wheel twine typing_extensions + + - name: Build and Upload to PyPi + run: | + python setup.py sdist bdist_wheel + python -m twine upload dist/* + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.gitignore b/.gitignore index 62fd0cc..051f709 100644 --- a/.gitignore +++ b/.gitignore @@ -3,4 +3,16 @@ build dist *.egg-info + +# Tests and validation .tox/ +.mypy_cache + +# Python's venv +.env +.venv +env + +# IDE +.vscode +.idea diff --git a/.travis.yml b/.travis.yml deleted file mode 100644 index 6df6b36..0000000 --- a/.travis.yml +++ /dev/null @@ -1,9 +0,0 @@ -sudo: false -language: python -python: - - "2.7" - - "3.4" - - "3.5" - - "3.6" -install: pip install tox-travis -script: tox diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..208e7f1 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,85 @@ +# Changelog +All notable changes to this project will be documented in this file. + +The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/), +and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). + +## [2.0.7] - 2023-02-21 +### Changed +- Fix inclusion of py.typed in pip packages - @sth +- Added pytest support with test file rename. Migrated to assertEqual + +## [2.0.6] - 2023-02-14 +### Changed +- Parameter `rename_fields` in merge_record_extra is now optional - @afallou + +## [2.0.5] - 2023-02-12 +### Added +- Allow reserved attrs to be renamed - @henkhogan +- Support added for Python 3.11 +- Now verifying builds in Pypy 3.9 as well +- Type annotations are now in the package - @louis-jaris +### Changed +- Fix rename_fields for exc_info - @guilhermeferrari +- Cleaned up test file for PEP8 - @lopagela +- Cleaned up old Python 2 artifacts - @louis-jaris +- Dropped Python 3.5 support - @idomozes +- Moved type check via tox into 3.11 run only +- Added test run in Python3.6 (will keep for a little while longer, but it's EOL so upgrade) + +## [2.0.4] - 2022-07-11 +### Changed +- Fix too strict regex for percentage style logging - @aberres +- + +## [2.0.3] - 2022-07-08 +### Added +- Add PEP 561 marker/basic mypy configuration. - @bringhurst +- Workaround logging.LogRecord.msg type of string. - @bringhurst +### Changed +- Changed a link archive of the reference page in case it's down. - @ahonnecke +- Removed unnecessary try-except around OrderedDict usage - @sozofaan +- Update documentation link to json module + use https - @deronnax +- Dropped 3.5 support. - @bringhurst + +## [2.0.2] - 2021-07-27 +### Added +- Officially supporting 3.9 - @felixonmars. +- You can now add static fields to log objects - @cosimomeli. +### Changed +- Dropped 3.4 support. +- Dropped Travis CI for Github Actions. +- Wheel should build for python 3 instead of just 3.4 now. + +## [2.0.1] - 2020-10-12 +### Added +- Support Pypi long descripton - @ereli-cb +### Changed +- You can now rename output fields - @schlitzered + +## [2.0.0] - 2020-09-26 +### Added +- New Changelog +- Added timezone support to timestamps - @lalten +- Refactored log record to function - @georgysavva +- Add python 3.8 support - @tommilligan +### Removed +- Support for Python 2.7 +- Debian directory + +## [0.1.11] - 2019-03-29 +### Added +- Support for Python 3.7 +### Changed +- 'stack_info' flag in logging calls is now respected in JsonFormatter by [@ghShu](https://github.com/ghShu) + + +[2.0.7]: https://github.com/madzak/python-json-logger/compare/v2.0.6...v2.0.7 +[2.0.6]: https://github.com/madzak/python-json-logger/compare/v2.0.5...v2.0.6 +[2.0.5]: https://github.com/madzak/python-json-logger/compare/v2.0.4...v2.0.5 +[2.0.4]: https://github.com/madzak/python-json-logger/compare/v2.0.3...v2.0.4 +[2.0.3]: https://github.com/madzak/python-json-logger/compare/v2.0.2...v2.0.3 +[2.0.2]: https://github.com/madzak/python-json-logger/compare/v2.0.1...v2.0.2 +[2.0.1]: https://github.com/madzak/python-json-logger/compare/v2.0.0...v2.0.1 +[2.0.0]: https://github.com/madzak/python-json-logger/compare/v0.1.11...v2.0.0 +[0.1.11]: https://github.com/madzak/python-json-logger/compare/v0.1.10...v0.1.11 diff --git a/MANIFEST.in b/MANIFEST.in index 90b3902..e35cf17 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,2 +1,3 @@ include LICENSE -recursive-include tests *.py \ No newline at end of file +include README.md +recursive-include tests *.py diff --git a/README.markdown b/README.md similarity index 80% rename from README.markdown rename to README.md index db641d6..fa864df 100644 --- a/README.markdown +++ b/README.md @@ -1,4 +1,6 @@ -[![Build Status](https://travis-ci.org/madzak/python-json-logger.svg?branch=master)](https://travis-ci.org/madzak/python-json-logger) +# This project has been retired and is no longer actively maintained. We recommend transitioning to [nhairs/python-json-logger](https://github.com/nhairs/python-json-logger) for continued development, updates, and community support. Thank you for your understanding and for supporting this project over the years! + +![Build Status](https://github.com/madzak/python-json-logger/actions/workflows/build.yml/badge.svg) [![License](https://img.shields.io/pypi/l/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) [![Version](https://img.shields.io/pypi/v/python-json-logger.svg)](https://pypi.python.org/pypi/python-json-logger/) @@ -6,6 +8,10 @@ Overview ======= This library is provided to allow standard python logging to output log data as json objects. With JSON we can make our logs more readable by machines and we can stop writing custom parsers for syslog type records. +News +======= +Hi, I see this package is quiet alive and I am sorry for ignoring it so long. I will be stepping up my maintenance of this package so please allow me a week to get things back in order (and most likely a new minor version) and I'll post and update here once I am caught up. + Installing ========== Pip: @@ -56,7 +62,7 @@ formatter = CustomJsonFormatter('one;two') # is equivalent to: -formatter = jsonlogger.JsonFormatter('(one) (two)') +formatter = jsonlogger.JsonFormatter('%(one)s %(two)s') ``` You can also add extra fields to your json output by specifying a dict in place of message, as well as by specifying an `extra={}` argument. @@ -78,7 +84,7 @@ class CustomJsonFormatter(jsonlogger.JsonFormatter): else: log_record['level'] = record.levelname -formatter = CustomJsonFormatter('(timestamp) (level) (name) (message)') +formatter = CustomJsonFormatter('%(timestamp)s %(level)s %(name)s %(message)s') ``` Items added to the log record will be included in *every* log message, no matter what the format requires. @@ -93,7 +99,7 @@ def json_translate(obj): return {"special": obj.special} formatter = jsonlogger.JsonFormatter(json_default=json_translate, - json_encoder=json.JSONEncoder()) + json_encoder=json.JSONEncoder) logHandler.setFormatter(formatter) logger.info({"special": "value", "run": 12}) @@ -161,3 +167,10 @@ Sample JSON with a full formatter (basically the log message from the unit test) "run": 12 } ``` + +External Examples +================= + +- [Wesley Tanaka - Structured log files in Python using python-json-logger](http://web.archive.org/web/20201130054012/https://wtanaka.com/node/8201) + +- [Archive](https://web.archive.org/web/20201130054012/https://wtanaka.com/node/8201) diff --git a/debian/changelog b/debian/changelog deleted file mode 100644 index db566dd..0000000 --- a/debian/changelog +++ /dev/null @@ -1,5 +0,0 @@ -python3-json-logger (0.1.8-1) stable; urgency=medium - - * Initial release. - - -- Maximilian Wilhelm Wed, 07 Feb 2018 22:12:07 +0100 diff --git a/debian/compat b/debian/compat deleted file mode 100644 index ec63514..0000000 --- a/debian/compat +++ /dev/null @@ -1 +0,0 @@ -9 diff --git a/debian/control b/debian/control deleted file mode 100644 index 049a7f1..0000000 --- a/debian/control +++ /dev/null @@ -1,19 +0,0 @@ -Source: python3-json-logger -Section: python -Priority: optional -Maintainer: Maximilian Wilhelm -Build-Depends: debhelper (>= 9), dh-python, dh-systemd, - python3-all, - python3-setuptools, - python3-pbr -Standards-Version: 3.9.5 -Homepage: https://github.com/madzak/python-json-logger -X-Python3-Version: >= 3.2 - -Package: python3-json-logger -Architecture: all -Depends: ${python3:Depends}, ${misc:Depends} -Description: JSON library for Python logging framework - This library is provided to allow standard python logging to output log data - as json objects. With JSON we can make our logs more readable by machines - and we can stop writing custom parsers for syslog type records. diff --git a/debian/copyright b/debian/copyright deleted file mode 100644 index 46393c6..0000000 --- a/debian/copyright +++ /dev/null @@ -1,33 +0,0 @@ -Format: https://www.debian.org/doc/packaging-manuals/copyright-format/1.0/ -Upstream-Name: python-json-logger -Source: https://github.com/madzak/python-json-logger - -Files: * -Copyright: 2011-2018 Zakaria Zajac -License: BSD-2-Clause - -Files: debian/* -Copyright: 2018 Maximilian Wilhelm -License: BSD-2-Clause - -License: BSD-2-Clause - Redistribution and use in source and binary forms, with or without - modification, are permitted provided that the following conditions - are met: - 1. Redistributions of source code must retain the above copyright - notice, this list of conditions and the following disclaimer. - 2. Redistributions in binary form must reproduce the above copyright - notice, this list of conditions and the following disclaimer in the - documentation and/or other materials provided with the distribution. - . - THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - ``AS IS'' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE HOLDERS OR - CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, - EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, - PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR - PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF - LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING - NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS - SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. diff --git a/debian/rules b/debian/rules deleted file mode 100755 index 8de9aa8..0000000 --- a/debian/rules +++ /dev/null @@ -1,17 +0,0 @@ -#!/usr/bin/make -f -# See debhelper(7) (uncomment to enable) -# output every command that modifies files on the build system. -#DH_VERBOSE = 1 - -# see EXAMPLES in dpkg-buildflags(1) and read /usr/share/dpkg/* -DPKG_EXPORT_BUILDFLAGS = 1 -include /usr/share/dpkg/default.mk - -export PBR_VERSION=0.5.5 - -# main packaging script based on dh7 syntax -%: - dh $@ --with python3 --buildsystem=pybuild - -override_dh_auto_install: - dh_auto_install -O--buildsystem=pybuild diff --git a/debian/source/format b/debian/source/format deleted file mode 100644 index 163aaf8..0000000 --- a/debian/source/format +++ /dev/null @@ -1 +0,0 @@ -3.0 (quilt) diff --git a/requirements/ci.txt b/requirements/ci.txt index e5e5161..13b8983 100644 --- a/requirements/ci.txt +++ b/requirements/ci.txt @@ -1,3 +1,6 @@ coverage pylint unittest-xml-reporting +wheel +setuptools +tox diff --git a/setup.cfg b/setup.cfg index 79bc678..8c4bed7 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,5 +1,48 @@ -[bdist_wheel] -# This flag says that the code is written to work on both Python 2 and Python -# 3. If at all possible, it is good practice to do this. If you cannot, you -# will need to generate wheels for each Python version that you support. -universal=1 +[mypy] + +# For details on each flag, please see the mypy documentation at: +# https://mypy.readthedocs.io/en/stable/config_file.html#config-file + +# Import Discovery +mypy_path = src +namespace_packages = true + +# Disallow dynamic typing +disallow_any_unimported = true +disallow_any_expr = false +disallow_any_decorated = true +disallow_any_explicit = false +disallow_any_generics = false +disallow_subclassing_any = true + +# Untyped definitions and calls +disallow_untyped_calls = false +disallow_untyped_defs = false +disallow_incomplete_defs = true +check_untyped_defs = true +disallow_untyped_decorators = true + +# None and Optional handling +no_implicit_optional = true + +# Configuring warnings +warn_redundant_casts = true +warn_unused_ignores = true +warn_no_return = true +warn_return_any = true +warn_unreachable = true + +# Miscellaneous strictness flags +implicit_reexport = true +strict_equality = true + +# Configuring error messages +show_error_context = true +show_column_numbers = true +show_error_codes = true +pretty = true +show_absolute_path = true + +# Miscellaneous +warn_unused_configs = true +verbosity = 0 diff --git a/setup.py b/setup.py index ab3530c..74320b3 100644 --- a/setup.py +++ b/setup.py @@ -1,31 +1,41 @@ +from os import path from setuptools import setup, find_packages +# read the contents of your README file +this_directory = path.abspath(path.dirname(__file__)) +with open(path.join(this_directory, 'README.md'), encoding='utf-8') as f: + long_description = f.read() setup( name="python-json-logger", - version="0.1.10", + version="2.0.7", url="http://github.com/madzak/python-json-logger", license="BSD", + include_package_data=True, description="A python library adding a json log formatter", + long_description=long_description, + long_description_content_type='text/markdown', author="Zakaria Zajac", author_email="zak@madzak.com", package_dir={'': 'src'}, + package_data={"pythonjsonlogger": ["py.typed"]}, packages=find_packages("src", exclude="tests"), # https://packaging.python.org/guides/distributing-packages-using-setuptools/#python-requires - python_requires='>=2.7', + python_requires=">=3.6", test_suite="tests.tests", classifiers=[ - 'Development Status :: 3 - Alpha', + 'Development Status :: 6 - Mature', 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Operating System :: OS Independent', 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', + 'Programming Language :: Python :: 3.7', + 'Programming Language :: Python :: 3.8', + 'Programming Language :: Python :: 3.9', + 'Programming Language :: Python :: 3.10', + 'Programming Language :: Python :: 3.11', 'Topic :: System :: Logging', ] ) diff --git a/src/pythonjsonlogger/jsonlogger.py b/src/pythonjsonlogger/jsonlogger.py index 5a10e34..519a64d 100644 --- a/src/pythonjsonlogger/jsonlogger.py +++ b/src/pythonjsonlogger/jsonlogger.py @@ -1,45 +1,72 @@ -''' +""" This library is provided to allow standard python logging to output log data as JSON formatted strings -''' +""" import logging import json import re -from datetime import date, datetime, time import traceback import importlib +from datetime import date, datetime, time, timezone +from typing import Any, Callable, Dict, List, Optional, Tuple, Union from inspect import istraceback -# Support order in python 2.7 and 3 -try: - from collections import OrderedDict -except ImportError: - pass +from collections import OrderedDict # skip natural LogRecord attributes # http://docs.python.org/library/logging.html#logrecord-attributes -RESERVED_ATTRS = ( - 'args', 'asctime', 'created', 'exc_info', 'exc_text', 'filename', - 'funcName', 'levelname', 'levelno', 'lineno', 'module', - 'msecs', 'message', 'msg', 'name', 'pathname', 'process', - 'processName', 'relativeCreated', 'stack_info', 'thread', 'threadName') - - -def merge_record_extra(record, target, reserved): +RESERVED_ATTRS: Tuple[str, ...] = ( + "args", + "asctime", + "created", + "exc_info", + "exc_text", + "filename", + "funcName", + "levelname", + "levelno", + "lineno", + "module", + "msecs", + "message", + "msg", + "name", + "pathname", + "process", + "processName", + "relativeCreated", + "stack_info", + "thread", + "threadName", +) + +OptionalCallableOrStr = Optional[Union[Callable, str]] + + +def merge_record_extra( + record: logging.LogRecord, + target: Dict, + reserved: Union[Dict, List], + rename_fields: Optional[Dict[str, str]] = None, +) -> Dict: """ Merges extra attributes from LogRecord object into target dictionary :param record: logging.LogRecord :param target: dict to update :param reserved: dict or list with reserved keys to skip + :param rename_fields: an optional dict, used to rename field names in the output. + Rename levelname to log.level: {'levelname': 'log.level'} """ + if rename_fields is None: + rename_fields = {} for key, value in record.__dict__.items(): # this allows to have numeric keys - if (key not in reserved - and not (hasattr(key, "startswith") - and key.startswith('_'))): - target[key] = value + if key not in reserved and not ( + hasattr(key, "startswith") and key.startswith("_") + ): + target[rename_fields.get(key, key)] = value return target @@ -53,11 +80,9 @@ def default(self, obj): return self.format_datetime_obj(obj) elif istraceback(obj): - return ''.join(traceback.format_tb(obj)).strip() + return "".join(traceback.format_tb(obj)).strip() - elif type(obj) == Exception \ - or isinstance(obj, Exception) \ - or type(obj) == type: + elif type(obj) == Exception or isinstance(obj, Exception) or type(obj) == type: return str(obj) try: @@ -77,23 +102,38 @@ def format_datetime_obj(self, obj): class JsonFormatter(logging.Formatter): """ A custom formatter to format logging records as json strings. - extra values will be formatted as str() if nor supported by + Extra values will be formatted as str() if not supported by json default encoder """ - def __init__(self, *args, **kwargs): + def __init__( + self, + *args: Any, + json_default: OptionalCallableOrStr = None, + json_encoder: OptionalCallableOrStr = None, + json_serialiser: Union[Callable, str] = json.dumps, + json_indent: Optional[Union[int, str]] = None, + json_ensure_ascii: bool = True, + prefix: str = "", + rename_fields: Optional[dict] = None, + static_fields: Optional[dict] = None, + reserved_attrs: Tuple[str, ...] = RESERVED_ATTRS, + timestamp: Union[bool, str] = False, + **kwargs: Any + ): """ :param json_default: a function for encoding non-standard objects - as outlined in http://docs.python.org/2/library/json.html + as outlined in https://docs.python.org/3/library/json.html :param json_encoder: optional custom encoder :param json_serializer: a :meth:`json.dumps`-compatible callable that will be used to serialize the log record. - :param json_indent: an optional :meth:`json.dumps`-compatible numeric value - that will be used to customize the indent of the output json. - :param prefix: an optional string prefix added at the beginning of - the formatted string :param json_indent: indent parameter for json.dumps :param json_ensure_ascii: ensure_ascii parameter for json.dumps + :param prefix: an optional string prefix added at the beginning of + the formatted string + :param rename_fields: an optional dict, used to rename field names in the output. + Rename message to @message: {'message': '@message'} + :param static_fields: an optional dict, used to add fields with static values to all logs :param reserved_attrs: an optional list of fields that will be skipped when outputting json log record. Defaults to all log record attributes: http://docs.python.org/library/logging.html#logrecord-attributes @@ -102,15 +142,16 @@ def __init__(self, *args, **kwargs): to log record using string as key. If True boolean is passed, timestamp key will be "timestamp". Defaults to False/off. """ - self.json_default = self._str_to_fn(kwargs.pop("json_default", None)) - self.json_encoder = self._str_to_fn(kwargs.pop("json_encoder", None)) - self.json_serializer = self._str_to_fn(kwargs.pop("json_serializer", json.dumps)) - self.json_indent = kwargs.pop("json_indent", None) - self.json_ensure_ascii = kwargs.pop("json_ensure_ascii", True) - self.prefix = kwargs.pop("prefix", "") - reserved_attrs = kwargs.pop("reserved_attrs", RESERVED_ATTRS) + self.json_default = self._str_to_fn(json_default) + self.json_encoder = self._str_to_fn(json_encoder) + self.json_serializer = self._str_to_fn(json_serialiser) + self.json_indent = json_indent + self.json_ensure_ascii = json_ensure_ascii + self.prefix = prefix + self.rename_fields = rename_fields or {} + self.static_fields = static_fields or {} self.reserved_attrs = dict(zip(reserved_attrs, reserved_attrs)) - self.timestamp = kwargs.pop("timestamp", False) + self.timestamp = timestamp # super(JsonFormatter, self).__init__(*args, **kwargs) logging.Formatter.__init__(self, *args, **kwargs) @@ -118,8 +159,7 @@ def __init__(self, *args, **kwargs): self.json_encoder = JsonEncoder self._required_fields = self.parse() - self._skip_fields = dict(zip(self._required_fields, - self._required_fields)) + self._skip_fields = dict(zip(self._required_fields, self._required_fields)) self._skip_fields.update(self.reserved_attrs) def _str_to_fn(self, fn_as_str): @@ -133,32 +173,64 @@ def _str_to_fn(self, fn_as_str): if not isinstance(fn_as_str, str): return fn_as_str - path, _, function = fn_as_str.rpartition('.') + path, _, function = fn_as_str.rpartition(".") module = importlib.import_module(path) return getattr(module, function) - def parse(self): + def parse(self) -> List[str]: """ Parses format string looking for substitutions This method is responsible for returning a list of fields (as strings) to include in all log messages. """ - standard_formatters = re.compile(r'\((.+?)\)', re.IGNORECASE) - return standard_formatters.findall(self._fmt) + if isinstance(self._style, logging.StringTemplateStyle): + formatter_style_pattern = re.compile(r"\$\{(.+?)\}", re.IGNORECASE) + elif isinstance(self._style, logging.StrFormatStyle): + formatter_style_pattern = re.compile(r"\{(.+?)\}", re.IGNORECASE) + # PercentStyle is parent class of StringTemplateStyle and StrFormatStyle so + # it needs to be checked last. + elif isinstance(self._style, logging.PercentStyle): + formatter_style_pattern = re.compile(r"%\((.+?)\)", re.IGNORECASE) + else: + raise ValueError("Invalid format: %s" % self._fmt) - def add_fields(self, log_record, record, message_dict): + if self._fmt: + return formatter_style_pattern.findall(self._fmt) + else: + return [] + + def add_fields( + self, + log_record: Dict[str, Any], + record: logging.LogRecord, + message_dict: Dict[str, Any], + ) -> None: """ Override this method to implement custom logic for adding fields. """ for field in self._required_fields: log_record[field] = record.__dict__.get(field) + + log_record.update(self.static_fields) log_record.update(message_dict) - merge_record_extra(record, log_record, reserved=self._skip_fields) + merge_record_extra( + record, + log_record, + reserved=self._skip_fields, + rename_fields=self.rename_fields, + ) if self.timestamp: - key = self.timestamp if type(self.timestamp) == str else 'timestamp' - log_record[key] = datetime.utcnow() + key = self.timestamp if type(self.timestamp) == str else "timestamp" + log_record[key] = datetime.fromtimestamp(record.created, tz=timezone.utc) + + self._perform_rename_log_fields(log_record) + + def _perform_rename_log_fields(self, log_record): + for old_field_name, new_field_name in self.rename_fields.items(): + log_record[new_field_name] = log_record[old_field_name] + del log_record[old_field_name] def process_log_record(self, log_record): """ @@ -169,18 +241,26 @@ def process_log_record(self, log_record): def jsonify_log_record(self, log_record): """Returns a json string of the log record.""" - return self.json_serializer(log_record, - default=self.json_default, - cls=self.json_encoder, - indent=self.json_indent, - ensure_ascii=self.json_ensure_ascii) + return self.json_serializer( + log_record, + default=self.json_default, + cls=self.json_encoder, + indent=self.json_indent, + ensure_ascii=self.json_ensure_ascii, + ) + + def serialize_log_record(self, log_record: Dict[str, Any]) -> str: + """Returns the final representation of the log record.""" + return "%s%s" % (self.prefix, self.jsonify_log_record(log_record)) - def format(self, record): + def format(self, record: logging.LogRecord) -> str: """Formats a log record and serializes to json""" - message_dict = {} + message_dict: Dict[str, Any] = {} + # FIXME: logging.LogRecord.msg and logging.LogRecord.message in typeshed + # are always type of str. We shouldn't need to override that. if isinstance(record.msg, dict): message_dict = record.msg - record.message = None + record.message = "" else: record.message = record.getMessage() # only format time if needed @@ -189,17 +269,17 @@ def format(self, record): # Display formatted exception, but allow overriding it in the # user-supplied dict. - if record.exc_info and not message_dict.get('exc_info'): - message_dict['exc_info'] = self.formatException(record.exc_info) - if not message_dict.get('exc_info') and record.exc_text: - message_dict['exc_info'] = record.exc_text - - try: - log_record = OrderedDict() - except NameError: - log_record = {} - + if record.exc_info and not message_dict.get("exc_info"): + message_dict["exc_info"] = self.formatException(record.exc_info) + if not message_dict.get("exc_info") and record.exc_text: + message_dict["exc_info"] = record.exc_text + # Display formatted record of stack frames + # default format is a string returned from :func:`traceback.print_stack` + if record.stack_info and not message_dict.get("stack_info"): + message_dict["stack_info"] = self.formatStack(record.stack_info) + + log_record: Dict[str, Any] = OrderedDict() self.add_fields(log_record, record, message_dict) log_record = self.process_log_record(log_record) - return "%s%s" % (self.prefix, self.jsonify_log_record(log_record)) + return self.serialize_log_record(log_record) diff --git a/src/pythonjsonlogger/py.typed b/src/pythonjsonlogger/py.typed new file mode 100644 index 0000000..89afa56 --- /dev/null +++ b/src/pythonjsonlogger/py.typed @@ -0,0 +1 @@ +# PEP-561 marker. https://mypy.readthedocs.io/en/latest/installed_packages.html diff --git a/tests/test_jsonlogger.py b/tests/test_jsonlogger.py new file mode 100644 index 0000000..af369d2 --- /dev/null +++ b/tests/test_jsonlogger.py @@ -0,0 +1,317 @@ +# -*- coding: utf-8 -*- +import unittest +import unittest.mock +import logging +import json +import sys +import traceback +import random + +try: + import xmlrunner # noqa +except ImportError: + pass + +from io import StringIO + +sys.path.append('src/python-json-logger') +from pythonjsonlogger import jsonlogger +import datetime + + +class TestJsonLogger(unittest.TestCase): + def setUp(self): + self.log = logging.getLogger("logging-test-{}".format(random.randint(1, 101))) + self.log.setLevel(logging.DEBUG) + self.buffer = StringIO() + + self.log_handler = logging.StreamHandler(self.buffer) + self.log.addHandler(self.log_handler) + + def test_default_format(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + + msg = "testing logging format" + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + + self.assertEqual(log_json["message"], msg) + + def test_percentage_format(self): + fr = jsonlogger.JsonFormatter( + # All kind of different styles to check the regex + '[%(levelname)8s] %(message)s %(filename)s:%(lineno)d %(asctime)' + ) + self.log_handler.setFormatter(fr) + + msg = "testing logging format" + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + + self.assertEqual(log_json["message"], msg) + self.assertEqual(log_json.keys(), {'levelname', 'message', 'filename', 'lineno', 'asctime'}) + + def test_rename_base_field(self): + fr = jsonlogger.JsonFormatter(rename_fields={'message': '@message'}) + self.log_handler.setFormatter(fr) + + msg = "testing logging format" + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + + self.assertEqual(log_json["@message"], msg) + + def test_rename_nonexistent_field(self): + fr = jsonlogger.JsonFormatter(rename_fields={'nonexistent_key': 'new_name'}) + self.log_handler.setFormatter(fr) + + stderr_watcher = StringIO() + sys.stderr = stderr_watcher + self.log.info("testing logging rename") + + self.assertTrue("KeyError: 'nonexistent_key'" in stderr_watcher.getvalue()) + + def test_add_static_fields(self): + fr = jsonlogger.JsonFormatter(static_fields={'log_stream': 'kafka'}) + + self.log_handler.setFormatter(fr) + + msg = "testing static fields" + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + + self.assertEqual(log_json["log_stream"], "kafka") + self.assertEqual(log_json["message"], msg) + + def test_format_keys(self): + supported_keys = [ + 'asctime', + 'created', + 'filename', + 'funcName', + 'levelname', + 'levelno', + 'lineno', + 'module', + 'msecs', + 'message', + 'name', + 'pathname', + 'process', + 'processName', + 'relativeCreated', + 'thread', + 'threadName' + ] + + log_format = lambda x: ['%({0:s})s'.format(i) for i in x] + custom_format = ' '.join(log_format(supported_keys)) + + fr = jsonlogger.JsonFormatter(custom_format) + self.log_handler.setFormatter(fr) + + msg = "testing logging format" + self.log.info(msg) + log_msg = self.buffer.getvalue() + log_json = json.loads(log_msg) + + for supported_key in supported_keys: + if supported_key in log_json: + self.assertTrue(True) + + def test_unknown_format_key(self): + fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') + + self.log_handler.setFormatter(fr) + msg = "testing unknown logging format" + try: + self.log.info(msg) + except Exception: + self.assertTrue(False, "Should succeed") + + def test_log_adict(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + + msg = {"text": "testing logging", "num": 1, 5: "9", + "nested": {"more": "data"}} + + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("text"), msg["text"]) + self.assertEqual(log_json.get("num"), msg["num"]) + self.assertEqual(log_json.get("5"), msg[5]) + self.assertEqual(log_json.get("nested"), msg["nested"]) + self.assertEqual(log_json["message"], "") + + def test_log_extra(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + + extra = {"text": "testing logging", "num": 1, 5: "9", + "nested": {"more": "data"}} + self.log.info("hello", extra=extra) + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("text"), extra["text"]) + self.assertEqual(log_json.get("num"), extra["num"]) + self.assertEqual(log_json.get("5"), extra[5]) + self.assertEqual(log_json.get("nested"), extra["nested"]) + self.assertEqual(log_json["message"], "hello") + + def test_json_default_encoder(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + + msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), + "otherdate": datetime.date(1789, 7, 14), + "otherdatetime": datetime.datetime(1789, 7, 14, 23, 59), + "otherdatetimeagain": datetime.datetime(1900, 1, 1)} + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("adate"), "1999-12-31T23:59:00") + self.assertEqual(log_json.get("otherdate"), "1789-07-14") + self.assertEqual(log_json.get("otherdatetime"), "1789-07-14T23:59:00") + self.assertEqual(log_json.get("otherdatetimeagain"), + "1900-01-01T00:00:00") + + @unittest.mock.patch('time.time', return_value=1500000000.0) + def test_json_default_encoder_with_timestamp(self, time_mock): + fr = jsonlogger.JsonFormatter(timestamp=True) + self.log_handler.setFormatter(fr) + + self.log.info("Hello") + + self.assertTrue(time_mock.called) + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("timestamp"), "2017-07-14T02:40:00+00:00") + + def test_json_custom_default(self): + def custom(o): + return "very custom" + fr = jsonlogger.JsonFormatter(json_default=custom) + self.log_handler.setFormatter(fr) + + msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), + "normal": "value"} + self.log.info(msg) + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("adate"), "very custom") + self.assertEqual(log_json.get("normal"), "value") + + def test_json_custom_logic_adds_field(self): + class CustomJsonFormatter(jsonlogger.JsonFormatter): + + def process_log_record(self, log_record): + log_record["custom"] = "value" + # Old Style "super" since Python 2.6's logging.Formatter is old + # style + return jsonlogger.JsonFormatter.process_log_record(self, log_record) + + self.log_handler.setFormatter(CustomJsonFormatter()) + self.log.info("message") + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("custom"), "value") + + def get_traceback_from_exception_followed_by_log_call(self) -> str: + try: + raise Exception('test') + except Exception: + self.log.exception("hello") + str_traceback = traceback.format_exc() + # Formatter removes trailing new line + if str_traceback.endswith('\n'): + str_traceback = str_traceback[:-1] + + return str_traceback + + def test_exc_info(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + expected_value = self.get_traceback_from_exception_followed_by_log_call() + + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("exc_info"), expected_value) + + def test_exc_info_renamed(self): + fr = jsonlogger.JsonFormatter("%(exc_info)s", rename_fields={"exc_info": "stack_trace"}) + self.log_handler.setFormatter(fr) + expected_value = self.get_traceback_from_exception_followed_by_log_call() + + log_json = json.loads(self.buffer.getvalue()) + self.assertEqual(log_json.get("stack_trace"), expected_value) + self.assertEqual(log_json.get("exc_info"), None) + + def test_ensure_ascii_true(self): + fr = jsonlogger.JsonFormatter() + self.log_handler.setFormatter(fr) + self.log.info('Привет') + msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] + self.assertEqual(msg, r"\u041f\u0440\u0438\u0432\u0435\u0442") + + def test_ensure_ascii_false(self): + fr = jsonlogger.JsonFormatter(json_ensure_ascii=False) + self.log_handler.setFormatter(fr) + self.log.info('Привет') + msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] + self.assertEqual(msg, "Привет") + + def test_custom_object_serialization(self): + def encode_complex(z): + if isinstance(z, complex): + return (z.real, z.imag) + else: + type_name = z.__class__.__name__ + raise TypeError("Object of type '{}' is no JSON serializable".format(type_name)) + + formatter = jsonlogger.JsonFormatter(json_default=encode_complex, + json_encoder=json.JSONEncoder) + self.log_handler.setFormatter(formatter) + + value = { + "special": complex(3, 8), + } + + self.log.info(" message", extra=value) + msg = self.buffer.getvalue() + self.assertEqual(msg, "{\"message\": \" message\", \"special\": [3.0, 8.0]}\n") + + def test_rename_reserved_attrs(self): + log_format = lambda x: ['%({0:s})s'.format(i) for i in x] + reserved_attrs_map = { + 'exc_info': 'error.type', + 'exc_text': 'error.message', + 'funcName': 'log.origin.function', + 'levelname': 'log.level', + 'module': 'log.origin.file.name', + 'processName': 'process.name', + 'threadName': 'process.thread.name', + 'msg': 'log.message' + } + + custom_format = ' '.join(log_format(reserved_attrs_map.keys())) + reserved_attrs = [_ for _ in jsonlogger.RESERVED_ATTRS if _ not in list(reserved_attrs_map.keys())] + formatter = jsonlogger.JsonFormatter(custom_format, reserved_attrs=reserved_attrs, rename_fields=reserved_attrs_map) + self.log_handler.setFormatter(formatter) + self.log.info("message") + + msg = self.buffer.getvalue() + self.assertEqual(msg, '{"error.type": null, "error.message": null, "log.origin.function": "test_rename_reserved_attrs", "log.level": "INFO", "log.origin.file.name": "test_jsonlogger", "process.name": "MainProcess", "process.thread.name": "MainThread", "log.message": "message"}\n') + + def test_merge_record_extra(self): + record = logging.LogRecord("name", level=1, pathname="", lineno=1, msg="Some message", args=None, exc_info=None) + output = jsonlogger.merge_record_extra(record, target=dict(foo="bar"), reserved=[]) + self.assertIn("foo", output) + self.assertIn("msg", output) + self.assertEqual(output["foo"], "bar") + self.assertEqual(output["msg"], "Some message") + + +if __name__ == '__main__': + if len(sys.argv[1:]) > 0: + if sys.argv[1] == 'xml': + testSuite = unittest.TestLoader().loadTestsFromTestCase( + TestJsonLogger) + xmlrunner.XMLTestRunner(output='reports').run(testSuite) + else: + unittest.main() diff --git a/tests/tests.py b/tests/tests.py deleted file mode 100644 index dfbecd4..0000000 --- a/tests/tests.py +++ /dev/null @@ -1,200 +0,0 @@ -# -*- coding: utf-8 -*- -import unittest -import logging -import json -import sys -import traceback - -try: - import xmlrunner -except ImportError: - pass - -try: - from StringIO import StringIO -except ImportError: - # Python 3 Support - from io import StringIO - -sys.path.append('src/python-json-logger') -from pythonjsonlogger import jsonlogger -import datetime - - -class TestJsonLogger(unittest.TestCase): - def setUp(self): - self.logger = logging.getLogger('logging-test') - self.logger.setLevel(logging.DEBUG) - self.buffer = StringIO() - - self.logHandler = logging.StreamHandler(self.buffer) - self.logger.addHandler(self.logHandler) - - def testDefaultFormat(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - - msg = "testing logging format" - self.logger.info(msg) - logJson = json.loads(self.buffer.getvalue()) - - self.assertEqual(logJson["message"], msg) - - def testFormatKeys(self): - supported_keys = [ - 'asctime', - 'created', - 'filename', - 'funcName', - 'levelname', - 'levelno', - 'lineno', - 'module', - 'msecs', - 'message', - 'name', - 'pathname', - 'process', - 'processName', - 'relativeCreated', - 'thread', - 'threadName' - ] - - log_format = lambda x: ['%({0:s})'.format(i) for i in x] - custom_format = ' '.join(log_format(supported_keys)) - - fr = jsonlogger.JsonFormatter(custom_format) - self.logHandler.setFormatter(fr) - - msg = "testing logging format" - self.logger.info(msg) - log_msg = self.buffer.getvalue() - log_json = json.loads(log_msg) - - for supported_key in supported_keys: - if supported_key in log_json: - self.assertTrue(True) - - def testUnknownFormatKey(self): - fr = jsonlogger.JsonFormatter('%(unknown_key)s %(message)s') - - self.logHandler.setFormatter(fr) - msg = "testing unknown logging format" - try: - self.logger.info(msg) - except: - self.assertTrue(False, "Should succeed") - - def testLogADict(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - - msg = {"text": "testing logging", "num": 1, 5: "9", - "nested": {"more": "data"}} - self.logger.info(msg) - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("text"), msg["text"]) - self.assertEqual(logJson.get("num"), msg["num"]) - self.assertEqual(logJson.get("5"), msg[5]) - self.assertEqual(logJson.get("nested"), msg["nested"]) - self.assertEqual(logJson["message"], None) - - def testLogExtra(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - - extra = {"text": "testing logging", "num": 1, 5: "9", - "nested": {"more": "data"}} - self.logger.info("hello", extra=extra) - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("text"), extra["text"]) - self.assertEqual(logJson.get("num"), extra["num"]) - self.assertEqual(logJson.get("5"), extra[5]) - self.assertEqual(logJson.get("nested"), extra["nested"]) - self.assertEqual(logJson["message"], "hello") - - def testJsonDefaultEncoder(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - - msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), - "otherdate": datetime.date(1789, 7, 14), - "otherdatetime": datetime.datetime(1789, 7, 14, 23, 59), - "otherdatetimeagain": datetime.datetime(1900, 1, 1)} - self.logger.info(msg) - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("adate"), "1999-12-31T23:59:00") - self.assertEqual(logJson.get("otherdate"), "1789-07-14") - self.assertEqual(logJson.get("otherdatetime"), "1789-07-14T23:59:00") - self.assertEqual(logJson.get("otherdatetimeagain"), - "1900-01-01T00:00:00") - - def testJsonCustomDefault(self): - def custom(o): - return "very custom" - fr = jsonlogger.JsonFormatter(json_default=custom) - self.logHandler.setFormatter(fr) - - msg = {"adate": datetime.datetime(1999, 12, 31, 23, 59), - "normal": "value"} - self.logger.info(msg) - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("adate"), "very custom") - self.assertEqual(logJson.get("normal"), "value") - - def testJsonCustomLogicAddsField(self): - class CustomJsonFormatter(jsonlogger.JsonFormatter): - - def process_log_record(self, log_record): - log_record["custom"] = "value" - # Old Style "super" since Python 2.6's logging.Formatter is old - # style - return jsonlogger.JsonFormatter.process_log_record(self, log_record) - - self.logHandler.setFormatter(CustomJsonFormatter()) - self.logger.info("message") - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("custom"), "value") - - def testExcInfo(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - try: - raise Exception('test') - except Exception: - - self.logger.exception("hello") - - expected_value = traceback.format_exc() - # Formatter removes trailing new line - if expected_value.endswith('\n'): - expected_value = expected_value[:-1] - - logJson = json.loads(self.buffer.getvalue()) - self.assertEqual(logJson.get("exc_info"), expected_value) - - def testEnsureAsciiTrue(self): - fr = jsonlogger.JsonFormatter() - self.logHandler.setFormatter(fr) - self.logger.info('Привет') - msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] - self.assertEqual(msg, r"\u041f\u0440\u0438\u0432\u0435\u0442") - - def testEnsureAsciiFalse(self): - fr = jsonlogger.JsonFormatter(json_ensure_ascii=False) - self.logHandler.setFormatter(fr) - self.logger.info('Привет') - msg = self.buffer.getvalue().split('"message": "', 1)[1].split('"', 1)[0] - self.assertEqual(msg, "Привет") - - - -if __name__ == '__main__': - if len(sys.argv[1:]) > 0: - if sys.argv[1] == 'xml': - testSuite = unittest.TestLoader().loadTestsFromTestCase( - TestJsonLogger) - xmlrunner.XMLTestRunner(output='reports').run(testSuite) - else: - unittest.main() diff --git a/tox.ini b/tox.ini index 574809d..8eafd27 100644 --- a/tox.ini +++ b/tox.ini @@ -1,5 +1,34 @@ [tox] -envlist = py27, py34, py35, py36 +requires = tox>=3 +envlist = lint, type, pypy{38,39}, py{36,37,38,39,310,311} + +[gh-actions] +python = + pypy-3.8: pypy38 + pypy-3.9: pypy39 + 3.6: py36 + 3.7: py37 + 3.8: py38 + 3.9: py39 + 3.10: py310 + 3.11: py311, type [testenv] -commands = python -m unittest discover +description = run unit tests +commands = + python -m unittest discover + +[testenv:lint] +description = run linters +skip_install = true +deps = + black>=22.12 +commands = + black src + +[testenv:type] +description = run type checks +deps = + mypy>=1.0 +commands = + mypy src