diff --git a/.gitignore b/.gitignore index 96acb31a4..b0f71da4d 100644 --- a/.gitignore +++ b/.gitignore @@ -1,4 +1,8 @@ -# https://github.com/github/gitignore/blob/da00310ccba9de9a988cc973ef5238ad2c1460e9/Python.gitignore +test/__tempdir__/ +.pytest_cache/ + +# ------------------------- +# below: https://github.com/github/gitignore/blob/da00310ccba9de9a988cc973ef5238ad2c1460e9/Python.gitignore # Byte-compiled / optimized / DLL files __pycache__/ diff --git a/can/io/__init__.py b/can/io/__init__.py index 1dc412d52..1d5269912 100644 --- a/can/io/__init__.py +++ b/can/io/__init__.py @@ -18,4 +18,4 @@ from .canutils import CanutilsLogReader, CanutilsLogWriter from .csv import CSVWriter, CSVReader from .sqlite import SqliteReader, SqliteWriter -from .stdout import Printer +from .printer import Printer diff --git a/can/io/asc.py b/can/io/asc.py index 4fcb856b8..bbc8807b7 100644 --- a/can/io/asc.py +++ b/can/io/asc.py @@ -4,7 +4,9 @@ """ Contains handling of ASC logging files. -Example .asc file: https://bitbucket.org/tobylorenz/vector_asc/src/47556e1a6d32c859224ca62d075e1efcc67fa690/src/Vector/ASC/tests/unittests/data/CAN_Log_Trigger_3_2.asc?at=master&fileviewer=file-view-default +Example .asc files: + - https://bitbucket.org/tobylorenz/vector_asc/src/47556e1a6d32c859224ca62d075e1efcc67fa690/src/Vector/ASC/tests/unittests/data/CAN_Log_Trigger_3_2.asc?at=master&fileviewer=file-view-default + - under `test/data/logfile.asc` """ from __future__ import absolute_import @@ -16,6 +18,7 @@ from ..message import Message from ..listener import Listener from ..util import channel2int +from .generic import BaseIOHandler CAN_MSG_EXT = 0x80000000 CAN_ID_MASK = 0x1FFFFFFF @@ -23,15 +26,20 @@ logger = logging.getLogger('can.io.asc') -class ASCReader(object): +class ASCReader(BaseIOHandler): """ Iterator of CAN messages from a ASC logging file. TODO: turn relative timestamps back to absolute form """ - def __init__(self, filename): - self.file = open(filename, 'r') + def __init__(self, file): + """ + :param file: a path-like object or as file-like object to read from + If this is a file-like object, is has to opened in text + read mode, not binary read mode. + """ + super(ASCReader, self).__init__(file, mode='r') @staticmethod def _extract_can_id(str_can_id): @@ -41,19 +49,19 @@ def _extract_can_id(str_can_id): else: is_extended = False can_id = int(str_can_id, 16) - logging.debug('ASCReader: _extract_can_id("%s") -> %x, %r', str_can_id, can_id, is_extended) - return (can_id, is_extended) + #logging.debug('ASCReader: _extract_can_id("%s") -> %x, %r', str_can_id, can_id, is_extended) + return can_id, is_extended def __iter__(self): for line in self.file: - logger.debug("ASCReader: parsing line: '%s'", line.splitlines()[0]) + #logger.debug("ASCReader: parsing line: '%s'", line.splitlines()[0]) temp = line.strip() if not temp or not temp[0].isdigit(): continue try: - (timestamp, channel, dummy) = temp.split(None, 2) # , frameType, dlc, frameData + timestamp, channel, dummy = temp.split(None, 2) # , frameType, dlc, frameData except ValueError: # we parsed an empty comment continue @@ -74,8 +82,8 @@ def __iter__(self): pass elif dummy[-1:].lower() == 'r': - (can_id_str, _) = dummy.split(None, 1) - (can_id_num, is_extended_id) = self._extract_can_id(can_id_str) + can_id_str, _ = dummy.split(None, 1) + can_id_num, is_extended_id = self._extract_can_id(can_id_str) msg = Message(timestamp=timestamp, arbitration_id=can_id_num & CAN_ID_MASK, extended_id=is_extended_id, @@ -86,10 +94,10 @@ def __iter__(self): else: try: # this only works if dlc > 0 and thus data is availabe - (can_id_str, _, _, dlc, data) = dummy.split(None, 4) + can_id_str, _, _, dlc, data = dummy.split(None, 4) except ValueError: # but if not, we only want to get the stuff up to the dlc - (can_id_str, _, _, dlc ) = dummy.split(None, 3) + can_id_str, _, _, dlc = dummy.split(None, 3) # and we set data to an empty sequence manually data = '' @@ -99,24 +107,26 @@ def __iter__(self): for byte in data[0:dlc]: frame.append(int(byte, 16)) - (can_id_num, is_extended_id) = self._extract_can_id(can_id_str) + can_id_num, is_extended_id = self._extract_can_id(can_id_str) - msg = Message( - timestamp=timestamp, - arbitration_id=can_id_num & CAN_ID_MASK, - extended_id=is_extended_id, - is_remote_frame=False, - dlc=dlc, - data=frame, - channel=channel) - yield msg + yield Message( + timestamp=timestamp, + arbitration_id=can_id_num & CAN_ID_MASK, + extended_id=is_extended_id, + is_remote_frame=False, + dlc=dlc, + data=frame, + channel=channel + ) + + self.stop() -class ASCWriter(Listener): +class ASCWriter(BaseIOHandler, Listener): """Logs CAN data to an ASCII log file (.asc). The measurement starts with the timestamp of the first registered message. - If a message has a timestamp smaller than the previous one (or 0 or None), + If a message has a timestamp smaller than the previous one or None, it gets assigned the timestamp that was written for the last message. It the first message does not have a timestamp, it is set to zero. """ @@ -125,16 +135,22 @@ class ASCWriter(Listener): FORMAT_DATE = "%a %b %m %I:%M:%S %p %Y" FORMAT_EVENT = "{timestamp: 9.4f} {message}\n" - def __init__(self, filename, channel=1): - # setup + def __init__(self, file, channel=1): + """ + :param file: a path-like object or as file-like object to write to + If this is a file-like object, is has to opened in text + write mode, not binary write mode. + :param channel: a default channel to use when the message does not + have a channel set + """ + super(ASCWriter, self).__init__(file, mode='w') self.channel = channel - self.log_file = open(filename, 'w') # write start of file header now = datetime.now().strftime("%a %b %m %I:%M:%S %p %Y") - self.log_file.write("date %s\n" % now) - self.log_file.write("base hex timestamps absolute\n") - self.log_file.write("internal events logged\n") + self.file.write("date %s\n" % now) + self.file.write("base hex timestamps absolute\n") + self.file.write("internal events logged\n") # the last part is written with the timestamp of the first message self.header_written = False @@ -142,10 +158,9 @@ def __init__(self, filename, channel=1): self.started = None def stop(self): - """Stops logging and closes the file.""" - if not self.log_file.closed: - self.log_file.write("End TriggerBlock\n") - self.log_file.close() + if not self.file.closed: + self.file.write("End TriggerBlock\n") + super(ASCWriter, self).stop() def log_event(self, message, timestamp=None): """Add a message to the log file. @@ -163,10 +178,9 @@ def log_event(self, message, timestamp=None): self.last_timestamp = (timestamp or 0.0) self.started = self.last_timestamp formatted_date = time.strftime(self.FORMAT_DATE, time.localtime(self.last_timestamp)) - self.log_file.write("base hex timestamps absolute\n") - self.log_file.write("Begin Triggerblock %s\n" % formatted_date) + self.file.write("Begin Triggerblock %s\n" % formatted_date) self.header_written = True - self.log_event("Start of measurement") # recursive call + self.log_event("Start of measurement") # caution: this is a recursive call! # figure out the correct timestamp if timestamp is None or timestamp < self.last_timestamp: @@ -177,11 +191,7 @@ def log_event(self, message, timestamp=None): timestamp -= self.started line = self.FORMAT_EVENT.format(timestamp=timestamp, message=message) - - if self.log_file.closed: - logger.warn("ASCWriter: ignoring write call to closed file") - else: - self.log_file.write(line) + self.file.write(line) def on_message_received(self, msg): diff --git a/can/io/blf.py b/can/io/blf.py index 8115e04e2..9de76aec4 100644 --- a/can/io/blf.py +++ b/can/io/blf.py @@ -15,6 +15,8 @@ objects types. """ +from __future__ import absolute_import + import struct import zlib import datetime @@ -24,6 +26,7 @@ from can.message import Message from can.listener import Listener from can.util import len2dlc, dlc2len, channel2int +from .generic import BaseIOHandler class BLFParseError(Exception): @@ -112,7 +115,7 @@ def systemtime_to_timestamp(systemtime): return 0 -class BLFReader(object): +class BLFReader(BaseIOHandler): """ Iterator of CAN messages from a Binary Logging File. @@ -120,11 +123,15 @@ class BLFReader(object): silently ignored. """ - def __init__(self, filename): - self.fp = open(filename, "rb") - data = self.fp.read(FILE_HEADER_STRUCT.size) + def __init__(self, file): + """ + :param file: a path-like object or as file-like object to read from + If this is a file-like object, is has to opened in binary + read mode, not text read mode. + """ + super(BLFReader, self).__init__(file, mode='rb') + data = self.file.read(FILE_HEADER_STRUCT.size) header = FILE_HEADER_STRUCT.unpack(data) - #print(header) if header[0] != b"LOGG": raise BLFParseError("Unexpected file format") self.file_size = header[10] @@ -133,25 +140,24 @@ def __init__(self, filename): self.start_timestamp = systemtime_to_timestamp(header[14:22]) self.stop_timestamp = systemtime_to_timestamp(header[22:30]) # Read rest of header - self.fp.read(header[1] - FILE_HEADER_STRUCT.size) + self.file.read(header[1] - FILE_HEADER_STRUCT.size) def __iter__(self): tail = b"" while True: - data = self.fp.read(OBJ_HEADER_BASE_STRUCT.size) + data = self.file.read(OBJ_HEADER_BASE_STRUCT.size) if not data: # EOF break header = OBJ_HEADER_BASE_STRUCT.unpack(data) - #print(header) if header[0] != b"LOBJ": raise BLFParseError() obj_type = header[4] obj_data_size = header[3] - OBJ_HEADER_BASE_STRUCT.size - obj_data = self.fp.read(obj_data_size) + obj_data = self.file.read(obj_data_size) # Read padding bytes - self.fp.read(obj_data_size % 4) + self.file.read(obj_data_size % 4) if obj_type == LOG_CONTAINER: method, uncompressed_size = LOG_CONTAINER_STRUCT.unpack_from( @@ -245,13 +251,13 @@ def __iter__(self): pos = next_pos - # Save remaing data that could not be processed + # save the remaining data that could not be processed tail = data[pos:] - self.fp.close() + self.stop() -class BLFWriter(Listener): +class BLFWriter(BaseIOHandler, Listener): """ Logs CAN data to a Binary Logging File compatible with Vector's tools. """ @@ -262,11 +268,16 @@ class BLFWriter(Listener): #: ZLIB compression level COMPRESSION_LEVEL = 9 - def __init__(self, filename, channel=1): - self.fp = open(filename, "wb") + def __init__(self, file, channel=1): + """ + :param file: a path-like object or as file-like object to write to + If this is a file-like object, is has to opened in binary + write mode, not text write mode. + """ + super(BLFWriter, self).__init__(file, mode='wb') self.channel = channel # Header will be written after log is done - self.fp.write(b"\x00" * FILE_HEADER_SIZE) + self.file.write(b"\x00" * FILE_HEADER_SIZE) self.cache = [] self.cache_size = 0 self.count_of_objects = 0 @@ -360,7 +371,7 @@ def _add_object(self, obj_type, data, timestamp=None): def _flush(self): """Compresses and writes data in the cache to file.""" - if self.fp.closed: + if self.file.closed: return cache = b"".join(self.cache) if not cache: @@ -379,21 +390,19 @@ def _flush(self): b"LOBJ", OBJ_HEADER_BASE_STRUCT.size, 1, obj_size, LOG_CONTAINER) container_header = LOG_CONTAINER_STRUCT.pack( ZLIB_DEFLATE, len(uncompressed_data)) - self.fp.write(base_header) - self.fp.write(container_header) - self.fp.write(compressed_data) + self.file.write(base_header) + self.file.write(container_header) + self.file.write(compressed_data) # Write padding bytes - self.fp.write(b"\x00" * (obj_size % 4)) + self.file.write(b"\x00" * (obj_size % 4)) self.uncompressed_size += OBJ_HEADER_V1_STRUCT.size + LOG_CONTAINER_STRUCT.size self.uncompressed_size += len(uncompressed_data) def stop(self): """Stops logging and closes the file.""" - if self.fp.closed: - return self._flush() - filesize = self.fp.tell() - self.fp.close() + filesize = self.file.tell() + super(BLFWriter, self).stop() # Write header in the beginning of the file header = [b"LOGG", FILE_HEADER_SIZE, @@ -403,5 +412,5 @@ def stop(self): self.count_of_objects, 0]) header.extend(timestamp_to_systemtime(self.start_timestamp)) header.extend(timestamp_to_systemtime(self.stop_timestamp)) - with open(self.fp.name, "r+b") as f: + with open(self.file.name, "r+b") as f: f.write(FILE_HEADER_STRUCT.pack(*header)) diff --git a/can/io/canutils.py b/can/io/canutils.py index 564b386e1..f3b436b13 100644 --- a/can/io/canutils.py +++ b/can/io/canutils.py @@ -7,12 +7,15 @@ (https://github.com/linux-can/can-utils). """ +from __future__ import absolute_import, division + import time import datetime import logging from can.message import Message from can.listener import Listener +from .generic import BaseIOHandler log = logging.getLogger('can.io.canutils') @@ -23,7 +26,7 @@ CAN_ERR_DLC = 8 -class CanutilsLogReader(object): +class CanutilsLogReader(BaseIOHandler): """ Iterator over CAN messages from a .log Logging File (candump -L). @@ -33,51 +36,60 @@ class CanutilsLogReader(object): ``(0.0) vcan0 001#8d00100100820100`` """ - def __init__(self, filename): - self.fp = open(filename, 'r') + def __init__(self, file): + """ + :param file: a path-like object or as file-like object to read from + If this is a file-like object, is has to opened in text + read mode, not binary read mode. + """ + super(CanutilsLogReader, self).__init__(file, mode='r') def __iter__(self): - for line in self.fp: + for line in self.file: + + # skip empty lines temp = line.strip() + if not temp: + continue - if temp: + timestamp, channel, frame = temp.split() + timestamp = float(timestamp[1:-1]) + canId, data = frame.split('#') + if channel.isdigit(): + channel = int(channel) - (timestamp, channel, frame) = temp.split() - timestamp = float(timestamp[1:-1]) - (canId, data) = frame.split('#') - if channel.isdigit(): - channel = int(channel) + if len(canId) > 3: + isExtended = True + else: + isExtended = False + canId = int(canId, 16) - if len(canId) > 3: - isExtended = True - else: - isExtended = False - canId = int(canId, 16) - - if data and data[0].lower() == 'r': - isRemoteFrame = True - if len(data) > 1: - dlc = int(data[1:]) - else: - dlc = 0 + if data and data[0].lower() == 'r': + isRemoteFrame = True + if len(data) > 1: + dlc = int(data[1:]) else: - isRemoteFrame = False + dlc = 0 + else: + isRemoteFrame = False - dlc = int(len(data) / 2) - dataBin = bytearray() - for i in range(0, 2 * dlc, 2): - dataBin.append(int(data[i:(i + 2)], 16)) + dlc = len(data) // 2 + dataBin = bytearray() + for i in range(0, len(data), 2): + dataBin.append(int(data[i:(i + 2)], 16)) - if canId & CAN_ERR_FLAG and canId & CAN_ERR_BUSERROR: - msg = Message(timestamp=timestamp, is_error_frame=True) - else: - msg = Message(timestamp=timestamp, arbitration_id=canId & 0x1FFFFFFF, - extended_id=isExtended, is_remote_frame=isRemoteFrame, - dlc=dlc, data=dataBin, channel=channel) - yield msg + if canId & CAN_ERR_FLAG and canId & CAN_ERR_BUSERROR: + msg = Message(timestamp=timestamp, is_error_frame=True) + else: + msg = Message(timestamp=timestamp, arbitration_id=canId & 0x1FFFFFFF, + extended_id=isExtended, is_remote_frame=isRemoteFrame, + dlc=dlc, data=dataBin, channel=channel) + yield msg + self.stop() -class CanutilsLogWriter(Listener): + +class CanutilsLogWriter(BaseIOHandler, Listener): """Logs CAN data to an ASCII log file (.log). This class is is compatible with "candump -L". @@ -86,24 +98,23 @@ class CanutilsLogWriter(Listener): It the first message does not have a timestamp, it is set to zero. """ - def __init__(self, filename, channel="vcan0"): + def __init__(self, file, channel="vcan0", append=False): + """ + :param file: a path-like object or as file-like object to write to + If this is a file-like object, is has to opened in text + write mode, not binary write mode. + :param channel: a default channel to use when the message does not + have a channel set + :param bool append: if set to `True` messages are appended to + the file, else the file is truncated + """ + mode = 'a' if append else 'w' + super(CanutilsLogWriter, self).__init__(file, mode=mode) + self.channel = channel - self.log_file = open(filename, 'w') self.last_timestamp = None - def stop(self): - """Stops logging and closes the file.""" - if self.log_file is not None: - self.log_file.close() - self.log_file = None - else: - log.warn("ignoring attempt to colse a already closed file") - def on_message_received(self, msg): - if self.log_file is None: - log.warn("ignoring write attempt to closed file") - return - # this is the case for the very first message: if self.last_timestamp is None: self.last_timestamp = (msg.timestamp or 0.0) @@ -113,22 +124,21 @@ def on_message_received(self, msg): timestamp = self.last_timestamp else: timestamp = msg.timestamp - + channel = msg.channel if msg.channel is not None else self.channel if msg.is_error_frame: - self.log_file.write("(%f) %s %08X#0000000000000000\n" % (timestamp, channel, CAN_ERR_FLAG | CAN_ERR_BUSERROR)) + self.file.write("(%f) %s %08X#0000000000000000\n" % (timestamp, channel, CAN_ERR_FLAG | CAN_ERR_BUSERROR)) elif msg.is_remote_frame: - data = [] if msg.is_extended_id: - self.log_file.write("(%f) %s %08X#R\n" % (timestamp, channel, msg.arbitration_id)) + self.file.write("(%f) %s %08X#R\n" % (timestamp, channel, msg.arbitration_id)) else: - self.log_file.write("(%f) %s %03X#R\n" % (timestamp, channel, msg.arbitration_id)) + self.file.write("(%f) %s %03X#R\n" % (timestamp, channel, msg.arbitration_id)) else: data = ["{:02X}".format(byte) for byte in msg.data] if msg.is_extended_id: - self.log_file.write("(%f) %s %08X#%s\n" % (timestamp, channel, msg.arbitration_id, ''.join(data))) + self.file.write("(%f) %s %08X#%s\n" % (timestamp, channel, msg.arbitration_id, ''.join(data))) else: - self.log_file.write("(%f) %s %03X#%s\n" % (timestamp, channel, msg.arbitration_id, ''.join(data))) + self.file.write("(%f) %s %03X#%s\n" % (timestamp, channel, msg.arbitration_id, ''.join(data))) diff --git a/can/io/csv.py b/can/io/csv.py index 1933648ac..e108679b8 100644 --- a/can/io/csv.py +++ b/can/io/csv.py @@ -5,21 +5,24 @@ This module contains handling for CSV (comma seperated values) files. TODO: CAN FD messages are not yet supported. + TODO: This module could use https://docs.python.org/2/library/csv.html#module-csv to allow different delimiters for writing, special escape chars to circumvent the base64 encoding and use csv.Sniffer to automatically deduce the delimiters of a CSV file. """ +from __future__ import absolute_import + from base64 import b64encode, b64decode from can.message import Message from can.listener import Listener +from .generic import BaseIOHandler - -class CSVWriter(Listener): +class CSVWriter(BaseIOHandler, Listener): """Writes a comma separated text file with a line for - each message. + each message. Includes a header line. The columns are as follows: @@ -38,11 +41,22 @@ class CSVWriter(Listener): Each line is terminated with a platform specific line seperator. """ - def __init__(self, filename): - self.csv_file = open(filename, 'wt') + def __init__(self, file, append=False): + """ + :param file: a path-like object or as file-like object to write to + If this is a file-like object, is has to opened in text + write mode, not binary write mode. + :param bool append: if set to `True` messages are appended to + the file and no header line is written, else + the file is truncated and starts with a newly + written header line + """ + mode = 'a' if append else 'w' + super(CSVWriter, self).__init__(file, mode=mode) # Write a header row - self.csv_file.write("timestamp,arbitration_id,extended,remote,error,dlc,data\n") + if not append: + self.file.write("timestamp,arbitration_id,extended,remote,error,dlc,data\n") def on_message_received(self, msg): row = ','.join([ @@ -54,26 +68,32 @@ def on_message_received(self, msg): str(msg.dlc), b64encode(msg.data).decode('utf8') ]) - self.csv_file.write(row + '\n') + self.file.write(row) + self.file.write('\n') - def stop(self): - self.csv_file.flush() - self.csv_file.close() -class CSVReader(): +class CSVReader(BaseIOHandler): """Iterator over CAN messages from a .csv file that was generated by :class:`~can.CSVWriter` or that uses the same - format that is described there. + format as described there. Assumes that there is a header + and thus skips the first line. + + Any line seperator is accepted. """ - def __init__(self, filename): - self.csv_file = open(filename, 'rt') + def __init__(self, file): + """ + :param file: a path-like object or as file-like object to read from + If this is a file-like object, is has to opened in text + read mode, not binary read mode. + """ + super(CSVReader, self).__init__(file, mode='r') + def __iter__(self): # skip the header line - self.header_line = next(self.csv_file).split(',') + next(self.file) - def __iter__(self): - for line in self.csv_file: + for line in self.file: timestamp, arbitration_id, extended, remote, error, dlc, data = line.split(',') @@ -87,4 +107,4 @@ def __iter__(self): data=b64decode(data), ) - self.csv_file.close() + self.stop() diff --git a/can/io/generic.py b/can/io/generic.py new file mode 100644 index 000000000..4f278d223 --- /dev/null +++ b/can/io/generic.py @@ -0,0 +1,51 @@ +#!/usr/bin/env python +# coding: utf-8 + +""" +Contains a generic class for file IO. +""" + +from abc import ABCMeta, abstractmethod + +from can import Listener + + +class BaseIOHandler(object): + """A generic file handler that can be used for reading and writing. + + Can be used as a context manager. + + :attr file-like file: + the file-like object that is kept internally, or None if none + was opened + """ + + __metaclass__ = ABCMeta + + def __init__(self, file, mode='rt'): + """ + :param file: a path-like object to open a file, a file-like object + to be used as a file or `None` to not use a file at all + :param str mode: the mode that should be used to open the file, see + :func:`builtin.open`, ignored if *file* is `None` + """ + if file is None or (hasattr(file, 'read') and hasattr(file, 'write')): + # file is None or some file-like object + self.file = file + else: + # file is some path-like object + self.file = open(file, mode) + + # for multiple inheritance + super(BaseIOHandler, self).__init__() + + def __enter__(self): + return self + + def __exit__(self, *args): + self.stop() + + def stop(self): + if self.file is not None: + # this also implies a flush() + self.file.close() diff --git a/can/io/logger.py b/can/io/logger.py index c4b27815e..9095c5898 100755 --- a/can/io/logger.py +++ b/can/io/logger.py @@ -5,19 +5,23 @@ See the :class:`Logger` class. """ +from __future__ import absolute_import + import logging +from ..listener import Listener +from .generic import BaseIOHandler from .asc import ASCWriter from .blf import BLFWriter from .canutils import CanutilsLogWriter from .csv import CSVWriter from .sqlite import SqliteWriter -from .stdout import Printer +from .printer import Printer log = logging.getLogger("can.io.logger") -class Logger(object): +class Logger(BaseIOHandler, Listener): """ Logs CAN messages to a file. @@ -29,25 +33,32 @@ class Logger(object): * .log :class:`can.CanutilsLogWriter` * other: :class:`can.Printer` - Note this class itself is just a dispatcher, - an object that inherits from Listener will - be created when instantiating this class. + .. note:: + This class itself is just a dispatcher, and any positional an keyword + arguments are passed on to the returned instance. """ @staticmethod - def __new__(cls, filename): - if not filename: - return Printer() - elif filename.endswith(".asc"): - return ASCWriter(filename) - elif filename.endswith(".blf"): - return BLFWriter(filename) - elif filename.endswith(".csv"): - return CSVWriter(filename) - elif filename.endswith(".db"): - return SqliteWriter(filename) - elif filename.endswith(".log"): - return CanutilsLogWriter(filename) - else: - log.info('unknown file type "%s", falling pack to can.Printer', filename) - return Printer(filename) + def __new__(cls, filename, *args, **kwargs): + """ + :type filename: str or None or path-like + :param filename: the filename/path the file to write to, + may be a path-like object if the target logger supports + it, and may be None to instantiate a :class:`~can.Printer` + + """ + if filename: + if filename.endswith(".asc"): + return ASCWriter(filename, *args, **kwargs) + elif filename.endswith(".blf"): + return BLFWriter(filename, *args, **kwargs) + elif filename.endswith(".csv"): + return CSVWriter(filename, *args, **kwargs) + elif filename.endswith(".db"): + return SqliteWriter(filename, *args, **kwargs) + elif filename.endswith(".log"): + return CanutilsLogWriter(filename, *args, **kwargs) + + # else: + log.info('unknown file type "%s", falling pack to can.Printer', filename) + return Printer(filename, *args, **kwargs) diff --git a/can/io/player.py b/can/io/player.py index 958f6a8dd..4af42c479 100755 --- a/can/io/player.py +++ b/can/io/player.py @@ -7,11 +7,12 @@ in the recorded order an time intervals. """ -from __future__ import absolute_import, print_function +from __future__ import absolute_import import time import logging +from .generic import BaseIOHandler from .asc import ASCReader from .blf import BLFReader from .canutils import CanutilsLogReader @@ -21,7 +22,7 @@ log = logging.getLogger('can.io.player') -class LogReader(object): +class LogReader(BaseIOHandler): """ Replay logged CAN messages from a file. @@ -34,29 +35,33 @@ class LogReader(object): Exposes a simple iterator interface, to use simply: - >>> for m in LogReader(my_file): - ... print(m) + >>> for msg in LogReader("some/path/to/my_file.log"): + ... print(msg) .. note:: - There are no time delays, if you want to reproduce - the measured delays between messages look at the - :class:`can.util.MessageSync` class. + There are no time delays, if you want to reproduce the measured + delays between messages look at the :class:`can.MessageSync` class. + + .. note:: + This class itself is just a dispatcher, and any positional an keyword + arguments are passed on to the returned instance. """ @staticmethod - def __new__(cls, filename): - if not filename: - raise TypeError("a filename must be given") - elif filename.endswith(".asc"): - return ASCReader(filename) + def __new__(cls, filename, *args, **kwargs): + """ + :param str filename: the filename/path the file to read from + """ + if filename.endswith(".asc"): + return ASCReader(filename, *args, **kwargs) elif filename.endswith(".blf"): - return BLFReader(filename) + return BLFReader(filename, *args, **kwargs) elif filename.endswith(".csv"): - return CSVReader(filename) + return CSVReader(filename, *args, **kwargs) elif filename.endswith(".db"): - return SqliteReader(filename) + return SqliteReader(filename, *args, **kwargs) elif filename.endswith(".log"): - return CanutilsLogReader(filename) + return CanutilsLogReader(filename, *args, **kwargs) else: raise NotImplementedError("No read support for this log format: {}".format(filename)) @@ -67,12 +72,12 @@ class MessageSync(object): """ def __init__(self, messages, timestamps=True, gap=0.0001, skip=60): - """Creates an new `MessageSync` instance. + """Creates an new **MessageSync** instance. :param messages: An iterable of :class:`can.Message` instances. - :param timestamps: Use the messages' timestamps. - :param gap: Minimum time between sent messages - :param skip: Skip periods of inactivity greater than this. + :param bool timestamps: Use the messages' timestamps. + :param float gap: Minimum time between sent messages in seconds + :param float skip: Skip periods of inactivity greater than this (in seconds). """ self.raw_messages = messages self.timestamps = timestamps diff --git a/can/io/printer.py b/can/io/printer.py new file mode 100644 index 000000000..4e9333fa2 --- /dev/null +++ b/can/io/printer.py @@ -0,0 +1,42 @@ +#!/usr/bin/env python +# coding: utf-8 + +""" +This Listener simply prints to stdout / the terminal or a file. +""" + +from __future__ import print_function, absolute_import + +import logging + +from can.listener import Listener +from .generic import BaseIOHandler + +log = logging.getLogger('can.io.printer') + + +class Printer(BaseIOHandler, Listener): + """ + The Printer class is a subclass of :class:`~can.Listener` which simply prints + any messages it receives to the terminal (stdout). A message is tunred into a + string using :meth:`~can.Message.__str__`. + + :attr bool write_to_file: `True` iff this instance prints to a file instead of + standard out + """ + + def __init__(self, file=None): + """ + :param file: an optional path-like object or as file-like object to "print" + to instead of writing to standard out (stdout) + If this is a file-like object, is has to opened in text + write mode, not binary write mode. + """ + self.write_to_file = file is not None + super(Printer, self).__init__(file, mode='w') + + def on_message_received(self, msg): + if self.write_to_file: + self.file.write(str(msg) + '\n') + else: + print(msg) diff --git a/can/io/sqlite.py b/can/io/sqlite.py index 5f3255729..23be2b8f5 100644 --- a/can/io/sqlite.py +++ b/can/io/sqlite.py @@ -4,9 +4,11 @@ """ Implements an SQL database writer and reader for storing CAN messages. -The database schema is given in the documentation of the loggers. +.. note:: The database schema is given in the documentation of the loggers. """ +from __future__ import absolute_import + import sys import time import threading @@ -15,111 +17,157 @@ from can.listener import BufferedReader from can.message import Message +from .generic import BaseIOHandler -log = logging.getLogger('can.io.sql') +log = logging.getLogger('can.io.sqlite') -# TODO comment on this -if sys.version_info > (3,): - buffer = memoryview +if sys.version_info.major < 3: + # legacy fallback for Python 2 + memoryview = buffer -class SqliteReader: +class SqliteReader(BaseIOHandler): """ Reads recorded CAN messages from a simple SQL database. This class can be iterated over or used to fetch all messages in the database with :meth:`~SqliteReader.read_all`. - Calling len() on this object might not run in constant time. + Calling :func:`~builtin.len` on this object might not run in constant time. + + :attr str table_name: the name of the database table used for storing the messages + + .. note:: The database schema is given in the documentation of the loggers. """ - _SELECT_ALL_COMMAND = "SELECT * FROM messages" + def __init__(self, file, table_name="messages"): + """ + :param file: a `str` or since Python 3.7 a path like object that points + to the database file to use + :param str table_name: the name of the table to look for the messages + + .. warning:: In contrary to all other readers/writers the Sqlite handlers + do not accept file-like objects as the `file` parameter. + It also runs in ``append=True`` mode all the time. + """ + super(SqliteReader, self).__init__(file=None) + self._conn = sqlite3.connect(file) + self._cursor = self._conn.cursor() + self.table_name = table_name - def __init__(self, filename): - log.debug("Starting SqliteReader with %s", filename) - self.conn = sqlite3.connect(filename) - self.cursor = self.conn.cursor() + def __iter__(self): + for frame_data in self._cursor.execute("SELECT * FROM {}".format(self.table_name)): + yield SqliteReader._assemble_message(frame_data) - @staticmethod - def _create_frame_from_db_tuple(frame_data): + @staticmethod + def _assemble_message(frame_data): timestamp, can_id, is_extended, is_remote, is_error, dlc, data = frame_data return Message( - timestamp, is_remote, is_extended, is_error, can_id, dlc, data + timestamp=timestamp, + is_remote_frame=bool(is_remote), + extended_id=bool(is_extended), + is_error_frame=bool(is_error), + arbitration_id=can_id, + dlc=dlc, + data=data ) - def __iter__(self): - log.debug("Iterating through messages from sql db") - for frame_data in self.cursor.execute(self._SELECT_ALL_COMMAND): - yield self._create_frame_from_db_tuple(frame_data) - def __len__(self): # this might not run in constant time - result = self.cursor.execute("SELECT COUNT(*) FROM messages") + result = self._cursor.execute("SELECT COUNT(*) FROM {}".format(self.table_name)) return int(result.fetchone()[0]) def read_all(self): - """Fetches all messages in the database.""" - result = self.cursor.execute(self._SELECT_ALL_COMMAND) - return result.fetchall() + """Fetches all messages in the database. + + :rtype: Generator[can.Message] + """ + result = self._cursor.execute("SELECT * FROM {}".format(self.table_name)).fetchall() + return (SqliteReader._assemble_message(frame) for frame in result) - def close(self): - """Closes the connection to the database.""" - self.conn.close() + def stop(self): + """Closes the connection to the database. + """ + super(SqliteReader, self).stop() + self._conn.close() -class SqliteWriter(BufferedReader): +class SqliteWriter(BaseIOHandler, BufferedReader): """Logs received CAN data to a simple SQL database. The sqlite database may already exist, otherwise it will be created when the first message arrives. Messages are internally buffered and written to the SQL file in a background - thread. + thread. Ensures that all messages that are added before calling :meth:`~can.SqliteWriter.stop()` + are actually written to the database after that call returns. Thus, calling + :meth:`~can.SqliteWriter.stop()` may take a while. + + :attr str table_name: the name of the database table used for storing the messages + :attr int num_frames: the number of frames actally writtem to the database, this + excludes messages that are still buffered + :attr float last_write: the last time a message war actually written to the database, + as given by ``time.time()`` .. note:: When the listener's :meth:`~SqliteWriter.stop` method is called the - thread writing to the sql file will continue to receive and internally + thread writing to the database will continue to receive and internally buffer messages if they continue to arrive before the :attr:`~SqliteWriter.GET_MESSAGE_TIMEOUT`. If the :attr:`~SqliteWriter.GET_MESSAGE_TIMEOUT` expires before a message - is received, the internal buffer is written out to the sql file. + is received, the internal buffer is written out to the database file. However if the bus is still saturated with messages, the Listener - will continue receiving until the :attr:`~SqliteWriter.MAX_TIME_BETWEEN_WRITES` - timeout is reached. + will continue receiving until the :attr:`~can.SqliteWriter.MAX_TIME_BETWEEN_WRITES` + timeout is reached or more than + :attr:`~can.SqliteWriter.MAX_BUFFER_SIZE_BEFORE_WRITES` messages are buffered. - """ + .. note:: The database schema is given in the documentation of the loggers. - _INSERT_MSG_TEMPLATE = ''' - INSERT INTO messages VALUES - (?, ?, ?, ?, ?, ?, ?) - ''' + """ GET_MESSAGE_TIMEOUT = 0.25 """Number of seconds to wait for messages from internal queue""" - MAX_TIME_BETWEEN_WRITES = 5 + MAX_TIME_BETWEEN_WRITES = 5.0 """Maximum number of seconds to wait between writes to the database""" - def __init__(self, filename): - super(SqliteWriter, self).__init__() - self.db_fn = filename - self.stop_running_event = threading.Event() - self.writer_thread = threading.Thread(target=self._db_writer_thread) - self.writer_thread.start() + MAX_BUFFER_SIZE_BEFORE_WRITES = 500 + """Maximum number of messages to buffer before writing to the database""" + + def __init__(self, file, table_name="messages"): + """ + :param file: a `str` or since Python 3.7 a path like object that points + to the database file to use + :param str table_name: the name of the table to store messages in + + .. warning:: In contrary to all other readers/writers the Sqlite handlers + do not accept file-like objects as the `file` parameter. + """ + super(SqliteWriter, self).__init__(file=None) + self.table_name = table_name + self._db_filename = file + self._stop_running_event = threading.Event() + self._writer_thread = threading.Thread(target=self._db_writer_thread) + self._writer_thread.start() + self.num_frames = 0 + self.last_write = time.time() def _create_db(self): - # Note: you can't share sqlite3 connections between threads - # hence we setup the db here. - log.info("Creating sqlite database") - self.conn = sqlite3.connect(self.db_fn) - cursor = self.conn.cursor() + """Creates a new databae or opens a connection to an existing one. + + .. note:: + You can't share sqlite3 connections between threads (by default) + hence we setup the db here. It has the upside of running async. + """ + log.debug("Creating sqlite database") + self._conn = sqlite3.connect(self._db_filename) # create table structure - cursor.execute(''' - CREATE TABLE IF NOT EXISTS messages + self._conn.cursor().execute(""" + CREATE TABLE IF NOT EXISTS {} ( ts REAL, arbitration_id INTEGER, @@ -129,52 +177,61 @@ def _create_db(self): dlc INTEGER, data BLOB ) - ''') - self.conn.commit() + """.format(self.table_name)) + self._conn.commit() + + self._insert_template = "INSERT INTO {} VALUES (?, ?, ?, ?, ?, ?, ?)".format(self.table_name) def _db_writer_thread(self): - num_frames = 0 - last_write = time.time() self._create_db() - while not self.stop_running_event.is_set(): - messages = [] - - msg = self.get_message(self.GET_MESSAGE_TIMEOUT) - while msg is not None: - log.debug("SqliteWriter: buffering message") - - messages.append(( - msg.timestamp, - msg.arbitration_id, - msg.id_type, - msg.is_remote_frame, - msg.is_error_frame, - msg.dlc, - buffer(msg.data) - )) - - if time.time() - last_write > self.MAX_TIME_BETWEEN_WRITES: - log.debug("Max timeout between writes reached") - break + try: + while True: + messages = [] # reset buffer msg = self.get_message(self.GET_MESSAGE_TIMEOUT) + while msg is not None: + #log.debug("SqliteWriter: buffering message") + + messages.append(( + msg.timestamp, + msg.arbitration_id, + msg.id_type, + msg.is_remote_frame, + msg.is_error_frame, + msg.dlc, + memoryview(msg.data) + )) + + if time.time() - self.last_write > self.MAX_TIME_BETWEEN_WRITES or \ + len(messages) > self.MAX_BUFFER_SIZE_BEFORE_WRITES: + break + else: + # just go on + msg = self.get_message(self.GET_MESSAGE_TIMEOUT) + + count = len(messages) + if count > 0: + with self._conn: + #log.debug("Writing %d frames to db", count) + self._conn.executemany(self._insert_template, messages) + self._conn.commit() # make the changes visible to the entire database + self.num_frames += count + self.last_write = time.time() + + # check if we are still supposed to run and go back up if yes + if self._stop_running_event.is_set(): + break - count = len(messages) - if count > 0: - with self.conn: - log.debug("Writing %s frames to db", count) - self.conn.executemany(SqliteWriter._INSERT_MSG_TEMPLATE, messages) - self.conn.commit() # make the changes visible to the entire database - num_frames += count - last_write = time.time() - - # go back up and check if we are still supposed to run - - self.conn.close() - log.info("Stopped sqlite writer after writing %s messages", num_frames) + finally: + self._conn.close() + log.info("Stopped sqlite writer after writing %d messages", self.num_frames) def stop(self): - self.stop_running_event.set() - log.debug("Stopping sqlite writer") - self.writer_thread.join() + """Stops the reader an writes all remaining messages to the database. Thus, this + might take a while an block. + """ + BufferedReader.stop(self) + self._stop_running_event.set() + self._writer_thread.join() + BaseIOHandler.stop(self) diff --git a/can/io/stdout.py b/can/io/stdout.py deleted file mode 100644 index c0a82ab5a..000000000 --- a/can/io/stdout.py +++ /dev/null @@ -1,40 +0,0 @@ -#!/usr/bin/env python -# coding: utf-8 - -""" -This Listener simply prints to stdout / the terminal or a file. -""" - -from __future__ import print_function - -import logging - -from can.listener import Listener - -log = logging.getLogger('can.io.stdout') - - -class Printer(Listener): - """ - The Printer class is a subclass of :class:`~can.Listener` which simply prints - any messages it receives to the terminal (stdout). - - :param output_file: An optional file to "print" to. - """ - - def __init__(self, output_file=None): - if output_file is not None: - log.info('Creating log file "{}"'.format(output_file)) - output_file = open(output_file, 'wt') - self.output_file = output_file - - def on_message_received(self, msg): - if self.output_file is not None: - self.output_file.write(str(msg) + '\n') - else: - print(msg) - - def stop(self): - if self.output_file: - self.output_file.write('\n') - self.output_file.close() diff --git a/can/listener.py b/can/listener.py index d7e8e7ced..1388bfedc 100644 --- a/can/listener.py +++ b/can/listener.py @@ -5,21 +5,44 @@ This module contains the implementation of `can.Listener` and some readers. """ +from abc import ABCMeta, abstractmethod + try: - # Python 3 - import queue + # Python 3.7 + from queue import SimpleQueue, Empty except ImportError: - # Python 2 - import Queue as queue + try: + # Python 3.0 - 3.6 + from queue import Queue as SimpleQueue, Empty + except ImportError: + # Python 2 + from Queue import Queue as SimpleQueue, Empty class Listener(object): + """The basic listener that can be called directly to handle some + CAN message:: + + listener = SomeListener() + msg = my_bus.recv() + + # now either call + listener(msg) + # or + listener.on_message_received(msg) + + """ + + __metaclass__ = ABCMeta + @abstractmethod def on_message_received(self, msg): - raise NotImplementedError( - "{} has not implemented on_message_received".format( - self.__class__.__name__) - ) + """This method is called to handle the given message. + + :param can.Message msg: the delivered message + + """ + pass def __call__(self, msg): return self.on_message_received(msg) @@ -32,8 +55,7 @@ def stop(self): class RedirectReader(Listener): """ - A RedirectReader sends all received messages - to another Bus. + A RedirectReader sends all received messages to another Bus. """ @@ -49,25 +71,48 @@ class BufferedReader(Listener): A BufferedReader is a subclass of :class:`~can.Listener` which implements a **message buffer**: that is, when the :class:`can.BufferedReader` instance is notified of a new message it pushes it into a queue of messages waiting to - be serviced. + be serviced. The messages can then be fetched with + :meth:`~can.BufferedReader.get_message`. + + Putting in messages after :meth:`~can.BufferedReader.stop` has be called will raise + an exception, see :meth:`~can.BufferedReader.on_message_received`. + + :attr bool is_stopped: ``True`` iff the reader has been stopped """ def __init__(self): - self.buffer = queue.Queue(0) + # set to "infinite" size + self.buffer = SimpleQueue() + self.is_stopped = False def on_message_received(self, msg): - self.buffer.put(msg) + """Append a message to the buffer. + + :raises: BufferError + if the reader has already been stopped + """ + if self.is_stopped: + raise RuntimeError("reader has already been stopped") + else: + self.buffer.put(msg) def get_message(self, timeout=0.5): """ Attempts to retrieve the latest message received by the instance. If no message is - available it blocks for given timeout or until a message is received (whichever - is shorter), + available it blocks for given timeout or until a message is received, or else + returns None (whichever is shorter). This method does not block after + :meth:`can.BufferedReader.stop` has been called. :param float timeout: The number of seconds to wait for a new message. - :return: the :class:`~can.Message` if there is one, or None if there is not. + :rytpe: can.Message or None + :return: the message if there is one, or None if there is not. """ try: - return self.buffer.get(block=True, timeout=timeout) - except queue.Empty: + return self.buffer.get(block=not self.is_stopped, timeout=timeout) + except Empty: return None + + def stop(self): + """Prohibits any more additions to this reader. + """ + self.is_stopped = True diff --git a/can/message.py b/can/message.py index 9154bc0b5..9bbf82a0e 100644 --- a/can/message.py +++ b/can/message.py @@ -2,7 +2,7 @@ # coding: utf-8 """ -This module contains the implementation of `can.Message`. +This module contains the implementation of :class:`can.Message`. """ import logging @@ -69,7 +69,7 @@ def __init__(self, timestamp=0.0, is_remote_frame=False, extended_id=True, logger.warning("data link count was %d but it should be less than or equal to 8", self.dlc) def __str__(self): - field_strings = ["Timestamp: {0:15.6f}".format(self.timestamp)] + field_strings = ["Timestamp: {0:>15.6f}".format(self.timestamp)] if self.id_type: # Extended arbitrationID arbitration_id_string = "ID: {0:08x}".format(self.arbitration_id) @@ -131,7 +131,8 @@ def __repr__(self): return "can.Message({})".format(", ".join(args)) def __eq__(self, other): - return (isinstance(other, self.__class__) and + if isinstance(other, self.__class__): + return ( self.arbitration_id == other.arbitration_id and #self.timestamp == other.timestamp and # allow the timestamp to differ self.id_type == other.id_type and @@ -140,7 +141,16 @@ def __eq__(self, other): self.is_remote_frame == other.is_remote_frame and self.is_error_frame == other.is_error_frame and self.is_fd == other.is_fd and - self.bitrate_switch == other.bitrate_switch) + self.bitrate_switch == other.bitrate_switch + ) + else: + return NotImplemented + + def __ne__(self, other): + if isinstance(other, self.__class__): + return not self.__eq__(other) + else: + return NotImplemented def __hash__(self): return hash(( diff --git a/can/thread_safe_bus.py b/can/thread_safe_bus.py index ce2e6e040..ac1a85a83 100644 --- a/can/thread_safe_bus.py +++ b/can/thread_safe_bus.py @@ -2,6 +2,7 @@ # coding: utf-8 from __future__ import print_function, absolute_import + from threading import RLock try: diff --git a/doc/history.rst b/doc/history.rst index dfc7ad532..70d0460c0 100644 --- a/doc/history.rst +++ b/doc/history.rst @@ -22,24 +22,36 @@ who wrote a leaf-socketcan driver for Linux. The pcan interface was contributed by Albert Bloomfield in 2013. -The usb2can interface was contributed by Joshua Villyard in 2015 +The usb2can interface was contributed by Joshua Villyard in 2015. The IXXAT VCI interface was contributed by Giuseppe Corbelli and funded -by `Weightpack `__ in 2016 +by `Weightpack `__ in 2016. The NI-CAN and virtual interfaces plus the ASCII and BLF loggers were contributed by Christian Sandberg in 2016 and 2017. The BLF format is based on a C++ library by Toby Lorenz. -The slcan interface, ASCII listener and log logger and listener were contributed by Eduard Bröcker in 2017. +The slcan interface, ASCII listener and log logger and listener were contributed +by Eduard Bröcker in 2017. The NeoVi interface for ICS (Intrepid Control Systems) devices was contributed by Pierre-Luc Tessier Gagné in 2017. +Many improvements all over the library, cleanups, unifications as well as more +comprehensive documentation and CI testing was contributed by Felix Divo in 2017 +and 2018. + Support for CAN within Python ----------------------------- -The 'socket' module contains support for SocketCAN from Python 3.3. +Python natively supports the CAN protocol from version 3.3 on, if running on Linux: -From Python 3.4 broadcast management commands are natively supported. +============== ============================================================== ==== +Python version Feature Link +============== ============================================================== ==== +3.3 Initial SocketCAN support `Docs `__ +3.4 Broadcast Banagement (BCM) commands are natively supported `Docs `__ +3.5 CAN FD support `Docs `__ +3.7 Support for CAN ISO-TP `Docs `__ +============== ============================================================== ==== diff --git a/doc/listeners.rst b/doc/listeners.rst index 0b69b631b..4c4ec7a2b 100644 --- a/doc/listeners.rst +++ b/doc/listeners.rst @@ -67,8 +67,8 @@ SqliteWriter Database table format ~~~~~~~~~~~~~~~~~~~~~ -The messages are written to the table ``messages`` in the sqlite database. -The table is created if it does not already exist. +The messages are written to the table ``messages`` in the sqlite database +by default. The table is created if it does not already exist. The entries are as follows: @@ -102,7 +102,7 @@ engineered from existing log files. One description of the format can be found ` .. autoclass:: can.ASCWriter :members: -ASCReader reads CAN data from ASCII log files .asc +ASCReader reads CAN data from ASCII log files .asc, as further references can-utils can be used: `asc2log `_, `log2asc `_. diff --git a/test/back2back_test.py b/test/back2back_test.py index a93855dd2..5d0034330 100644 --- a/test/back2back_test.py +++ b/test/back2back_test.py @@ -17,7 +17,6 @@ import can from .config import * -from .data.example_data import generate_message class Back2BackTestCase(unittest.TestCase): diff --git a/test/contextmanager_test.py b/test/contextmanager_test.py index a69dfd5e4..ea9321502 100644 --- a/test/contextmanager_test.py +++ b/test/contextmanager_test.py @@ -16,7 +16,7 @@ def setUp(self): self.msg_send = can.Message(extended_id=False, arbitration_id=0x100, data=data) def test_open_buses(self): - with can.interface.Bus(bustype='virtual') as bus_send, can.interface.Bus(bustype='virtual') as bus_recv: + with can.Bus(interface='virtual') as bus_send, can.Bus(interface='virtual') as bus_recv: bus_send.send(self.msg_send) msg_recv = bus_recv.recv() @@ -24,7 +24,7 @@ def test_open_buses(self): self.assertTrue(msg_recv) def test_use_closed_bus(self): - with can.interface.Bus(bustype='virtual') as bus_send, can.interface.Bus(bustype='virtual') as bus_recv: + with can.Bus(interface='virtual') as bus_send, can.Bus(interface='virtual') as bus_recv: bus_send.send(self.msg_send) # Receiving a frame after bus has been closed should raise a CanException diff --git a/test/data/example_data.py b/test/data/example_data.py index c3290683a..e1a446384 100644 --- a/test/data/example_data.py +++ b/test/data/example_data.py @@ -7,15 +7,28 @@ """ import random +from operator import attrgetter from can import Message # make tests more reproducible random.seed(13339115) + +def sort_messages(messages): + """ + Sorts the given messages by timestamps (ascending). + + :param Iterable[can.Message] messages: a sequence of messages to sort + :rtype: list + """ + return list(sorted(messages, key=attrgetter('timestamp'))) + + # some random number TEST_TIME = 1483389946.197 + # List of messages of different types that can be used in tests TEST_MESSAGES_BASE = [ Message( @@ -70,6 +83,8 @@ timestamp=TEST_TIME + 3.165 ), ] +TEST_MESSAGES_BASE = sort_messages(TEST_MESSAGES_BASE) + TEST_MESSAGES_REMOTE_FRAMES = [ Message( @@ -91,6 +106,8 @@ timestamp=TEST_TIME + 7858.67 ), ] +TEST_MESSAGES_REMOTE_FRAMES = sort_messages(TEST_MESSAGES_REMOTE_FRAMES) + TEST_MESSAGES_ERROR_FRAMES = [ Message( @@ -105,8 +122,12 @@ timestamp=TEST_TIME + 17.157 ) ] +TEST_MESSAGES_ERROR_FRAMES = sort_messages(TEST_MESSAGES_ERROR_FRAMES) + + +TEST_ALL_MESSAGES = sort_messages(TEST_MESSAGES_BASE + TEST_MESSAGES_REMOTE_FRAMES + \ + TEST_MESSAGES_ERROR_FRAMES) -TEST_ALL_MESSAGES = TEST_MESSAGES_BASE + TEST_MESSAGES_REMOTE_FRAMES + TEST_MESSAGES_ERROR_FRAMES TEST_COMMENTS = [ "This is the first comment", @@ -127,4 +148,4 @@ def generate_message(arbitration_id): and a non-extended ID. """ data = bytearray([random.randrange(0, 2 ** 8 - 1) for _ in range(8)]) - return Message(arbitration_id=arbitration_id, data=data, extended_id=False) + return Message(arbitration_id=arbitration_id, data=data, extended_id=False, timestamp=TEST_TIME) diff --git a/test/listener_test.py b/test/listener_test.py index b2a80382c..c25a6fb56 100644 --- a/test/listener_test.py +++ b/test/listener_test.py @@ -11,8 +11,9 @@ import random import logging import tempfile -import os.path import sqlite3 +import os +from os.path import join, dirname import can @@ -21,7 +22,7 @@ channel = 'virtual_channel_0' can.rc['interface'] = 'virtual' -logging.getLogger('').setLevel(logging.DEBUG) +logging.basicConfig(level=logging.DEBUG) # makes the random number generator deterministic random.seed(13339115) @@ -69,13 +70,13 @@ def tearDown(self): class ListenerTest(BusTest): def testBasicListenerCanBeAddedToNotifier(self): - a_listener = can.Listener() + a_listener = can.Printer() notifier = can.Notifier(self.bus, [a_listener], 0.1) notifier.stop() self.assertIn(a_listener, notifier.listeners) - + def testAddListenerToNotifier(self): - a_listener = can.Listener() + a_listener = can.Printer() notifier = can.Notifier(self.bus, [], 0.1) notifier.stop() self.assertNotIn(a_listener, notifier.listeners) @@ -83,7 +84,7 @@ def testAddListenerToNotifier(self): self.assertIn(a_listener, notifier.listeners) def testRemoveListenerFromNotifier(self): - a_listener = can.Listener() + a_listener = can.Printer() notifier = can.Notifier(self.bus, [a_listener], 0.1) notifier.stop() self.assertIn(a_listener, notifier.listeners) @@ -92,45 +93,65 @@ def testRemoveListenerFromNotifier(self): def testPlayerTypeResolution(self): def test_filetype_to_instance(extension, klass): - can_player = can.LogReader("test.{}".format(extension)) - self.assertIsInstance(can_player, klass) - if hasattr(can_player, "stop"): - can_player.stop() - - test_filetype_to_instance("asc", can.ASCReader) - test_filetype_to_instance("blf", can.BLFReader) - test_filetype_to_instance("csv", can.CSVReader) - test_filetype_to_instance("db" , can.SqliteReader) - test_filetype_to_instance("log", can.CanutilsLogReader) + print("testing: {}".format(extension)) + try: + if extension == ".blf": + delete = False + file_handler = open(join(dirname(__file__), "data/logfile.blf")) + else: + delete = True + file_handler = tempfile.NamedTemporaryFile(suffix=extension, delete=False) + + with file_handler as my_file: + filename = my_file.name + with can.LogReader(filename) as reader: + self.assertIsInstance(reader, klass) + finally: + if delete: + os.remove(filename) + + test_filetype_to_instance(".asc", can.ASCReader) + test_filetype_to_instance(".blf", can.BLFReader) + test_filetype_to_instance(".csv", can.CSVReader) + test_filetype_to_instance(".db" , can.SqliteReader) + test_filetype_to_instance(".log", can.CanutilsLogReader) # test file extensions that are not supported - with self.assertRaisesRegexp(NotImplementedError, "xyz_42"): - test_filetype_to_instance("xyz_42", can.Printer) - with self.assertRaises(Exception): - test_filetype_to_instance(None, can.Printer) + with self.assertRaisesRegexp(NotImplementedError, ".xyz_42"): + test_filetype_to_instance(".xyz_42", can.Printer) def testLoggerTypeResolution(self): def test_filetype_to_instance(extension, klass): - can_logger = can.Logger("test.{}".format(extension)) - self.assertIsInstance(can_logger, klass) - can_logger.stop() - - test_filetype_to_instance("asc", can.ASCWriter) - test_filetype_to_instance("blf", can.BLFWriter) - test_filetype_to_instance("csv", can.CSVWriter) - test_filetype_to_instance("db" , can.SqliteWriter) - test_filetype_to_instance("log", can.CanutilsLogWriter) - test_filetype_to_instance("txt", can.Printer) - - # test file extensions that should usa a fallback - test_filetype_to_instance(None, can.Printer) - test_filetype_to_instance("some_unknown_extention_42", can.Printer) + print("testing: {}".format(extension)) + try: + with tempfile.NamedTemporaryFile(suffix=extension, delete=False) as my_file: + filename = my_file.name + with can.Logger(filename) as writer: + self.assertIsInstance(writer, klass) + finally: + os.remove(filename) + + test_filetype_to_instance(".asc", can.ASCWriter) + test_filetype_to_instance(".blf", can.BLFWriter) + test_filetype_to_instance(".csv", can.CSVWriter) + test_filetype_to_instance(".db" , can.SqliteWriter) + test_filetype_to_instance(".log", can.CanutilsLogWriter) + test_filetype_to_instance(".txt", can.Printer) + + # test file extensions that should use a fallback + test_filetype_to_instance("", can.Printer) + test_filetype_to_instance(".", can.Printer) + test_filetype_to_instance(".some_unknown_extention_42", can.Printer) + with can.Logger(None) as logger: + self.assertIsInstance(logger, can.Printer) def testBufferedListenerReceives(self): a_listener = can.BufferedReader() a_listener(generate_message(0xDADADA)) - m = a_listener.get_message(0.1) - self.assertIsNotNone(m) + a_listener(generate_message(0xDADADA)) + self.assertIsNotNone(a_listener.get_message(0.1)) + a_listener.stop() + self.assertIsNotNone(a_listener.get_message(0.1)) if __name__ == '__main__': diff --git a/test/logformats_test.py b/test/logformats_test.py index e7d8b4bf7..2a315352d 100644 --- a/test/logformats_test.py +++ b/test/logformats_test.py @@ -13,14 +13,13 @@ TODO: implement CAN FD support testing """ -from __future__ import print_function -from __future__ import absolute_import +from __future__ import print_function, absolute_import, division +import logging import unittest import tempfile -from time import sleep -import sqlite3 import os +from abc import abstractmethod, ABCMeta try: # Python 3 @@ -33,207 +32,409 @@ from .data.example_data import TEST_MESSAGES_BASE, TEST_MESSAGES_REMOTE_FRAMES, \ TEST_MESSAGES_ERROR_FRAMES, TEST_COMMENTS, \ - generate_message + sort_messages +logging.basicConfig(level=logging.DEBUG) -def _test_writer_and_reader(test_case, writer_constructor, reader_constructor, sleep_time=None, - check_remote_frames=True, check_error_frames=True, - check_comments=False, round_timestamps=False): + +class ReaderWriterTest(unittest.TestCase): """Tests a pair of writer and reader by writing all data first and then reading all data and checking if they could be reconstructed - correctly. - - :param unittest.TestCase test_case: the test case the use the assert methods on - :param Callable writer_constructor: the constructor of the writer class - :param Callable reader_constructor: the constructor of the reader class - - :param float sleep_time: specifies the time to sleep after writing all messages. - gets ignored when set to None - :param bool check_remote_frames: if True, also tests remote frames - :param bool check_error_frames: if True, also tests error frames - :param bool check_comments: if True, also inserts comments at some - locations and checks if they are contained anywhere literally - in the resulting file. The locations as selected randomly - but deterministically, which makes the test reproducible. - :param bool round_timestamps: if True, rounds timestamps using :meth:`~builtin.round` - before comparing the read messages/events + correctly. Optionally writes some comments as well. + """ - assert isinstance(test_case, unittest.TestCase), \ - "test_case has to be a subclass of unittest.TestCase" - - if check_comments: - # we check this because of the lack of a common base class - # we filter for not starts with '__' so we do not get all the builtin - # methods when logging to the console - test_case.assertIn('log_event', [d for d in dir(writer_constructor) if not d.startswith('__')], - "cannot check comments with this writer: {}".format(writer_constructor)) - - # create a temporary file - temp = tempfile.NamedTemporaryFile('w', delete=False) - temp.close() - filename = temp.name - - # get all test messages - original_messages = TEST_MESSAGES_BASE - if check_remote_frames: - original_messages += TEST_MESSAGES_REMOTE_FRAMES - if check_error_frames: - original_messages += TEST_MESSAGES_ERROR_FRAMES - - # get all test comments - original_comments = TEST_COMMENTS - - # create writer - writer = writer_constructor(filename) - - # write - if check_comments: - # write messages and insert comments here and there + __test__ = False + + __metaclass__ = ABCMeta + + def __init__(self, *args, **kwargs): + super(ReaderWriterTest, self).__init__(*args, **kwargs) + self._setup_instance() + + @abstractmethod + def _setup_instance(self): + """Hook for subclasses.""" + raise NotImplementedError() + + def _setup_instance_helper(self, + writer_constructor, reader_constructor, binary_file=False, + check_remote_frames=True, check_error_frames=True, check_comments=False, + test_append=False, round_timestamps=False): + """ + :param Callable writer_constructor: the constructor of the writer class + :param Callable reader_constructor: the constructor of the reader class + + :param bool check_remote_frames: if True, also tests remote frames + :param bool check_error_frames: if True, also tests error frames + :param bool check_comments: if True, also inserts comments at some + locations and checks if they are contained anywhere literally + in the resulting file. The locations as selected randomly + but deterministically, which makes the test reproducible. + :param bool test_append: tests the writer in append mode as well + :param bool round_timestamps: if True, rounds timestamps using :meth:`~builtin.round` + before comparing the read messages/events + + """ + # get all test messages + self.original_messages = TEST_MESSAGES_BASE + if check_remote_frames: + self.original_messages += TEST_MESSAGES_REMOTE_FRAMES + if check_error_frames: + self.original_messages += TEST_MESSAGES_ERROR_FRAMES + + # sort them so that for example ASCWriter does not "fix" any messages with timestamp 0.0 + self.original_messages = sort_messages(self.original_messages) + + if check_comments: + # we check this because of the lack of a common base class + # we filter for not starts with '__' so we do not get all the builtin + # methods when logging to the console + attrs = [attr for attr in dir(writer_constructor) if not attr.startswith('__')] + assert 'log_event' in attrs, \ + "cannot check comments with this writer: {}".format(writer_constructor) + + # get all test comments + self.original_comments = TEST_COMMENTS if check_comments else () + + self.writer_constructor = writer_constructor + self.reader_constructor = reader_constructor + self.binary_file = binary_file + self.test_append_enabled = test_append + self.round_timestamps = round_timestamps + + def setUp(self): + with tempfile.NamedTemporaryFile('w+', delete=False) as test_file: + self.test_file_name = test_file.name + + def tearDown(self): + os.remove(self.test_file_name) + del self.test_file_name + + def test_path_like_explicit_stop(self): + """testing with path-like and explicit stop() call""" + + # create writer + print("writing all messages/comments") + writer = self.writer_constructor(self.test_file_name) + self._write_all(writer) + self._ensure_fsync(writer) + writer.stop() + if hasattr(writer.file, 'closed'): + self.assertTrue(writer.file.closed) + + print("reading all messages") + reader = self.reader_constructor(self.test_file_name) + read_messages = list(reader) + # redundant, but this checks if stop() can be called multiple times + reader.stop() + if hasattr(writer.file, 'closed'): + self.assertTrue(writer.file.closed) + + # check if at least the number of messages matches + # could use assertCountEqual in later versions of Python and in the other methods + self.assertEqual(len(read_messages), len(self.original_messages), + "the number of written messages does not match the number of read messages") + + self.assertMessagesEqual(read_messages) + self.assertIncludesComments(self.test_file_name) + + def test_path_like_context_manager(self): + """testing with path-like object and context manager""" + + # create writer + print("writing all messages/comments") + with self.writer_constructor(self.test_file_name) as writer: + self._write_all(writer) + self._ensure_fsync(writer) + w = writer + if hasattr(w.file, 'closed'): + self.assertTrue(w.file.closed) + + # read all written messages + print("reading all messages") + with self.reader_constructor(self.test_file_name) as reader: + read_messages = list(reader) + r = reader + if hasattr(r.file, 'closed'): + self.assertTrue(r.file.closed) + + # check if at least the number of messages matches; + self.assertEqual(len(read_messages), len(self.original_messages), + "the number of written messages does not match the number of read messages") + + self.assertMessagesEqual(read_messages) + self.assertIncludesComments(self.test_file_name) + + def test_file_like_explicit_stop(self): + """testing with file-like object and explicit stop() call""" + + # create writer + print("writing all messages/comments") + my_file = open(self.test_file_name, 'wb' if self.binary_file else 'w') + writer = self.writer_constructor(my_file) + self._write_all(writer) + self._ensure_fsync(writer) + writer.stop() + if hasattr(my_file, 'closed'): + self.assertTrue(my_file.closed) + + print("reading all messages") + my_file = open(self.test_file_name, 'rb' if self.binary_file else 'r') + reader = self.reader_constructor(my_file) + read_messages = list(reader) + # redundant, but this checks if stop() can be called multiple times + reader.stop() + if hasattr(my_file, 'closed'): + self.assertTrue(my_file.closed) + + # check if at least the number of messages matches + # could use assertCountEqual in later versions of Python and in the other methods + self.assertEqual(len(read_messages), len(self.original_messages), + "the number of written messages does not match the number of read messages") + + self.assertMessagesEqual(read_messages) + self.assertIncludesComments(self.test_file_name) + + def test_file_like_context_manager(self): + """testing with file-like object and context manager""" + + # create writer + print("writing all messages/comments") + my_file = open(self.test_file_name, 'wb' if self.binary_file else 'w') + with self.writer_constructor(my_file) as writer: + self._write_all(writer) + self._ensure_fsync(writer) + w = writer + if hasattr(my_file, 'closed'): + self.assertTrue(my_file.closed) + + # read all written messages + print("reading all messages") + my_file = open(self.test_file_name, 'rb' if self.binary_file else 'r') + with self.reader_constructor(my_file) as reader: + read_messages = list(reader) + r = reader + if hasattr(my_file, 'closed'): + self.assertTrue(my_file.closed) + + # check if at least the number of messages matches; + self.assertEqual(len(read_messages), len(self.original_messages), + "the number of written messages does not match the number of read messages") + + self.assertMessagesEqual(read_messages) + self.assertIncludesComments(self.test_file_name) + + def test_append_mode(self): + """ + testing append mode with context manager and path-like object + """ + if not self.test_append_enabled: + raise unittest.SkipTest("do not test append mode") + + count = len(self.original_messages) + first_part = self.original_messages[:count // 2] + second_part = self.original_messages[count // 2:] + + # write first half + with self.writer_constructor(self.test_file_name) as writer: + for message in first_part: + writer(message) + self._ensure_fsync(writer) + + # use append mode for second half + try: + writer = self.writer_constructor(self.test_file_name, append=True) + except TypeError as e: + # maybe "append" is not a formal parameter (this is the case for SqliteWriter) + try: + writer = self.writer_constructor(self.test_file_name) + except TypeError: + # is the is still a problem, raise the initial error + raise e + with writer: + for message in second_part: + writer(message) + self._ensure_fsync(writer) + with self.reader_constructor(self.test_file_name) as reader: + read_messages = list(reader) + + self.assertMessagesEqual(read_messages) + + def _write_all(self, writer): + """Writes messages and insert comments here and there.""" # Note: we make no assumptions about the length of original_messages and original_comments - for msg, comment in zip_longest(original_messages, original_comments, fillvalue=None): + for msg, comment in zip_longest(self.original_messages, self.original_comments, fillvalue=None): # msg and comment might be None if comment is not None: print("writing comment: ", comment) writer.log_event(comment) # we already know that this method exists - print("writing comment: ", comment) if msg is not None: print("writing message: ", msg) writer(msg) - print("writing message: ", msg) - else: - # ony write messages - for msg in original_messages: - print("writing message: ", msg) - writer(msg) - print("writing message: ", msg) - # sleep and close the writer - if sleep_time is not None: - sleep(sleep_time) + def _ensure_fsync(self, io_handler): + if hasattr(io_handler.file, 'fileno'): + io_handler.file.flush() + os.fsync(io_handler.file.fileno()) + + def assertMessagesEqual(self, read_messages): + """ + Checks the order and content of the individual messages. + """ + for index, (original, read) in enumerate(zip(self.original_messages, read_messages)): + try: + # check everything except the timestamp + self.assertEqual(original, read, "messages are not equal at index #{}".format(index)) + # check the timestamp + if self.round_timestamps: + original.timestamp = round(original.timestamp) + read.timestamp = round(read.timestamp) + self.assertAlmostEqual(read.timestamp, original.timestamp, places=6, + msg="message timestamps are not almost_equal at index #{} ({!r} !~= {!r})" + .format(index, original.timestamp, read.timestamp)) + except: + print("Comparing: original message: {!r}".format(original)) + print(" read message: {!r}".format(read)) + raise + + def assertIncludesComments(self, filename): + """ + Ensures that all comments are literally contained in the given file. + + :param filename: the path-like object to use + """ + if self.original_comments: + # read the entire outout file + with open(filename, 'rb' if self.binary_file else 'r') as file: + output_contents = file.read() + # check each, if they can be found in there literally + for comment in self.original_comments: + self.assertIn(comment, output_contents) + + +class TestAscFileFormat(ReaderWriterTest): + """Tests can.ASCWriter and can.ASCReader""" - writer.stop() + __test__ = True - # read all written messages - read_messages = list(reader_constructor(filename)) + def _setup_instance(self): + super(TestAscFileFormat, self)._setup_instance_helper( + can.ASCWriter, can.ASCReader, + check_comments=True, round_timestamps=True + ) - # check if at least the number of messages matches - test_case.assertEqual(len(read_messages), len(original_messages), - "the number of written messages does not match the number of read messages") - # check the order and content of the individual messages - for i, (read, original) in enumerate(zip(read_messages, original_messages)): - try: - # check everything except the timestamp - test_case.assertEqual(read, original) - # check the timestamp - if round_timestamps: - original.timestamp = round(original.timestamp) - read.timestamp = round(read.timestamp) - test_case.assertAlmostEqual(read.timestamp, original.timestamp, places=6) - except Exception as exception: - # attach the index - exception.args += ("test failed at index #{}".format(i), ) - raise exception - - # check if the comments are contained in the file - if check_comments: - # read the entire outout file - with open(filename, 'r') as file: - output_contents = file.read() - # check each, if they can be found in there literally - for comment in original_comments: - test_case.assertTrue(comment in output_contents) - - -class TestCanutilsLog(unittest.TestCase): - """Tests can.CanutilsLogWriter and can.CanutilsLogReader""" +class TestBlfFileFormat(ReaderWriterTest): + """Tests can.BLFWriter and can.BLFReader""" - def test_writer_and_reader(self): - _test_writer_and_reader(self, can.CanutilsLogWriter, can.CanutilsLogReader, - check_comments=False) + __test__ = True + def _setup_instance(self): + super(TestBlfFileFormat, self)._setup_instance_helper( + can.BLFWriter, can.BLFReader, + binary_file=True, + check_comments=False + ) -class TestAscFileFormat(unittest.TestCase): - """Tests can.ASCWriter and can.ASCReader""" + def test_read_known_file(self): + logfile = os.path.join(os.path.dirname(__file__), "data", "logfile.blf") + with can.BLFReader(logfile) as reader: + messages = list(reader) + self.assertEqual(len(messages), 2) + self.assertEqual(messages[0], + can.Message( + extended_id=False, + arbitration_id=0x64, + data=[0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8])) + self.assertEqual(messages[0].channel, 0) + self.assertEqual(messages[1], + can.Message( + is_error_frame=True, + extended_id=True, + arbitration_id=0x1FFFFFFF)) + self.assertEqual(messages[1].channel, 0) - def test_writer_and_reader(self): - _test_writer_and_reader(self, can.ASCWriter, can.ASCReader, - check_comments=True, round_timestamps=True) +class TestCanutilsFileFormat(ReaderWriterTest): + """Tests can.CanutilsLogWriter and can.CanutilsLogReader""" -class TestCsvFileFormat(unittest.TestCase): - """Tests can.ASCWriter and can.ASCReader""" + __test__ = True - def test_writer_and_reader(self): - _test_writer_and_reader(self, can.CSVWriter, can.CSVReader, - check_comments=False) + def _setup_instance(self): + super(TestCanutilsFileFormat, self)._setup_instance_helper( + can.CanutilsLogWriter, can.CanutilsLogReader, + test_append=True, check_comments=False + ) -class TestSqliteDatabaseFormat(unittest.TestCase): - """Tests can.SqliteWriter and can.SqliteReader""" +class TestCsvFileFormat(ReaderWriterTest): + """Tests can.ASCWriter and can.ASCReader""" - def test_writer_and_reader(self): - _test_writer_and_reader(self, can.SqliteWriter, can.SqliteReader, - sleep_time=can.SqliteWriter.MAX_TIME_BETWEEN_WRITES + 0.5, - check_comments=False) + __test__ = True - def testSQLWriterWritesToSameFile(self): - f = tempfile.NamedTemporaryFile('w', delete=False) - f.close() + def _setup_instance(self): + super(TestCsvFileFormat, self)._setup_instance_helper( + can.CSVWriter, can.CSVReader, + test_append=True, check_comments=False + ) - first_listener = can.SqliteWriter(f.name) - first_listener(generate_message(0x01)) - sleep(first_listener.MAX_TIME_BETWEEN_WRITES) - first_listener.stop() +class TestSqliteDatabaseFormat(ReaderWriterTest): + """Tests can.SqliteWriter and can.SqliteReader""" - second_listener = can.SqliteWriter(f.name) - second_listener(generate_message(0x02)) + __test__ = True - sleep(second_listener.MAX_TIME_BETWEEN_WRITES) + def _setup_instance(self): + super(TestSqliteDatabaseFormat, self)._setup_instance_helper( + can.SqliteWriter, can.SqliteReader, + test_append=True, check_comments=False + ) - second_listener.stop() + @unittest.skip("not implemented") + def test_file_like_explicit_stop(self): + pass - con = sqlite3.connect(f.name) + @unittest.skip("not implemented") + def test_file_like_context_manager(self): + pass - with con: - c = con.cursor() + def test_read_all(self): + """ + testing :meth:`can.SqliteReader.read_all` with context manager and path-like object + """ + # create writer + print("writing all messages/comments") + with self.writer_constructor(self.test_file_name) as writer: + self._write_all(writer) - c.execute("select COUNT() from messages") - self.assertEqual(2, c.fetchone()[0]) + # read all written messages + print("reading all messages") + with self.reader_constructor(self.test_file_name) as reader: + read_messages = list(reader.read_all()) - c.execute("select * from messages") - msg1 = c.fetchone() - msg2 = c.fetchone() + # check if at least the number of messages matches; + self.assertEqual(len(read_messages), len(self.original_messages), + "the number of written messages does not match the number of read messages") - self.assertEqual(msg1[1], 0x01) - self.assertEqual(msg2[1], 0x02) + self.assertMessagesEqual(read_messages) -class TestBlfFileFormat(unittest.TestCase): - """Tests can.BLFWriter and can.BLFReader""" +class TestPrinter(unittest.TestCase): + """Tests that can.Printer does not crash""" - def test_writer_and_reader(self): - _test_writer_and_reader(self, can.BLFWriter, can.BLFReader, - check_comments=False) + messages = TEST_MESSAGES_BASE + TEST_MESSAGES_REMOTE_FRAMES + TEST_MESSAGES_ERROR_FRAMES - def test_reader(self): - logfile = os.path.join(os.path.dirname(__file__), "data", "logfile.blf") - messages = list(can.BLFReader(logfile)) - self.assertEqual(len(messages), 2) - self.assertEqual(messages[0], - can.Message( - extended_id=False, - arbitration_id=0x64, - data=[0x1, 0x2, 0x3, 0x4, 0x5, 0x6, 0x7, 0x8])) - self.assertEqual(messages[0].channel, 0) - self.assertEqual(messages[1], - can.Message( - is_error_frame=True, - extended_id=True, - arbitration_id=0x1FFFFFFF)) - self.assertEqual(messages[1].channel, 0) + def test_not_crashes_with_stdout(self): + with can.Printer() as printer: + for message in self.messages: + printer(message) + + def test_not_crashes_with_file(self): + with tempfile.NamedTemporaryFile('w', delete=False) as temp_file: + with can.Printer(temp_file) as printer: + for message in self.messages: + printer(message) if __name__ == '__main__':