diff --git a/nptdms/base_segment.py b/nptdms/base_segment.py index f3b5738..3f40642 100644 --- a/nptdms/base_segment.py +++ b/nptdms/base_segment.py @@ -56,14 +56,14 @@ def _read_data_chunk(self, file, data_objects, chunk_index): """ raise NotImplementedError("Data chunk reading must be implemented in base classes") - def read_channel_data_chunks(self, file, data_objects, channel_path, chunk_offset, stop_chunk, chunk_size): + def read_channel_data_chunks(self, file, data_objects, channel_path, chunk_offset, stop_chunk): """ Read multiple data chunks for a single channel at once In the base case we read each chunk individually but subclasses can override this """ for chunk_index in range(chunk_offset, stop_chunk): - yield self._read_channel_data_chunk(file, data_objects, chunk_index, channel_path, chunk_size) + yield self._read_channel_data_chunk(file, data_objects, chunk_index, channel_path) - def _read_channel_data_chunk(self, file, data_objects, chunk_index, channel_path, chunk_size): + def _read_channel_data_chunk(self, file, data_objects, chunk_index, channel_path): """ Read data from a chunk for a single channel """ # In the base case we can read data for all channels diff --git a/nptdms/tdms_segment.py b/nptdms/tdms_segment.py index 61dba55..cacde39 100644 --- a/nptdms/tdms_segment.py +++ b/nptdms/tdms_segment.py @@ -383,7 +383,7 @@ def _read_channel_data_chunks(self, file, data_objects, channel_path, chunk_offs reader = self._get_data_reader() initial_position = file.tell() for i, chunk in enumerate(reader.read_channel_data_chunks( - file, data_objects, channel_path, chunk_offset, stop_chunk, chunk_size + file, data_objects, channel_path, chunk_offset, stop_chunk )): yield chunk file.seek(initial_position + (i + 1) * chunk_size) @@ -461,7 +461,7 @@ def read_data_chunks(self, file, data_objects, num_chunks): raise ValueError("Cannot read interleaved data with different chunk sizes") return [self._read_interleaved_chunks(file, data_objects, num_chunks)] - def read_channel_data_chunks(self, file, data_objects, channel_path, chunk_offset, stop_chunk, chunk_size): + def read_channel_data_chunks(self, file, data_objects, channel_path, chunk_offset, stop_chunk): """ Read multiple data chunks for a single channel at once """ num_chunks = stop_chunk - chunk_offset @@ -513,7 +513,7 @@ def _read_data_chunk(self, file, data_objects, chunk_index): object_data[obj.path] = obj.read_values(file, number_values, self.endianness) return RawDataChunk.channel_data(object_data) - def _read_channel_data_chunk(self, file, data_objects, chunk_index, channel_path, chunk_size): + def _read_channel_data_chunk(self, file, data_objects, chunk_index, channel_path): """ Read data from a chunk for a single channel """ channel_data = RawChannelDataChunk.empty()