diff --git a/src/BaseIO.cpp b/src/BaseIO.cpp index 62067e02..f5165ff4 100644 --- a/src/BaseIO.cpp +++ b/src/BaseIO.cpp @@ -93,9 +93,11 @@ BaseRecordingData::BaseRecordingData() {} BaseRecordingData::~BaseRecordingData() {} -Status BaseRecordingData::writeDataBlock(const SizeType& xDataSize, +// Overload that uses the member variable position (works for simple data +// extension) +Status BaseRecordingData::writeDataBlock(const std::vector& dataShape, const BaseDataType& type, const void* data) { - return writeDataBlock(xDataSize, size[1], type, data); + return writeDataBlock(dataShape, position, type, data); } diff --git a/src/BaseIO.hpp b/src/BaseIO.hpp index 150bfe6b..77c3213e 100644 --- a/src/BaseIO.hpp +++ b/src/BaseIO.hpp @@ -246,10 +246,10 @@ class BaseIO * @param path The location in the file of the new dataset. * @return A pointer to the created dataset. */ - virtual BaseRecordingData* createDataSet(const BaseDataType& type, - const SizeArray& size, - const SizeArray& chunking, - const std::string& path) = 0; + virtual BaseRecordingData* createArrayDataSet(const BaseDataType& type, + const SizeArray& size, + const SizeArray& chunking, + const std::string& path) = 0; /** * @brief Returns a pointer to a dataset at a given path. @@ -357,43 +357,31 @@ class BaseRecordingData virtual ~BaseRecordingData(); /** - * @brief Writes a 1D block of data (samples). - * @param xDataSize The size of the data block in the x dimension (samples). + * @brief Writes a block of data using the stored position information. + * This is not intended to be overwritten by derived classes, but is a + * convenience function for writing data using the last recorded position. + * @param dataShape The size of the data block. * @param type The data type of the elements in the data block. * @param data A pointer to the data block. * @return The status of the write operation. */ - Status writeDataBlock(const SizeType& xDataSize, + Status writeDataBlock(const std::vector& dataShape, const BaseDataType& type, const void* data); /** - * @brief Writes a 2D block of data (samples x channels). - * @param xDataSize The size of the data block in the x dimension (samples). - * @param yDataSize The size of the data block in the y dimension (channels). + * @brief Writes a block of data (any number of dimensions). + * @param dataShape The size of the data block. + * @param positionOffset The position of the data block to write to. * @param type The data type of the elements in the data block. * @param data A pointer to the data block. * @return The status of the write operation. */ - virtual Status writeDataBlock(const SizeType& xDataSize, - const SizeType& yDataSize, + virtual Status writeDataBlock(const std::vector& dataShape, + const std::vector& positionOffset, const BaseDataType& type, const void* data) = 0; - /** - * @brief Writes a row of data in a 2D block. - * @param xDataSize The size of the data row in the x dimension (samples). - * @param yPosition The position of the data row in the y dimension - * (channels). - * @param type The data type of the elements in the data block. - * @param data A pointer to the data block. - * @return The status of the write operation. - */ - virtual Status writeDataRow(const SizeType& xDataSize, - const SizeType& yPos, - const BaseDataType& type, - const void* data) = 0; - protected: /** * @brief The current position in the x dimension. @@ -401,20 +389,19 @@ class BaseRecordingData SizeType xPos; /** - * @brief The size of the data block in each dimension. + * @brief The size of the dataset in each dimension. */ - SizeType size[3]; + std::vector size; /** - * @brief The number of dimensions in the data block. + * @brief The current position in the dataset. */ - SizeType dimension; /**< The number of dimensions in the data block. */ + std::vector position; /** - * @brief The position in the x dimension of samples written for each row - * (channel). + * @brief The number of dimensions in the data block. */ - std::vector rowXPos; + SizeType nDimensions; }; } // namespace AQNWB diff --git a/src/Channel.cpp b/src/Channel.cpp index 815a1a4a..8b2dd936 100644 --- a/src/Channel.cpp +++ b/src/Channel.cpp @@ -11,7 +11,8 @@ Channel::Channel(const std::string name, const float conversion, const float samplingRate, const float bitVolts, - const std::array position) + const std::array position, + const std::string comments) : name(name) , groupName(groupName) , localIndex(localIndex) @@ -20,6 +21,7 @@ Channel::Channel(const std::string name, , conversion(conversion) , samplingRate(samplingRate) , bitVolts(bitVolts) + , comments(comments) { } diff --git a/src/Channel.hpp b/src/Channel.hpp index 99c22fc3..fe835272 100644 --- a/src/Channel.hpp +++ b/src/Channel.hpp @@ -27,7 +27,8 @@ class Channel const float bitVolts = 0.000002f, // least significant bit needed to // convert 16-bit int to volts // currently a placeholder - const std::array position = {0.f, 0.f, 0.f}); + const std::array position = {0.f, 0.f, 0.f}, + const std::string comments = "no comments"); /** * @brief Destructor @@ -45,6 +46,7 @@ class Channel * @return The samplingRate value. */ float getSamplingRate() const; + /** * @brief Getter for bitVolts * @return The bitVolts value. @@ -76,6 +78,11 @@ class Channel */ std::array position; + /** + * @brief Comments about the channel. + */ + std::string comments; + private: /** * @brief Conversion factor. diff --git a/src/Types.hpp b/src/Types.hpp index 99bc1588..37bbd8a3 100644 --- a/src/Types.hpp +++ b/src/Types.hpp @@ -35,8 +35,8 @@ class Types using SizeArray = std::vector; /** - * @brief Alias for a group of channels. + * @brief Alias for a vector of channels. */ - using ChannelGroup = std::vector; + using ChannelVector = std::vector; }; } // namespace AQNWB diff --git a/src/Utils.hpp b/src/Utils.hpp index 69dcafe3..b1a85813 100644 --- a/src/Utils.hpp +++ b/src/Utils.hpp @@ -67,4 +67,29 @@ inline std::unique_ptr createIO(const std::string& type, throw std::invalid_argument("Invalid IO type"); } } + +inline std::unique_ptr transformToInt16(SizeType numSamples, + float conversion_factor, + const float* data) +{ + std::unique_ptr scaledData = std::make_unique(numSamples); + std::unique_ptr intData = std::make_unique(numSamples); + + // copy data and multiply by scaling factor + double multFactor = 1 / (32767.0f * conversion_factor); + std::transform(data, + data + numSamples, + scaledData.get(), + [multFactor](float value) { return value * multFactor; }); + + // convert float to int16 + std::transform( + scaledData.get(), + scaledData.get() + numSamples, + intData.get(), + [](float value) + { return static_cast(std::clamp(value, -32768.0f, 32767.0f)); }); + + return intData; +} } // namespace AQNWB diff --git a/src/hdf5/HDF5IO.cpp b/src/hdf5/HDF5IO.cpp index fa1937f7..066c6f87 100644 --- a/src/hdf5/HDF5IO.cpp +++ b/src/hdf5/HDF5IO.cpp @@ -94,14 +94,19 @@ Status HDF5IO::createAttribute(const BaseDataType& type, if (!opened) return Status::Failure; - try { - gloc = file->openGroup(path); - loc = &gloc; - } catch (FileIException - error) // If there is no group with that path, try a dataset - { - dloc = file->openDataSet(path); - loc = &dloc; + // open the group or dataset + H5O_type_t objectType = getObjectType(path); + switch (objectType) { + case H5O_TYPE_GROUP: + gloc = file->openGroup(path); + loc = &gloc; + break; + case H5O_TYPE_DATASET: + dloc = file->openDataSet(path); + loc = &dloc; + break; + default: + return Status::Failure; // not a valid dataset or group type } H5type = getH5Type(type); @@ -167,14 +172,19 @@ Status HDF5IO::createAttribute(const std::vector& data, StrType H5type(PredType::C_S1, maxSize); H5type.setSize(H5T_VARIABLE); - try { - gloc = file->openGroup(path); - loc = &gloc; - } catch (FileIException - error) // If there is no group with that path, try a dataset - { - dloc = file->openDataSet(path); - loc = &dloc; + // open the group or dataset + H5O_type_t objectType = getObjectType(path); + switch (objectType) { + case H5O_TYPE_GROUP: + gloc = file->openGroup(path); + loc = &gloc; + break; + case H5O_TYPE_DATASET: + dloc = file->openDataSet(path); + loc = &dloc; + break; + default: + return Status::Failure; // not a valid dataset or group type } try { @@ -216,14 +226,19 @@ Status HDF5IO::createReferenceAttribute(const std::string& referencePath, if (!opened) return Status::Failure; - try { - gloc = file->openGroup(path); - loc = &gloc; - } catch (FileIException - error) // If there is no group with that path, try a dataset - { - dloc = file->openDataSet(path); - loc = &dloc; + // open the group or dataset + H5O_type_t objectType = getObjectType(path); + switch (objectType) { + case H5O_TYPE_GROUP: + gloc = file->openGroup(path); + loc = &gloc; + break; + case H5O_TYPE_DATASET: + dloc = file->openDataSet(path); + loc = &dloc; + break; + default: + return Status::Failure; // not a valid dataset or group type } try { @@ -354,9 +369,10 @@ Status HDF5IO::createStringDataSet(const std::string& path, } std::unique_ptr dataset; - dataset = std::unique_ptr(createDataSet( + dataset = std::unique_ptr(createArrayDataSet( BaseDataType::V_STR, SizeArray {values.size()}, SizeArray {1}, path)); - dataset->writeDataBlock(1, BaseDataType::V_STR, cStrs.data()); + dataset->writeDataBlock( + std::vector(1, 1), BaseDataType::V_STR, cStrs.data()); return Status::Success; } @@ -383,25 +399,25 @@ AQNWB::BaseRecordingData* HDF5IO::getDataSet(const std::string& path) } } -AQNWB::BaseRecordingData* HDF5IO::createDataSet(const BaseDataType& type, - const SizeArray& size, - const SizeArray& chunking, - const std::string& path) +AQNWB::BaseRecordingData* HDF5IO::createArrayDataSet(const BaseDataType& type, + const SizeArray& size, + const SizeArray& chunking, + const std::string& path) { std::unique_ptr data; DSetCreatPropList prop; + DataType H5type = getH5Type(type); + if (!opened) return nullptr; - // Right now this classes don't support datasets with rank > 3. - // If it's needed in the future we can extend them to be of generic rank SizeType dimension = size.size(); - if ((dimension > 3) || (dimension < 1)) + if (dimension < 1) // Check for at least one dimension return nullptr; - DataType H5type = getH5Type(type); - - hsize_t dims[3], chunk_dims[3], max_dims[3]; + // Use vectors to support an arbitrary number of dimensions + std::vector dims(dimension), chunk_dims(dimension), + max_dims(dimension); for (SizeType i = 0; i < dimension; i++) { dims[i] = static_cast(size[i]); @@ -414,14 +430,31 @@ AQNWB::BaseRecordingData* HDF5IO::createDataSet(const BaseDataType& type, } } - DataSpace dSpace(static_cast(dimension), dims, max_dims); - prop.setChunk(static_cast(dimension), chunk_dims); + DataSpace dSpace(static_cast(dimension), dims.data(), max_dims.data()); + prop.setChunk(static_cast(dimension), chunk_dims.data()); data = std::make_unique( file->createDataSet(path, H5type, dSpace, prop)); return new HDF5RecordingData(data.release()); } +H5O_type_t HDF5IO::getObjectType(const std::string& path) +{ +#if H5_VERSION_GE(1, 12, 0) + // get whether path is a dataset or group + H5O_info_t objInfo; // Structure to hold information about the object + H5Oget_info_by_name( + this->file->getId(), path.c_str(), &objInfo, H5O_INFO_BASIC, H5P_DEFAULT); +#else + // get whether path is a dataset or group + H5O_info_t objInfo; // Structure to hold information about the object + H5Oget_info_by_name(this->file->getId(), path.c_str(), &objInfo, H5P_DEFAULT); +#endif + H5O_type_t objectType = objInfo.type; + + return objectType; +} + H5::DataType HDF5IO::getNativeType(BaseDataType type) { H5::DataType baseType; @@ -527,32 +560,24 @@ H5::DataType HDF5IO::getH5Type(BaseDataType type) // HDF5RecordingData HDF5RecordingData::HDF5RecordingData(H5::DataSet* data) { - DataSpace dSpace; - DSetCreatPropList prop; - hsize_t dims[3], chunk[3]; + DataSpace dSpace = data->getSpace(); + DSetCreatPropList prop = data->getCreatePlist(); - dSpace = data->getSpace(); - prop = data->getCreatePlist(); + int nDimensions = dSpace.getSimpleExtentNdims(); + std::vector dims(nDimensions), chunk(nDimensions); - dimension = dSpace.getSimpleExtentDims(dims); - prop.getChunk(static_cast(dimension), chunk); + nDimensions = dSpace.getSimpleExtentDims( + dims.data()); // TODO -redefine here or use original? + prop.getChunk(static_cast(nDimensions), chunk.data()); - this->size[0] = dims[0]; - if (dimension > 1) - this->size[1] = dims[1]; - else - this->size[1] = 1; - if (dimension > 1) - this->size[2] = dims[2]; - else - this->size[2] = 1; - - this->xPos = 0; + this->size = std::vector(nDimensions); + for (int i = 0; i < nDimensions; ++i) { + this->size[i] = dims[i]; + } + this->nDimensions = nDimensions; + this->position = std::vector( + nDimensions, 0); // Initialize position with 0 for each dimension this->dSet = std::make_unique(*data); - ; - this->rowXPos.clear(); - this->rowXPos.insert( - this->rowXPos.end(), static_cast(this->size[1]), 0); } // HDF5RecordingData @@ -563,117 +588,64 @@ HDF5RecordingData::~HDF5RecordingData() dSet->flush(H5F_SCOPE_GLOBAL); } -Status HDF5RecordingData::writeDataBlock(const SizeType& xDataSize, - const SizeType& yDataSize, - const BaseDataType& type, - const void* data) +Status HDF5RecordingData::writeDataBlock( + const std::vector& dataShape, + const std::vector& positionOffset, + const BaseDataType& type, + const void* data) { try { - hsize_t dim[3], offset[3]; - DataSpace fSpace; - DataType nativeType; - - dim[2] = static_cast(size[2]); - // only modify y size if new required size is larger than what we had. - if (yDataSize > size[1]) - dim[1] = static_cast(yDataSize); - else - dim[1] = static_cast(size[1]); - dim[0] = static_cast(xPos) + xDataSize; - - // First be sure that we have enough space - dSet->extend(dim); - - fSpace = dSet->getSpace(); - fSpace.getSimpleExtentDims(dim); - size[0] = dim[0]; - if (dimension > 1) - size[1] = dim[1]; - - // Create memory space - dim[0] = static_cast(xDataSize); - dim[1] = static_cast(yDataSize); - dim[2] = static_cast(size[2]); - - DataSpace mSpace(static_cast(dimension), dim); - // select where to write - offset[0] = static_cast(xPos); - offset[1] = 0; - offset[2] = 0; - - fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset); - - nativeType = HDF5IO::getNativeType(type); - - dSet->write(data, nativeType, mSpace, fSpace); - xPos += xDataSize; - } catch (DataSetIException error) { - error.printErrorStack(); - } catch (DataSpaceIException error) { - error.printErrorStack(); - } catch (FileIException error) { - error.printErrorStack(); - } - return Status::Success; -} - -void HDF5RecordingData::readDataBlock(const BaseDataType& type, void* buffer) -{ - DataSpace fSpace = dSet->getSpace(); - DataType nativeType = HDF5IO::getNativeType(type); - dSet->read(buffer, nativeType, fSpace, fSpace); -} - -Status HDF5RecordingData::writeDataRow(const SizeType& xDataSize, - const SizeType& yPos, - const BaseDataType& type, - const void* data) -{ - hsize_t dim[2], offset[2]; - DataSpace fSpace; - DataType nativeType; + // check dataShape and positionOffset inputs match the dimensions of the + // dataset + if (dataShape.size() != nDimensions || positionOffset.size() != nDimensions) + { + return Status::Failure; + } - if (dimension > 2) - return Status::Failure; // not currently writing rows in datasets > 2d - if ((yPos < 0) || (yPos >= size[1])) - return Status::Failure; // yPosition out of bounds + // Ensure that we have enough space to accommodate new data + std::vector dSetDims(nDimensions), offset(nDimensions); + for (int i = 0; i < nDimensions; ++i) { + offset[i] = static_cast(positionOffset[i]); - try { - // Check dimensions - if (rowXPos[yPos] + xDataSize > size[0]) { - dim[1] = size[1]; - dim[0] = rowXPos[yPos] + xDataSize; - dSet->extend(dim); - - fSpace = dSet->getSpace(); - fSpace.getSimpleExtentDims(dim); - size[0] = static_cast(dim[0]); - } - if (rowXPos[yPos] + xDataSize > xPos) { - xPos = rowXPos[yPos] + xDataSize; + if (dataShape[i] + offset[i] > size[i]) // TODO - do I need offset here + dSetDims[i] = dataShape[i] + offset[i]; + else + dSetDims[i] = size[i]; } - // Create memory space - dim[0] = static_cast(xDataSize); - dim[1] = static_cast(1); - DataSpace mSpace(static_cast(dimension), dim); + // Adjust dataset dimensions if necessary + dSet->extend(dSetDims.data()); - // select where to write - fSpace = dSet->getSpace(); - offset[0] = rowXPos[yPos]; - offset[1] = yPos; + // Set size to new size based on updated dimensionality + DataSpace fSpace = dSet->getSpace(); + fSpace.getSimpleExtentDims(dSetDims.data()); + for (int i = 0; i < nDimensions; ++i) { + size[i] = dSetDims[i]; + } - fSpace.selectHyperslab(H5S_SELECT_SET, dim, offset); + // Create memory space with the shape of the data + // DataSpace mSpace(dimension, dSetDim.data()); + std::vector dataDims(nDimensions); + for (int i = 0; i < nDimensions; ++i) { + if (dataShape[i] == 0) { + dataDims[i] = 1; + } else { + dataDims[i] = static_cast(dataShape[i]); + } + } + DataSpace mSpace(static_cast(nDimensions), dataDims.data()); - nativeType = HDF5IO::getNativeType(type); + // Select hyperslab in the file space + fSpace.selectHyperslab(H5S_SELECT_SET, dataDims.data(), offset.data()); + // Write the data + DataType nativeType = HDF5IO::getNativeType(type); dSet->write(data, nativeType, mSpace, fSpace); - if (yPos < rowXPos.size()) { - rowXPos[yPos] += xDataSize; - } else { - rowXPos.push_back(xDataSize); - } + // Update position for simple extension + for (int i = 0; i < dataShape.size(); ++i) { + position[i] += dataShape[i]; + } } catch (DataSetIException error) { error.printErrorStack(); } catch (DataSpaceIException error) { @@ -681,6 +653,10 @@ Status HDF5RecordingData::writeDataRow(const SizeType& xDataSize, } catch (FileIException error) { error.printErrorStack(); } - return Status::Success; } + +const H5::DataSet* HDF5RecordingData::getDataSet() +{ + return dSet.get(); +}; diff --git a/src/hdf5/HDF5IO.hpp b/src/hdf5/HDF5IO.hpp index bea2aa27..51cbe966 100644 --- a/src/hdf5/HDF5IO.hpp +++ b/src/hdf5/HDF5IO.hpp @@ -4,8 +4,11 @@ #include #include +#include + #include "BaseIO.hpp" #include "Types.hpp" + namespace H5 { class DataSet; @@ -182,10 +185,10 @@ class HDF5IO : public BaseIO * @param path The location in the file of the new dataset. * @return A pointer to the created dataset. */ - BaseRecordingData* createDataSet(const BaseDataType& type, - const SizeArray& size, - const SizeArray& chunking, - const std::string& path) override; + BaseRecordingData* createArrayDataSet(const BaseDataType& type, + const SizeArray& size, + const SizeArray& chunking, + const std::string& path) override; /** * @brief Returns a pointer to a dataset at a given path. @@ -194,6 +197,13 @@ class HDF5IO : public BaseIO */ BaseRecordingData* getDataSet(const std::string& path) override; + /** + * @brief Returns the HDF5 type of object at a given path. + * @param path The location in the file of the object. + * @return The type of object at the given path. + */ + H5O_type_t getObjectType(const std::string& path); + /** * @brief Returns the HDF5 native data type for a given base data type. * @param type The base data type. @@ -226,8 +236,8 @@ class HDF5IO : public BaseIO * @brief Represents an HDF5 Dataset that can be extended indefinitely in blocks. * -* This class provides functionality for reading and writing 2D blocks of data -* (samples x channels) to an HDF5 dataset. +* This class provides functionality for reading and writing blocks of data +* to an HDF5 dataset. */ class HDF5RecordingData : public BaseRecordingData { @@ -254,37 +264,23 @@ class HDF5RecordingData : public BaseRecordingData ~HDF5RecordingData(); /** - * @brief Writes a 2D block of data to the HDF5 dataset. - * @param xDataSize The size of the data block in the x dimension (samples). - * @param yDataSize The size of the data block in the y dimension (channels). + * @brief Writes a block of data to the HDF5 dataset. + * @param dataShape The size of the data block. + * @param positionOffset The position of the data block to write to. * @param type The data type of the elements in the data block. * @param data A pointer to the data block. * @return The status of the write operation. */ - Status writeDataBlock(const SizeType& xDataSize, - const SizeType& yDataSize, + Status writeDataBlock(const std::vector& dataShape, + const std::vector& positionOffset, const BaseDataType& type, const void* data); /** - * @brief Reads a block of data from the HDF5 dataset. - * @param type The data type of the data block. - * @param buffer A pointer to the buffer to store the read data. - */ - void readDataBlock(const BaseDataType& type, void* buffer); - - /** - * @brief Writes a row of data in a 2D block. - * @param xDataSize The size of the data row in the x dimension (samples). - * @param yPos The position of the data row in the y dimension (channels). - * @param type The data type of the elements in the data block. - * @param data A pointer to the data block. - * @return The status of the write operation. + * @brief Gets a const pointer to the HDF5 dataset. + * @return A const pointer to the HDF5 dataset. */ - Status writeDataRow(const SizeType& xDataSize, - const SizeType& yPos, - const BaseDataType& type, - const void* data); + const H5::DataSet* getDataSet(); private: /** diff --git a/src/nwb/NWBFile.cpp b/src/nwb/NWBFile.cpp index ab5b7a81..ea1d5ea6 100644 --- a/src/nwb/NWBFile.cpp +++ b/src/nwb/NWBFile.cpp @@ -77,23 +77,22 @@ Status NWBFile::createFileStructure() return Status::Success; } -Status NWBFile::startRecording(std::vector recordingArrays) +Status NWBFile::createElectricalSeries( + std::vector recordingArrays, + const BaseDataType& dataType) { // store all recorded data in the acquisition group std::string rootPath = "/acquisition/"; - timeseriesData.clear(); - timeseriesData.reserve(recordingArrays.size()); - // Setup electrode table std::string electrodeTablePath = "general/extracellular_ephys/electrodes/"; ElectrodeTable elecTable = ElectrodeTable(electrodeTablePath, io); elecTable.initialize(); // Create continuous datasets - for (const auto& channelGroup : recordingArrays) { + for (const auto& channelVector : recordingArrays) { // Setup electrodes and devices - std::string groupName = channelGroup[0].groupName; + std::string groupName = channelVector[0].groupName; std::string devicePath = "general/devices/" + groupName; std::string electrodePath = "general/extracellular_ephys/" + groupName; std::string electricalSeriesPath = rootPath + groupName; @@ -109,17 +108,20 @@ Status NWBFile::startRecording(std::vector recordingArrays) auto electricalSeries = std::make_unique( electricalSeriesPath, io, + dataType, + channelVector, + elecTable.getPath(), + "volts", "Stores continuously sampled voltage data from an " "extracellular ephys recording", - channelGroup, - CHUNK_XSIZE, - elecTable.getPath()); + SizeArray {0, channelVector.size()}, + SizeArray {CHUNK_XSIZE}); electricalSeries->initialize(); - timeseriesData.push_back(std::move(electricalSeries)); + recordingContainers->addData(std::move(electricalSeries)); // Add electrode information to electrode table (does not write to datasets // yet) - elecTable.addElectrodes(channelGroup); + elecTable.addElectrodes(channelVector); } // write electrode information to datasets @@ -130,31 +132,6 @@ Status NWBFile::startRecording(std::vector recordingArrays) void NWBFile::stopRecording() {} -Status NWBFile::writeTimeseriesTimestamps(SizeType datasetInd, - SizeType numSamples, - BaseDataType type, - const void* data) -{ - if (!timeseriesData[datasetInd]) - return Status::Failure; - - return timeseriesData[datasetInd]->timestamps->writeDataBlock( - numSamples, type, data); -} - -Status NWBFile::writeTimeseriesData(SizeType datasetInd, - SizeType rowInd, - SizeType numSamples, - BaseDataType type, - const void* data) -{ - if (!timeseriesData[datasetInd]) - return Status::Failure; - - return timeseriesData[datasetInd]->data->writeDataRow( - numSamples, rowInd, type, data); -} - void NWBFile::cacheSpecifications(const std::string& specPath, const std::string& versionNumber) { @@ -193,5 +170,28 @@ std::unique_ptr NWBFile::createRecordingData( const std::string& path) { return std::unique_ptr( - io->createDataSet(type, size, chunking, path)); + io->createArrayDataSet(type, size, chunking, path)); +} + +TimeSeries* NWBFile::getTimeSeries(const SizeType& timeseriesInd) +{ + if (timeseriesInd >= this->recordingContainers->containers.size()) { + return nullptr; + } else { + return this->recordingContainers->containers[timeseriesInd].get(); + } +} + +// Recording Container + +RecordingContainers::RecordingContainers(const std::string& name) + : name(name) +{ +} + +RecordingContainers::~RecordingContainers() {} + +void RecordingContainers::addData(std::unique_ptr data) +{ + this->containers.push_back(std::move(data)); } diff --git a/src/nwb/NWBFile.hpp b/src/nwb/NWBFile.hpp index d8c01a2c..31577b4e 100644 --- a/src/nwb/NWBFile.hpp +++ b/src/nwb/NWBFile.hpp @@ -1,6 +1,8 @@ #pragma once #include +#include +#include #include "BaseIO.hpp" #include "Types.hpp" @@ -9,7 +11,7 @@ namespace AQNWB::NWB { -using TimeSeriesData = std::vector>; +class RecordingContainers; // declare here because gets used in NWBFile class /** * @brief The NWBFile class provides an interface for setting up and managing @@ -52,42 +54,20 @@ class NWBFile void finalize(); /** - * @brief Starts a recording. - * @return Status The status of the recording operation. + * @brief Create ElectricalSeries objects to record data into. + * Created objects are stored in recordingContainers. + * @param dataType The data type of the elements in the data block. + * @return Status The status of the object creation operation. */ - Status startRecording(std::vector recordingArrays); + Status createElectricalSeries( + std::vector recordingArrays, + const BaseDataType& dataType = BaseDataType::I16); /** * @brief Closes the relevant datasets. */ void stopRecording(); - /** - * @brief Write timeseries timestamps to the NWB file. - * @param datasetID The index of the timeseries dataset. - * @param numSamples The number of samples to write. - * @param type The base data type. - * @param data The data to write. - */ - Status writeTimeseriesTimestamps(SizeType datasetInd, - SizeType numSamples, - BaseDataType type, - const void* data); - - /** - * @brief Write a row of timeseries data to the NWB file. - * @param datasetID The index of the timeseries dataset. - * @param rowID The index of the row to write. - * @param numSamples The number of samples to write. - * @param type The base data type. - * @param data The data to write. - */ - Status writeTimeseriesData(SizeType datasetInd, - SizeType rowInd, - SizeType numSamples, - BaseDataType type, - const void* data); - /** * @brief Indicates the NWB schema version. */ @@ -103,6 +83,13 @@ class NWBFile */ const std::string HDMFExperimentalVersion = "0.5.0"; + /** + * @brief Gets the TimeSeries object from the recording containers + * @param containerName The name of the timeseries group. + * @param timeseriesInd The index of the timeseries dataset within the group. + */ + TimeSeries* getTimeSeries(const SizeType& timeseriesInd); + protected: /** * @brief Creates the default file structure. @@ -134,18 +121,48 @@ class NWBFile void cacheSpecifications(const std::string& specPath, const std::string& versionNumber); + const std::string identifierText; + std::shared_ptr io; + std::unique_ptr recordingContainers = + std::make_unique("RecordingContainers"); +}; + +/** + * @brief The RecordingContainers class provides an interface for managing + * groups of TimeSeries acquired during a recording. + */ +class RecordingContainers +{ +public: + /** + * @brief Constructor for RecordingContainer class. + * @param name The name of the group of time series + */ + RecordingContainers(const std::string& name); + /** - * @brief Creates a new dataset to hold text data (messages). - * @param path The location in the file for the dataset. - * @param name The name of the dataset. - * @param text The text data to be stored in the dataset. + * @brief Deleted copy constructor to prevent construction-copying. */ - void createTextDataSet(const std::string& path, - const std::string& name, - const std::string& text); + RecordingContainers(const RecordingContainers&) = delete; - const std::string identifierText; - std::shared_ptr io; - TimeSeriesData timeseriesData; + /** + * @brief Deleted copy assignment operator to prevent copying. + */ + RecordingContainers& operator=(const RecordingContainers&) = delete; + + /** + * @brief Destructor for RecordingContainer class. + */ + ~RecordingContainers(); + + /** + * @brief Adds a TimeSeries object to the container. + * @param data The TimeSeries object to add. + */ + void addData(std::unique_ptr data); + + std::vector> containers; + std::string name; }; + } // namespace AQNWB::NWB diff --git a/src/nwb/NWBRecording.cpp b/src/nwb/NWBRecording.cpp index 8f9c9806..a0b49460 100644 --- a/src/nwb/NWBRecording.cpp +++ b/src/nwb/NWBRecording.cpp @@ -4,17 +4,10 @@ #include "Utils.hpp" #include "hdf5/HDF5IO.hpp" -constexpr SizeType MAX_BUFFER_SIZE = 40960; - using namespace AQNWB::NWB; // NWBRecordingEngine -NWBRecording::NWBRecording() -{ - scaledBuffer = std::make_unique(MAX_BUFFER_SIZE); - intBuffer = std::make_unique(MAX_BUFFER_SIZE); - bufferSize = MAX_BUFFER_SIZE; -} +NWBRecording::NWBRecording() {} NWBRecording::~NWBRecording() { @@ -26,7 +19,7 @@ NWBRecording::~NWBRecording() Status NWBRecording::openFile(const std::string& rootFolder, const std::string& baseName, int experimentNumber, - std::vector recordingArrays, + std::vector recordingArrays, const std::string& IOType) { std::string filename = @@ -38,7 +31,7 @@ Status NWBRecording::openFile(const std::string& rootFolder, nwbfile->initialize(); // start the new recording - return nwbfile->startRecording(recordingArrays); + return nwbfile->createElectricalSeries(recordingArrays); } void NWBRecording::closeFile() @@ -47,42 +40,26 @@ void NWBRecording::closeFile() nwbfile->finalize(); } -void NWBRecording::writeTimeseriesData(SizeType timeseriesInd, - Channel channel, - const float* dataBuffer, - const double* timestampBuffer, - SizeType numSamples) +Status NWBRecording::writeTimeseriesData( + const std::string& containerName, + const SizeType& timeseriesInd, + const Channel& channel, + const std::vector& dataShape, + const std::vector& positionOffset, + const void* data, + const void* timestamps) { - // check if more samples than allocated buffer size - if (numSamples > bufferSize) { - bufferSize = numSamples; - scaledBuffer = std::make_unique(numSamples); - intBuffer = std::make_unique(numSamples); - } - - // copy data and multiply by scaling factor - double multFactor = 1 / (32767.0f * channel.getBitVolts()); - std::transform(dataBuffer, - dataBuffer + numSamples, - scaledBuffer.get(), - [multFactor](float value) { return value * multFactor; }); + TimeSeries* ts = nwbfile->getTimeSeries(timeseriesInd); - // convert float to int16 - std::transform( - scaledBuffer.get(), - scaledBuffer.get() + numSamples, - intBuffer.get(), - [](float value) - { return static_cast(std::clamp(value, -32768.0f, 32767.0f)); }); + if (ts == nullptr) + return Status::Failure; // write data and timestamps to datasets - nwbfile->writeTimeseriesData(timeseriesInd, - channel.localIndex, - numSamples, - BaseDataType::I16, - intBuffer.get()); if (channel.localIndex == 0) { - nwbfile->writeTimeseriesTimestamps( - timeseriesInd, numSamples, BaseDataType::F64, timestampBuffer); + // write with timestamps if it's the first channel + return ts->writeData(dataShape, positionOffset, data, timestamps); + } else { + // write without timestamps if its another channel in the same timeseries + return ts->writeData(dataShape, positionOffset, data); } } diff --git a/src/nwb/NWBRecording.hpp b/src/nwb/NWBRecording.hpp index e30d2ce1..94238933 100644 --- a/src/nwb/NWBRecording.hpp +++ b/src/nwb/NWBRecording.hpp @@ -43,7 +43,7 @@ class NWBRecording Status openFile(const std::string& rootFolder, const std::string& baseName, int experimentNumber, - std::vector recordingArrays, + std::vector recordingArrays, const std::string& IOType = "HDF5"); /** @@ -53,33 +53,31 @@ class NWBRecording void closeFile(); /** - * @brief Writes data for a timeseries. + * @brief Write timeseries to an NWB file. + * @param containerName The name of the timeseries group to write to. + * @param timeseriesInd The index of the timeseries dataset within the + * timeseries group. + * @param channel The channel index to use for writing timestamps. + * @param dataShape The size of the data block. + * @param positionOffset The position of the data block to write to. + * @param data A pointer to the data block. + * @param timestamps A pointer to the timestamps block. May be null if + * multidimensional TimeSeries and only need to write the timestamps once but + * write data multiple times. + * @return The status of the write operation. */ - void writeTimeseriesData(SizeType timeSeriesID, - Channel systemChannel, - const float* dataBuffer, - const double* timestampBuffer, - SizeType size); + Status writeTimeseriesData(const std::string& containerName, + const SizeType& timeseriesInd, + const Channel& channel, + const std::vector& dataShape, + const std::vector& positionOffset, + const void* data, + const void* timestamps); private: /** * @brief Pointer to the current NWB file. */ std::unique_ptr nwbfile; - - /** - * @brief Holds scaled samples for writing. - */ - std::unique_ptr scaledBuffer = nullptr; - - /** - * @brief Holds integer samples for writing. - */ - std::unique_ptr intBuffer = nullptr; - - /** - * @brief Maximum buffer size for writing data. - */ - SizeType bufferSize; }; } // namespace AQNWB::NWB diff --git a/src/nwb/base/TimeSeries.cpp b/src/nwb/base/TimeSeries.cpp index 2ba6cf29..a9c008b3 100644 --- a/src/nwb/base/TimeSeries.cpp +++ b/src/nwb/base/TimeSeries.cpp @@ -7,11 +7,25 @@ using namespace AQNWB::NWB; /** Constructor */ TimeSeries::TimeSeries(const std::string& path, std::shared_ptr io, + const BaseDataType& dataType, + const std::string& unit, const std::string& description, - const std::string& comments) + const std::string& comments, + const SizeArray& dsetSize, + const SizeArray& chunkSize, + const float& conversion, + const float& resolution, + const float& offset) : Container(path, io) + , dataType(dataType) + , unit(unit) , description(description) , comments(comments) + , dsetSize(dsetSize) + , chunkSize(chunkSize) + , conversion(conversion) + , resolution(resolution) + , offset(offset) { } @@ -22,6 +36,45 @@ void TimeSeries::initialize() { Container::initialize(); + // setup attributes io->createCommonNWBAttributes(path, "core", neurodataType, description); io->createAttribute(comments, path, "comments"); + + // setup datasets + this->data = std::unique_ptr(io->createArrayDataSet( + dataType, dsetSize, chunkSize, getPath() + "/data")); + io->createDataAttributes(getPath(), conversion, resolution, unit); + + SizeArray tsDsetSize = { + dsetSize[0]}; // timestamps match data along first dimension + this->timestamps = std::unique_ptr(io->createArrayDataSet( + this->timestampsType, tsDsetSize, chunkSize, getPath() + "/timestamps")); + io->createTimestampsAttributes(getPath()); +} + +Status TimeSeries::writeData(const std::vector& dataShape, + const std::vector& positionOffset, + const void* data, + const void* timestamps) +{ + Status tsStatus = Status::Success; + if (timestamps != nullptr) { + const std::vector timestampsShape = { + dataShape[0]}; // timestamps should match shape of the first data + // dimension + const std::vector timestampsPositionOffset = {positionOffset[0]}; + tsStatus = this->timestamps->writeDataBlock(timestampsShape, + timestampsPositionOffset, + this->timestampsType, + timestamps); + } + + Status dataStatus = this->data->writeDataBlock( + dataShape, positionOffset, this->dataType, data); + + if ((dataStatus != Status::Success) or (tsStatus != Status::Success)) { + return Status::Failure; + } else { + return Status::Success; + } } diff --git a/src/nwb/base/TimeSeries.hpp b/src/nwb/base/TimeSeries.hpp index 4705d195..3bd08793 100644 --- a/src/nwb/base/TimeSeries.hpp +++ b/src/nwb/base/TimeSeries.hpp @@ -22,14 +22,36 @@ class TimeSeries : public Container */ TimeSeries(const std::string& path, std::shared_ptr io, + const BaseDataType& dataType, + const std::string& unit, const std::string& description = "no description", - const std::string& comments = "no comments"); + const std::string& comments = "no comments", + const SizeArray& dsetSize = SizeArray {0}, + const SizeArray& chunkSize = SizeArray {1}, + const float& conversion = 1.0f, + const float& resolution = -1.0f, + const float& offset = 0.0f); /** * @brief Destructor */ ~TimeSeries(); + /** + * @brief Writes a timeseries data block to the file. + * @param dataShape The size of the data block. + * @param positionOffset The position of the data block to write to. + * @param data A pointer to the data block. + * @param timestamps A pointer to the timestamps block. May be null if + * multidimensional TimeSeries and only need to write the timestamps once but + * write data in separate blocks. + * @return The status of the write operation. + */ + Status writeData(const std::vector& dataShape, + const std::vector& positionOffset, + const void* data, + const void* timestamps = nullptr); + /** * @brief Initializes the TimeSeries by creating NWB related attributes and * writing the description and comment metadata. @@ -46,7 +68,23 @@ class TimeSeries : public Container */ std::unique_ptr timestamps; -private: + /** + * @brief Data type of the data. + */ + BaseDataType dataType; + + /** + * @brief Data type of the timestamps (float64). + */ + BaseDataType timestampsType = BaseDataType::F64; + + /** + * @brief Base unit of measurement for working with the data. Actual stored + * values are not necessarily stored in these units. To access the data in + * these units, multiply ‘data’ by ‘conversion’ and add ‘offset’. + */ + std::string unit; + /** * @brief The description of the TimeSeries. */ @@ -57,11 +95,41 @@ class TimeSeries : public Container */ std::string comments; + /** + * @brief Size used in dataset creation. Can be expanded when writing if + * needed. + */ + SizeArray dsetSize; + + /** + * @brief Chunking size used in dataset creation. + */ + SizeArray chunkSize; + + /** + * @brief Scalar to multiply each element in data to convert it to the + * specified ‘unit’. + */ + float conversion; + + /** + * @brief Smallest meaningful difference between values in data, stored in the + * specified by unit. + */ + float resolution; + + /** + * @brief Scalar to add to the data after scaling by ‘conversion’ to finalize + * its coercion to the specified ‘unit’. + */ + float offset; + /** * @brief The starting time of the TimeSeries. */ float startingTime = 0.0; +private: /** * @brief The neurodataType of the TimeSeries. */ diff --git a/src/nwb/ecephys/ElectricalSeries.cpp b/src/nwb/ecephys/ElectricalSeries.cpp index aa7dd01f..f69d3050 100644 --- a/src/nwb/ecephys/ElectricalSeries.cpp +++ b/src/nwb/ecephys/ElectricalSeries.cpp @@ -7,13 +7,28 @@ using namespace AQNWB::NWB; /** Constructor */ ElectricalSeries::ElectricalSeries(const std::string& path, std::shared_ptr io, + const BaseDataType& dataType, + const Types::ChannelVector& channelVector, + const std::string& electrodesTablePath, + const std::string& unit, const std::string& description, - const Types::ChannelGroup& channelGroup, - const SizeType& chunkSize, - const std::string& electrodesTablePath) - : TimeSeries(path, io, description) - , channelGroup(channelGroup) - , chunkSize(chunkSize) + const SizeArray& dsetSize, + const SizeArray& chunkSize, + const float& conversion, + const float& resolution, + const float& offset) + : TimeSeries(path, + io, + dataType, + unit, + description, + channelVector[0].comments, + dsetSize, + chunkSize, + channelVector[0].getConversion(), + resolution, + offset) + , channelVector(channelVector) , electrodesTablePath(electrodesTablePath) { } @@ -26,49 +41,55 @@ void ElectricalSeries::initialize() { TimeSeries::initialize(); - std::vector electrodeInds(channelGroup.size()); - for (size_t i = 0; i < channelGroup.size(); ++i) { - electrodeInds[i] = channelGroup[i].globalIndex; + // setup variables based on number of channels + std::vector electrodeInds(channelVector.size()); + for (size_t i = 0; i < channelVector.size(); ++i) { + electrodeInds[i] = channelVector[i].globalIndex; } - - // make data dataset - data = std::unique_ptr( - io->createDataSet(BaseDataType::I16, - SizeArray {0, channelGroup.size()}, - SizeArray {chunkSize}, - getPath() + "/data")); - io->createDataAttributes( - getPath(), channelGroup[0].getConversion(), -1.0f, "volts"); - - // make timestamps dataset - timestamps = std::unique_ptr( - io->createDataSet(BaseDataType::F64, - SizeArray {0}, - SizeArray {chunkSize}, - getPath() + "/timestamps")); - io->createTimestampsAttributes(getPath()); + samplesRecorded = SizeArray(channelVector.size(), 0); // make channel conversion dataset channelConversion = std::unique_ptr( - io->createDataSet(BaseDataType::F32, - SizeArray {1}, - SizeArray {chunkSize}, - getPath() + "/channel_conversion")); + io->createArrayDataSet(BaseDataType::F32, + SizeArray {1}, + chunkSize, + getPath() + "/channel_conversion")); io->createCommonNWBAttributes(getPath() + "/channel_conversion", "hdmf-common", "", "Bit volts values for all channels"); // make electrodes dataset - electrodesDataset = std::unique_ptr( - io->createDataSet(BaseDataType::I32, - SizeArray {1}, - SizeArray {chunkSize}, - getPath() + "/electrodes")); + electrodesDataset = std::unique_ptr(io->createArrayDataSet( + BaseDataType::I32, SizeArray {1}, chunkSize, getPath() + "/electrodes")); electrodesDataset->writeDataBlock( - channelGroup.size(), BaseDataType::I32, &electrodeInds[0]); + std::vector(1, channelVector.size()), + BaseDataType::I32, + &electrodeInds[0]); io->createCommonNWBAttributes( getPath() + "/electrodes", "hdmf-common", "DynamicTableRegion", ""); io->createReferenceAttribute( electrodesTablePath, getPath() + "/electrodes", "table"); } + +Status ElectricalSeries::writeChannel(SizeType channelInd, + const SizeType& numSamples, + const void* data, + const void* timestamps) +{ + // get offsets and datashape + std::vector dataShape = { + numSamples, 1}; // Note: schema has 1D and 3D but planning to deprecate + std::vector positionOffset = {samplesRecorded[channelInd], + channelInd}; + + // track samples recorded per channel + samplesRecorded[channelInd] += numSamples; + + // write channel data + if (channelInd == 0) { + return writeData(dataShape, positionOffset, data, timestamps); + } else { + return writeData(dataShape, positionOffset, data); + } +} diff --git a/src/nwb/ecephys/ElectricalSeries.hpp b/src/nwb/ecephys/ElectricalSeries.hpp index 10e1c549..cfc52c39 100644 --- a/src/nwb/ecephys/ElectricalSeries.hpp +++ b/src/nwb/ecephys/ElectricalSeries.hpp @@ -24,10 +24,16 @@ class ElectricalSeries : public TimeSeries */ ElectricalSeries(const std::string& path, std::shared_ptr io, - const std::string& description, - const Types::ChannelGroup& channelGroup, - const SizeType& chunkSize, - const std::string& electrodesTablePath); + const BaseDataType& dataType, + const Types::ChannelVector& channelVector, + const std::string& electrodesTablePath, + const std::string& unit = "volts", + const std::string& description = "no description", + const SizeArray& dsetSize = SizeArray {0}, + const SizeArray& chunkSize = SizeArray {1}, + const float& conversion = 1.0f, + const float& resolution = -1.0f, + const float& offset = 0.0f); /** * @brief Destructor @@ -40,14 +46,22 @@ class ElectricalSeries : public TimeSeries void initialize(); /** - * @brief Channel group that this time series is associated with. + * @brief Writes a channel to an ElectricalSeries dataset. + * @param channelInd The channel index within the ElectricalSeries + * @param numSamples The number of samples to write (length in time). + * @param data A pointer to the data block. + * @param timestamps A pointer to the timestamps block. + * @return The status of the write operation. */ - Types::ChannelGroup channelGroup; + Status writeChannel(SizeType channelInd, + const SizeType& numSamples, + const void* data, + const void* timestamps); /** - * @brief Chunk size to use in dataset creation. + * @brief Channel group that this time series is associated with. */ - SizeType chunkSize; + Types::ChannelVector channelVector; /** * @brief Path to the electrodes table this time series references @@ -69,5 +83,10 @@ class ElectricalSeries : public TimeSeries * @brief The neurodataType of the TimeSeries. */ std::string neurodataType = "ElectricalSeries"; + + /** + * @brief The number of samples already written per channel. + */ + SizeArray samplesRecorded; }; } // namespace AQNWB::NWB diff --git a/src/nwb/file/ElectrodeTable.cpp b/src/nwb/file/ElectrodeTable.cpp index 998c374e..b3076d63 100644 --- a/src/nwb/file/ElectrodeTable.cpp +++ b/src/nwb/file/ElectrodeTable.cpp @@ -24,15 +24,15 @@ void ElectrodeTable::initialize() DynamicTable::initialize(); electrodeDataset->dataset = - std::unique_ptr(io->createDataSet( + std::unique_ptr(io->createArrayDataSet( BaseDataType::I32, SizeArray {1}, SizeArray {1}, path + "id")); groupNamesDataset->dataset = std::unique_ptr( - io->createDataSet(BaseDataType::STR(250), - SizeArray {0}, - SizeArray {1}, - path + "group_name")); + io->createArrayDataSet(BaseDataType::STR(250), + SizeArray {0}, + SizeArray {1}, + path + "group_name")); locationsDataset - ->dataset = std::unique_ptr(io->createDataSet( + ->dataset = std::unique_ptr(io->createArrayDataSet( BaseDataType::STR(250), SizeArray {0}, SizeArray {1}, path + "location")); } @@ -78,6 +78,6 @@ void ElectrodeTable::setColNames(const std::vector& newColNames) // Getter for groupPath std::string ElectrodeTable::getGroupPath() const { - return groupReferences[0]; // all channel in channelGroup should have the + return groupReferences[0]; // all channels in ChannelVector should have the // same groupName } diff --git a/src/nwb/hdmf/table/DynamicTable.cpp b/src/nwb/hdmf/table/DynamicTable.cpp index 47f3cd64..e3e1c961 100644 --- a/src/nwb/hdmf/table/DynamicTable.cpp +++ b/src/nwb/hdmf/table/DynamicTable.cpp @@ -35,9 +35,11 @@ void DynamicTable::addColumn(const std::string& name, if (vectorData->dataset == nullptr) { std::cerr << "VectorData dataset is not initialized" << std::endl; } else { + // write in loop because variable length string for (SizeType i = 0; i < values.size(); i++) - vectorData->dataset->writeDataBlock( - 1, BaseDataType::STR(values[i].size()), &values[i]); + vectorData->dataset->writeDataBlock(std::vector(1, 1), + BaseDataType::STR(values[i].size()), + &values[i]); io->createCommonNWBAttributes( path + name, "hdmf-common", "VectorData", colDescription); } @@ -50,7 +52,7 @@ void DynamicTable::setRowIDs(std::unique_ptr& elementIDs, std::cerr << "ElementIdentifiers dataset is not initialized" << std::endl; } else { elementIDs->dataset->writeDataBlock( - values.size(), BaseDataType::I32, &values[0]); + std::vector(1, values.size()), BaseDataType::I32, &values[0]); io->createCommonNWBAttributes( path + "id", "hdmf-common", "ElementIdentifiers"); } diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index 76874234..dbb5def8 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -12,7 +12,9 @@ include(Catch) # ---- Tests ---- add_executable(aq-nwb_test + testBase.cpp testEcephys.cpp + testFile.cpp testHDF5IO.cpp testNWBFile.cpp testNWBRecording.cpp) diff --git a/tests/testBase.cpp b/tests/testBase.cpp new file mode 100644 index 00000000..d296292d --- /dev/null +++ b/tests/testBase.cpp @@ -0,0 +1,84 @@ +#include +#include + +#include "BaseIO.hpp" +#include "Types.hpp" +#include "Utils.hpp" +#include "nwb/base/TimeSeries.hpp" +#include "testUtils.hpp" + +using namespace AQNWB; + +TEST_CASE("TimeSeries", "[base]") +{ + // Prepare test data + SizeType numSamples = 10; + std::string dataPath = "/tsdata"; + std::vector dataShape = {numSamples}; + std::vector positionOffset = {0}; + BaseDataType dataType = BaseDataType::F32; + std::vector data = getMockData1D(numSamples); + BaseDataType timestampsType = BaseDataType::F64; + std::vector timestamps = getMockTimestamps(numSamples, 1); + + SECTION("test writing timeseries data block") + { + // setup timeseries object + std::string path = getTestFilePath("testTimeseries.h5"); + std::shared_ptr io = createIO("HDF5", path); + io->open(); + NWB::TimeSeries ts = NWB::TimeSeries(dataPath, io, dataType, "unit"); + ts.initialize(); + + // Write data to file + Status writeStatus = + ts.writeData(dataShape, positionOffset, data.data(), timestamps.data()); + REQUIRE(writeStatus == Status::Success); + + // Read data back from file + double* tsBuffer = new double[numSamples]; + BaseRecordingData* tsDset = io->getDataSet(dataPath + "/timestamps"); + readH5DataBlock(static_cast(tsDset)->getDataSet(), + timestampsType, + tsBuffer); + std::vector tsRead(tsBuffer, tsBuffer + numSamples); + delete[] tsBuffer; + REQUIRE(tsRead == timestamps); + + // Read data back from file + float* dataBuffer = new float[numSamples]; + BaseRecordingData* dataDset = io->getDataSet(dataPath + "/data"); + readH5DataBlock( + static_cast(dataDset)->getDataSet(), + dataType, + dataBuffer); + std::vector dataRead(dataBuffer, dataBuffer + numSamples); + delete[] dataBuffer; + REQUIRE_THAT(dataRead, Catch::Matchers::Approx(data).margin(1)); + } + + SECTION("test writing timeseries without timestamps") + { + // setup timeseries object + std::string path = getTestFilePath("testTimeseriesNoTimestamps.h5"); + std::shared_ptr io = createIO("HDF5", path); + io->open(); + NWB::TimeSeries ts = NWB::TimeSeries(dataPath, io, dataType, "unit"); + ts.initialize(); + + // Write data to file + Status writeStatus = ts.writeData(dataShape, positionOffset, data.data()); + REQUIRE(writeStatus == Status::Success); + + // Read data back from file + double* tsBuffer = new double[numSamples]; + BaseRecordingData* tsDset = io->getDataSet(dataPath + "/timestamps"); + readH5DataBlock(static_cast(tsDset)->getDataSet(), + timestampsType, + tsBuffer); + std::vector tsRead(tsBuffer, tsBuffer + numSamples); + delete[] tsBuffer; + std::vector zeros(numSamples, 0.0); + REQUIRE(tsRead == zeros); + } +} diff --git a/tests/testEcephys.cpp b/tests/testEcephys.cpp index fe83bf4f..5c324e19 100644 --- a/tests/testEcephys.cpp +++ b/tests/testEcephys.cpp @@ -1,76 +1,166 @@ - +#include #include +#include #include "BaseIO.hpp" #include "Channel.hpp" #include "Types.hpp" +#include "Utils.hpp" #include "hdf5/HDF5IO.hpp" +#include "nwb/device/Device.hpp" +#include "nwb/ecephys/ElectricalSeries.hpp" +#include "nwb/file/ElectrodeGroup.hpp" #include "nwb/file/ElectrodeTable.hpp" #include "testUtils.hpp" using namespace AQNWB; -TEST_CASE("ElectrodeTable", "[ecephys]") +TEST_CASE("ElectricalSeries", "[ecephys]") { - std::string path = "/electrodes/"; + // setup recording info + SizeType numSamples = 100; + SizeType numChannels = 2; + SizeType bufferSize = numSamples / 5; + std::vector dataBuffer(bufferSize); + std::vector timestampsBuffer(bufferSize); + std::vector mockArrays = getMockChannelArrays(); + std::string dataPath = "/esdata"; + BaseDataType dataType = BaseDataType::F32; + std::vector> mockData = + getMockData2D(numSamples, numChannels); + std::vector mockTimestamps = getMockTimestamps(numSamples, 1); + std::string devicePath = "/device"; + std::string electrodePath = "/elecgroup/"; + SECTION("test initialization") { - std::string filename = getTestFilePath("electrodeTable.h5"); - std::shared_ptr io = std::make_unique(filename); - io->open(); - io->createGroup("/general"); - io->createGroup("/general/extracellular_ephys"); - io->createGroup("/general/extracellular_ephys/array0"); - - std::vector channelIDs = {0, 1, 2}; - std::vector channels = { - Channel("ch0", "array0", channelIDs[0], 0), - Channel("ch1", "array0", channelIDs[1], 1), - Channel("ch2", "array0", channelIDs[2], 2), - }; - - NWB::ElectrodeTable electrodeTable(path, io); - electrodeTable.initialize(); - electrodeTable.addElectrodes(channels); - electrodeTable.finalize(); - - // Check if id datasets are created correctly - SizeType numChannels = 3; - BaseRecordingData* id_data = io->getDataSet(path + "id"); - int* buffer = new int[numChannels]; - static_cast(id_data)->readDataBlock( - BaseDataType::I32, buffer); - std::vector read_channels(buffer, buffer + numChannels); - delete[] buffer; - REQUIRE(channelIDs == read_channels); + // TODO } - SECTION("test initialization with empty channels") + SECTION("test linking to electrode table region") { - std::vector channels = {}; + // TODO + } - std::string filename = getTestFilePath("electrodeTableNoData.h5"); - std::shared_ptr io = std::make_unique(filename); + SECTION("test writing channels") + { + // setup io object + std::string path = getTestFilePath("ElectricalSeries.h5"); + std::shared_ptr io = createIO("HDF5", path); io->open(); - NWB::ElectrodeTable electrodeTable(path, io); - electrodeTable.initialize(); + + // setup electrode table, device, and electrode group + std::string elecTablePath = "/electrodes/"; + NWB::ElectrodeTable elecTable = NWB::ElectrodeTable(elecTablePath, io); + elecTable.initialize(); + + // setup electrical series + NWB::ElectricalSeries es = + NWB::ElectricalSeries(dataPath, + io, + dataType, + mockArrays[0], + elecTable.getPath(), + "volts", + "no description", + SizeArray {0, mockArrays[0].size()}); + es.initialize(); + + // write channel data + for (SizeType ch = 0; ch < numChannels; ++ch) { + es.writeChannel( + ch, numSamples, mockData[ch].data(), mockTimestamps.data()); + } + io->close(); + + // Read data back from file + std::unique_ptr file = + std::make_unique(path, H5F_ACC_RDONLY); + std::unique_ptr dataset = + std::make_unique(file->openDataSet(dataPath + "/data")); + std::vector> dataOut(numChannels, + std::vector(numSamples)); + float* buffer = new float[numSamples * numChannels]; + + H5::DataSpace fSpace = dataset->getSpace(); + hsize_t dims[1] = {numSamples * numChannels}; + H5::DataSpace mSpace(1, dims); + dataset->read(buffer, H5::PredType::NATIVE_FLOAT, mSpace, fSpace); + + for (SizeType i = 0; i < numChannels; ++i) { + for (SizeType j = 0; j < numSamples; ++j) { + dataOut[i][j] = buffer[j * numChannels + i]; + } + } + delete[] buffer; + REQUIRE_THAT(dataOut[0], Catch::Matchers::Approx(mockData[0]).margin(1)); + REQUIRE_THAT(dataOut[1], Catch::Matchers::Approx(mockData[1]).margin(1)); } - SECTION("test table creation with multiple arrays") + SECTION("test samples recorded tracking") { - // TODO - } -} + // setup io object + std::string path = getTestFilePath("ElectricalSeriesSampleTracking.h5"); + std::shared_ptr io = createIO("HDF5", path); + io->open(); -TEST_CASE("ElectricalSeries", "[ecephys]") -{ - std::string filename = getTestFilePath("ElectricalSeries.h5"); + // setup electrode table + std::string elecTablePath = "/electrodes/"; + NWB::ElectrodeTable elecTable = NWB::ElectrodeTable(elecTablePath, io); + elecTable.initialize(); - // setup recording info - std::vector mockArrays = getMockChannelArrays(); + // setup electrical series + NWB::ElectricalSeries es = + NWB::ElectricalSeries(dataPath, + io, + dataType, + mockArrays[0], + elecTable.getPath(), + "volts", + "no description", + SizeArray {0, mockArrays[0].size()}); + es.initialize(); + + // write channel data in segments + for (SizeType ch = 0; ch < numChannels; ++ch) { + SizeType samplesRecorded = 0; + for (SizeType b = 0; b * bufferSize < numSamples; b += 1) { + // copy chunk of data + std::copy(mockData[ch].begin() + samplesRecorded, + mockData[ch].begin() + samplesRecorded + bufferSize, + dataBuffer.begin()); + std::copy(mockTimestamps.begin() + samplesRecorded, + mockTimestamps.begin() + samplesRecorded + bufferSize, + timestampsBuffer.begin()); + + es.writeChannel( + ch, dataBuffer.size(), dataBuffer.data(), timestampsBuffer.data()); + samplesRecorded += bufferSize; + } + } + io->close(); - std::string path = "/electrodes/"; - SECTION("test initialization") {} + // Read data back from file + std::unique_ptr file = + std::make_unique(path, H5F_ACC_RDONLY); + std::unique_ptr dataset = + std::make_unique(file->openDataSet(dataPath + "/data")); + std::vector> dataOut(numChannels, + std::vector(numSamples)); + float* buffer = new float[numSamples * numChannels]; - SECTION("test linking to electrode table region") {} + H5::DataSpace fSpace = dataset->getSpace(); + hsize_t dims[1] = {numSamples * numChannels}; + H5::DataSpace mSpace(1, dims); + dataset->read(buffer, H5::PredType::NATIVE_FLOAT, mSpace, fSpace); + + for (SizeType i = 0; i < numChannels; ++i) { + for (SizeType j = 0; j < numSamples; ++j) { + dataOut[i][j] = buffer[j * numChannels + i]; + } + } + delete[] buffer; + REQUIRE_THAT(dataOut[0], Catch::Matchers::Approx(mockData[0]).margin(1)); + REQUIRE_THAT(dataOut[1], Catch::Matchers::Approx(mockData[1]).margin(1)); + } } diff --git a/tests/testFile.cpp b/tests/testFile.cpp new file mode 100644 index 00000000..7c815102 --- /dev/null +++ b/tests/testFile.cpp @@ -0,0 +1,64 @@ +#include + +#include "BaseIO.hpp" +#include "Channel.hpp" +#include "Types.hpp" +#include "hdf5/HDF5IO.hpp" +#include "nwb/file/ElectrodeTable.hpp" +#include "testUtils.hpp" + +using namespace AQNWB; + +TEST_CASE("ElectrodeTable", "[ecephys]") +{ + std::string path = "/electrodes/"; + SECTION("test initialization") + { + std::string filename = getTestFilePath("electrodeTable.h5"); + std::shared_ptr io = std::make_unique(filename); + io->open(); + io->createGroup("/general"); + io->createGroup("/general/extracellular_ephys"); + io->createGroup("/general/extracellular_ephys/array0"); + + std::vector channelIDs = {0, 1, 2}; + std::vector channels = { + Channel("ch0", "array0", channelIDs[0], 0), + Channel("ch1", "array0", channelIDs[1], 1), + Channel("ch2", "array0", channelIDs[2], 2), + }; + + NWB::ElectrodeTable electrodeTable(path, io); + electrodeTable.initialize(); + electrodeTable.addElectrodes(channels); + electrodeTable.finalize(); + + // Check if id datasets are created correctly + SizeType numChannels = 3; + BaseRecordingData* id_data = io->getDataSet(path + "id"); + int* buffer = new int[numChannels]; + readH5DataBlock( + static_cast(id_data)->getDataSet(), + BaseDataType::I32, + buffer); + std::vector read_channels(buffer, buffer + numChannels); + delete[] buffer; + REQUIRE(channelIDs == read_channels); + } + + SECTION("test initialization with empty channels") + { + std::vector channels = {}; + + std::string filename = getTestFilePath("electrodeTableNoData.h5"); + std::shared_ptr io = std::make_unique(filename); + io->open(); + NWB::ElectrodeTable electrodeTable(path, io); + electrodeTable.initialize(); + } + + SECTION("test table creation with multiple arrays") + { + // TODO + } +} diff --git a/tests/testHDF5IO.cpp b/tests/testHDF5IO.cpp index f7e1b020..e13e03cc 100644 --- a/tests/testHDF5IO.cpp +++ b/tests/testHDF5IO.cpp @@ -40,14 +40,193 @@ TEST_CASE("writeGroup", "[hdf5io]") TEST_CASE("writeDataset", "[hdf5io]") { - SECTION("create dataset and write data row") + std::vector testData = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10}; + + SECTION("write 1D data block to 1D dataset") { - // TODO + // open file + std::string path = getTestFilePath("1DData1DDataset.h5"); + std::unique_ptr hdf5io = std::make_unique(path); + hdf5io->open(); + + // Set up test data + std::string dataPath = "/1DData1DDataset"; + SizeType numSamples = 10; + + // Create HDF5RecordingData object and dataset + BaseRecordingData* dataset = hdf5io->createArrayDataSet( + BaseDataType::I32, SizeArray {0}, SizeArray {1}, dataPath); + + // Write data block + std::vector dataShape = {numSamples}; + std::vector positionOffset = {0}; + static_cast(dataset)->writeDataBlock( + dataShape, positionOffset, BaseDataType::I32, &testData[0]); + + BaseRecordingData* dataRead = hdf5io->getDataSet(dataPath); + int* buffer = new int[numSamples]; + readH5DataBlock( + static_cast(dataRead)->getDataSet(), + BaseDataType::I32, + buffer); + std::vector dataOut(buffer, buffer + numSamples); + delete[] buffer; + + REQUIRE(dataOut == testData); + hdf5io->close(); } - SECTION("create dataset and write data block") + SECTION("write 1D data block to 2D dataset") { - // TODO + // open file + std::string path = getTestFilePath("1DData2DDataset.h5"); + std::unique_ptr hdf5io = std::make_unique(path); + hdf5io->open(); + + // Set up test data for 3D + SizeType numRows = 1, numCols = 10; + std::string dataPath = "/1DData2DDataset"; + std::vector dataShape = {numRows, numCols}; + std::vector positionOffset = {0, 0}; + + BaseRecordingData* dataset = + hdf5io->createArrayDataSet(BaseDataType::I32, + SizeArray {numRows, numCols}, + SizeArray {0, 0}, + dataPath); + Status status = + static_cast(dataset)->writeDataBlock( + dataShape, positionOffset, BaseDataType::I32, testData.data()); + + // Read back the 1D data block from 3D dataset + BaseRecordingData* dataRead1D = hdf5io->getDataSet(dataPath); + int* buffer1D = new int[numCols]; + readH5DataBlock( + static_cast(dataRead1D)->getDataSet(), + BaseDataType::I32, + buffer1D); + std::vector dataOut1D(buffer1D, buffer1D + numCols); + delete[] buffer1D; + + // Check if the written and read data match for 1D data block in 3D dataset + REQUIRE(dataOut1D == testData); + hdf5io->close(); + } + + SECTION("write 2D data block to 2D dataset") + { + // open file + std::string path = getTestFilePath("2DData2DDataset.h5"); + std::unique_ptr hdf5io = std::make_unique(path); + hdf5io->open(); + + // Set up test data for 2D + SizeType numRows = 2, numCols = 5; + std::string dataPath = "/2DData2DDataset"; + std::vector dataShape = {numRows, numCols}; + std::vector positionOffset = {0, 0}; + + // Create HDF5RecordingData object and dataset for 2D data + BaseRecordingData* dataset = hdf5io->createArrayDataSet( + BaseDataType::I32, + SizeArray {numRows, numCols}, // Initial size + SizeArray {0, 0}, // chunking + dataPath); + + // Write 2D data block + Status status = + static_cast(dataset)->writeDataBlock( + dataShape, positionOffset, BaseDataType::I32, testData.data()); + + // Read back the 2D data block + BaseRecordingData* dataRead = hdf5io->getDataSet(dataPath); + int* buffer = new int[numRows * numCols]; + readH5DataBlock( + static_cast(dataRead)->getDataSet(), + BaseDataType::I32, + buffer); + std::vector dataOut(buffer, buffer + numRows * numCols); + delete[] buffer; + + // Check if the written and read data match + REQUIRE(dataOut == testData); + hdf5io->close(); + } + + SECTION("write 1D data block to 3D dataset") + { + // open file + std::string path = getTestFilePath("1DData3DDataset.h5"); + std::unique_ptr hdf5io = std::make_unique(path); + hdf5io->open(); + + // Set up test data for 3D + SizeType depth = 1, height = 1, width = 10; + std::string dataPath = "1DData3DDataset"; + std::vector dataShape = {depth, height, width}; + std::vector positionOffset = {0, 0, 0}; + + BaseRecordingData* dataset = + hdf5io->createArrayDataSet(BaseDataType::I32, + SizeArray {depth, height, width}, + SizeArray {0, 0, 0}, + dataPath); + Status status = + static_cast(dataset)->writeDataBlock( + dataShape, positionOffset, BaseDataType::I32, testData.data()); + + // Read back the 1D data block from 3D dataset + BaseRecordingData* dataRead1D = hdf5io->getDataSet(dataPath); + int* buffer1D = + new int[width]; // Assuming 'width' is the size of the 1D data block + readH5DataBlock( + static_cast(dataRead1D)->getDataSet(), + BaseDataType::I32, + buffer1D); + std::vector dataOut1D(buffer1D, buffer1D + width); + delete[] buffer1D; + + // Check if the written and read data match for 1D data block in 3D dataset + REQUIRE(dataOut1D == testData); + hdf5io->close(); + } + + SECTION("write 2D data block to 3D dataset") + { + // open file + std::string path = getTestFilePath("2DData3DDataset.h5"); + std::unique_ptr hdf5io = std::make_unique(path); + hdf5io->open(); + + SizeType depth = 1, height = 2, width = 5; + std::string dataPath = "2DData3DDataset"; + std::vector dataShape = {depth, height, width}; + std::vector positionOffset = {0, 0, 0}; + + BaseRecordingData* dataset = + hdf5io->createArrayDataSet(BaseDataType::I32, + SizeArray {depth, height, width}, + SizeArray {0, 0, 0}, + dataPath); + Status status = + static_cast(dataset)->writeDataBlock( + dataShape, positionOffset, BaseDataType::I32, testData.data()); + + // Read back the 2D data block from 3D dataset + BaseRecordingData* dataRead2D = hdf5io->getDataSet(dataPath); + int* buffer2D = + new int[height * width]; // Assuming 'numRows' and 'numCols' define the + // 2D data block size + readH5DataBlock( + static_cast(dataRead2D)->getDataSet(), + BaseDataType::I32, + buffer2D); + std::vector dataOut2D(buffer2D, buffer2D + height * width); + delete[] buffer2D; + + // Check if the written and read data match for 2D data block in 3D dataset + REQUIRE(dataOut2D == testData); + hdf5io->close(); } } @@ -100,4 +279,4 @@ TEST_CASE("writeAttributes", "[hdf5io]") // close file hdf5io.close(); -} +} \ No newline at end of file diff --git a/tests/testNWBFile.cpp b/tests/testNWBFile.cpp index cccade57..231b2e90 100644 --- a/tests/testNWBFile.cpp +++ b/tests/testNWBFile.cpp @@ -4,6 +4,7 @@ #include "Utils.hpp" #include "hdf5/HDF5IO.hpp" #include "nwb/NWBFile.hpp" +#include "nwb/base/TimeSeries.hpp" #include "testUtils.hpp" using namespace AQNWB; @@ -19,9 +20,9 @@ TEST_CASE("saveNWBFile", "[nwb]") nwbfile.finalize(); } -TEST_CASE("startRecording", "[nwb]") +TEST_CASE("createElectricalSeries", "[nwb]") { - std::string filename = getTestFilePath("test_recording.nwb"); + std::string filename = getTestFilePath("createElectricalSeries.nwb"); // initialize nwbfile object and create base structure NWB::NWBFile nwbfile(generateUuid(), @@ -29,18 +30,21 @@ TEST_CASE("startRecording", "[nwb]") nwbfile.initialize(); // start recording - std::vector mockArrays = getMockChannelArrays(); - Status result = nwbfile.startRecording(mockArrays); + std::vector mockArrays = getMockChannelArrays(1, 2); + Status result = nwbfile.createElectricalSeries(mockArrays, BaseDataType::F32); // write timeseries data std::vector mockData = {1.0f, 2.0f, 3.0f, 4.0f, 5.0f}; - std::vector mockTimestamps = {0.1f, 0.2f, 0.3f, 0.4f, 0.5f}; - nwbfile.writeTimeseriesData(0, 0, 5, BaseDataType::F32, mockData.data()); - nwbfile.writeTimeseriesTimestamps( - 0, 5, BaseDataType::F32, mockTimestamps.data()); - nwbfile.writeTimeseriesData(1, 0, 5, BaseDataType::F32, mockData.data()); - nwbfile.writeTimeseriesTimestamps( - 1, 5, BaseDataType::F32, mockTimestamps.data()); + std::vector mockTimestamps = {0.1, 0.2, 0.3, 0.4, 0.5}; + std::vector positionOffset = {0, 0}; + std::vector dataShape = {mockData.size(), 0}; + + NWB::TimeSeries* ts0 = nwbfile.getTimeSeries(0); + ts0->writeData( + dataShape, positionOffset, mockData.data(), mockTimestamps.data()); + NWB::TimeSeries* ts1 = nwbfile.getTimeSeries(1); + ts1->writeData( + dataShape, positionOffset, mockData.data(), mockTimestamps.data()); nwbfile.finalize(); diff --git a/tests/testNWBRecording.cpp b/tests/testNWBRecording.cpp index 20d7de79..4d24840c 100644 --- a/tests/testNWBRecording.cpp +++ b/tests/testNWBRecording.cpp @@ -6,6 +6,7 @@ #include "BaseIO.hpp" #include "Channel.hpp" #include "Types.hpp" +#include "Utils.hpp" #include "hdf5/HDF5IO.hpp" #include "nwb/NWBRecording.hpp" #include "nwb/file/ElectrodeTable.hpp" @@ -25,16 +26,16 @@ TEST_CASE("writeContinuousData", "[recording]") // setup mock data SizeType numChannels = 4; - SizeType numSamples = 1000; + SizeType numSamples = 300; SizeType samplesRecorded = 0; SizeType bufferSize = numSamples / 10; std::vector dataBuffer(bufferSize); std::vector timestampsBuffer(bufferSize); - std::vector mockRecordingArrays = + std::vector mockRecordingArrays = getMockChannelArrays(); std::vector> mockData = - getMockData(numChannels, numSamples); + getMockData2D(numSamples, numChannels); std::vector mockTimestamps = getMockTimestamps(numSamples); // open files @@ -46,23 +47,31 @@ TEST_CASE("writeContinuousData", "[recording]") while (isRecording) { // write data to the file for each channel for (SizeType i = 0; i < mockRecordingArrays.size(); ++i) { - const auto& channelGroup = mockRecordingArrays[i]; - for (const auto& channel : channelGroup) { + const auto& channelVector = mockRecordingArrays[i]; + for (const auto& channel : channelVector) { // copy data into buffer std::copy(mockData[channel.globalIndex].begin() + samplesRecorded, mockData[channel.globalIndex].begin() + samplesRecorded - + numSamples / 10, + + bufferSize, dataBuffer.begin()); std::copy(mockTimestamps.begin() + samplesRecorded, - mockTimestamps.begin() + samplesRecorded + numSamples / 10, + mockTimestamps.begin() + samplesRecorded + bufferSize, timestampsBuffer.begin()); // write timseries data - nwbRecording.writeTimeseriesData(i, + std::vector positionOffset = {samplesRecorded, + channel.localIndex}; + std::vector dataShape = {dataBuffer.size(), 1}; + std::unique_ptr intBuffer = transformToInt16( + dataBuffer.size(), channel.getBitVolts(), dataBuffer.data()); + + nwbRecording.writeTimeseriesData("ElectricalSeries", + i, channel, - dataBuffer.data(), - timestampsBuffer.data(), - dataBuffer.size()); + dataShape, + positionOffset, + intBuffer.get(), + timestampsBuffer.data()); } } // check if recording is done @@ -116,74 +125,6 @@ TEST_CASE("writeContinuousData", "[recording]") Catch::Matchers::Approx(mockTimestamps).margin(tolerance)); } - SECTION("test if more samples than buffer size", "[recording]") - { - // get file path and remove if exists - std::string path = getTestFilePath("testBufferOverrun"); - if (fs::exists(path + "Recording1.nwb")) { - fs::remove(path + "Recording1.nwb"); - } - - // setup mock data - SizeType numChannels = 1; - SizeType numArrays = 1; - SizeType numSamples = 45000; - std::vector dataBuffer(numSamples); - std::vector timestampsBuffer(numSamples); - - std::vector mockRecordingArrays = - getMockChannelArrays(numChannels, numArrays); - std::vector> mockData = - getMockData(numChannels, numSamples); - std::vector mockTimestamps = getMockTimestamps(numSamples); - - // open files - NWB::NWBRecording nwbRecording; - nwbRecording.openFile(path, "Recording", 1, mockRecordingArrays); - - // write data to the file - const auto& channel = mockRecordingArrays[0][0]; - std::copy(mockData[channel.globalIndex].begin(), - mockData[channel.globalIndex].begin() + numSamples, - dataBuffer.begin()); - std::copy(mockTimestamps.begin(), - mockTimestamps.begin() + numSamples, - timestampsBuffer.begin()); - - // write timseries data - nwbRecording.writeTimeseriesData(0, - channel, - dataBuffer.data(), - timestampsBuffer.data(), - dataBuffer.size()); - - nwbRecording.closeFile(); - - // check contents of data - std::string dataPath = "/acquisition/array0/data"; - std::unique_ptr file = - std::make_unique(path + "Recording1.nwb", H5F_ACC_RDONLY); - std::unique_ptr dataset = - std::make_unique(file->openDataSet(dataPath)); - - float* buffer = new float[numSamples * numChannels]; - - H5::DataSpace fSpace = dataset->getSpace(); - hsize_t dims[1] = {numSamples * numChannels}; - H5::DataSpace mSpace(1, dims); - dataset->read(buffer, H5::PredType::NATIVE_FLOAT, mSpace, fSpace); - - std::vector> dataOut(numChannels, - std::vector(numSamples)); - for (SizeType i = 0; i < numChannels; ++i) { - for (SizeType j = 0; j < numSamples; ++j) { - dataOut[i][j] = buffer[j * numChannels + i] * (32767.0f * 0.000002f); - } - } - delete[] buffer; - REQUIRE_THAT(dataOut[0], Catch::Matchers::Approx(mockData[0]).margin(1)); - } - SECTION("add a new recording number to the same file", "[recording]") { // TODO diff --git a/tests/testUtils.hpp b/tests/testUtils.hpp index cc1df28d..608b9296 100644 --- a/tests/testUtils.hpp +++ b/tests/testUtils.hpp @@ -4,7 +4,9 @@ #include #include +#include #include +#include #include "Channel.hpp" #include "Types.hpp" @@ -30,10 +32,10 @@ inline std::string getTestFilePath(std::string filename) return filepath.u8string(); } -inline std::vector getMockChannelArrays( +inline std::vector getMockChannelArrays( SizeType numChannels = 2, SizeType numArrays = 2) { - std::vector arrays(numArrays); + std::vector arrays(numArrays); for (SizeType i = 0; i < numArrays; i++) { std::vector chGroup; for (SizeType j = 0; j < numChannels; j++) { @@ -48,8 +50,24 @@ inline std::vector getMockChannelArrays( return arrays; } -inline std::vector> getMockData(SizeType numChannels = 4, - SizeType numSamples = 1000) +inline std::vector getMockData1D(SizeType numSamples = 1000) +{ + std::vector mockData(numSamples); + + std::random_device rd; + std::mt19937 rng(rd()); // random number generator + std::uniform_real_distribution<> dis(-1.0, 1.0); // range of floats + + for (auto& data : mockData) { + data = static_cast(dis(rng)) + * 1000.f; // approximate microvolt unit range + } + + return mockData; +} + +inline std::vector> getMockData2D(SizeType numSamples = 1000, + SizeType numChannels = 4) { std::vector> mockData(numChannels, std::vector(numSamples)); @@ -81,3 +99,12 @@ inline std::vector getMockTimestamps(SizeType numSamples = 1000, return mockTimestamps; } + +inline void readH5DataBlock(const H5::DataSet* dSet, + const BaseDataType& type, + void* buffer) +{ + H5::DataSpace fSpace = dSet->getSpace(); + H5::DataType nativeType = HDF5::HDF5IO::getNativeType(type); + dSet->read(buffer, nativeType, fSpace, fSpace); +}