Skip to content

Commit

Permalink
Merge branch 'main' into add_read
Browse files Browse the repository at this point in the history
  • Loading branch information
oruebel committed Dec 22, 2024
2 parents bfa740a + 264c893 commit ddb8f50
Show file tree
Hide file tree
Showing 28 changed files with 240 additions and 78 deletions.
11 changes: 9 additions & 2 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -21,8 +21,8 @@ jobs:
strategy:
fail-fast: false
matrix:
os: [macos-latest, ubuntu-latest]

os: [macos-latest, ubuntu-latest, windows-latest]
runs-on: ${{ matrix.os }}

steps:
Expand All @@ -43,6 +43,13 @@ jobs:
- name: Install dependencies - macos
if: matrix.os == 'macos-latest'
run: brew install hdf5 boost catch2

- name: Install dependencies - windows
if: matrix.os == 'windows-latest'
run: |
cd "${VCPKG_INSTALLATION_ROOT}"
vcpkg install hdf5[cpp]:x64-windows boost-date-time:x64-windows boost-endian:x64-windows boost-uuid:x64-windows catch2:x64-windows
vcpkg integrate install
- name: Configure
shell: pwsh
Expand Down
3 changes: 3 additions & 0 deletions CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -92,6 +92,9 @@ find_package(Boost REQUIRED)
include_directories(${Boost_INCLUDE_DIRS})

target_link_libraries(aqnwb_aqnwb ${HDF5_CXX_LIBRARIES} ${Boost_LIBRARIES})
if (WIN32)
target_link_libraries(aqnwb_aqnwb bcrypt)
endif()

# ---- Install rules ----

Expand Down
3 changes: 3 additions & 0 deletions CMakePresets.json
Original file line number Diff line number Diff line change
Expand Up @@ -102,6 +102,9 @@
"inherits": ["flags-msvc", "ci-std"],
"generator": "Visual Studio 17 2022",
"architecture": "x64",
"cacheVariables": {
"CMAKE_TOOLCHAIN_FILE": "C:/vcpkg/scripts/buildsystems/vcpkg.cmake"
},
"hidden": true
},
{
Expand Down
7 changes: 6 additions & 1 deletion docs/pages/devdocs/install.dox
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,12 @@
* \section dev_requirements_sec Requirements
*
* Please ensure that the required libraries described in the
* \ref user_requirements_sec "User Requirements" section are installed.
* \ref user_requirements_sec "User Requirements" section are installed and
* clone the latest AqNWB source via:
*
* \code{.sh}
* git clone https://github.com/NeurodataWithoutBorders/aqnwb.git
* \endcode
*
* For developers we also recommend to install the following optional command line tools used for
* generating the documentation, for code quality checks, and for debugging.
Expand Down
5 changes: 5 additions & 0 deletions docs/pages/userdocs/hdf5io.dox
Original file line number Diff line number Diff line change
Expand Up @@ -127,6 +127,11 @@
* }
* \enddot
*
* \warning
* There are known issues using SWMR mode on Windows due to file locking by the reader processes. One workaround
* is to set the environment variable `HDF5_USE_FILE_LOCKING=FALSE` to prevent file access errors when using
* a writer process with other reader processes.
*
* \subsection hdf5io_swmr_features Why does AqNWB use SMWR mode?
*
* Using SWMR has several key advantages for data acquisition applications:
Expand Down
10 changes: 10 additions & 0 deletions docs/pages/userdocs/install.dox
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,16 @@
* - [HDF5 >= 1.10](https://github.com/HDFGroup/hdf5)
* - [Boost](https://www.boost.org/)
*
* \section userbuild_source_sec Source
*
* The source code for AqNWB is available online via the
* [AqNWB GitHub](https://github.com/NeurodataWithoutBorders/aqnwb) repository.
* To checkout the latest developer version, clone the repository via:
*
* \code{.sh}
* git clone https://github.com/NeurodataWithoutBorders/aqnwb.git
* \endcode
*
* \section userbuild_build_sec Build
*
* Here are the steps for building in release mode with a multi-configuration generator:
Expand Down
61 changes: 51 additions & 10 deletions resources/generate_spec_files.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
# load file
yaml = YAML(typ='safe')
with open(file) as f:
namespace = yaml.load(file)
namespace = yaml.load(f)

# get all the sources
for i, ns in enumerate(namespace['namespaces']):
Expand All @@ -22,8 +22,8 @@
header_file = Path(f"./src/spec/{ns['name'].replace('-', '_')}.hpp").resolve()
with open(header_file, 'w') as fo:
fo.write('#pragma once\n\n')
fo.write('#include <string>\n#include <string_view>\n#include <array>\n\n')
fo.write(f'namespace AQNWB::SPEC::{ns['name'].upper().replace('-', '_')}\n{{\n\n')
fo.write('#include <array>\n#include <string>\n#include <string_view>\n\n')
fo.write(f'namespace AQNWB::SPEC::{ns["name"].upper().replace("-", "_")}\n{{\n\n')
fo.write(f'const std::string version = "{ns["version"]}";\n\n')

# load and convert schema files
Expand All @@ -33,14 +33,55 @@
# load file
schema_file = file.parent / s['source']
with open(schema_file) as f:
spec = yaml.load(schema_file)
spec = yaml.load(f)

# convert to cpp string
print(f'Generating file {header_file} - {s["source"]}')
with open(header_file, 'a') as fo:
json_str = json.dumps(spec, separators=(',', ':'))
chunk_size = 16000 # Adjust the chunk size as needed
if len(json_str) > chunk_size:
# Split string into chunks if needed
chunks = [json_str[i:i + chunk_size] for i in range(0, len(json_str), chunk_size)]

var_name = s['source'].replace('.yaml', '').replace('.', '_')
fo.write(f'constexpr std::string_view {var_name} = R"delimiter(\n{json.dumps(spec, separators=(',', ':'))})delimiter";\n\n')
chunk_var_names = []
for i, chunk in enumerate(chunks):
chunk_var_name = f'{var_name}_part{i}'
with open(header_file, 'a') as fo:
fo.write(f'constexpr std::string_view {chunk_var_name} = R"delimiter({chunk})delimiter";\n')
chunk_var_names.append(chunk_var_name)

# Concatenate chunks at compile-time
with open(header_file, 'a') as fo:
fo.write(f'constexpr std::array<std::string_view, {len(chunks)}> {var_name}_parts = {{{", ".join(chunk_var_names)}}};\n')
fo.write(f'constexpr std::size_t {var_name}_total_length = []() {{\n')
fo.write(f' std::size_t length = 0;\n')
fo.write(f' for (const auto& part : {var_name}_parts) {{\n')
fo.write(f' length += part.size();\n')
fo.write(f' }}\n')
fo.write(f' return length;\n')
fo.write(f'}}();\n')
fo.write(f'constexpr auto {var_name}_concatenate = []() {{\n')
fo.write(f' std::array<char, {var_name}_total_length + 1> result{{}};\n')
fo.write(f' std::size_t pos = 0;\n')
fo.write(f' for (const auto& part : {var_name}_parts) {{\n')
fo.write(f' for (char c : part) {{\n')
fo.write(f' result[pos++] = c;\n')
fo.write(f' }}\n')
fo.write(f' }}\n')
fo.write(f' result[pos] = \'\\0\';\n')
fo.write(f' return result;\n')
fo.write(f'}}();\n')
fo.write(f'constexpr std::string_view {var_name}({var_name}_concatenate.data(), {var_name}_concatenate.size() - 1);\n\n')
var_names.append(var_name)
else:
with open(header_file, 'a') as fo:
var_name = s['source'].replace('.yaml', '').replace('.', '_')
fo.write(f'constexpr std::string_view {var_name} = R"delimiter(\n{json.dumps(spec, separators=(',', ':'))})delimiter";\n\n')
var_names.append(var_name)




# reformat schema sources for namespace file
schema = list()
Expand All @@ -49,12 +90,12 @@
s = {'source': s['source'].split('.yaml')[0]}
schema.append(s)
ns['schema'] = schema

# convert to cpp variables
ns_output = {'namespaces': [ns]}
with open(header_file, 'a') as fo:
fo.write(f'constexpr std::string_view namespaces = R"delimiter(\n{json.dumps(ns_output, separators=(',', ':'))})delimiter";\n\n')
fo.write(f'constexpr std::array<std::pair<std::string_view, std::string_view>, {len(var_names) + 1}> specVariables {{{{\n')
fo.write(''.join([f' {{"{name.replace('_', '.')}", {name}}},\n' for name in var_names]))
fo.write(f'constexpr std::array<std::pair<std::string_view, std::string_view>, {len(var_names) + 1}>\n specVariables {{{{\n')
fo.write(''.join([f' {{"{name.replace("_", ".")}", {name}}},\n' for name in var_names]))
fo.write(' {"namespace", namespaces}\n')
fo.write(f'}}}};\n}} // namespace AQNWB::SPEC::{ns['name'].upper().replace('-', '_')}\n')
fo.write(f'}}}};\n}} // namespace AQNWB::SPEC::{ns["name"].upper().replace("-", "_")}\n')
9 changes: 5 additions & 4 deletions src/Utils.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -127,16 +127,17 @@ static inline std::string mergePaths(const std::string& path1,
*/
static inline void convertFloatToInt16LE(const float* source,
void* dest,
int numSamples)
SizeType numSamples)
{
// TODO - several steps in this function may be unnecessary for our use
// case. Consider simplifying the intermediate cast to char and the
// final cast to uint16_t.
auto maxVal = static_cast<double>(0x7fff);
auto intData = static_cast<char*>(dest);

for (int i = 0; i < numSamples; ++i) {
auto clampedValue = std::clamp(maxVal * source[i], -maxVal, maxVal);
for (SizeType i = 0; i < numSamples; ++i) {
auto clampedValue =
std::clamp(maxVal * static_cast<double>(source[i]), -maxVal, maxVal);
auto intValue =
static_cast<uint16_t>(static_cast<int16_t>(std::round(clampedValue)));
intValue = boost::endian::native_to_little(intValue);
Expand All @@ -158,7 +159,7 @@ static inline std::unique_ptr<int16_t[]> transformToInt16(
std::unique_ptr<int16_t[]> intData = std::make_unique<int16_t[]>(numSamples);

// copy data and multiply by scaling factor
double multFactor = 1 / (32767.0f * conversion_factor);
float multFactor = 1.0f / (32767.0f * conversion_factor);
std::transform(data,
data + numSamples,
scaledData.get(),
Expand Down
7 changes: 7 additions & 0 deletions src/io/hdf5/HDF5IO.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -682,7 +682,14 @@ Status HDF5IO::createReferenceDataSet(
delete[] rdata;

herr_t dsetStatus = H5Dclose(dset);
if (checkStatus(dsetStatus) == Status::Failure) {
return Status::Failure;
}

herr_t spaceStatus = H5Sclose(space);
if (checkStatus(spaceStatus) == Status::Failure) {
return Status::Failure;
}

return checkStatus(writeStatus);
}
Expand Down
5 changes: 3 additions & 2 deletions src/nwb/RecordingContainers.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -62,7 +62,8 @@ Status RecordingContainers::writeElectricalSeriesData(
if (es == nullptr)
return Status::Failure;

es->writeChannel(channel.getLocalIndex(), numSamples, data, timestamps);
return es->writeChannel(
channel.getLocalIndex(), numSamples, data, timestamps);
}

Status RecordingContainers::writeSpikeEventData(const SizeType& containerInd,
Expand All @@ -77,5 +78,5 @@ Status RecordingContainers::writeSpikeEventData(const SizeType& containerInd,
if (ses == nullptr)
return Status::Failure;

ses->writeSpike(numSamples, numChannels, data, timestamps);
return ses->writeSpike(numSamples, numChannels, data, timestamps);
}
12 changes: 6 additions & 6 deletions src/nwb/base/TimeSeries.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -55,25 +55,25 @@ void TimeSeries::initialize(const IO::BaseDataType& dataType,

Status TimeSeries::writeData(const std::vector<SizeType>& dataShape,
const std::vector<SizeType>& positionOffset,
const void* data,
const void* timestamps)
const void* dataInput,
const void* timestampsInput)
{
Status tsStatus = Status::Success;
if (timestamps != nullptr) {
if (timestampsInput != nullptr) {
const std::vector<SizeType> timestampsShape = {
dataShape[0]}; // timestamps should match shape of the first data
// dimension
const std::vector<SizeType> timestampsPositionOffset = {positionOffset[0]};
tsStatus = this->timestamps->writeDataBlock(timestampsShape,
timestampsPositionOffset,
this->timestampsType,
timestamps);
timestampsInput);
}

Status dataStatus = this->data->writeDataBlock(
dataShape, positionOffset, this->dataType, data);
dataShape, positionOffset, this->dataType, dataInput);

if ((dataStatus != Status::Success) or (tsStatus != Status::Success)) {
if ((dataStatus != Status::Success) || (tsStatus != Status::Success)) {
return Status::Failure;
} else {
return Status::Success;
Expand Down
8 changes: 4 additions & 4 deletions src/nwb/base/TimeSeries.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -35,16 +35,16 @@ class TimeSeries : public Container
* @brief Writes a timeseries data block to the file.
* @param dataShape The size of the data block.
* @param positionOffset The position of the data block to write to.
* @param data A pointer to the data block.
* @param timestamps A pointer to the timestamps block. May be null if
* @param dataInput A pointer to the data block.
* @param timestampsInput A pointer to the timestamps block. May be null if
* multidimensional TimeSeries and only need to write the timestamps once but
* write data in separate blocks.
* @return The status of the write operation.
*/
Status writeData(const std::vector<SizeType>& dataShape,
const std::vector<SizeType>& positionOffset,
const void* data,
const void* timestamps = nullptr);
const void* dataInput,
const void* timestampsInput = nullptr);

/**
* @brief Initializes the TimeSeries by creating NWB related attributes and
Expand Down
10 changes: 5 additions & 5 deletions src/nwb/ecephys/ElectricalSeries.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ void ElectricalSeries::initialize(const IO::BaseDataType& dataType,
offset);

// setup variables based on number of channels
std::vector<int> electrodeInds(channelVector.size());
std::vector<SizeType> electrodeInds(channelVector.size());
std::vector<float> channelConversions(channelVector.size());
for (size_t i = 0; i < channelVector.size(); ++i) {
electrodeInds[i] = channelVector[i].getGlobalIndex();
Expand Down Expand Up @@ -91,8 +91,8 @@ void ElectricalSeries::initialize(const IO::BaseDataType& dataType,

Status ElectricalSeries::writeChannel(SizeType channelInd,
const SizeType& numSamples,
const void* data,
const void* timestamps)
const void* dataInput,
const void* timestampsInput)
{
// get offsets and datashape
std::vector<SizeType> dataShape = {
Expand All @@ -105,8 +105,8 @@ Status ElectricalSeries::writeChannel(SizeType channelInd,

// write channel data
if (channelInd == 0) {
return writeData(dataShape, positionOffset, data, timestamps);
return writeData(dataShape, positionOffset, dataInput, timestampsInput);
} else {
return writeData(dataShape, positionOffset, data);
return writeData(dataShape, positionOffset, dataInput);
}
}
8 changes: 4 additions & 4 deletions src/nwb/ecephys/ElectricalSeries.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -64,14 +64,14 @@ class ElectricalSeries : public TimeSeries
* @brief Writes a channel to an ElectricalSeries dataset.
* @param channelInd The channel index within the ElectricalSeries
* @param numSamples The number of samples to write (length in time).
* @param data A pointer to the data block.
* @param timestamps A pointer to the timestamps block.
* @param dataInput A pointer to the data block.
* @param timestampsInput A pointer to the timestamps block.
* @return The status of the write operation.
*/
Status writeChannel(SizeType channelInd,
const SizeType& numSamples,
const void* data,
const void* timestamps);
const void* dataInput,
const void* timestampsInput);

/**
* @brief Channel group that this time series is associated with.
Expand Down
6 changes: 3 additions & 3 deletions src/nwb/ecephys/SpikeEventSeries.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -38,8 +38,8 @@ void SpikeEventSeries::initialize(const IO::BaseDataType& dataType,

Status SpikeEventSeries::writeSpike(const SizeType& numSamples,
const SizeType& numChannels,
const void* data,
const void* timestamps)
const void* dataInput,
const void* timestampsInput)
{
// get offsets and datashape
std::vector<SizeType> dataShape;
Expand All @@ -54,5 +54,5 @@ Status SpikeEventSeries::writeSpike(const SizeType& numSamples,
this->m_eventsRecorded += 1;

// write channel data
return writeData(dataShape, positionOffset, data, timestamps);
return writeData(dataShape, positionOffset, dataInput, timestampsInput);
}
8 changes: 4 additions & 4 deletions src/nwb/ecephys/SpikeEventSeries.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -67,13 +67,13 @@ class SpikeEventSeries : public ElectricalSeries
*
* @param numSamples The number of samples in the event
* @param numChannels The number of channels in the event
* @param data The data of the event
* @param timestamps The timestamps of the event
* @param dataInput The data of the event
* @param timestampsInput The timestamps of the event
*/
Status writeSpike(const SizeType& numSamples,
const SizeType& numChannels,
const void* data,
const void* timestamps);
const void* dataInput,
const void* timestampsInput);

DEFINE_FIELD(readData, DatasetField, std::any, "data", Spike waveforms)

Expand Down
Loading

0 comments on commit ddb8f50

Please sign in to comment.