Skip to content
Snippets Groups Projects
Commit b32a63e4 authored by mingf2's avatar mingf2
Browse files

save in h5 format

parent 6f867eff
No related branches found
No related tags found
No related merge requests found
......@@ -17,3 +17,15 @@ add_subdirectory(tests)
add_executable(example example.cpp)
target_link_libraries(example PUBLIC binFileParser)
include(FetchContent)
# Import FetchContent module:
FetchContent_Declare(
highfive
URL
https://github.com/BlueBrain/HighFive/archive/refs/tags/v2.7.1.zip
)
FetchContent_MakeAvailable(highfive)
add_executable(example_h5 example_h5.cpp)
target_link_libraries(example_h5 PUBLIC binFileParser HighFive)
\ No newline at end of file
#include <string>
#include <iostream>
#include <chrono>
#include <highfive/H5DataSet.hpp>
#include <highfive/H5DataSpace.hpp>
#include <highfive/H5File.hpp>
#include "BinFileParser.h"
void print_execution_time()
{
static auto startTime = std::chrono::high_resolution_clock::now();
auto endTime = std::chrono::high_resolution_clock::now();
std::cout << std::chrono::duration_cast<std::chrono::milliseconds>(endTime - startTime).count() << "ms" << std::endl;
startTime = endTime;
}
int main(int argc, char** argv)
{
std::string fpath("/media/ming/DATA/projects/python_parser_for_DT5550W_bin_file/test_data/test.data");
DT5550WBinFile binFileHandle(fpath, true);
// Depending on available memory size, chunkSize can be made smaller/larger
const int chunkSize = 100000;
// Max number of events to process
const int maxN = 1000000;
int numberOfPulsesRead = 0;
// suppose the events you want to export are stored in a vector called `newEvents`
std::vector<CITIROCEvent> newEvents;
// Read the file in chunks of 100k events until end of file (EOF)
while (numberOfPulsesRead < maxN && binFileHandle.isGood())
{
auto newChunk = binFileHandle.readNextNEvents(chunkSize);
numberOfPulsesRead += newChunk.size();
std::cout << "Read " << newChunk.size() << "events" << '\n';
newEvents.insert(newEvents.end(), newChunk.begin(), newChunk.end()); // save the events we just read
}
print_execution_time();
// save data to txt files
std::ofstream outfile_LG("timestamps.txt");
if(!outfile_LG){// file couldn't be opened
std::cerr << "Error: timestamps.txt could not be opened" << std::endl;
exit(1);
}
for (int i=0; i<newEvents.size();i++)
{
for (int j=0; j<32; j++)
{
outfile_LG << newEvents[i].chargeLG[j] << ' '; // separated by space
}
outfile_LG << '\n';
}
outfile_LG.close();
print_execution_time();
std::vector<std::vector<uint16_t>> LG_charges(newEvents.size(), std::vector<uint16_t>(32, 0));
std::vector<uint64_t> timestamps(newEvents.size(), 0);
for (int i=0; i<newEvents.size();i++)
{
LG_charges[i] = newEvents[i].chargeLG;
}
for (int i=0; i<newEvents.size();i++)
{
timestamps[i] = newEvents[i].RunEventTimecode;
}
const std::string FILE_NAME("newEvents.h5");
try {
// Create a new file using the default property lists.
HighFive::File file("newEvents.h5", HighFive::File::ReadWrite | HighFive::File::Create | HighFive::File::Overwrite);
// Create the dataset
HighFive::DataSet LG_dataset = file.createDataSet<uint16_t>("LGcharges", HighFive::DataSpace::From(LG_charges));
// write it
LG_dataset.write(LG_charges);
HighFive::DataSet tt_dataset = file.createDataSet<uint16_t>("timestamps", HighFive::DataSpace::From(timestamps));
tt_dataset.write(timestamps);
} catch (HighFive::Exception& err) {
// catch and print any HDF5 error
std::cerr << err.what() << std::endl;
}
print_execution_time();
return 0;
}
\ No newline at end of file
'''
Description:
Author: Ming Fang
Date: 1969-12-31 18:00:00
LastEditors: Ming Fang
LastEditTime: 2023-08-15 16:00:04
'''
import numpy as np
import h5py
hf = h5py.File('newEvents.h5', 'r')
LGcharges = hf.get('LGcharges')[()]
print(LGcharges.shape)
print(LGcharges[0])
timestamps = hf.get('timestamps')[()]
print(timestamps.shape)
print(timestamps[0])
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment