Commit 296ca60a authored by Pierre Aubert's avatar Pierre Aubert
Browse files

Add method to read an input file per block

parent 6f14a77c
Pipeline #101269 passed with stages
in 8 minutes and 11 seconds
......@@ -57,6 +57,7 @@ bool testReadData(const std::string & fileName){
//Get the number of rows and number of columns
size_t nbRow = table.getNbEntries(), nbPixel = table.getNbPixel(), nbSlice = table.getNbSlice();
std::cout << "testReadData : nbRow = " << nbRow << ", nbPixel = " << nbPixel << ", nbSlice = " << nbSlice << std::endl;
bool b(true);
for(size_t i(0lu); i < nbRow; ++i){
b &= table.getEventId(i) == i;
......@@ -73,19 +74,56 @@ bool testReadData(const std::string & fileName){
b &= tabSignal[k] == 43lu*(i*nbRow + k) % 17lu;
}
}
std::cout << "testReadBlockData : b = " << b << std::endl;
return b;
}
///Test to read block data
/** @param fileName : name of the file to be read
* @param nbTotalRow : total number of rows in the dataset
* @param offset : offset of the first row to be read
* @param nbRow : number of rows to be read
* @return true on success, false otherwise
*/
bool testReadBlockData(const std::string & fileName, size_t nbTotalRow, size_t offset, size_t nbRow){
//Let's create our table
TableEvent table;
//Add read the file
table.read(fileName, offset, nbRow);
//Get the number of rows and number of columns
size_t nbPixel = table.getNbPixel(), nbSlice = table.getNbSlice();
std::cout << "testReadBlockData : nbRow = " << nbRow << ", nbPixel = " << nbPixel << ", nbSlice = " << nbSlice << std::endl;
bool b(true);
for(size_t i(0lu); i < nbRow; ++i){
b &= table.getEventId(i) == (i + offset);
b &= table.getTimestamp(i) == 2lu*(i + offset);
unsigned short * waveform = table.getWaveform(i);
for(size_t j(0lu); j < nbSlice; ++j){
for(size_t k(0lu); k < nbPixel; ++k){
b &= waveform[j*nbPixel + k] == 29lu*((i + offset)*nbTotalRow +j*nbPixel + k) % 19lu;
}
}
float * tabSignal = table.getCalibSignal(i);
for(size_t k(0lu); k < nbPixel; ++k){
b &= tabSignal[k] == 43lu*((i + offset)*nbTotalRow + k) % 17lu;
}
}
std::cout << "testReadBlockData : b = " << b << std::endl;
return b;
}
int main(int argc, char** argv){
std::string fileName("configTable.h5");
if(!testWriteData(fileName)){
std::cerr << "Cannot write file '"<<fileName<<"'" << std::endl;
return -1;
}
if(!testReadData(fileName)){
std::cerr << "Cannot read file '"<<fileName<<"'" << std::endl;
return -1;
}
return 0;
bool b(true);
b &= testWriteData(fileName);
b &= testReadData(fileName);
b &= testReadBlockData(fileName, 10lu, 0lu, 10lu);
b &= testReadBlockData(fileName, 10lu, 1lu, 5lu);
b &= testReadBlockData(fileName, 10lu, 3lu, 4lu);
b &= testReadBlockData(fileName, 10lu, 5lu, 5lu);
b &= testReadBlockData(fileName, 10lu, 2lu, 8lu);
return b - 1;
}
......@@ -235,6 +235,10 @@ std::string ph5_backendTableHeader(PTable & table){
body += "\t\tvoid read(const H5::H5File & file);\n";
body += "\t\tvoid read(const H5::Group & group);\n\n";
body += "\t\tvoid read(const std::string & fileName, size_t offset, size_t nbRow);\n";
body += "\t\tvoid read(const H5::H5File & file, size_t offset, size_t nbRow);\n";
body += "\t\tvoid read(const H5::Group & group, size_t offset, size_t nbRow);\n\n";
body += "\t\tvoid write(const std::string & fileName) const;\n";
body += "\t\tvoid write(H5::H5File & file) const;\n";
body += "\t\tvoid write(H5::Group & group) const;\n\n";
......@@ -266,6 +270,7 @@ std::string ph5_backendTableHeader(PTable & table){
}
body += "\tprivate:\n";
body += "\t\tvoid readDataSet(const H5::DataSet & dataset);\n";
body += "\t\tvoid readDataSet(const H5::DataSet & dataset, size_t offset, size_t nbRow);\n";
body += "\t\tvoid writeDataSet(H5::DataSet & dataset) const;\n";
body += "\t\tvoid allocate(size_t nbRow);\n\n";
......@@ -373,6 +378,36 @@ std::string ph5_backendTableSourceRead(const PTable & table){
body += "\tH5::DataSet dataset = group.openDataSet(p__tableName);\n";
body += "\treadDataSet(dataset);\n";
body += "}\n\n";
body += "///Read the table " + name + " from given file\n";
body += "/**\t@param fileName : name of the HDF5 file to be used\n";
body += " * \t@param offset : index of the first row the "+name+" class needs to load\n";
body += " * \t@param nbRow : number of rows the "+name+" class needs to load (Will reallocate the "+name+" is the number of rows is greater than the number off already allocated rows)\n";
body += "*/\n";
body += "void " + name + "::read(const std::string & fileName, size_t offset, size_t nbRow){\n";
body += "\tH5::H5File file(fileName, H5F_ACC_RDONLY);\n";
body += "\tread(file, offset, nbRow);\n";
body += "}\n\n";
body += "///Read the table " + name + " from given file\n";
body += "/**\t@param file : HDF5 file to be used\n";
body += " * \t@param offset : index of the first row the "+name+" class needs to load\n";
body += " * \t@param nbRow : number of rows the "+name+" class needs to load (Will reallocate the "+name+" is the number of rows is greater than the number off already allocated rows)\n";
body += "*/\n";
body += "void " + name + "::read(const H5::H5File & file, size_t offset, size_t nbRow){\n";
body += "\tH5::DataSet dataset = file.openDataSet(p__tableName);\n";
body += "\treadDataSet(dataset, offset, nbRow);\n";
body += "}\n\n";
body += "///Read the table " + name + " from given group\n";
body += "/**\t@param group : HDF5 group to be used\n";
body += " * \t@param offset : index of the first row the "+name+" class needs to load\n";
body += " * \t@param nbRow : number of rows the "+name+" class needs to load (Will reallocate the "+name+" is the number of rows is greater than the number off already allocated rows)\n";
body += "*/\n";
body += "void " + name + "::read(const H5::Group & group, size_t offset, size_t nbRow){\n";
body += "\tH5::DataSet dataset = group.openDataSet(p__tableName);\n";
body += "\treadDataSet(dataset, offset, nbRow);\n";
body += "}\n\n";
return body;
}
......@@ -780,6 +815,42 @@ std::string ph5_backendTableSourceReadDataSet(PTable & table){
body += "\tdataset.read(p_"+it->getName()+", getCompType" + firstToUpper(it->getName()) + "());\n";
}
body += "}\n\n";
body += "///Read the given DataSet and fill the Table with it\n";
body += "/**\t@param dataset : dataset to be used\n";
body += " * \t@param offset : index of the first row the "+name+" class needs to load\n";
body += " * \t@param nbRow : number of rows the "+name+" class needs to load (Will reallocate the "+name+" is the number of rows is greater than the number off already allocated rows)\n";
body += "*/\n";
body += "void "+name+"::readDataSet(const H5::DataSet & dataset, size_t offset, size_t nbRow){\n";
body += "\tH5::CompType compType = dataset.getCompType();\n";
for(PVecAttribute::iterator it(vecAttriute.begin()); it != vecAttriute.end(); ++it){
if(!ph5_attributeIsTensor(*it)){continue;}
body += "\treadDim" + firstToUpper(it->getName()) + "(compType);\n";
}
body += "\tH5::DataSpace dataSpace = dataset.getSpace();\n";
body += "\tsize_t nbEntries(dataSpace.getSimpleExtentNpoints());\n";
body += "\tresize(nbRow);\n";
body += "\thsize_t dimBlockFile[1];\n";
body += "\tdimBlockFile[0] = nbEntries;\n";
body += "\thsize_t offsetBlockBase[1];\n";
body += "\toffsetBlockBase[0] = offset;\n";
body += "\thsize_t countFile[1];\n";
body += "\tcountFile[0] = nbRow;\n";
body += "\tH5::DataSpace spaceBlockFile(1, dimBlockFile);\n";
body += "\tspaceBlockFile.selectHyperslab(H5S_SELECT_SET, countFile, offsetBlockBase);\n";
body += "\thsize_t dimBlockMem[1];\n";
body += "\tdimBlockMem[0] = nbRow;\n";
body += "\tH5::DataSpace blockMem(1, dimBlockMem);\n";
for(PVecAttribute::iterator it(vecAttriute.begin()); it != vecAttriute.end(); ++it){
body += "\tdataset.read(p_"+it->getName()+", getCompType" + firstToUpper(it->getName()) + "(), blockMem, spaceBlockFile);\n";
}
body += "}\n\n";
return body;
}
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment