/* * zfits.h * * Created on: May 16, 2013 * Author: lyard */ #ifndef MARS_ZFITS #define MARS_ZFITS #include #include "fits.h" #include "huffman.h" #ifndef __MARS__ namespace std { #endif class zfits : public fits { public: // Basic constructor zfits(const string& fname, const string& tableName="", bool force=false) : fits(fname, tableName, force), fBuffer(0), fTransposedBuffer(0), fCompressedBuffer(0), fNumTiles(0), fNumRowsPerTile(0), fCurrentRow(-1), fHeapOff(0) { InitCompressionReading(); } // Alternative contstructor zfits(const string& fname, const string& fout, const string& tableName, bool force=false) : fits(fname, fout, tableName, force), fBuffer(0), fTransposedBuffer(0), fCompressedBuffer(0), fNumTiles(0), fNumRowsPerTile(0), fCurrentRow(-1), fHeapOff(0) { InitCompressionReading(); } // Skip the next row bool SkipNextRow() { if (!fTable.isCompressed) return fits::SkipNextRow(); fRow++; return true; } private: // Do what it takes to initialize the compressed structured void InitCompressionReading() { //The constructor may have failed if (!good()) return; //Get compressed specific keywords fNumTiles = fTable.isCompressed ? GetInt("NAXIS2") : 0; fNumRowsPerTile = fTable.isCompressed ? GetInt("ZTILELEN") : 0; //give it some space for uncompressing AllocateBuffers(); //read the file's catalog ReadCatalog(); } // Stage the requested row to internal buffer // Does NOT return data to users void StageRow(size_t row, char* dest) { if (!fTable.isCompressed) { fits::StageRow(row, dest); return; } ReadBinaryRow(row, dest); } // Copy decompressed data to location requested by user void MoveColumnDataToUserSpace(char* dest, const char* src, const Table::Column& c) { if (!fTable.isCompressed) { fits::MoveColumnDataToUserSpace(dest, src, c); return; } memcpy(dest, src, c.num*c.size); } vector fBuffer; /// fTransposedBuffer; /// fCompressedBuffer; /// > > fCatalog;///< Catalog, i.e. the main table that points to the compressed data. void AllocateBuffers() { if (!fTable.isCompressed) return; fBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile); fTransposedBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile); fCompressedBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile + 1024*1024); //use a bit more memory, in case the compression algorithms uses more } // Read catalog data. I.e. the address of the compressed data inside the heap void ReadCatalog() { if (!fTable.isCompressed) return; char readBuf[16]; fCatalog.resize(fNumTiles); for (uint32_t i=0;i(reinterpret_cast(tempValues), readBuf, 2); //add catalog entry fCatalog[i].emplace_back(tempValues[0], tempValues[1]); } //see if there is a gap before heap data fHeapOff = tellg()+fTable.GetHeapShift(); } // Compressed versin of the read row bool ReadBinaryRow(const size_t &rowNum, char *bufferToRead) { if (rowNum >= GetNumRows()) return false; const uint32_t requestedTile = rowNum/fNumRowsPerTile; const uint32_t currentTile = fCurrentRow/fNumRowsPerTile; fCurrentRow = rowNum; //should we read yet another chunk of data ? if (requestedTile != currentTile) { //read yet another chunk from the file //the size that we should read is in the catalog. we should sum up the sizes of all columns const uint32_t currentCatRow = fCurrentRow/fNumRowsPerTile; int64_t sizeToRead = 0; for (uint32_t i=0;ioffset; // pointer to column (destination buffer) switch (it->comp) { case UNCOMPRESSED: case SMOOTHMAN: // regular, "semi-transposed" copy for (char *dest=buffer; destbytes); src += it->bytes; // next column } break; default: // transposed copy for (char *elem=buffer; elembytes; elem+=it->size) // element-by-element (arrays) { for (char *dest=elem; destsize); src += it->size; // next element } } break; }; } } //Data loaded and uncompressed. Copy it to destination memcpy(bufferToRead, fBuffer.data()+fTable.bytes_per_row*(fCurrentRow%fNumRowsPerTile), fTable.bytes_per_row); return good(); } // Read a bunch of uncompressed data uint32_t UncompressUNCOMPRESSED(char* dest, const char* src, uint32_t numRows, uint32_t sizeOfElems, uint32_t numRowElems) { memcpy(dest, src, numRows*sizeOfElems*numRowElems); return numRows*sizeOfElems*numRowElems; } // Read a bunch of data compressed with the Huffman algorithm uint32_t UncompressHUFFMAN(char* dest, const char* src, uint32_t , uint32_t sizeOfElems, uint32_t numRowElems) { if (sizeOfElems < 2) { cout << "Error, Huffman only works on shorts or longer types. (here: " << sizeOfElems << "). Aborting." << endl; return -1; } vector uncompressed; //read compressed sizes (one per row) const uint32_t* compressedSizes = reinterpret_cast(src); src += sizeof(uint32_t)*numRowElems; //uncompress the rows, one by one uint32_t sizeWritten = 0; for (uint32_t j=0;j(src), compressedSizes[j], uncompressed); memcpy(dest, uncompressed.data(), uncompressed.size()*sizeof(uint16_t)); sizeWritten += uncompressed.size()*sizeof(uint16_t); dest += uncompressed.size()*sizeof(uint16_t); src += compressedSizes[j]; } return sizeWritten; } //Read a bunch of data compressed with the smoothman algorithm uint32_t UncompressSMOOTHMAN(int16_t* dest, const char* src, uint32_t numRows, uint32_t sizeOfElems, uint32_t numRowElems) { //call huffman transposed const uint32_t sizeWritten = UncompressHUFFMAN(reinterpret_cast(dest), src, numRowElems, sizeOfElems, numRows); //un-do the integer smoothing for (uint32_t j=2;j(dest), src, thisRoundNumRows, col.size, col.num); break; default: ; } } } };//class zfits #ifndef __MARS__ }; //namespace std #endif #endif