/* * zfits.h * * Created on: May 16, 2013 * Author: lyard */ #ifndef ZFITS_H_ #define ZFITS_H_ #include "fits.h" #include #include "huffman.h" #define COMPRESSED_FLAG 0x1 #define UNCOMPRESSED_FLAG 0x0 #ifndef __MARS__ namespace std { #else using namespace std; #endif class zfits : public fits { public: /* * Basic constructor */ zfits(const string& fname, const string& tableName="", bool force=false, bool mightFail=false) : fits(fname, tableName, force, mightFail), fBuffer(0), fTransposedBuffer(0), fCompressedBuffer(0), fNumTiles(0), fNumRowsPerTile(0), fHeapPtr(0), fCurrentRow(-1) { InitCompressionReading(); } /* * Alternative contstructor */ zfits(const string& fname, const string& fout, const string& tableName="", bool force=false, bool mightFail=false): fits(fname, fout, tableName, force, mightFail), fBuffer(0), fTransposedBuffer(0), fCompressedBuffer(0), fNumTiles(0), fNumRowsPerTile(0), fHeapPtr(0), fCurrentRow(-1) { InitCompressionReading(); } /* * Default destructor */ ~zfits() {} /* * Skip the next row */ virtual bool SkipNextRow() { if (fTable.isCompressed) { fRow++; return true; } else return fits::SkipNextRow(); } private: /* * Do what it takes to initialize the compressed structured */ void InitCompressionReading() { //The constructor may have failed if (!good()) return; //Get compressed specific keywords fNumTiles = fTable.isCompressed ? GetInt("NAXIS2") : 0; fNumRowsPerTile = fTable.isCompressed ? GetInt("ZTILELEN") : 0; //give it some space for uncompressing AllocateBuffers(); //read the file's catalog ReadCatalog(); } /* * Stage the requested row to internal buffer * Does NOT return data to users */ virtual void StageRow(size_t row, char* dest) { if (fTable.isCompressed) ReadBinaryRow(row, dest); else fits::StageRow(row, dest); } /* * Copy decompressed data to location requested by user */ virtual void MoveColumnDataToUserSpace(char* dest, const char* src, const Table::Column& c) { if (fTable.isCompressed) memcpy(dest, src, c.num*c.size); else fits::MoveColumnDataToUserSpace(dest, src, c); } vector fBuffer; /// fTransposedBuffer; /// fCompressedBuffer; /// > > fCatalog;///< Catalog, i.e. the main table that points to the compressed data. void AllocateBuffers() { if (!fTable.isCompressed) return; fBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile); fTransposedBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile); fCompressedBuffer.resize(fTable.bytes_per_row*fNumRowsPerTile + 1024*1024); //use a bit more memory, in case the compression algorithms uses more } /* * Read catalog data. I.e. the address of the compressed data inside the heap */ void ReadCatalog() { if (!fTable.isCompressed) return; char readBuf[16]; fCatalog.resize(fNumTiles); for (uint32_t i=0;i((char*)(&tempValues[0]), readBuf, 2); //add catalog entry fCatalog[i].push_back(make_pair(tempValues[0], tempValues[1])); } //see if there is a gap before heap data const off_t heapShift = ComputeGapFromDataToHeap(fTable); fHeapPtr = tellg() + heapShift; } /* * Compressed versin of the read row */ virtual bool ReadBinaryRow(uint64_t rowNum, char* bufferToRead) { if (rowNum >= GetNumRows()) return false; const int requestedTile = rowNum/fNumRowsPerTile; const int currentTile = fCurrentRow/fNumRowsPerTile; fCurrentRow = rowNum; //should we read yet another chunk of data ? if (requestedTile != currentTile) { //read yet another chunk from the file //the size that we should read is in the catalog. we should sum up the sizes of all columns int64_t sizeToRead = 0; const uint32_t currentCatRow = fCurrentRow/fNumRowsPerTile; for (uint32_t i=0;i uncompressed; //read compressed sizes (one per row) const uint32_t* compressedSizes = reinterpret_cast(src); src += sizeof(uint32_t)*numRowElems; //uncompress the rows, one by one uint32_t sizeWritten = 0; for (uint32_t j=0;j(src), compressedSizes[j], uncompressed); memcpy(dest, uncompressed.data(), uncompressed.size()*sizeof(uint16_t)); sizeWritten += uncompressed.size()*sizeof(uint16_t); dest += uncompressed.size()*sizeof(uint16_t); src += compressedSizes[j]; } return sizeWritten; } /* * Read a bunch of data compressed with the smoothman algorithm */ uint32_t UncompressSMOOTHMAN(int16_t* dest, const char* src, uint32_t numRows, uint32_t sizeOfElems, uint32_t numRowElems) { //call huffman transposed uint32_t sizeWritten = UncompressHUFFMAN(reinterpret_cast(dest), src, numRowElems, sizeOfElems, numRows); //un-do the integer smoothing for (uint32_t j=2;j(fTransposedBuffer.data()+offset), fCompressedBuffer.data()+compressedOffset, thisRoundNumRows, fTable.sortedCols[i].size, fTable.sortedCols[i].num); break; default: ; }; } } };//class zfits #ifndef __MARS__ }; //namespace std #endif #endif /* ZFITS_H_ */