21#include "lazperf/Extractor.hpp"
22#include "lazperf/filestream.hpp"
23#include "lazperf/header.hpp"
24#include "lazperf/readers.hpp"
25#include "lazperf/vlr.hpp"
26#include "lazperf/writers.hpp"
31using namespace Qt::StringLiterals;
67 std::vector<char> buf( 32 );
68 int numEntries =
static_cast<int>( size / 32 );
69 file.seekg(
static_cast<int64_t
>( offset ) );
70 while ( numEntries-- )
72 file.read( buf.data(),
static_cast<long>( buf.size() ) );
73 lazperf::LeExtractor s( buf.data(), buf.size() );
77 s >> d >> x >> y >> z;
87bool QgsCopcUpdate::write(
const QString &outputFilename,
const QHash<QgsPointCloudNodeId, UpdatedChunk> &updatedChunks )
90 m_f.open( QgsLazDecoder::toNativePath( outputFilename ), std::ios::out | std::ios::binary );
95 std::vector<char> allHeaderData;
96 allHeaderData.resize( mHeader.point_offset );
97 mFile.read( allHeaderData.data(),
static_cast<long>( allHeaderData.size() ) );
98 m_f.write( allHeaderData.data(),
static_cast<long>( allHeaderData.size() ) );
100 m_f.write(
"XXXXXXXX", 8 );
102 uint64_t currentChunkOffset = mHeader.point_offset + 8;
103 mFile.seekg(
static_cast<long>( currentChunkOffset ) );
111 QHash<QgsPointCloudNodeId, uint64_t> voxelToNewOffset;
114 for ( lazperf::chunk ch : mChunks )
116 Q_ASSERT( mOffsetToVoxel.contains( currentChunkOffset ) );
119 uint64_t newOffset = m_f.tellp();
120 voxelToNewOffset[n] = newOffset;
123 if ( updatedChunks.contains( n ) )
128 mFile.seekg(
static_cast<long>( mFile.tellg() ) +
static_cast<long>( ch.offset ) );
133 mChunks[chIndex].offset = updatedChunk.
chunkData.size();
138 std::vector<char> originalChunkData;
139 originalChunkData.resize( ch.offset );
140 mFile.read( originalChunkData.data(),
static_cast<long>( originalChunkData.size() ) );
141 m_f.write( originalChunkData.data(),
static_cast<long>( originalChunkData.size() ) );
144 currentChunkOffset += ch.offset;
150 const uint64_t newChunkTableOffset = m_f.tellp();
152 m_f.write(
"\0\0\0\0", 4 );
153 m_f.write(
reinterpret_cast<const char *
>( &mChunkCount ),
sizeof( mChunkCount ) );
155 lazperf::OutFileStream outStream( m_f );
156 lazperf::compress_chunk_table( outStream.cb(), mChunks,
true );
164 const long hierPositionShift =
static_cast<long>( m_f.tellp() ) + 60 -
static_cast<long>( mHierarchyOffset );
167 const int nEntries =
static_cast<int>( mHierarchyBlob.size() / 32 );
168 for (
int i = 0; i < nEntries; ++i )
174 Q_ASSERT( voxelToNewOffset.contains( e.
key ) );
177 if ( updatedChunks.contains( e.
key ) )
179 uint64_t newByteSize = updatedChunks[e.
key].chunkData.size();
180 e.
byteSize =
static_cast<int>( newByteSize );
186 e.
offset += hierPositionShift;
197 const uint64_t newEvlrOffset = m_f.tellp();
199 lazperf::evlr_header outCopcHierEvlr;
200 outCopcHierEvlr.reserved = 0;
201 outCopcHierEvlr.user_id =
"copc";
202 outCopcHierEvlr.record_id = 1000;
203 outCopcHierEvlr.data_length = mHierarchyBlob.size();
204 outCopcHierEvlr.description =
"EPT Hierarchy";
206 outCopcHierEvlr.write( m_f );
207 m_f.write( mHierarchyBlob.data(),
static_cast<long>( mHierarchyBlob.size() ) );
211 for (
size_t i = 0; i < mEvlrHeaders.size(); ++i )
213 lazperf::evlr_header evlrHeader = mEvlrHeaders[i];
214 std::vector<char> evlrBody = mEvlrData[i];
216 evlrHeader.write( m_f );
217 m_f.write( evlrBody.data(),
static_cast<long>( evlrBody.size() ) );
223 m_f.write(
reinterpret_cast<const char *
>( &newEvlrOffset ), 8 );
225 const uint64_t newRootHierOffset = mCopcVlr.root_hier_offset + hierPositionShift;
227 m_f.write(
reinterpret_cast<const char *
>( &newRootHierOffset ), 8 );
229 m_f.seekp( mHeader.point_offset );
230 m_f.write(
reinterpret_cast<const char *
>( &newChunkTableOffset ), 8 );
239 mInputFilename = inputFilename;
241 mFile.open( QgsLazDecoder::toNativePath( inputFilename ), std::ios::binary | std::ios::in );
244 mErrorMessage = u
"Could not open file for reading: %1"_s.arg( inputFilename );
258bool QgsCopcUpdate::readHeader()
261 mHeader = lazperf::header14::create( mFile );
264 mErrorMessage = u
"Error reading COPC header"_s;
268 lazperf::vlr_header vh = lazperf::vlr_header::create( mFile );
269 mCopcVlr = lazperf::copc_info_vlr::create( mFile );
271 int baseCount = lazperf::baseCount( mHeader.point_format_id );
272 if ( baseCount == 0 )
274 mErrorMessage = u
"Bad point record format: %1"_s.arg( mHeader.point_format_id );
282void QgsCopcUpdate::readChunkTable()
284 uint64_t chunkTableOffset;
286 mFile.seekg( mHeader.point_offset );
287 mFile.read(
reinterpret_cast<char *
>( &chunkTableOffset ),
sizeof( chunkTableOffset ) );
288 mFile.seekg(
static_cast<long>( chunkTableOffset ) + 4 );
289 mFile.read(
reinterpret_cast<char *
>( &mChunkCount ),
sizeof( mChunkCount ) );
295 bool variable =
true;
298 std::ifstream copcFileTmp;
299 copcFileTmp.open( QgsLazDecoder::toNativePath( mInputFilename ), std::ios::binary | std::ios::in );
300 copcFileTmp.seekg( mFile.tellg() );
301 lazperf::InFileStream copcInFileStream( copcFileTmp );
303 mChunks = lazperf::decompress_chunk_table( copcInFileStream.cb(), mChunkCount, variable );
304 std::vector<lazperf::chunk> chunksWithAbsoluteOffsets;
305 uint64_t nextChunkOffset = mHeader.point_offset + 8;
306 for ( lazperf::chunk ch : mChunks )
308 chunksWithAbsoluteOffsets.push_back( {nextChunkOffset, ch.count} );
309 nextChunkOffset += ch.offset;
314void QgsCopcUpdate::readHierarchy()
319 childEntriesToProcess.push_back( HierarchyEntry
321 QgsPointCloudNodeId( 0, 0, 0, 0 ),
322 mCopcVlr.root_hier_offset,
323 static_cast<int32_t
>( mCopcVlr.root_hier_size ),
326 while ( !childEntriesToProcess.empty() )
328 HierarchyEntry childEntry = childEntriesToProcess.back();
329 childEntriesToProcess.pop_back();
333 for (
const HierarchyEntry &e : page )
335 if ( e.pointCount > 0 )
337 Q_ASSERT( !mOffsetToVoxel.contains( e.offset ) );
338 mOffsetToVoxel[e.offset] = e.key;
340 else if ( e.pointCount < 0 )
342 childEntriesToProcess.push_back( e );
347 lazperf::evlr_header evlr1;
348 mFile.seekg(
static_cast<long>( mHeader.evlr_offset ) );
350 mHierarchyOffset = 0;
352 for ( uint32_t i = 0; i < mHeader.evlr_count; ++i )
355 if ( evlr1.user_id ==
"copc" && evlr1.record_id == 1000 )
357 mHierarchyBlob.resize( evlr1.data_length );
358 mHierarchyOffset = mFile.tellg();
359 mFile.read( mHierarchyBlob.data(),
static_cast<long>( evlr1.data_length ) );
364 mEvlrHeaders.push_back( evlr1 );
365 std::vector<char> evlrBlob;
366 evlrBlob.resize( evlr1.data_length );
367 mFile.read( evlrBlob.data(),
static_cast<long>( evlrBlob.size() ) );
368 mEvlrData.emplace_back( std::move( evlrBlob ) );
372 Q_ASSERT( !mHierarchyBlob.empty() );
377 const QString &outputFilename,
378 const QHash<QgsPointCloudNodeId, UpdatedChunk> &updatedChunks,
382 if ( !copcUpdate.
read( inputFilename ) )
389 if ( !copcUpdate.
write( outputFilename, updatedChunks ) )
Handles update operations to a COPC file.
QString errorMessage() const
Returns error message.
static bool writeUpdatedFile(const QString &inputFilename, const QString &outputFilename, const QHash< QgsPointCloudNodeId, UpdatedChunk > &updatedChunks, QString *errorMessage=nullptr)
Convenience function to do the whole process in one go: load a COPC file, then write a new COPC file ...
bool read(const QString &inputFilename)
Reads input COPC file and initializes all the members.
bool write(const QString &outputFilename, const QHash< QgsPointCloudNodeId, UpdatedChunk > &updatedChunks)
Writes a COPC file with updated chunks.
Represents an indexed point cloud node's position in octree.
QVector< HierarchyEntry > HierarchyEntries
HierarchyEntries getHierarchyPage(std::ifstream &file, uint64_t offset, uint64_t size)
Keeps one entry of COPC hierarchy.
QgsPointCloudNodeId key
Key of the data to which this entry corresponds.
uint64_t offset
Absolute offset to the data chunk if the pointCount > 0.
int32_t pointCount
If > 0, represents the number of points in the data chunk.
int32_t byteSize
Size of the data chunk in bytes (compressed size) if the pointCount > 0.
Keeps information how points of a single chunk has been modified.
QByteArray chunkData
Data of the chunk (compressed already with LAZ compressor).