Compare commits
34 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
40eaaffa6a | ||
|
|
c5913bd323 | ||
|
|
cc55012180 | ||
|
|
87c73d7980 | ||
|
|
1f97fa8455 | ||
|
|
d3cda623f2 | ||
|
|
865f80ab31 | ||
|
|
a1503e244c | ||
|
|
c52f098d5e | ||
|
|
67c38711b4 | ||
|
|
b659e4314f | ||
|
|
7182c682f6 | ||
|
|
b582abf8ea | ||
|
|
5f518f1da1 | ||
|
|
6740fc57d8 | ||
|
|
0c9406ff20 | ||
|
|
8446425437 | ||
|
|
20981e4579 | ||
|
|
9937f9c142 | ||
|
|
cdd98386b8 | ||
|
|
88d487a35c | ||
|
|
3d290761dd | ||
|
|
3e5703eb3d | ||
|
|
09d5d10d16 | ||
|
|
4ae4695888 | ||
|
|
1460ec5088 | ||
|
|
3462079d9c | ||
|
|
6b8475f7c4 | ||
|
|
034a255702 | ||
|
|
a4adee4d0f | ||
|
|
3e77799c32 | ||
|
|
3405bc89d2 | ||
|
|
7748467a27 | ||
|
|
df993bae7d |
20
.vscode/keybindings.json
vendored
Normal file
20
.vscode/keybindings.json
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
// Place your key bindings in this file to overwrite the defaults
|
||||
[
|
||||
{
|
||||
"key": "ctrl+l",
|
||||
"command": "editor.action.deleteLines",
|
||||
"when": "editorTextFocus && !editorReadonly"
|
||||
},{
|
||||
"key": "ctrl+shift+i",
|
||||
"command": "editor.action.toggleRenderWhitespace"
|
||||
},{
|
||||
"key": "ctrl+d",
|
||||
"command": "editor.action.copyLinesDownAction"
|
||||
},{
|
||||
"key": "alt+2",
|
||||
"command": "type",
|
||||
"args": {
|
||||
"text": "`"
|
||||
}
|
||||
}
|
||||
]
|
||||
19
.vscode/settings.json
vendored
Normal file
19
.vscode/settings.json
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
{
|
||||
"workbench.editor.enablePreview": false,
|
||||
"files.associations": {
|
||||
"*.vs": "cpp",
|
||||
"*.fs": "cpp"
|
||||
},
|
||||
"files.trimTrailingWhitespace": false,
|
||||
"editor.fontSize": 28,
|
||||
"editor.autoIndent": false,
|
||||
"editor.detectIndentation": false,
|
||||
"editor.insertSpaces": false,
|
||||
"editor.minimap.enabled": false,
|
||||
"editor.autoClosingBrackets": false,
|
||||
"editor.formatOnType": false,
|
||||
"editor.acceptSuggestionOnEnter": "off",
|
||||
"editor.acceptSuggestionOnCommitCharacter": false,
|
||||
"editor.mouseWheelZoom": true,
|
||||
"editor.renderWhitespace": "all",
|
||||
}
|
||||
@@ -1,4 +1,4 @@
|
||||
cmake_minimum_required(VERSION 2.8)
|
||||
cmake_minimum_required(VERSION 3.3)
|
||||
|
||||
project(Potree)
|
||||
|
||||
|
||||
@@ -1,82 +0,0 @@
|
||||
|
||||
|
||||
#ifndef AABB_H
|
||||
#define AABB_H
|
||||
|
||||
|
||||
#include <math.h>
|
||||
#include <algorithm>
|
||||
|
||||
#include "Vector3.h"
|
||||
|
||||
using std::min;
|
||||
using std::max;
|
||||
using std::endl;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class AABB{
|
||||
|
||||
public:
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> size;
|
||||
|
||||
AABB(){
|
||||
min = Vector3<double>(std::numeric_limits<float>::max());
|
||||
max = Vector3<double>(-std::numeric_limits<float>::max());
|
||||
size = Vector3<double>(std::numeric_limits<float>::max());
|
||||
}
|
||||
|
||||
AABB(Vector3<double> min, Vector3<double> max){
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
size = max-min;
|
||||
}
|
||||
|
||||
bool isInside(const Vector3<double> &p){
|
||||
if(min.x <= p.x && p.x <= max.x){
|
||||
if(min.y <= p.y && p.y <= max.y){
|
||||
if(min.z <= p.z && p.z <= max.z){
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
void update(const Vector3<double> &point){
|
||||
min.x = std::min(min.x, point.x);
|
||||
min.y = std::min(min.y, point.y);
|
||||
min.z = std::min(min.z, point.z);
|
||||
|
||||
max.x = std::max(max.x, point.x);
|
||||
max.y = std::max(max.y, point.y);
|
||||
max.z = std::max(max.z, point.z);
|
||||
|
||||
size = max - min;
|
||||
}
|
||||
|
||||
void update(const AABB &aabb){
|
||||
update(aabb.min);
|
||||
update(aabb.max);
|
||||
}
|
||||
|
||||
void makeCubic(){
|
||||
max = min + size.maxValue();
|
||||
size = max - min;
|
||||
}
|
||||
|
||||
friend ostream &operator<<( ostream &output, const AABB &value ){
|
||||
output << "min: " << value.min << endl;
|
||||
output << "max: " << value.max << endl;
|
||||
output << "size: " << value.size << endl;
|
||||
return output;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,55 +0,0 @@
|
||||
|
||||
|
||||
#ifndef BINPOINTREADER_H
|
||||
#define BINPOINTREADER_H
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include "Point.h"
|
||||
#include "PointReader.h"
|
||||
#include "PointAttributes.hpp"
|
||||
|
||||
using std::string;
|
||||
|
||||
using std::ifstream;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class BINPointReader : public PointReader{
|
||||
private:
|
||||
AABB aabb;
|
||||
double scale;
|
||||
string path;
|
||||
vector<string> files;
|
||||
vector<string>::iterator currentFile;
|
||||
ifstream *reader;
|
||||
PointAttributes attributes;
|
||||
Point point;
|
||||
|
||||
public:
|
||||
|
||||
BINPointReader(string path, AABB aabb, double scale, PointAttributes pointAttributes);
|
||||
|
||||
~BINPointReader();
|
||||
|
||||
bool readNextPoint();
|
||||
|
||||
Point getPoint();
|
||||
|
||||
AABB getAABB();
|
||||
|
||||
long long numPoints();
|
||||
|
||||
void close();
|
||||
|
||||
Vector3<double> getScale();
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,151 +0,0 @@
|
||||
|
||||
#ifndef BINPOINTWRITER_H
|
||||
#define BINPOINTWRITER_H
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
|
||||
#include "AABB.h"
|
||||
#include "PointAttributes.hpp"
|
||||
#include "PointWriter.hpp"
|
||||
#include "stuff.h"
|
||||
|
||||
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::ofstream;
|
||||
using std::ios;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class BINPointWriter : public PointWriter{
|
||||
|
||||
public:
|
||||
PointAttributes attributes;
|
||||
ofstream *writer;
|
||||
AABB aabb;
|
||||
double scale;
|
||||
|
||||
BINPointWriter(string file, AABB aabb, double scale, PointAttributes pointAttributes) {
|
||||
this->file = file;
|
||||
this->aabb = aabb;
|
||||
this->scale = scale;
|
||||
numPoints = 0;
|
||||
|
||||
attributes = pointAttributes;
|
||||
|
||||
writer = new ofstream(file, ios::out | ios::binary);
|
||||
}
|
||||
|
||||
BINPointWriter(string file, PointAttributes attributes) {
|
||||
this->file = file;
|
||||
numPoints = 0;
|
||||
this->attributes = attributes;
|
||||
|
||||
writer = new ofstream(file, ios::out | ios::binary);
|
||||
}
|
||||
|
||||
~BINPointWriter(){
|
||||
close();
|
||||
}
|
||||
|
||||
void write(Point &point){
|
||||
for(int i = 0; i < attributes.size(); i++){
|
||||
|
||||
PointAttribute attribute = attributes[i];
|
||||
|
||||
if(attribute == PointAttribute::POSITION_CARTESIAN){
|
||||
//float pos[3] = {(float) point.x,(float) point.y,(float) point.z};
|
||||
int x = (int)((point.position.x - aabb.min.x) / scale);
|
||||
int y = (int)((point.position.y - aabb.min.y) / scale);
|
||||
int z = (int)((point.position.z - aabb.min.z) / scale);
|
||||
int pos[3] = {x, y, z};
|
||||
writer->write((const char*)pos, 3*sizeof(int));
|
||||
}else if(attribute == PointAttribute::COLOR_PACKED){
|
||||
unsigned char rgba[4] = {point.color.x, point.color.y, point.color.z, 255};
|
||||
writer->write((const char*)rgba, 4*sizeof(unsigned char));
|
||||
}else if(attribute == PointAttribute::INTENSITY){
|
||||
writer->write((const char*)&point.intensity, sizeof(unsigned short));
|
||||
}else if(attribute == PointAttribute::CLASSIFICATION){
|
||||
writer->write((const char*)&point.classification, sizeof(unsigned char));
|
||||
} else if (attribute == PointAttribute::RETURN_NUMBER) {
|
||||
writer->write((const char*)&point.returnNumber, sizeof(unsigned char));
|
||||
} else if (attribute == PointAttribute::NUMBER_OF_RETURNS) {
|
||||
writer->write((const char*)&point.numberOfReturns, sizeof(unsigned char));
|
||||
} else if (attribute == PointAttribute::SOURCE_ID) {
|
||||
writer->write((const char*)&point.pointSourceID, sizeof(unsigned short));
|
||||
} else if (attribute == PointAttribute::GPS_TIME) {
|
||||
writer->write((const char*)&point.gpsTime, sizeof(double));
|
||||
} else if(attribute == PointAttribute::NORMAL_SPHEREMAPPED){
|
||||
// see http://aras-p.info/texts/CompactNormalStorage.html
|
||||
float nx = point.normal.x;
|
||||
float ny = point.normal.y;
|
||||
float nz = point.normal.z;
|
||||
float lengthxy = sqrt(nx * nx + ny * ny);
|
||||
|
||||
float ex = 0.5f * (nx / lengthxy) * sqrt(-nz * 0.5f + 0.5f) + 0.5f;
|
||||
float ey = 0.5f * (ny / lengthxy) * sqrt(-nz * 0.5f + 0.5f) + 0.5f;
|
||||
|
||||
unsigned char bx = (unsigned char)(ex * 255);
|
||||
unsigned char by = (unsigned char)(ey * 255);
|
||||
|
||||
writer->write((const char*)&bx, 1);
|
||||
writer->write((const char*)&by, 1);
|
||||
}else if(attribute == PointAttribute::NORMAL_OCT16){
|
||||
// see http://lgdv.cs.fau.de/get/1602
|
||||
|
||||
float nx = point.normal.x;
|
||||
float ny = point.normal.y;
|
||||
float nz = point.normal.z;
|
||||
|
||||
float norm1 = abs(nx) + abs(ny) + abs(nz);
|
||||
|
||||
nx = nx / norm1;
|
||||
ny = ny / norm1;
|
||||
nz = nz / norm1;
|
||||
|
||||
float u = 0;
|
||||
float v = 0;
|
||||
|
||||
if(nz >= 0){
|
||||
u = nx;
|
||||
v = ny;
|
||||
}else{
|
||||
u = psign(nx)*(1-psign(ny)*ny);
|
||||
v = psign(ny)*(1-psign(nx)*nx);
|
||||
}
|
||||
|
||||
unsigned char bx = (unsigned char)(min((u + 1) * 128, 255.0f));
|
||||
unsigned char by = (unsigned char)(min((v + 1) * 128, 255.0f));
|
||||
|
||||
writer->write((const char*)&bx, 1);
|
||||
writer->write((const char*)&by, 1);
|
||||
}else if(attribute == PointAttribute::NORMAL){
|
||||
writer->write((const char*)&point.normal.x, sizeof(float));
|
||||
writer->write((const char*)&point.normal.y, sizeof(float));
|
||||
writer->write((const char*)&point.normal.z, sizeof(float));
|
||||
}
|
||||
}
|
||||
|
||||
writer->write(reinterpret_cast<const char*>(point.extraBytes.data()), point.extraBytes.size());
|
||||
|
||||
numPoints++;
|
||||
}
|
||||
|
||||
void close(){
|
||||
if(writer != NULL){
|
||||
writer->close();
|
||||
delete writer;
|
||||
writer = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
55
PotreeConverter/include/ChunkProcessor.h
Normal file
55
PotreeConverter/include/ChunkProcessor.h
Normal file
@@ -0,0 +1,55 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <future>
|
||||
#include <vector>
|
||||
#include <iostream>
|
||||
|
||||
#include "Metadata.h"
|
||||
#include "Points.h"
|
||||
#include "Node.h"
|
||||
#include "SparseGrid.h"
|
||||
#include "stuff.h"
|
||||
|
||||
using std::atomic;
|
||||
using std::future;
|
||||
using std::vector;
|
||||
using std::thread;
|
||||
using std::mutex;
|
||||
using std::unique_lock;
|
||||
using std::lock_guard;
|
||||
using std::string;
|
||||
using std::fstream;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
struct Chunk {
|
||||
|
||||
string file = "";
|
||||
//Points* points = nullptr;
|
||||
string id = "";
|
||||
//int index = -1;
|
||||
//Vector3<int> index3D;
|
||||
Vector3<double> min = {Infinity, Infinity, Infinity};
|
||||
Vector3<double> max = {-Infinity, -Infinity, -Infinity };
|
||||
|
||||
Chunk() {
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
class ChunkLoader;
|
||||
|
||||
vector<shared_ptr<Chunk>> getListOfChunks(Metadata& metadata);
|
||||
|
||||
shared_ptr<Points> loadChunk(shared_ptr<Chunk> chunk, Attributes attributes);
|
||||
|
||||
shared_ptr<Node> processChunk(shared_ptr<Chunk> chunk, shared_ptr<Points> points, double spacing);
|
||||
|
||||
380
PotreeConverter/include/Chunker.h
Normal file
380
PotreeConverter/include/Chunker.h
Normal file
@@ -0,0 +1,380 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <assert.h>
|
||||
#include <filesystem>
|
||||
#include <atomic>
|
||||
#include <thread>
|
||||
#include <memory>
|
||||
|
||||
|
||||
#include "Points.h"
|
||||
#include "Vector3.h"
|
||||
#include "LASWriter.hpp"
|
||||
#include "TaskPool.h"
|
||||
|
||||
#include "json.hpp"
|
||||
|
||||
using json = nlohmann::json;
|
||||
|
||||
using std::shared_ptr;
|
||||
using std::string;
|
||||
using std::atomic_bool;
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
struct ChunkerCell {
|
||||
uint32_t count = 0;
|
||||
vector<shared_ptr<Points>> batches;
|
||||
//bool flushing = false;
|
||||
atomic<bool> isFlusing = false;
|
||||
int index = 0;
|
||||
int ix = 0;
|
||||
int iy = 0;
|
||||
int iz = 0;
|
||||
string name = "";
|
||||
|
||||
ChunkerCell() {
|
||||
//flushing = false;
|
||||
}
|
||||
};
|
||||
|
||||
// might be better off using https://github.com/progschj/ThreadPool
|
||||
struct FlushTask {
|
||||
shared_ptr<ChunkerCell> cell;
|
||||
string filepath = "";
|
||||
vector<shared_ptr<Points>> batches;
|
||||
};
|
||||
|
||||
mutex mtx_abc;
|
||||
|
||||
auto flushProcessor = [](shared_ptr<FlushTask> task) {
|
||||
|
||||
double tStart = now();
|
||||
|
||||
uint64_t numPoints = 0;
|
||||
|
||||
for (auto batch : task->batches) {
|
||||
numPoints += batch->points.size();
|
||||
}
|
||||
|
||||
uint64_t bytesPerPoint = 28;
|
||||
uint64_t fileDataSize = numPoints * bytesPerPoint;
|
||||
void* fileData = malloc(fileDataSize);
|
||||
uint8_t* fileDataU8 = reinterpret_cast<uint8_t*>(fileData);
|
||||
|
||||
uint64_t i = 0;
|
||||
for (shared_ptr<Points> batch : task->batches) {
|
||||
|
||||
uint8_t* attBuffer = batch->attributeBuffer->dataU8;
|
||||
int attributesByteSize = 4;
|
||||
|
||||
//vector<uint8_t> dbgSrc(attBuffer, attBuffer + batch->attributeBuffer->size);
|
||||
|
||||
for (Point& point : batch->points) {
|
||||
|
||||
int fileDataOffset = i * bytesPerPoint;
|
||||
|
||||
memcpy(fileDataU8 + fileDataOffset, &point, 24);
|
||||
|
||||
uint8_t* attSrc = attBuffer + (point.index * attributesByteSize);
|
||||
memcpy(fileDataU8 + fileDataOffset + 24, attSrc, attributesByteSize);
|
||||
|
||||
i++;
|
||||
}
|
||||
}
|
||||
|
||||
lock_guard<mutex> lock(mtx_abc);
|
||||
|
||||
fstream file;
|
||||
file.open(task->filepath, ios::out | ios::binary | ios::app);
|
||||
file.write(reinterpret_cast<const char*>(fileData), fileDataSize);
|
||||
file.close();
|
||||
|
||||
//cout << task->filepath << endl;
|
||||
|
||||
free(fileData);
|
||||
|
||||
task->cell->isFlusing = false;
|
||||
|
||||
// shouldn't be necessary with shared pointers?
|
||||
//for (auto batch : task->batches) {
|
||||
// delete batch;
|
||||
//}
|
||||
|
||||
double duration = now() - tStart;
|
||||
|
||||
};
|
||||
|
||||
class Chunker {
|
||||
public:
|
||||
|
||||
vector<Points*> batchesToDo;
|
||||
int32_t gridSize = 1;
|
||||
string path = "";
|
||||
Attributes attributes;
|
||||
|
||||
vector<shared_ptr<ChunkerCell>> cells;
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
atomic<int> flushThreads = 0;
|
||||
|
||||
// debug
|
||||
mutex mtx_debug_message;
|
||||
vector<string> debugMessages;
|
||||
|
||||
shared_ptr<TaskPool<FlushTask>> pool;
|
||||
|
||||
Chunker(string path, Attributes attributes, Vector3<double> min, Vector3<double> max, int gridSize) {
|
||||
this->path = path;
|
||||
this->attributes = attributes;
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->gridSize = gridSize;
|
||||
|
||||
|
||||
int numCells = gridSize * gridSize * gridSize;
|
||||
cells.resize(numCells, nullptr);
|
||||
|
||||
int numFlushThreads = 10;
|
||||
pool = make_shared<TaskPool<FlushTask>>(numFlushThreads, flushProcessor);
|
||||
|
||||
}
|
||||
|
||||
void add(shared_ptr<Points> batch) {
|
||||
|
||||
Attributes attributes = batch->attributes;
|
||||
|
||||
int attributesByteSize = 4;
|
||||
|
||||
double gridSizeD = double(gridSize);
|
||||
Vector3<double> size = max - min;
|
||||
Vector3<double> cellsD = Vector3<double>(gridSizeD, gridSizeD, gridSizeD);
|
||||
|
||||
int64_t numPoints = batch->points.size();
|
||||
int numCells = gridSize * gridSize * gridSize;
|
||||
vector<int> cells_numNew(numCells);
|
||||
|
||||
for (int64_t i = 0; i < numPoints; i++) {
|
||||
Point& point = batch->points[i];
|
||||
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
int32_t ux = int32_t(cellsD.x * (x - min.x) / size.x);
|
||||
int32_t uy = int32_t(cellsD.y * (y - min.y) / size.y);
|
||||
int32_t uz = int32_t(cellsD.z * (z - min.z) / size.z);
|
||||
|
||||
ux = std::min(ux, gridSize - 1);
|
||||
uy = std::min(uy, gridSize - 1);
|
||||
uz = std::min(uz, gridSize - 1);
|
||||
|
||||
int32_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
cells_numNew[index]++;
|
||||
|
||||
}
|
||||
|
||||
// allocate necessary space for each cell
|
||||
for (uint64_t i = 0; i < cells_numNew.size(); i++) {
|
||||
int numNew = cells_numNew[i];
|
||||
|
||||
if (numNew == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
uint64_t attributeBufferSize = numNew * attributes.byteSize;
|
||||
|
||||
auto cellBatch = make_shared<Points>();
|
||||
cellBatch->attributeBuffer = make_shared<Buffer>(attributeBufferSize);
|
||||
cellBatch->attributes = attributes;
|
||||
|
||||
if (cells[i] == nullptr) {
|
||||
auto cell = make_shared<ChunkerCell>();
|
||||
cell->index = i;
|
||||
|
||||
int ix = i % gridSize;
|
||||
int iy = ((i - ix) / gridSize) % gridSize;
|
||||
int iz = (i - ix - iy * gridSize) / (gridSize * gridSize);
|
||||
|
||||
cell->ix = ix;
|
||||
cell->iy = iy;
|
||||
cell->iz = iz;
|
||||
|
||||
string name = "r";
|
||||
int levels = std::log2(gridSize);
|
||||
|
||||
int div = gridSize;
|
||||
for (int j = 0; j < levels; j++) {
|
||||
|
||||
int lIndex = 0;
|
||||
|
||||
if (ix >= (div / 2)) {
|
||||
lIndex = lIndex + 0b100;
|
||||
ix = ix - div / 2;
|
||||
}
|
||||
|
||||
if (iy >= (div / 2)) {
|
||||
lIndex = lIndex + 0b010;
|
||||
iy = iy - div / 2;
|
||||
}
|
||||
|
||||
if (iz >= (div / 2)) {
|
||||
lIndex = lIndex + 0b001;
|
||||
iz = iz - div / 2;
|
||||
}
|
||||
|
||||
name += to_string(lIndex);
|
||||
div = div / 2;
|
||||
}
|
||||
cell->name = name;
|
||||
|
||||
cells[i] = cell;
|
||||
|
||||
}
|
||||
|
||||
cells[i]->batches.push_back(cellBatch);
|
||||
}
|
||||
|
||||
vector<shared_ptr<ChunkerCell>> toFlush;
|
||||
|
||||
// now add them
|
||||
for (int64_t i = 0; i < batch->points.size(); i++) {
|
||||
Point point = batch->points[i];
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
int32_t ux = int32_t(cellsD.x * (x - min.x) / size.x);
|
||||
int32_t uy = int32_t(cellsD.y * (y - min.y) / size.y);
|
||||
int32_t uz = int32_t(cellsD.z * (z - min.z) / size.z);
|
||||
|
||||
ux = std::min(ux, gridSize - 1);
|
||||
uy = std::min(uy, gridSize - 1);
|
||||
uz = std::min(uz, gridSize - 1);
|
||||
|
||||
int32_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
auto cell = cells[index];
|
||||
|
||||
auto cellBatch = cell->batches.back();
|
||||
uint8_t* attBuffer = batch->attributeBuffer->dataU8;
|
||||
|
||||
// copy point and its attribute buffer
|
||||
uint64_t srcIndex = point.index;
|
||||
uint64_t destIndex = cellBatch->points.size();
|
||||
|
||||
point.index = destIndex;
|
||||
|
||||
cellBatch->points.push_back(point);
|
||||
|
||||
uint8_t* attDest = cellBatch->attributeBuffer->dataU8 + (destIndex * attributesByteSize);
|
||||
uint8_t* attSrc = batch->attributeBuffer->dataU8 + (srcIndex * attributesByteSize);
|
||||
memcpy(attDest, attSrc, attributesByteSize);
|
||||
|
||||
//vector<uint8_t> dbgSrc(attSrc, attSrc + 4);
|
||||
//vector<uint8_t> dbgDest(attDest, attDest + 4);
|
||||
|
||||
cell->count++;
|
||||
|
||||
if (cell->count > 1'000'000 && cell->isFlusing == false) {
|
||||
toFlush.push_back(cell);
|
||||
cell->isFlusing = true;
|
||||
}
|
||||
}
|
||||
|
||||
//for (ChunkerCell* cell : cells) {
|
||||
// if (cell == nullptr) {
|
||||
// continue;
|
||||
// }
|
||||
|
||||
// for (Points* batch : cell->batches) {
|
||||
// vector<uint8_t> dbgSrc(
|
||||
// batch->attributeBuffer->dataU8,
|
||||
// batch->attributeBuffer->dataU8 + batch->attributeBuffer->size);
|
||||
|
||||
// int a = 10;
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
for (auto cell : toFlush) {
|
||||
flushCell(cell);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void addDebugMessage(string message) {
|
||||
|
||||
lock_guard<mutex> lock(mtx_debug_message);
|
||||
|
||||
debugMessages.push_back(message);
|
||||
|
||||
}
|
||||
|
||||
void flushCell(shared_ptr<ChunkerCell> cell) {
|
||||
|
||||
auto task = make_shared<FlushTask>();
|
||||
task->cell = cell;
|
||||
task->filepath = path + "/" + cell->name + ".bin";
|
||||
task->batches = std::move(cell->batches);
|
||||
|
||||
cell->count = 0;
|
||||
cell->batches = vector<shared_ptr<Points>>();
|
||||
cell->isFlusing = true;
|
||||
|
||||
pool->addTask(task);
|
||||
|
||||
}
|
||||
|
||||
void saveMetadata() {
|
||||
|
||||
string filepath = path + "/chunks.json";
|
||||
|
||||
auto min = this->min;
|
||||
auto max = this->max;
|
||||
|
||||
json js = {
|
||||
{"min", {min.x, min.y, min.z}},
|
||||
{"max", {max.x, max.y, max.z}},
|
||||
};
|
||||
|
||||
fstream file;
|
||||
file.open(filepath, ios::out);
|
||||
file << js.dump(4);
|
||||
file.close();
|
||||
|
||||
}
|
||||
|
||||
void close() {
|
||||
|
||||
// finish all other flushes first
|
||||
// used to make sure that we are not flushing to a file that's already being flushed
|
||||
pool->waitTillEmpty();
|
||||
|
||||
vector<shared_ptr<ChunkerCell>> populatedCells;
|
||||
int numCells = 0;
|
||||
for (auto cell : cells) {
|
||||
if (cell != nullptr && cell->count > 0) {
|
||||
populatedCells.push_back(cell);
|
||||
}
|
||||
}
|
||||
|
||||
// now flush all the remaining cells
|
||||
for (auto cell : populatedCells) {
|
||||
flushCell(cell);
|
||||
}
|
||||
|
||||
|
||||
saveMetadata();
|
||||
|
||||
|
||||
cout << "waiting flush" << endl;
|
||||
pool->close();
|
||||
cout << "all flushed" << endl;
|
||||
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
236
PotreeConverter/include/Chunker_Tree.h
Normal file
236
PotreeConverter/include/Chunker_Tree.h
Normal file
@@ -0,0 +1,236 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <assert.h>
|
||||
#include <filesystem>
|
||||
|
||||
#include "Points.h"
|
||||
#include "Vector3.h"
|
||||
#include "LASWriter.hpp"
|
||||
|
||||
using std::string;
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
struct ChunkerCell {
|
||||
|
||||
uint32_t count = 0;
|
||||
vector<Points*> batches;
|
||||
|
||||
};
|
||||
|
||||
struct ChunkNode {
|
||||
|
||||
struct BoundingBox {
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
};
|
||||
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> size;
|
||||
|
||||
uint64_t storeSize = 1'000'000;
|
||||
|
||||
uint64_t totalPoints = 0;
|
||||
|
||||
vector<Point> points;
|
||||
vector<ChunkNode*> children;
|
||||
|
||||
vector<int> childBinSize;
|
||||
|
||||
string name = "";
|
||||
|
||||
ChunkNode(string name, Vector3<double> min, Vector3<double> max) {
|
||||
this->name = name;
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->size = max - min;
|
||||
|
||||
childBinSize.resize(8, 0);
|
||||
|
||||
points.reserve(1'000'000);
|
||||
}
|
||||
|
||||
void add(Point& point) {
|
||||
|
||||
totalPoints++;
|
||||
|
||||
if (totalPoints <= storeSize) {
|
||||
points.push_back(point);
|
||||
|
||||
{
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
int index = 0;
|
||||
|
||||
index = index | (nx > 0.5 ? 0b100 : 0b000);
|
||||
index = index | (ny > 0.5 ? 0b010 : 0b000);
|
||||
index = index | (nz > 0.5 ? 0b001 : 0b000);
|
||||
|
||||
childBinSize[index]++;
|
||||
}
|
||||
|
||||
}else if(totalPoints == storeSize + 1){
|
||||
split();
|
||||
|
||||
addToChild(point);
|
||||
} else {
|
||||
addToChild(point);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void addToChild(Point& point) {
|
||||
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
int index = 0;
|
||||
|
||||
if (index == 2) {
|
||||
int a = 0;
|
||||
}
|
||||
|
||||
if (nx < 0.0 || nx > 1.0) {
|
||||
int b = 0;
|
||||
}
|
||||
|
||||
if (ny < 0.0 || ny > 1.0) {
|
||||
int b = 0;
|
||||
}
|
||||
|
||||
if (nz < 0.0 || nz > 1.0) {
|
||||
int b = 0;
|
||||
}
|
||||
|
||||
index = index | (nx > 0.5 ? 0b100 : 0b000);
|
||||
index = index | (ny > 0.5 ? 0b010 : 0b000);
|
||||
index = index | (nz > 0.5 ? 0b001 : 0b000);
|
||||
|
||||
auto child = children[index];
|
||||
|
||||
if (point.x < child->min.x || point.x > child->max.x) {
|
||||
int damn = 0;
|
||||
}
|
||||
|
||||
children[index]->add(point);
|
||||
}
|
||||
|
||||
void split() {
|
||||
|
||||
for (int i = 0; i < 8; i++) {
|
||||
BoundingBox box = childBoundingBoxOf(i);
|
||||
string childName = this->name + to_string(i);
|
||||
ChunkNode* child = new ChunkNode(childName, box.min, box.max);
|
||||
|
||||
//child->points.reserve(childBinSize[i]);
|
||||
|
||||
children.push_back(child);
|
||||
}
|
||||
|
||||
for (Point& point : points) {
|
||||
addToChild(point);
|
||||
}
|
||||
|
||||
points = vector<Point>();
|
||||
|
||||
}
|
||||
|
||||
BoundingBox childBoundingBoxOf(int index) {
|
||||
BoundingBox box;
|
||||
Vector3<double> center = min + (size * 0.5);
|
||||
|
||||
if ((index & 0b100) == 0) {
|
||||
box.min.x = min.x;
|
||||
box.max.x = center.x;
|
||||
} else {
|
||||
box.min.x = center.x;
|
||||
box.max.x = max.x;
|
||||
}
|
||||
|
||||
if ((index & 0b010) == 0) {
|
||||
box.min.y = min.y;
|
||||
box.max.y = center.y;
|
||||
} else {
|
||||
box.min.y = center.y;
|
||||
box.max.y = max.y;
|
||||
}
|
||||
|
||||
if ((index & 0b001) == 0) {
|
||||
box.min.z = min.z;
|
||||
box.max.z = center.z;
|
||||
} else {
|
||||
box.min.z = center.z;
|
||||
box.max.z = max.z;
|
||||
}
|
||||
|
||||
return box;
|
||||
}
|
||||
};
|
||||
|
||||
class Chunker {
|
||||
public:
|
||||
|
||||
vector<Points*> batchesToDo;
|
||||
int32_t gridSize = 1;
|
||||
string path = "";
|
||||
|
||||
//vector<ChunkerCell> cells;
|
||||
ChunkNode* root = nullptr;
|
||||
|
||||
//Vector3<double> min = {0.0, 0.0, 0.0};
|
||||
//Vector3<double> max = {0.0, 0.0, 0.0};
|
||||
|
||||
//Chunker(string targetDirectory, int gridSize) {
|
||||
Chunker(string path, Vector3<double> min, Vector3<double> max) {
|
||||
this->path = path;
|
||||
this->gridSize = gridSize;
|
||||
|
||||
//cells.resize(gridSize * gridSize * gridSize);
|
||||
|
||||
root = new ChunkNode("r", min, max);
|
||||
}
|
||||
|
||||
void close() {
|
||||
|
||||
function<void(ChunkNode*)> traverse = [&traverse, this](ChunkNode* node) {
|
||||
|
||||
cout << node->name << ": " << node->totalPoints << ", " << node->points.size() << endl;
|
||||
|
||||
string lasPath = this->path + "/" + node->name + ".las";
|
||||
LASHeader header;
|
||||
header.min = node->min;
|
||||
header.max = node->max;
|
||||
header.numPoints = node->points.size();
|
||||
header.scale = { 0.001, 0.001, 0.001 };
|
||||
|
||||
writeLAS(lasPath, header, node->points);
|
||||
|
||||
for (ChunkNode* child : node->children) {
|
||||
|
||||
if (child->totalPoints == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
traverse(child);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
traverse(root);
|
||||
|
||||
}
|
||||
|
||||
void add(Points* batch) {
|
||||
|
||||
for (Point& point : batch->points) {
|
||||
root->add(point);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
@@ -1,196 +0,0 @@
|
||||
|
||||
#ifndef CLOUDJS_H
|
||||
#define CLOUDJS_H
|
||||
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
#include <list>
|
||||
|
||||
#include "rapidjson/document.h"
|
||||
#include "rapidjson/prettywriter.h"
|
||||
#include "rapidjson/stringbuffer.h"
|
||||
|
||||
#include "AABB.h"
|
||||
#include "definitions.hpp"
|
||||
#include "PointAttributes.hpp"
|
||||
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::stringstream;
|
||||
using std::list;
|
||||
using rapidjson::Document;
|
||||
using rapidjson::StringBuffer;
|
||||
using rapidjson::Writer;
|
||||
using rapidjson::PrettyWriter;
|
||||
using rapidjson::Value;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class CloudJS{
|
||||
public:
|
||||
|
||||
class Node{
|
||||
public:
|
||||
string name;
|
||||
int pointCount;
|
||||
|
||||
Node(string name, int pointCount){
|
||||
this->name = name;
|
||||
this->pointCount = pointCount;
|
||||
}
|
||||
};
|
||||
|
||||
string version;
|
||||
string octreeDir = "data";
|
||||
AABB boundingBox;
|
||||
AABB tightBoundingBox;
|
||||
OutputFormat outputFormat;
|
||||
PointAttributes pointAttributes;
|
||||
double spacing;
|
||||
vector<Node> hierarchy;
|
||||
double scale;
|
||||
int hierarchyStepSize = -1;
|
||||
long long numAccepted = 0;
|
||||
string projection = "";
|
||||
|
||||
CloudJS() = default;
|
||||
|
||||
CloudJS(string content){
|
||||
Document d;
|
||||
d.Parse(content.c_str());
|
||||
|
||||
Value &vVersion = d["version"];
|
||||
Value &vOctreeDir = d["octreeDir"];
|
||||
Value &vPoints = d["points"];
|
||||
Value &vBoundingBox = d["boundingBox"];
|
||||
Value &vTightBoundingBox = d["tightBoundingBox"];
|
||||
Value &vPointAttributes = d["pointAttributes"];
|
||||
Value &vSpacing = d["spacing"];
|
||||
Value &vScale = d["scale"];
|
||||
Value &vHierarchyStepSize = d["hierarchyStepSize"];
|
||||
|
||||
|
||||
|
||||
version = vVersion.GetString();
|
||||
octreeDir = vOctreeDir.GetString();
|
||||
|
||||
if(d.HasMember("projection")){
|
||||
Value &vProjection = d["projection"];
|
||||
projection = vProjection.GetString();
|
||||
}
|
||||
|
||||
numAccepted = vPoints.GetInt64();
|
||||
boundingBox = AABB(
|
||||
Vector3<double>(vBoundingBox["lx"].GetDouble(), vBoundingBox["ly"].GetDouble(), vBoundingBox["lz"].GetDouble()),
|
||||
Vector3<double>(vBoundingBox["ux"].GetDouble(), vBoundingBox["uy"].GetDouble(), vBoundingBox["uz"].GetDouble())
|
||||
);
|
||||
tightBoundingBox = AABB(
|
||||
Vector3<double>(vTightBoundingBox["lx"].GetDouble(), vTightBoundingBox["ly"].GetDouble(), vTightBoundingBox["lz"].GetDouble()),
|
||||
Vector3<double>(vTightBoundingBox["ux"].GetDouble(), vTightBoundingBox["uy"].GetDouble(), vTightBoundingBox["uz"].GetDouble())
|
||||
);
|
||||
|
||||
if(vPointAttributes.IsArray()){
|
||||
outputFormat = OutputFormat::BINARY;
|
||||
pointAttributes = PointAttributes();
|
||||
|
||||
for (Value::ConstValueIterator itr = vPointAttributes.Begin(); itr != vPointAttributes.End(); ++itr){
|
||||
string strpa = itr->GetString();
|
||||
PointAttribute pa = PointAttribute::fromString(strpa);
|
||||
pointAttributes.add(pa);
|
||||
}
|
||||
|
||||
|
||||
}else{
|
||||
string pa = vPointAttributes.GetString();
|
||||
if(pa == "LAS"){
|
||||
outputFormat = OutputFormat::LAS;
|
||||
}else if(pa == "LAZ"){
|
||||
outputFormat = OutputFormat::LAZ;
|
||||
}
|
||||
}
|
||||
|
||||
spacing = vSpacing.GetDouble();
|
||||
scale = vScale.GetDouble();
|
||||
hierarchyStepSize = vHierarchyStepSize.GetInt();
|
||||
|
||||
}
|
||||
|
||||
string getString(){
|
||||
|
||||
Document d(rapidjson::kObjectType);
|
||||
|
||||
Value version(this->version.c_str(), (rapidjson::SizeType)this->version.size());
|
||||
Value octreeDir("data");
|
||||
Value projection(this->projection.c_str(), (rapidjson::SizeType)this->projection.size());
|
||||
|
||||
Value boundingBox(rapidjson::kObjectType);
|
||||
{
|
||||
boundingBox.AddMember("lx", this->boundingBox.min.x, d.GetAllocator());
|
||||
boundingBox.AddMember("ly", this->boundingBox.min.y, d.GetAllocator());
|
||||
boundingBox.AddMember("lz", this->boundingBox.min.z, d.GetAllocator());
|
||||
boundingBox.AddMember("ux", this->boundingBox.max.x, d.GetAllocator());
|
||||
boundingBox.AddMember("uy", this->boundingBox.max.y, d.GetAllocator());
|
||||
boundingBox.AddMember("uz", this->boundingBox.max.z, d.GetAllocator());
|
||||
}
|
||||
|
||||
Value tightBoundingBox(rapidjson::kObjectType);
|
||||
{
|
||||
tightBoundingBox.AddMember("lx", this->tightBoundingBox.min.x, d.GetAllocator());
|
||||
tightBoundingBox.AddMember("ly", this->tightBoundingBox.min.y, d.GetAllocator());
|
||||
tightBoundingBox.AddMember("lz", this->tightBoundingBox.min.z, d.GetAllocator());
|
||||
tightBoundingBox.AddMember("ux", this->tightBoundingBox.max.x, d.GetAllocator());
|
||||
tightBoundingBox.AddMember("uy", this->tightBoundingBox.max.y, d.GetAllocator());
|
||||
tightBoundingBox.AddMember("uz", this->tightBoundingBox.max.z, d.GetAllocator());
|
||||
}
|
||||
|
||||
Value pointAttributes;
|
||||
if(outputFormat == OutputFormat::BINARY){
|
||||
pointAttributes.SetArray();
|
||||
for(int i = 0; i < this->pointAttributes.size(); i++){
|
||||
PointAttribute attribute = this->pointAttributes[i];
|
||||
|
||||
|
||||
Value vAttribute(rapidjson::kObjectType);
|
||||
vAttribute.AddMember("name", Value(attribute.name.c_str(), d.GetAllocator()), d.GetAllocator());
|
||||
vAttribute.AddMember("size", attribute.byteSize, d.GetAllocator());
|
||||
vAttribute.AddMember("elements", attribute.numElements, d.GetAllocator());
|
||||
vAttribute.AddMember("elementSize", attribute.byteSize / attribute.numElements, d.GetAllocator());
|
||||
vAttribute.AddMember("type", Value(attribute.type.c_str(), d.GetAllocator()), d.GetAllocator());
|
||||
vAttribute.AddMember("description", Value(attribute.description.c_str(), d.GetAllocator()), d.GetAllocator());
|
||||
|
||||
pointAttributes.PushBack(vAttribute, d.GetAllocator());
|
||||
}
|
||||
}else if(outputFormat == OutputFormat::LAS){
|
||||
pointAttributes = "LAS";
|
||||
}else if(outputFormat == OutputFormat::LAZ){
|
||||
pointAttributes = "LAZ";
|
||||
}
|
||||
Value spacing(this->spacing);
|
||||
Value scale(this->scale);
|
||||
Value hierarchyStepSize(this->hierarchyStepSize);
|
||||
|
||||
|
||||
d.AddMember("version", version, d.GetAllocator());
|
||||
d.AddMember("octreeDir", octreeDir, d.GetAllocator());
|
||||
d.AddMember("projection", projection, d.GetAllocator());
|
||||
d.AddMember("points", (uint64_t)numAccepted, d.GetAllocator());
|
||||
d.AddMember("boundingBox", boundingBox, d.GetAllocator());
|
||||
d.AddMember("tightBoundingBox", tightBoundingBox, d.GetAllocator());
|
||||
d.AddMember("pointAttributes", pointAttributes, d.GetAllocator());
|
||||
d.AddMember("spacing", spacing, d.GetAllocator());
|
||||
d.AddMember("scale", scale, d.GetAllocator());
|
||||
d.AddMember("hierarchyStepSize", hierarchyStepSize, d.GetAllocator());
|
||||
|
||||
StringBuffer buffer;
|
||||
PrettyWriter<StringBuffer> writer(buffer);
|
||||
d.Accept(writer);
|
||||
|
||||
return buffer.GetString();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,76 +0,0 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <unordered_map>
|
||||
|
||||
using std::unordered_map;
|
||||
|
||||
// see LAS spec 1.4
|
||||
// https://www.asprs.org/wp-content/uploads/2010/12/LAS_1_4_r13.pdf
|
||||
// total of 192 bytes
|
||||
struct ExtraBytesRecord {
|
||||
unsigned char reserved[2];
|
||||
unsigned char data_type;
|
||||
unsigned char options;
|
||||
char name[32];
|
||||
unsigned char unused[4];
|
||||
int64_t no_data[3]; // 24 = 3*8 bytes // hack: not really int, can be double too
|
||||
int64_t min[3]; // 24 = 3*8 bytes // hack: not really int, can be double too
|
||||
int64_t max[3]; // 24 = 3*8 bytes // hack: not really int, can be double too
|
||||
double scale[3];
|
||||
double offset[3];
|
||||
char description[32];
|
||||
};
|
||||
|
||||
struct ExtraType {
|
||||
string type = "";
|
||||
int size = 0;
|
||||
int numElements = 0;
|
||||
};
|
||||
|
||||
//ExtraType extraTypeFromID(int id) {
|
||||
// if (id == 0) {
|
||||
// return ExtraType{ "undefined", 0, 1 };
|
||||
// }else if (id == 1) {
|
||||
// return ExtraType{ "uint8", 1, 1 };
|
||||
// }else if (id == 2) {
|
||||
// return ExtraType{ "int8", 1, 1 };
|
||||
// }else if (id == 3) {
|
||||
// return ExtraType{ "uint16", 2, 1 };
|
||||
// }else if (id == 4) {
|
||||
// return ExtraType{ "int16", 2, 1 };
|
||||
// }else if (id == 5) {
|
||||
// return ExtraType{ "uint32", 4, 1 };
|
||||
// }else if (id == 6) {
|
||||
// return ExtraType{ "int32", 4, 1 };
|
||||
// }else if (id == 7) {
|
||||
// return ExtraType{ "uint64", 8, 1 };
|
||||
// }else if (id == 8) {
|
||||
// return ExtraType{ "int64", 8, 1 };
|
||||
// }else if (id == 9) {
|
||||
// return ExtraType{ "float", 4, 1 };
|
||||
// }else if (id == 10) {
|
||||
// return ExtraType{ "double", 8, 1 };
|
||||
// }
|
||||
//
|
||||
// cout << "ERROR: unsupported extra type: " << id << endl;
|
||||
// exit(123);
|
||||
//}
|
||||
|
||||
const unordered_map<unsigned char, ExtraType> typeToExtraType = {
|
||||
{0, ExtraType{"undefined", 0, 1}},
|
||||
{1, ExtraType{"uint8", 1, 1}},
|
||||
{2, ExtraType{"int8", 1, 1}},
|
||||
{3, ExtraType{"uint16", 2, 1}},
|
||||
{4, ExtraType{"int16", 2, 1}},
|
||||
{5, ExtraType{"uint32", 4, 1}},
|
||||
{6, ExtraType{"int32", 4, 1}},
|
||||
{7, ExtraType{"uint64", 8, 1}},
|
||||
{8, ExtraType{"int64", 8, 1}},
|
||||
{9, ExtraType{"float", 4, 1}},
|
||||
{10, ExtraType{"double", 8, 1}},
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
#ifndef GRID_CELL_H
|
||||
#define GRID_CELL_H
|
||||
|
||||
#include "Point.h"
|
||||
#include "GridIndex.h"
|
||||
|
||||
#include <math.h>
|
||||
#include <vector>
|
||||
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class SparseGrid;
|
||||
|
||||
|
||||
class GridCell{
|
||||
public:
|
||||
vector<Vector3<double> > points;
|
||||
vector<GridCell*> neighbours;
|
||||
SparseGrid *grid;
|
||||
|
||||
GridCell();
|
||||
|
||||
GridCell(SparseGrid *grid, GridIndex &index);
|
||||
|
||||
void add(Vector3<double> p);
|
||||
|
||||
bool isDistant(const Vector3<double> &p, const double &squaredSpacing) const;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,45 +0,0 @@
|
||||
|
||||
|
||||
#ifndef GRID_INDEX_H
|
||||
#define GRID_INDEX_H
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class GridIndex{
|
||||
public:
|
||||
int i,j,k;
|
||||
|
||||
GridIndex(){
|
||||
i = 0;
|
||||
j = 0;
|
||||
k = 0;
|
||||
}
|
||||
|
||||
GridIndex(int i, int j, int k){
|
||||
this->i = i;
|
||||
this->j = j;
|
||||
this->k = k;
|
||||
}
|
||||
|
||||
bool operator<(const GridIndex& b) const{
|
||||
if(i < b.i){
|
||||
return true;
|
||||
}else if(i == b.i && j < b.j){
|
||||
return true;
|
||||
}else if(i == b.i && j == b.j && k < b.k){
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
friend ostream &operator<<( ostream &output, const GridIndex &value ){
|
||||
output << "[" << value.i << ", " << value.j << ", " << value.k << "]" ;
|
||||
return output;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
205
PotreeConverter/include/LASLoader.hpp
Normal file
205
PotreeConverter/include/LASLoader.hpp
Normal file
@@ -0,0 +1,205 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <future>
|
||||
#include <experimental/coroutine>
|
||||
|
||||
#include "laszip_api.h"
|
||||
|
||||
#include "Points.h"
|
||||
#include "stuff.h"
|
||||
#include "Vector3.h"
|
||||
|
||||
using std::string;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::thread;
|
||||
using std::future;
|
||||
using std::mutex;
|
||||
using std::lock_guard;
|
||||
using std::unique_lock;
|
||||
|
||||
|
||||
|
||||
class LASLoader {
|
||||
|
||||
public:
|
||||
|
||||
laszip_POINTER laszip_reader = nullptr;
|
||||
laszip_header* header = nullptr;
|
||||
laszip_point* point;
|
||||
|
||||
uint64_t batchSize = 1'000'000;
|
||||
vector<shared_ptr<Points>> batches;
|
||||
bool finishedLoading = false;
|
||||
|
||||
uint64_t numPoints = 0;
|
||||
Vector3<double> min = { 0.0, 0.0, 0.0 };
|
||||
Vector3<double> max = { 0.0, 0.0, 0.0 };
|
||||
|
||||
|
||||
mutex mtx_batches;
|
||||
mutex mtx_finishedLoading;
|
||||
|
||||
LASLoader(string path) {
|
||||
|
||||
laszip_create(&laszip_reader);
|
||||
|
||||
laszip_BOOL request_reader = 1;
|
||||
laszip_request_compatibility_mode(laszip_reader, request_reader);
|
||||
|
||||
laszip_BOOL is_compressed = iEndsWith(path, ".laz") ? 1 : 0;
|
||||
laszip_open_reader(laszip_reader, path.c_str(), &is_compressed);
|
||||
|
||||
laszip_get_header_pointer(laszip_reader, &header);
|
||||
|
||||
this->min = {header->min_x, header->min_y, header->min_z};
|
||||
this->max = {header->max_x, header->max_y, header->max_z};
|
||||
|
||||
uint64_t npoints = (header->number_of_point_records ? header->number_of_point_records : header->extended_number_of_point_records);
|
||||
|
||||
this->numPoints = npoints;
|
||||
|
||||
spawnLoadThread();
|
||||
|
||||
}
|
||||
|
||||
future<shared_ptr<Points>> nextBatch() {
|
||||
|
||||
auto fut = std::async(std::launch::async, [=]() -> shared_ptr<Points> {
|
||||
|
||||
bool done = false;
|
||||
|
||||
while (!done) {
|
||||
{
|
||||
lock_guard<mutex> guard1(mtx_finishedLoading);
|
||||
lock_guard<mutex> guard2(mtx_batches);
|
||||
|
||||
bool nothingLeftTodo = finishedLoading && batches.size() == 0;
|
||||
|
||||
if (nothingLeftTodo) {
|
||||
return nullptr;
|
||||
}
|
||||
}
|
||||
|
||||
//unique<mutex> guard(mtx_batches);
|
||||
unique_lock<mutex> lock(mtx_batches, std::defer_lock);
|
||||
lock.lock();
|
||||
if (batches.size() > 0) {
|
||||
auto batch = batches.back();
|
||||
batches.pop_back();
|
||||
|
||||
lock.unlock();
|
||||
|
||||
return batch;
|
||||
} else {
|
||||
lock.unlock();
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
}
|
||||
|
||||
cout << "damn" << endl;
|
||||
|
||||
return nullptr;
|
||||
});
|
||||
|
||||
return fut;
|
||||
}
|
||||
|
||||
Attributes getAttributes() {
|
||||
Attributes attributes;
|
||||
Attribute aColor;
|
||||
aColor.byteOffset = 12;
|
||||
aColor.bytes = 4;
|
||||
aColor.name = "color";
|
||||
attributes.list.push_back(aColor);
|
||||
attributes.byteSize += aColor.bytes;
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
void loadStuff() {
|
||||
|
||||
uint64_t npoints = (header->number_of_point_records ? header->number_of_point_records : header->extended_number_of_point_records);
|
||||
|
||||
laszip_get_point_pointer(laszip_reader, &point);
|
||||
|
||||
shared_ptr<Points> points;
|
||||
|
||||
double coordinates[3];
|
||||
|
||||
Attributes attributes = getAttributes();
|
||||
|
||||
for (uint64_t i = 0; i < npoints; i++) {
|
||||
|
||||
if ((i % batchSize) == 0) {
|
||||
|
||||
if (points != nullptr) {
|
||||
lock_guard<mutex> guard(mtx_batches);
|
||||
|
||||
batches.push_back(points);
|
||||
}
|
||||
|
||||
uint64_t currentBatchSize = std::min(npoints - i, batchSize);
|
||||
|
||||
points = make_shared<Points>();
|
||||
uint64_t attributeBufferSize = currentBatchSize * attributes.byteSize;
|
||||
points->attributes = attributes;
|
||||
points->attributeBuffer = make_shared<Buffer>(attributeBufferSize);
|
||||
}
|
||||
|
||||
laszip_read_point(laszip_reader);
|
||||
|
||||
uint8_t r = point->rgb[0] / 256;
|
||||
uint8_t g = point->rgb[1] / 256;
|
||||
uint8_t b = point->rgb[2] / 256;
|
||||
|
||||
laszip_get_coordinates(laszip_reader, coordinates);
|
||||
|
||||
uint64_t reli = i % batchSize;
|
||||
|
||||
Point point = {
|
||||
coordinates[0],
|
||||
coordinates[1],
|
||||
coordinates[2],
|
||||
reli
|
||||
};
|
||||
points->points.push_back(point);
|
||||
|
||||
uint8_t* rgbBuffer = points->attributeBuffer->dataU8 + (4 * reli + 0);
|
||||
|
||||
rgbBuffer[0] = r;
|
||||
rgbBuffer[1] = g;
|
||||
rgbBuffer[2] = b;
|
||||
rgbBuffer[3] = 255;
|
||||
}
|
||||
|
||||
{
|
||||
lock_guard<mutex> guard1(mtx_batches);
|
||||
lock_guard<mutex> guard2(mtx_finishedLoading);
|
||||
|
||||
batches.push_back(points);
|
||||
finishedLoading = true;
|
||||
}
|
||||
|
||||
cout << "#points: " << npoints << endl;
|
||||
|
||||
cout << batches.size() << endl;
|
||||
}
|
||||
|
||||
void spawnLoadThread() {
|
||||
|
||||
thread t([&](){
|
||||
loadStuff();
|
||||
});
|
||||
t.detach();
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
@@ -1,170 +0,0 @@
|
||||
|
||||
|
||||
#ifndef LASPOINTREADER_H
|
||||
#define LASPOINTREADER_H
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include "laszip_api.h"
|
||||
|
||||
#include "Point.h"
|
||||
#include "PointReader.h"
|
||||
#include "stuff.h"
|
||||
#include "ExtraBytes.hpp"
|
||||
|
||||
using std::string;
|
||||
|
||||
using std::ifstream;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class LIBLASReader{
|
||||
private:
|
||||
|
||||
double tr[16];
|
||||
bool hasTransform = false;
|
||||
|
||||
Point transform(double x, double y, double z) const {
|
||||
Point p;
|
||||
if (hasTransform) {
|
||||
p.position.x = tr[0] * x + tr[4] * y + tr[8] * z + tr[12];
|
||||
p.position.y = tr[1] * x + tr[5] * y + tr[9] * z + tr[13];
|
||||
p.position.z = tr[2] * x + tr[6] * y + tr[10] * z + tr[14];
|
||||
} else {
|
||||
p.position = Vector3<double>{x,y,z};
|
||||
}
|
||||
return p;
|
||||
}
|
||||
public:
|
||||
|
||||
laszip_POINTER laszip_reader;
|
||||
laszip_header* header;
|
||||
laszip_point* point;
|
||||
int colorScale;
|
||||
double coordinates[3];
|
||||
long long pointsRead = 0;
|
||||
|
||||
LIBLASReader(string path) {
|
||||
|
||||
laszip_create(&laszip_reader);
|
||||
|
||||
laszip_BOOL request_reader = 1;
|
||||
laszip_request_compatibility_mode(laszip_reader, request_reader);
|
||||
|
||||
|
||||
{// read first x points to find if color is 1 or 2 bytes
|
||||
laszip_BOOL is_compressed = iEndsWith(path, ".laz") ? 1 : 0;
|
||||
laszip_open_reader(laszip_reader, path.c_str(), &is_compressed);
|
||||
|
||||
laszip_get_header_pointer(laszip_reader, &header);
|
||||
|
||||
long long npoints = (header->number_of_point_records ? header->number_of_point_records : header->extended_number_of_point_records);
|
||||
|
||||
laszip_get_point_pointer(laszip_reader, &point);
|
||||
|
||||
colorScale = 1;
|
||||
for(int i = 0; i < 100'000 && i < npoints; i++){
|
||||
laszip_read_point(laszip_reader);
|
||||
|
||||
auto r = point->rgb[0];
|
||||
auto g = point->rgb[1];
|
||||
auto b = point->rgb[2];
|
||||
|
||||
if(r > 255 || g > 255 || b > 255){
|
||||
colorScale = 256;
|
||||
break;
|
||||
};
|
||||
}
|
||||
}
|
||||
|
||||
laszip_seek_point(laszip_reader, 0);
|
||||
}
|
||||
|
||||
long long numPoints() {
|
||||
if (header->version_major >= 1 && header->version_minor >= 4) {
|
||||
return header->extended_number_of_point_records;
|
||||
} else {
|
||||
return header->number_of_point_records;
|
||||
}
|
||||
}
|
||||
|
||||
~LIBLASReader(){
|
||||
laszip_close_reader(laszip_reader);
|
||||
laszip_destroy(laszip_reader);
|
||||
}
|
||||
|
||||
bool readPoint(){
|
||||
if(pointsRead < numPoints()){
|
||||
laszip_read_point(laszip_reader);
|
||||
pointsRead++;
|
||||
|
||||
return true;
|
||||
}else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
Point GetPoint() {
|
||||
|
||||
laszip_get_coordinates(laszip_reader, coordinates);
|
||||
|
||||
Point p = transform(coordinates[0], coordinates[1], coordinates[2]);
|
||||
p.intensity = point->intensity;
|
||||
p.classification = point->classification;
|
||||
|
||||
p.color.x = point->rgb[0] / colorScale;
|
||||
p.color.y = point->rgb[1] / colorScale;
|
||||
p.color.z = point->rgb[2] / colorScale;
|
||||
|
||||
p.returnNumber = point->return_number;
|
||||
p.numberOfReturns = point->number_of_returns;
|
||||
p.pointSourceID = point->point_source_ID;
|
||||
p.gpsTime = point->gps_time;
|
||||
|
||||
if (point->num_extra_bytes > 0) {
|
||||
p.extraBytes = vector<uint8_t>(point->extra_bytes, point->extra_bytes + point->num_extra_bytes);
|
||||
}
|
||||
|
||||
return p;
|
||||
}
|
||||
void close(){
|
||||
|
||||
}
|
||||
|
||||
AABB getAABB();
|
||||
};
|
||||
|
||||
class LASPointReader : public PointReader{
|
||||
private:
|
||||
AABB aabb;
|
||||
string path;
|
||||
LIBLASReader *reader;
|
||||
vector<string> files;
|
||||
vector<string>::iterator currentFile;
|
||||
public:
|
||||
|
||||
LASPointReader(string path);
|
||||
|
||||
~LASPointReader();
|
||||
|
||||
bool readNextPoint();
|
||||
|
||||
Point getPoint();
|
||||
|
||||
AABB getAABB();
|
||||
|
||||
long long numPoints();
|
||||
|
||||
void close();
|
||||
|
||||
Vector3<double> getScale();
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,104 +0,0 @@
|
||||
|
||||
#ifndef LASPOINTWRITER_H
|
||||
#define LASPOINTWRITER_H
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
#include <fstream>
|
||||
#include <stdio.h>
|
||||
#include <string.h>
|
||||
|
||||
#include "laszip_api.h"
|
||||
|
||||
#include "AABB.h"
|
||||
#include "PointWriter.hpp"
|
||||
#include "Point.h"
|
||||
#include "stuff.h"
|
||||
|
||||
using std::string;
|
||||
using std::fstream;
|
||||
using std::ios;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class LASPointWriter : public PointWriter{
|
||||
|
||||
public:
|
||||
AABB aabb;
|
||||
laszip_POINTER writer = NULL;
|
||||
laszip_header header;
|
||||
laszip_point* point;
|
||||
double coordinates[3];
|
||||
|
||||
LASPointWriter(string file, AABB aabb, double scale) {
|
||||
this->file = file;
|
||||
this->aabb = aabb;
|
||||
numPoints = 0;
|
||||
|
||||
|
||||
memset(&header, 0, sizeof(laszip_header));
|
||||
strcpy(header.generating_software, "potree");
|
||||
|
||||
header.version_major = 1;
|
||||
header.version_minor = 2;
|
||||
header.header_size = 227;
|
||||
header.offset_to_point_data = 227;
|
||||
header.point_data_format = 2;
|
||||
header.min_x = aabb.min.x;
|
||||
header.min_y = aabb.min.y;
|
||||
header.min_z = aabb.min.z;
|
||||
header.max_x = aabb.max.x;
|
||||
header.max_y = aabb.max.y;
|
||||
header.max_z = aabb.max.z;
|
||||
header.x_offset = aabb.min.x;
|
||||
header.y_offset = aabb.min.y;
|
||||
header.z_offset = aabb.min.z;
|
||||
header.x_scale_factor = scale;
|
||||
header.y_scale_factor = scale;
|
||||
header.z_scale_factor = scale;
|
||||
header.point_data_record_length = 26;
|
||||
header.number_of_point_records = 111;
|
||||
|
||||
|
||||
laszip_create(&writer);
|
||||
|
||||
laszip_BOOL compress = iEndsWith(file, ".laz") ? 1 : 0;
|
||||
if(compress){
|
||||
laszip_BOOL request_writer = 1;
|
||||
laszip_request_compatibility_mode(writer, request_writer);
|
||||
}
|
||||
|
||||
laszip_set_header(writer, &header);
|
||||
laszip_open_writer(writer, file.c_str(), compress);
|
||||
|
||||
laszip_get_point_pointer(writer, &point);
|
||||
}
|
||||
|
||||
~LASPointWriter(){
|
||||
close();
|
||||
}
|
||||
|
||||
void write(Point &point);
|
||||
|
||||
void close(){
|
||||
|
||||
if(writer != NULL){
|
||||
laszip_close_writer(writer);
|
||||
laszip_destroy(writer);
|
||||
writer = NULL;
|
||||
|
||||
fstream *stream = new fstream(file, ios::out | ios::binary | ios::in );
|
||||
stream->seekp(107);
|
||||
stream->write(reinterpret_cast<const char*>(&numPoints), 4);
|
||||
stream->close();
|
||||
delete stream;
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
19
PotreeConverter/include/LASWriter.hpp
Normal file
19
PotreeConverter/include/LASWriter.hpp
Normal file
@@ -0,0 +1,19 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include "Points.h"
|
||||
#include "Vector3.h"
|
||||
|
||||
struct LASHeader {
|
||||
|
||||
int headerSize = 375;
|
||||
uint64_t numPoints = 0;
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> scale;
|
||||
|
||||
};
|
||||
|
||||
|
||||
void writeLAS(string path, LASHeader header, vector<Point> points);
|
||||
void writeLAS(string path, LASHeader header, vector<Point> sample, Points* points);
|
||||
26
PotreeConverter/include/Metadata.h
Normal file
26
PotreeConverter/include/Metadata.h
Normal file
@@ -0,0 +1,26 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <cstdint>
|
||||
#include <string>
|
||||
|
||||
|
||||
#include "Vector3.h"
|
||||
|
||||
using std::string;
|
||||
|
||||
struct Metadata {
|
||||
|
||||
string targetDirectory = "";
|
||||
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
uint64_t numPoints = 0;
|
||||
|
||||
uint32_t chunkGridSize = 0;
|
||||
|
||||
Metadata() {
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
170
PotreeConverter/include/Node.h
Normal file
170
PotreeConverter/include/Node.h
Normal file
@@ -0,0 +1,170 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
#include "Vector3.h"
|
||||
#include "Points.h"
|
||||
#include "SparseGrid.h"
|
||||
|
||||
class Node {
|
||||
|
||||
public:
|
||||
|
||||
struct BoundingBox {
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
};
|
||||
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> size;
|
||||
double spacing = 1.0;
|
||||
|
||||
string name = "";
|
||||
int index = 0;
|
||||
|
||||
Node* parent = nullptr;
|
||||
vector<shared_ptr<Node>> children = vector<shared_ptr<Node>>(8, nullptr);
|
||||
|
||||
shared_ptr<SparseGrid> grid;
|
||||
vector<Point> accepted;
|
||||
|
||||
vector<Point> store;
|
||||
bool storeExceeded = false;
|
||||
int maxStoreSize = 1'000;
|
||||
|
||||
Node(Vector3<double> min, Vector3<double> max, double spacing) {
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->size = max - min;
|
||||
this->spacing = spacing;
|
||||
|
||||
grid = make_shared<SparseGrid>(min, max, spacing);
|
||||
}
|
||||
|
||||
~Node() {
|
||||
|
||||
}
|
||||
|
||||
void add(Point& candidate) {
|
||||
|
||||
bool isDistant = grid->isDistant(candidate);
|
||||
|
||||
if (isDistant) {
|
||||
accepted.push_back(candidate);
|
||||
grid->add(candidate);
|
||||
} else if (!storeExceeded) {
|
||||
store.push_back(candidate);
|
||||
|
||||
if (store.size() > maxStoreSize) {
|
||||
processStore();
|
||||
}
|
||||
} else {
|
||||
addToChild(candidate);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void addToChild(Point& point) {
|
||||
int childIndex = childIndexOf(point);
|
||||
|
||||
if (children[childIndex] == nullptr) {
|
||||
BoundingBox box = childBoundingBoxOf(point);
|
||||
shared_ptr<Node> child = make_shared<Node>(box.min, box.max, spacing * 0.5);
|
||||
child->index = childIndex;
|
||||
child->name = this->name + to_string(childIndex);
|
||||
child->parent = this;
|
||||
|
||||
children[childIndex] = child;
|
||||
}
|
||||
|
||||
children[childIndex]->add(point);
|
||||
}
|
||||
|
||||
void processStore() {
|
||||
storeExceeded = true;
|
||||
|
||||
for (Point& point : store) {
|
||||
addToChild(point);
|
||||
}
|
||||
|
||||
store.clear();
|
||||
}
|
||||
|
||||
int childIndexOf(Point& point) {
|
||||
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
int childIndex = 0;
|
||||
|
||||
if (nx > 0.5) {
|
||||
childIndex = childIndex | 0b100;
|
||||
}
|
||||
|
||||
if (ny > 0.5) {
|
||||
childIndex = childIndex | 0b010;
|
||||
}
|
||||
|
||||
if (nz > 0.5) {
|
||||
childIndex = childIndex | 0b001;
|
||||
}
|
||||
|
||||
return childIndex;
|
||||
}
|
||||
|
||||
BoundingBox childBoundingBoxOf(Point& point) {
|
||||
BoundingBox box;
|
||||
Vector3<double> center = min + (size * 0.5);
|
||||
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
if (nx <= 0.5) {
|
||||
box.min.x = min.x;
|
||||
box.max.x = center.x;
|
||||
} else {
|
||||
box.min.x = center.x;
|
||||
box.max.x = max.x;
|
||||
}
|
||||
|
||||
if (ny <= 0.5) {
|
||||
box.min.y = min.y;
|
||||
box.max.y = center.y;
|
||||
} else {
|
||||
box.min.y = center.y;
|
||||
box.max.y = max.y;
|
||||
}
|
||||
|
||||
if (nz <= 0.5) {
|
||||
box.min.z = min.z;
|
||||
box.max.z = center.z;
|
||||
} else {
|
||||
box.min.z = center.z;
|
||||
box.max.z = max.z;
|
||||
}
|
||||
|
||||
return box;
|
||||
}
|
||||
|
||||
void traverse(function<void(Node*)> callback) {
|
||||
|
||||
callback(this);
|
||||
|
||||
for (auto child : children) {
|
||||
if (child == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
child->traverse(callback);
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
@@ -1,87 +0,0 @@
|
||||
#ifndef PTXPOINTREADER_H
|
||||
#define PTXPOINTREADER_H
|
||||
|
||||
#include <map>
|
||||
#include "PointReader.h"
|
||||
|
||||
using std::string;
|
||||
using std::fstream;
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
/**
|
||||
* This reader importa PTX files. We suppose that PTX files are a concatenation,
|
||||
* of multiple PTX "chunks", all of them having the same structure. Every point
|
||||
* has exactly 4 double precision fields: X, Y, Z, Intensity (from 0.0 to 1.0).
|
||||
*/
|
||||
class PTXPointReader : public PointReader {
|
||||
private:
|
||||
double tr[16];
|
||||
Point p;
|
||||
long currentChunk;
|
||||
static std::map<string, AABB> aabbs;
|
||||
static std::map<string, long> counts;
|
||||
|
||||
inline Point transform(double tr[16], double x, double y, double z) const {
|
||||
Point p(tr[0] * x + tr[4] * y + tr[8] * z + tr[12],
|
||||
tr[1] * x + tr[5] * y + tr[9] * z + tr[13],
|
||||
tr[2] * x + tr[6] * y + tr[10] * z + tr[14]);
|
||||
return p;
|
||||
}
|
||||
|
||||
fstream *stream;
|
||||
string path;
|
||||
vector<string> files;
|
||||
vector<string>::iterator currentFile;
|
||||
Vector3<double> origin;
|
||||
|
||||
/**
|
||||
* Returns false if there is neo next chunk.
|
||||
*/
|
||||
bool loadChunk(fstream *stream, long currentChunk, double tr[16]);
|
||||
|
||||
void scanForAABB();
|
||||
|
||||
bool doReadNextPoint();
|
||||
|
||||
public:
|
||||
|
||||
PTXPointReader(string path);
|
||||
|
||||
~PTXPointReader() {
|
||||
close();
|
||||
}
|
||||
|
||||
bool readNextPoint();
|
||||
|
||||
inline Point getPoint() {
|
||||
return p;
|
||||
}
|
||||
|
||||
inline Vector3<double> getOrigin() {
|
||||
return origin;
|
||||
}
|
||||
|
||||
inline AABB getAABB() {
|
||||
if (PTXPointReader::aabbs.find(path) == aabbs.end()) {
|
||||
scanForAABB();
|
||||
}
|
||||
return PTXPointReader::aabbs[path];
|
||||
}
|
||||
|
||||
inline long long numPoints() {
|
||||
if (PTXPointReader::counts.find(path) == counts.end()) {
|
||||
scanForAABB();
|
||||
}
|
||||
return PTXPointReader::counts[path];
|
||||
}
|
||||
|
||||
inline void close() {
|
||||
stream->close();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,300 +0,0 @@
|
||||
|
||||
|
||||
#ifndef PLYPOINTREADER_H
|
||||
#define PLYPOINTREADER_H
|
||||
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <regex>
|
||||
|
||||
#include "Point.h"
|
||||
#include "PointReader.h"
|
||||
|
||||
using std::ifstream;
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::map;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
const int PLY_FILE_FORMAT_ASCII = 0;
|
||||
const int PLY_FILE_FORMAT_BINARY_LITTLE_ENDIAN = 1;
|
||||
|
||||
struct PlyPropertyType{
|
||||
string name;
|
||||
int size;
|
||||
|
||||
PlyPropertyType(){}
|
||||
|
||||
PlyPropertyType(string name, int size)
|
||||
:name(name)
|
||||
,size(size)
|
||||
{
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
struct PlyProperty{
|
||||
string name;
|
||||
PlyPropertyType type;
|
||||
|
||||
PlyProperty(string name, PlyPropertyType type)
|
||||
:name(name)
|
||||
,type(type)
|
||||
{
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
struct PlyElement{
|
||||
string name;
|
||||
vector<PlyProperty> properties;
|
||||
int size;
|
||||
|
||||
PlyElement(string name)
|
||||
:name(name)
|
||||
{
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
unordered_map<string, PlyPropertyType> plyPropertyTypes = {
|
||||
{ "char", PlyPropertyType("char", 1) },
|
||||
{ "int8", PlyPropertyType("char", 1) },
|
||||
{ "uchar", PlyPropertyType("uchar", 1) },
|
||||
{ "uint8", PlyPropertyType("uchar", 1) },
|
||||
{ "short", PlyPropertyType("short", 2) },
|
||||
{ "int16", PlyPropertyType("short", 2) },
|
||||
{ "ushort", PlyPropertyType("ushort", 2) },
|
||||
{ "uint16", PlyPropertyType("ushort", 2) },
|
||||
{ "int", PlyPropertyType("int", 4) },
|
||||
{ "int32", PlyPropertyType("int", 4) },
|
||||
{ "uint", PlyPropertyType("uint", 4) },
|
||||
{ "uint32", PlyPropertyType("uint", 4) },
|
||||
{ "float", PlyPropertyType("float", 4) },
|
||||
{ "float32", PlyPropertyType("float", 4) },
|
||||
{ "double", PlyPropertyType("double", 8) },
|
||||
{ "float64", PlyPropertyType("double", 8) }
|
||||
};
|
||||
|
||||
vector<string> plyRedNames = { "r", "red", "diffuse_red" };
|
||||
vector<string> plyGreenNames = { "g", "green", "diffuse_green" };
|
||||
vector<string> plyBlueNames = { "b", "blue", "diffuse_blue" };
|
||||
|
||||
class PlyPointReader : public PointReader{
|
||||
private:
|
||||
AABB *aabb;
|
||||
ifstream stream;
|
||||
int format;
|
||||
long pointCount;
|
||||
long pointsRead;
|
||||
PlyElement vertexElement;
|
||||
char *buffer;
|
||||
int pointByteSize;
|
||||
Point point;
|
||||
string file;
|
||||
|
||||
public:
|
||||
PlyPointReader(string file)
|
||||
: stream(file, std::ios::in | std::ios::binary)
|
||||
,vertexElement("vertexElement"){
|
||||
format = -1;
|
||||
pointCount = 0;
|
||||
pointsRead = 0;
|
||||
pointByteSize = 0;
|
||||
buffer = new char[100];
|
||||
aabb = NULL;
|
||||
this->file = file;
|
||||
|
||||
std::regex rEndHeader("^end_header.*");
|
||||
std::regex rFormat("^format (ascii|binary_little_endian).*");
|
||||
std::regex rElement("^element (\\w*) (\\d*)");
|
||||
std::regex rProperty("^property (char|int8|uchar|uint8|short|int16|ushort|uint16|int|int32|uint|uint32|float|float32|double|float64) (\\w*)");
|
||||
|
||||
string line;
|
||||
while(std::getline(stream, line)){
|
||||
line = trim(line);
|
||||
|
||||
std::cmatch sm;
|
||||
if(std::regex_match(line, rEndHeader)){
|
||||
// stop line parsing when end_header is encountered
|
||||
break;
|
||||
}else if(std::regex_match(line.c_str(), sm, rFormat)){
|
||||
// parse format
|
||||
string f = sm[1];
|
||||
if(f == "ascii"){
|
||||
format = PLY_FILE_FORMAT_ASCII;
|
||||
}else if(f == "binary_little_endian"){
|
||||
format = PLY_FILE_FORMAT_BINARY_LITTLE_ENDIAN;
|
||||
}
|
||||
}else if(std::regex_match(line.c_str(), sm, rElement)){
|
||||
// parse vertex element declaration
|
||||
string name = sm[1];
|
||||
long count = atol(string(sm[2]).c_str());
|
||||
|
||||
if(name != "vertex"){
|
||||
continue;
|
||||
}
|
||||
pointCount = count;
|
||||
|
||||
while(true){
|
||||
std::streamoff len = stream.tellg();
|
||||
getline(stream, line);
|
||||
line = trim(line);
|
||||
if(std::regex_match(line.c_str(), sm, rProperty)){
|
||||
string name = sm[2];
|
||||
PlyPropertyType type = plyPropertyTypes[sm[1]];
|
||||
PlyProperty property(name, type);
|
||||
vertexElement.properties.push_back(property);
|
||||
pointByteSize += type.size;
|
||||
}else{
|
||||
// abort if line was not a property definition
|
||||
stream.seekg(len ,std::ios_base::beg);
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool readNextPoint(){
|
||||
if(pointsRead == pointCount){
|
||||
return false;
|
||||
}
|
||||
|
||||
double x = 0;
|
||||
double y = 0;
|
||||
double z = 0;
|
||||
float dummy;
|
||||
float nx = 0;
|
||||
float ny = 0;
|
||||
float nz = 0;
|
||||
unsigned char r = 0;
|
||||
unsigned char g = 0;
|
||||
unsigned char b = 0;
|
||||
|
||||
if(format == PLY_FILE_FORMAT_ASCII){
|
||||
string line;
|
||||
getline(stream, line);
|
||||
line = trim(line);
|
||||
|
||||
//vector<string> tokens;
|
||||
//split(tokens, line, is_any_of("\t "));
|
||||
vector<string> tokens = split(line, {'\t', ' '});
|
||||
int i = 0;
|
||||
for(const auto &prop : vertexElement.properties){
|
||||
string token = tokens[i++];
|
||||
if(prop.name == "x" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
x = stof(token);
|
||||
}else if(prop.name == "y" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
y = stof(token);
|
||||
}else if(prop.name == "z" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
z = stof(token);
|
||||
}else if(prop.name == "x" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
x = stod(token);
|
||||
}else if(prop.name == "y" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
y = stod(token);
|
||||
}else if(prop.name == "z" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
z = stod(token);
|
||||
}else if(std::find(plyRedNames.begin(), plyRedNames.end(), prop.name) != plyRedNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
r = (unsigned char)stof(token);
|
||||
}else if(std::find(plyGreenNames.begin(), plyGreenNames.end(), prop.name) != plyGreenNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
g = (unsigned char)stof(token);
|
||||
}else if(std::find(plyBlueNames.begin(), plyBlueNames.end(), prop.name) != plyBlueNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
b = (unsigned char)stof(token);
|
||||
}else if(prop.name == "nx" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
nx = stof(token);
|
||||
}else if(prop.name == "ny" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
ny = stof(token);
|
||||
}else if(prop.name == "nz" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
nz = stof(token);
|
||||
}
|
||||
}
|
||||
}else if(format == PLY_FILE_FORMAT_BINARY_LITTLE_ENDIAN){
|
||||
stream.read(buffer, pointByteSize);
|
||||
|
||||
int offset = 0;
|
||||
for(const auto &prop : vertexElement.properties){
|
||||
if(prop.name == "x" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&dummy, (buffer+offset), prop.type.size);
|
||||
x=dummy;
|
||||
}else if(prop.name == "y" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&dummy, (buffer+offset), prop.type.size);
|
||||
y=dummy;
|
||||
}else if(prop.name == "z" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&dummy, (buffer+offset), prop.type.size);
|
||||
z=dummy;
|
||||
}else if(prop.name == "x" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
memcpy(&x, (buffer+offset), prop.type.size);
|
||||
}else if(prop.name == "y" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
memcpy(&y, (buffer+offset), prop.type.size);
|
||||
}else if(prop.name == "z" && prop.type.name == plyPropertyTypes["double"].name){
|
||||
memcpy(&z, (buffer+offset), prop.type.size);
|
||||
}else if(std::find(plyRedNames.begin(), plyRedNames.end(), prop.name) != plyRedNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
memcpy(&r, (buffer+offset), prop.type.size);
|
||||
}else if(std::find(plyGreenNames.begin(), plyGreenNames.end(), prop.name) != plyGreenNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
memcpy(&g, (buffer+offset), prop.type.size);
|
||||
}else if(std::find(plyBlueNames.begin(), plyBlueNames.end(), prop.name) != plyBlueNames.end() && prop.type.name == plyPropertyTypes["uchar"].name){
|
||||
memcpy(&b, (buffer+offset), prop.type.size);
|
||||
}else if(prop.name == "nx" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&nx, (buffer+offset), prop.type.size);
|
||||
}else if(prop.name == "ny" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&ny, (buffer+offset), prop.type.size);
|
||||
}else if(prop.name == "nz" && prop.type.name == plyPropertyTypes["float"].name){
|
||||
memcpy(&nz, (buffer+offset), prop.type.size);
|
||||
}
|
||||
|
||||
|
||||
offset += prop.type.size;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
point = Point(x,y,z,r,g,b);
|
||||
point.normal.x = nx;
|
||||
point.normal.y = ny;
|
||||
point.normal.z = nz;
|
||||
pointsRead++;
|
||||
return true;
|
||||
}
|
||||
|
||||
Point getPoint(){
|
||||
return point;
|
||||
}
|
||||
|
||||
AABB getAABB(){
|
||||
if(aabb == NULL){
|
||||
|
||||
aabb = new AABB();
|
||||
|
||||
PlyPointReader *reader = new PlyPointReader(file);
|
||||
while(reader->readNextPoint()){
|
||||
Point p = reader->getPoint();
|
||||
aabb->update(p.position);
|
||||
}
|
||||
|
||||
reader->close();
|
||||
delete reader;
|
||||
|
||||
}
|
||||
|
||||
return *aabb;
|
||||
}
|
||||
|
||||
long long numPoints(){
|
||||
return pointCount;
|
||||
}
|
||||
|
||||
void close(){
|
||||
stream.close();
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,56 +0,0 @@
|
||||
|
||||
#ifndef POINT_H
|
||||
#define POINT_H
|
||||
|
||||
#include "Vector3.h"
|
||||
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
using std::ostream;
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class Point{
|
||||
public:
|
||||
|
||||
Vector3<double> position{0};
|
||||
Vector3<unsigned char> color{255};
|
||||
Vector3<float> normal{0};
|
||||
unsigned short intensity = 0;
|
||||
unsigned char classification = 0;
|
||||
unsigned char returnNumber = 0;
|
||||
unsigned char numberOfReturns = 0;
|
||||
unsigned short pointSourceID = 0;
|
||||
double gpsTime = 0.0;
|
||||
vector<uint8_t> extraBytes;
|
||||
|
||||
Point() = default;
|
||||
|
||||
Point(double x, double y, double z) :
|
||||
position(x, y, z)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Point(double x, double y, double z, unsigned char r, unsigned char g, unsigned char b) :
|
||||
position(x, y, z), color(r, g, b)
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
Point(const Point &other) = default;
|
||||
|
||||
~Point() = default;
|
||||
|
||||
friend ostream &operator<<( ostream &output, const Point &value ){
|
||||
output << value.position ;
|
||||
return output;
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,107 +0,0 @@
|
||||
|
||||
|
||||
#ifndef POINT_ATTRIBUTES_H
|
||||
#define POINT_ATTRIBUTES_H
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::unordered_map;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
#define ATTRIBUTE_TYPE_INT8 "int8"
|
||||
#define ATTRIBUTE_TYPE_INT16 "int16"
|
||||
#define ATTRIBUTE_TYPE_INT32 "int32"
|
||||
#define ATTRIBUTE_TYPE_INT64 "int64"
|
||||
#define ATTRIBUTE_TYPE_UINT8 "uint8"
|
||||
#define ATTRIBUTE_TYPE_UINT16 "uint16"
|
||||
#define ATTRIBUTE_TYPE_UINT32 "uint32"
|
||||
#define ATTRIBUTE_TYPE_UINT64 "uint64"
|
||||
#define ATTRIBUTE_TYPE_FLOAT "float"
|
||||
#define ATTRIBUTE_TYPE_DOUBLE "double"
|
||||
|
||||
const unordered_map<string, int> attributeTypeSize = {
|
||||
{ATTRIBUTE_TYPE_INT8, 1},
|
||||
{ATTRIBUTE_TYPE_INT16, 2},
|
||||
{ATTRIBUTE_TYPE_INT32, 4},
|
||||
{ATTRIBUTE_TYPE_INT64, 8},
|
||||
{ATTRIBUTE_TYPE_UINT8, 1},
|
||||
{ATTRIBUTE_TYPE_UINT16, 2},
|
||||
{ATTRIBUTE_TYPE_UINT32, 4},
|
||||
{ATTRIBUTE_TYPE_UINT64, 8},
|
||||
{ATTRIBUTE_TYPE_FLOAT, 4},
|
||||
{ATTRIBUTE_TYPE_DOUBLE, 8}
|
||||
};
|
||||
|
||||
class PointAttribute{
|
||||
public:
|
||||
static const PointAttribute POSITION_CARTESIAN;
|
||||
static const PointAttribute COLOR_PACKED;
|
||||
static const PointAttribute INTENSITY;
|
||||
static const PointAttribute CLASSIFICATION;
|
||||
static const PointAttribute RETURN_NUMBER;
|
||||
static const PointAttribute NUMBER_OF_RETURNS;
|
||||
static const PointAttribute SOURCE_ID;
|
||||
static const PointAttribute GPS_TIME;
|
||||
static const PointAttribute NORMAL_SPHEREMAPPED;
|
||||
static const PointAttribute NORMAL_OCT16;
|
||||
static const PointAttribute NORMAL;
|
||||
|
||||
int ordinal;
|
||||
string name;
|
||||
string description;
|
||||
string type;
|
||||
int numElements;
|
||||
int byteSize;
|
||||
|
||||
PointAttribute(int ordinal, string name, string type, int numElements, int byteSize){
|
||||
this->ordinal = ordinal;
|
||||
this->name = name;
|
||||
this->type = type;
|
||||
this->numElements = numElements;
|
||||
this->byteSize = byteSize;
|
||||
}
|
||||
|
||||
static PointAttribute fromString(string name);
|
||||
|
||||
};
|
||||
|
||||
bool operator==(const PointAttribute& lhs, const PointAttribute& rhs);
|
||||
|
||||
|
||||
class PointAttributes{
|
||||
public:
|
||||
vector<PointAttribute> attributes;
|
||||
int byteSize;
|
||||
|
||||
PointAttributes(){
|
||||
byteSize = 0;
|
||||
}
|
||||
|
||||
void add(PointAttribute attribute){
|
||||
attributes.push_back(attribute);
|
||||
byteSize += attribute.byteSize;
|
||||
}
|
||||
|
||||
int size(){
|
||||
return (int)attributes.size();
|
||||
}
|
||||
|
||||
PointAttribute& operator[](int i) {
|
||||
return attributes[i];
|
||||
}
|
||||
|
||||
|
||||
};
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
@@ -1,33 +0,0 @@
|
||||
|
||||
|
||||
#ifndef POINTREADER_H
|
||||
#define POINTREADER_H
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "Point.h"
|
||||
#include "AABB.h"
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class PointReader{
|
||||
public:
|
||||
|
||||
virtual ~PointReader(){};
|
||||
|
||||
virtual bool readNextPoint() = 0;
|
||||
|
||||
virtual Point getPoint() = 0;
|
||||
|
||||
virtual AABB getAABB() = 0;
|
||||
|
||||
virtual long long numPoints() = 0;
|
||||
|
||||
virtual void close() = 0;
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,32 +0,0 @@
|
||||
|
||||
#ifndef POINTWRITER_H
|
||||
#define POINTWRITER_H
|
||||
|
||||
#include <string>
|
||||
#include <iostream>
|
||||
|
||||
#include "Point.h"
|
||||
|
||||
using std::string;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class PointWriter{
|
||||
|
||||
public:
|
||||
string file;
|
||||
int numPoints = 0;
|
||||
|
||||
virtual ~PointWriter(){};
|
||||
|
||||
virtual void write(Point &point) = 0;
|
||||
|
||||
virtual void close() = 0;
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
|
||||
102
PotreeConverter/include/Points.h
Normal file
102
PotreeConverter/include/Points.h
Normal file
@@ -0,0 +1,102 @@
|
||||
|
||||
#pragma once
|
||||
#include <cstdint>
|
||||
#include <iostream>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::shared_ptr;
|
||||
|
||||
struct Buffer {
|
||||
void* data = nullptr;
|
||||
uint8_t* dataU8 = nullptr;
|
||||
uint16_t* dataU16 = nullptr;
|
||||
uint32_t* dataU32 = nullptr;
|
||||
int8_t* dataI8 = nullptr;
|
||||
int16_t* dataI16 = nullptr;
|
||||
int32_t* dataI32 = nullptr;
|
||||
float* dataF = nullptr;
|
||||
double* dataD = nullptr;
|
||||
char* dataChar = nullptr;
|
||||
|
||||
uint64_t size = 0;
|
||||
|
||||
Buffer(uint64_t size) {
|
||||
|
||||
this->data = malloc(size);
|
||||
|
||||
this->dataU8 = reinterpret_cast<uint8_t*>(this->data);
|
||||
this->dataU16 = reinterpret_cast<uint16_t*>(this->data);
|
||||
this->dataU32 = reinterpret_cast<uint32_t*>(this->data);
|
||||
this->dataI8 = reinterpret_cast<int8_t*>(this->data);
|
||||
this->dataI16 = reinterpret_cast<int16_t*>(this->data);
|
||||
this->dataI32 = reinterpret_cast<int32_t*>(this->data);
|
||||
this->dataF = reinterpret_cast<float*>(this->data);
|
||||
this->dataD = reinterpret_cast<double*>(this->data);
|
||||
this->dataChar = reinterpret_cast<char*>(this->data);
|
||||
|
||||
this->size = size;
|
||||
}
|
||||
|
||||
~Buffer() {
|
||||
free(this->data);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
struct Attribute {
|
||||
|
||||
string name = "undefined";
|
||||
int64_t byteOffset = 0;
|
||||
int64_t bytes = 0;
|
||||
|
||||
Attribute() {
|
||||
|
||||
}
|
||||
};
|
||||
|
||||
struct Attributes {
|
||||
|
||||
vector<Attribute> list;
|
||||
int byteSize = 0;
|
||||
|
||||
};
|
||||
|
||||
struct Point {
|
||||
|
||||
double x = 0.0;
|
||||
double y = 0.0;
|
||||
double z = 0.0;
|
||||
|
||||
uint64_t index = 0;
|
||||
|
||||
double squaredDistanceTo(Point& b) {
|
||||
double dx = b.x - this->x;
|
||||
double dy = b.y - this->y;
|
||||
double dz = b.z - this->z;
|
||||
|
||||
double dd = dx * dx + dy * dy + dz * dz;
|
||||
|
||||
return dd;
|
||||
};
|
||||
};
|
||||
|
||||
|
||||
struct Points {
|
||||
vector<Point> points;
|
||||
|
||||
Attributes attributes;
|
||||
shared_ptr<Buffer> attributeBuffer;
|
||||
|
||||
//~Points() {
|
||||
// delete attributeBuffer;
|
||||
//}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -1,75 +0,0 @@
|
||||
|
||||
|
||||
#ifndef POTREE_CONVERTER_H
|
||||
#define POTREE_CONVERTER_H
|
||||
|
||||
#include "AABB.h"
|
||||
#include "CloudJS.hpp"
|
||||
#include "definitions.hpp"
|
||||
#include "PointReader.h"
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <cstdint>
|
||||
|
||||
using std::vector;
|
||||
using std::string;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class SparseGrid;
|
||||
|
||||
struct FileInfos {
|
||||
AABB aabb;
|
||||
uint64_t numPoints = 0;
|
||||
};
|
||||
|
||||
class PotreeConverter{
|
||||
|
||||
private:
|
||||
AABB aabb;
|
||||
vector<string> sources;
|
||||
string workDir;
|
||||
CloudJS cloudjs;
|
||||
PointAttributes pointAttributes;
|
||||
|
||||
PointReader *createPointReader(string source, PointAttributes pointAttributes);
|
||||
void prepare();
|
||||
FileInfos computeInfos();
|
||||
void generatePage(string name);
|
||||
|
||||
public:
|
||||
float spacing;
|
||||
int maxDepth;
|
||||
string format;
|
||||
OutputFormat outputFormat;
|
||||
vector<string> outputAttributes;
|
||||
vector<double> colorRange;
|
||||
vector<double> intensityRange;
|
||||
double scale = 0.01;
|
||||
int diagonalFraction = 250;
|
||||
vector<double> aabbValues;
|
||||
string pageName = "";
|
||||
string pageTemplatePath = "";
|
||||
StoreOption storeOption = StoreOption::ABORT_IF_EXISTS;
|
||||
string projection = "";
|
||||
bool sourceListingOnly = false;
|
||||
ConversionQuality quality = ConversionQuality::DEFAULT;
|
||||
string title = "PotreeViewer";
|
||||
string description = "";
|
||||
bool edlEnabled = false;
|
||||
bool showSkybox = false;
|
||||
string material = "RGB";
|
||||
string executablePath;
|
||||
int storeSize = 20'000;
|
||||
int flushLimit = 10'000'000;
|
||||
|
||||
PotreeConverter(string executablePath, string workDir, vector<string> sources);
|
||||
|
||||
void convert();
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,34 +0,0 @@
|
||||
|
||||
#ifndef POTREEEXCEPTION_H
|
||||
#define POTREEEXCEPTION_H
|
||||
|
||||
// using standard exceptions
|
||||
#include <iostream>
|
||||
#include <exception>
|
||||
#include <string>
|
||||
|
||||
using std::exception;
|
||||
using std::string;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class PotreeException: public exception{
|
||||
private:
|
||||
string message;
|
||||
|
||||
public:
|
||||
PotreeException(string message){
|
||||
this->message = message;
|
||||
}
|
||||
|
||||
virtual ~PotreeException() throw(){
|
||||
}
|
||||
|
||||
virtual const char* what() const throw(){
|
||||
return message.c_str();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,155 +1,688 @@
|
||||
|
||||
|
||||
#ifndef POTREEWRITER_H
|
||||
#define POTREEWRITER_H
|
||||
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <vector>
|
||||
#include <functional>
|
||||
|
||||
#include "AABB.h"
|
||||
#include "SparseGrid.h"
|
||||
#include "CloudJS.hpp"
|
||||
#include "PointAttributes.hpp"
|
||||
|
||||
using std::string;
|
||||
using std::thread;
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class PotreeWriter;
|
||||
class PointReader;
|
||||
class PointWriter;
|
||||
|
||||
class PWNode{
|
||||
|
||||
public:
|
||||
int index = -1;
|
||||
AABB aabb;
|
||||
AABB acceptedAABB;
|
||||
int level = 0;
|
||||
SparseGrid *grid;
|
||||
unsigned int numAccepted = 0;
|
||||
PWNode *parent = NULL;
|
||||
vector<PWNode*> children;
|
||||
bool addedSinceLastFlush = true;
|
||||
bool addCalledSinceLastFlush = false;
|
||||
PotreeWriter *potreeWriter;
|
||||
vector<Point> cache;
|
||||
//int storeLimit = 20'000;
|
||||
vector<Point> store;
|
||||
bool isInMemory = true;
|
||||
|
||||
PWNode(PotreeWriter* potreeWriter, AABB aabb);
|
||||
|
||||
PWNode(PotreeWriter* potreeWriter, int index, AABB aabb, int level);
|
||||
|
||||
~PWNode();
|
||||
|
||||
string name() const;
|
||||
|
||||
float spacing();
|
||||
|
||||
bool isLeafNode(){
|
||||
return children.size() == 0;
|
||||
}
|
||||
|
||||
bool isInnerNode(){
|
||||
return children.size() > 0;
|
||||
}
|
||||
|
||||
void loadFromDisk();
|
||||
|
||||
PWNode *add(Point &point);
|
||||
|
||||
PWNode *createChild(int childIndex);
|
||||
|
||||
void split();
|
||||
|
||||
string workDir();
|
||||
|
||||
string hierarchyPath();
|
||||
|
||||
string path();
|
||||
|
||||
void flush();
|
||||
|
||||
void traverse(std::function<void(PWNode*)> callback);
|
||||
|
||||
void traverseBreadthFirst(std::function<void(PWNode*)> callback);
|
||||
|
||||
vector<PWNode*> getHierarchy(int levels);
|
||||
|
||||
PWNode* findNode(string name);
|
||||
|
||||
private:
|
||||
|
||||
PointReader *createReader(string path);
|
||||
PointWriter *createWriter(string path);
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
class PotreeWriter{
|
||||
|
||||
public:
|
||||
|
||||
AABB aabb;
|
||||
AABB tightAABB;
|
||||
string workDir;
|
||||
float spacing;
|
||||
double scale = 0;
|
||||
int maxDepth = -1;
|
||||
PWNode *root;
|
||||
long long numAdded = 0;
|
||||
long long numAccepted = 0;
|
||||
CloudJS cloudjs;
|
||||
OutputFormat outputFormat;
|
||||
PointAttributes pointAttributes;
|
||||
int hierarchyStepSize = 5;
|
||||
vector<Point> store;
|
||||
thread storeThread;
|
||||
int pointsInMemory = 0;
|
||||
string projection = "";
|
||||
ConversionQuality quality = ConversionQuality::DEFAULT;
|
||||
int storeSize = 20'000;
|
||||
|
||||
|
||||
PotreeWriter(string workDir, ConversionQuality quality);
|
||||
|
||||
PotreeWriter(string workDir, AABB aabb, float spacing, int maxDepth, double scale, OutputFormat outputFormat, PointAttributes pointAttributes, ConversionQuality quality);
|
||||
|
||||
~PotreeWriter(){
|
||||
close();
|
||||
|
||||
delete root;
|
||||
}
|
||||
|
||||
string getExtension();
|
||||
|
||||
void processStore();
|
||||
|
||||
void waitUntilProcessed();
|
||||
|
||||
void add(Point &p);
|
||||
|
||||
void flush();
|
||||
|
||||
void close(){
|
||||
flush();
|
||||
}
|
||||
|
||||
void setProjection(string projection);
|
||||
|
||||
void loadStateFromDisk();
|
||||
|
||||
private:
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <filesystem>
|
||||
#include <fstream>
|
||||
#include <random>
|
||||
|
||||
|
||||
#include "json.hpp"
|
||||
|
||||
using json = nlohmann::json;
|
||||
using namespace std;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
struct PWNode {
|
||||
|
||||
string name = "";
|
||||
int64_t numPoints = 0;
|
||||
vector<PWNode*> children;
|
||||
|
||||
int64_t byteOffset = 0;
|
||||
int64_t byteSize = 0;
|
||||
|
||||
PWNode(string name) {
|
||||
this->name = name;
|
||||
this->children.resize(8, nullptr);
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
struct Subsample {
|
||||
vector<Point> subsample;
|
||||
vector<Point> remaining;
|
||||
};
|
||||
|
||||
Subsample subsampleLevel(vector<Point>& samples, double spacing, Vector3<double> min, Vector3<double> max) {
|
||||
|
||||
Vector3<double> size = max - min;
|
||||
double gridSizeD = (size.x / spacing) / 2.0;
|
||||
int gridSize = int(gridSizeD);
|
||||
|
||||
random_device rd;
|
||||
mt19937 mt(rd());
|
||||
uniform_real_distribution<double> random(0.0, 1.0);
|
||||
|
||||
vector<vector<int>> grid(gridSize * gridSize * gridSize);
|
||||
|
||||
// binning of points into cells
|
||||
for (int i = 0; i < samples.size(); i++) {
|
||||
|
||||
Point& point = samples[i];
|
||||
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
int ux = std::min(gridSize * nx, gridSize - 1.0);
|
||||
int uy = std::min(gridSize * ny, gridSize - 1.0);
|
||||
int uz = std::min(gridSize * nz, gridSize - 1.0);
|
||||
|
||||
int index = ux + uy * gridSize + uz * gridSize * gridSize;
|
||||
|
||||
vector<int>& cell = grid[index];
|
||||
cell.push_back(i);
|
||||
}
|
||||
|
||||
// select random point from each cell
|
||||
vector<Point> subsample;
|
||||
vector<Point> remaining;
|
||||
for (auto& cell : grid) {
|
||||
if (cell.size() == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double r = double(cell.size()) * random(mt);
|
||||
int selected = cell[int(r)];
|
||||
|
||||
for (int i : cell) {
|
||||
if (i == selected) {
|
||||
subsample.push_back(samples[selected]);
|
||||
} else {
|
||||
remaining.push_back(samples[selected]);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {subsample, remaining};
|
||||
}
|
||||
|
||||
struct SubsampleData {
|
||||
shared_ptr<Points> points = nullptr;
|
||||
string nodeName = "";
|
||||
};
|
||||
|
||||
shared_ptr<Points> toBufferData(vector<Point>& subsample, shared_ptr<Chunk> chunk, shared_ptr<Points> pointsInChunk) {
|
||||
|
||||
int numPoints = subsample.size();
|
||||
|
||||
Attributes attributes = pointsInChunk->attributes;
|
||||
|
||||
shared_ptr<Points> points = make_shared<Points>();
|
||||
// TODO potential source of error? does it copy or move the referenced data?
|
||||
// would be bad if it kept pointing to the reference, even after it is deleted
|
||||
points->points = subsample;
|
||||
points->attributes = attributes;
|
||||
uint64_t attributeBufferSize = attributes.byteSize * numPoints;
|
||||
points->attributeBuffer = make_shared<Buffer>(attributeBufferSize);
|
||||
|
||||
for (int i = 0; i < points->points.size(); i++) {
|
||||
|
||||
Point& point = points->points[i];
|
||||
|
||||
int srcIndex = point.index;
|
||||
int destIndex = i;
|
||||
|
||||
uint8_t* attSrc = pointsInChunk->attributeBuffer->dataU8 + (attributes.byteSize * srcIndex);
|
||||
uint8_t* attDest = points->attributeBuffer->dataU8 + (attributes.byteSize * destIndex);
|
||||
|
||||
memcpy(attDest, attSrc, attributes.byteSize);
|
||||
|
||||
point.index = destIndex;
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
vector<SubsampleData> subsampleLowerLevels(shared_ptr<Chunk> chunk, shared_ptr<Points> pointsInChunk, shared_ptr<Node> chunkRoot) {
|
||||
|
||||
int startLevel = chunkRoot->name.size() - 1;
|
||||
|
||||
string currentName = chunkRoot->name;
|
||||
vector<Point>& currentSample = chunkRoot->accepted;
|
||||
double currentSpacing = chunkRoot->spacing / 2.0;
|
||||
auto min = chunk->min;
|
||||
auto max = chunk->max;
|
||||
|
||||
vector<SubsampleData> subsamples;
|
||||
|
||||
for (int level = startLevel; level >= 0; level--) {
|
||||
|
||||
Subsample subsample = subsampleLevel(currentSample, currentSpacing, min, max);
|
||||
|
||||
if (level == startLevel) {
|
||||
chunkRoot->accepted = subsample.remaining;
|
||||
}
|
||||
|
||||
shared_ptr<Points> subsampleBuffer = toBufferData(subsample.remaining, chunk, pointsInChunk);
|
||||
|
||||
SubsampleData subData = { subsampleBuffer, currentName };
|
||||
|
||||
subsamples.push_back(subData);
|
||||
|
||||
currentName = currentName.substr(0, currentName.size() - 1);
|
||||
currentSample = subsample.subsample;
|
||||
currentSpacing = currentSpacing * 2.0;
|
||||
}
|
||||
|
||||
|
||||
return subsamples;
|
||||
}
|
||||
|
||||
class PotreeWriter {
|
||||
public:
|
||||
|
||||
string targetDirectory = "";
|
||||
string pathData = "";
|
||||
string pathCloudJs = "";
|
||||
string pathHierarchy = "";
|
||||
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
double scale = 1.0;
|
||||
double spacing = 1.0;
|
||||
int upperLevels = 1;
|
||||
|
||||
PWNode* root = nullptr;
|
||||
unordered_map<Node*, PWNode*> pwNodes;
|
||||
|
||||
mutex mtx_writeChunk;
|
||||
mutex* mtx_test = new mutex();
|
||||
|
||||
int currentByteOffset = 0;
|
||||
mutex mtx_byteOffset;
|
||||
|
||||
vector<SubsampleData> lowerLevelSubsamples;
|
||||
|
||||
fstream* fsFile = nullptr;
|
||||
|
||||
PotreeWriter(string targetDirectory,
|
||||
Vector3<double> min, Vector3<double> max,
|
||||
double spacing, double scale, int upperLevels,
|
||||
vector<shared_ptr<Chunk>> chunks) {
|
||||
|
||||
|
||||
this->targetDirectory = targetDirectory;
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->spacing = spacing;
|
||||
this->scale = scale;
|
||||
this->upperLevels = upperLevels;
|
||||
|
||||
fs::create_directories(targetDirectory);
|
||||
|
||||
pathData = targetDirectory + "/octree.data";
|
||||
pathCloudJs = targetDirectory + "/cloud.json";
|
||||
pathHierarchy = targetDirectory + "/hierarchy.json";
|
||||
|
||||
fs::remove(pathData);
|
||||
|
||||
root = new PWNode("r");
|
||||
|
||||
vector<string> nodeIDs;
|
||||
for (auto chunk : chunks) {
|
||||
nodeIDs.push_back(chunk->id);
|
||||
}
|
||||
createNodes(nodeIDs);
|
||||
}
|
||||
|
||||
struct ChildParams {
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> size;
|
||||
int id;
|
||||
};
|
||||
|
||||
uint64_t increaseByteOffset(uint64_t amount) {
|
||||
|
||||
lock_guard<mutex> lock(mtx_byteOffset);
|
||||
|
||||
uint64_t old = currentByteOffset;
|
||||
|
||||
currentByteOffset += amount;
|
||||
|
||||
return old;
|
||||
}
|
||||
|
||||
ChildParams computeChildParameters(Vector3<double>& min, Vector3<double>& max, Vector3<double> point){
|
||||
|
||||
auto size = max - min;
|
||||
|
||||
double nx = (point.x - min.x) / size.x;
|
||||
double ny = (point.y - min.y) / size.y;
|
||||
double nz = (point.z - min.z) / size.z;
|
||||
|
||||
Vector3<double> childMin;
|
||||
Vector3<double> childMax;
|
||||
Vector3<double> center = min + size / 2.0;
|
||||
|
||||
int childIndex = 0;
|
||||
|
||||
if (nx > 0.5) {
|
||||
childIndex = childIndex | 0b100;
|
||||
childMin.x = center.x;
|
||||
childMax.x = max.x;
|
||||
} else {
|
||||
childMin.x = min.x;
|
||||
childMax.x = center.x;
|
||||
}
|
||||
|
||||
if (ny > 0.5) {
|
||||
childIndex = childIndex | 0b010;
|
||||
childMin.y = center.y;
|
||||
childMax.y = max.y;
|
||||
} else {
|
||||
childMin.y = min.y;
|
||||
childMax.y = center.y;
|
||||
}
|
||||
|
||||
if (nz > 0.5) {
|
||||
childIndex = childIndex | 0b001;
|
||||
childMin.z = center.z;
|
||||
childMax.z = max.z;
|
||||
} else {
|
||||
childMin.z = min.z;
|
||||
childMax.z = center.z;
|
||||
}
|
||||
|
||||
ChildParams params;
|
||||
params.min = childMin;
|
||||
params.max = childMax;
|
||||
params.size = childMax - childMin;
|
||||
params.id = childIndex;
|
||||
|
||||
return params;
|
||||
}
|
||||
|
||||
vector<int> toVectorID(string stringID) {
|
||||
vector<int> id;
|
||||
|
||||
for (int i = 1; i < stringID.size(); i++) {
|
||||
|
||||
int index = stringID[i] - '0'; // ... ouch
|
||||
|
||||
id.push_back(index);
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
vector<int> computeNodeID(Node* node) {
|
||||
|
||||
auto min = this->min;
|
||||
auto max = this->max;
|
||||
auto target = (node->min + node->max) / 2.0;
|
||||
|
||||
vector<int> id;
|
||||
|
||||
for (int i = 0; i < upperLevels; i++) {
|
||||
auto childParams = computeChildParameters(min, max, target);
|
||||
|
||||
id.push_back(childParams.id);
|
||||
|
||||
min = childParams.min;
|
||||
max = childParams.max;
|
||||
}
|
||||
|
||||
return id;
|
||||
}
|
||||
|
||||
void createNodes(vector<string> nodeIDs) {
|
||||
|
||||
for (string nodeID : nodeIDs) {
|
||||
PWNode* node = root;
|
||||
|
||||
vector<int> id = toVectorID(nodeID);
|
||||
|
||||
for (int childIndex : id) {
|
||||
|
||||
if (node->children[childIndex] == nullptr) {
|
||||
string childName = node->name + to_string(childIndex);
|
||||
PWNode* child = new PWNode(childName);
|
||||
|
||||
node->children[childIndex] = child;
|
||||
}
|
||||
|
||||
node = node->children[childIndex];
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
PWNode* findPWNode(vector<int> id) {
|
||||
|
||||
PWNode* node = root;
|
||||
|
||||
for (int childIndex : id) {
|
||||
|
||||
//if (node->children[childIndex] == nullptr) {
|
||||
// string childName = node->name + to_string(childIndex);
|
||||
// PWNode* child = new PWNode(childName);
|
||||
|
||||
// node->children[childIndex] = child;
|
||||
//}
|
||||
|
||||
node = node->children[childIndex];
|
||||
}
|
||||
|
||||
return node;
|
||||
}
|
||||
|
||||
|
||||
|
||||
void writeChunk(shared_ptr<Chunk> chunk, shared_ptr<Points> points, shared_ptr<Node> chunkRoot) {
|
||||
|
||||
double tStart = now();
|
||||
|
||||
// returns subsamples and removes subsampled points from nodes
|
||||
// TODO not happy with passing a pointer here
|
||||
auto subsamples = subsampleLowerLevels(chunk, points, chunkRoot);
|
||||
|
||||
struct NodePairing {
|
||||
shared_ptr<Node> node = nullptr;
|
||||
PWNode* pwNode = nullptr;
|
||||
|
||||
NodePairing(shared_ptr<Node> node, PWNode* pwNode) {
|
||||
this->node = node;
|
||||
this->pwNode = pwNode;
|
||||
}
|
||||
};
|
||||
|
||||
function<void(shared_ptr<Node>, PWNode*, vector<NodePairing> & nodes)> flatten = [&flatten](shared_ptr<Node> node, PWNode* pwNode, vector<NodePairing>& nodes) {
|
||||
nodes.emplace_back(node, pwNode);
|
||||
|
||||
//for (int i = 0; i < node->children.size(); i++) {
|
||||
|
||||
//auto child = node->children[i];
|
||||
for(auto child : node->children){
|
||||
|
||||
if (child == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
PWNode* pwChild = new PWNode(child->name);
|
||||
pwNode->children.push_back(pwChild);
|
||||
|
||||
flatten(child, pwChild, nodes);
|
||||
}
|
||||
|
||||
return nodes;
|
||||
};
|
||||
|
||||
PWNode* pwChunkRoot = new PWNode(chunkRoot->name);
|
||||
vector<NodePairing> nodes;
|
||||
flatten(chunkRoot, pwChunkRoot, nodes);
|
||||
|
||||
Attributes attributes = points->attributes;
|
||||
auto attributeBuffer = points->attributeBuffer;
|
||||
const char* ccAttributeBuffer = attributeBuffer->dataChar;
|
||||
|
||||
auto min = this->min;
|
||||
auto scale = this->scale;
|
||||
|
||||
uint64_t bufferSize = 0;
|
||||
int bytesPerPoint = 12 + attributes.byteSize;
|
||||
|
||||
for (NodePairing pair : nodes) {
|
||||
int numPoints = pair.node->accepted.size() + pair.node->store.size();
|
||||
int nodeBufferSize = numPoints * bytesPerPoint;
|
||||
|
||||
bufferSize += nodeBufferSize;
|
||||
}
|
||||
|
||||
vector<uint8_t> buffer(bufferSize, 0);
|
||||
uint64_t bufferOffset = 0;
|
||||
|
||||
auto writePoint = [&bufferOffset, &bytesPerPoint , &buffer, &min, &scale, &attributes, &attributeBuffer](Point& point) {
|
||||
int32_t ix = int32_t((point.x - min.x) / scale);
|
||||
int32_t iy = int32_t((point.y - min.y) / scale);
|
||||
int32_t iz = int32_t((point.z - min.z) / scale);
|
||||
|
||||
memcpy(buffer.data() + bufferOffset + 0, reinterpret_cast<void*>(&ix), sizeof(int32_t));
|
||||
memcpy(buffer.data() + bufferOffset + 4, reinterpret_cast<void*>(&iy), sizeof(int32_t));
|
||||
memcpy(buffer.data() + bufferOffset + 8, reinterpret_cast<void*>(&iz), sizeof(int32_t));
|
||||
|
||||
int64_t attributeOffset = point.index * attributes.byteSize;
|
||||
|
||||
auto attributeTarget = buffer.data() + bufferOffset + 12;
|
||||
auto attributeSource = attributeBuffer->dataU8 + attributeOffset;
|
||||
memcpy(attributeTarget, attributeSource, attributes.byteSize);
|
||||
|
||||
bufferOffset += bytesPerPoint;
|
||||
};
|
||||
|
||||
|
||||
for (NodePairing& pair: nodes) {
|
||||
|
||||
for (Point& point : pair.node->accepted) {
|
||||
writePoint(point);
|
||||
}
|
||||
|
||||
for (Point& point : pair.node->store) {
|
||||
writePoint(point);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// ==============================================================================
|
||||
// FROM HERE ON, ONLY ONE THREAD UPDATES THE HIERARCHY DATA AND WRITES TO FILE
|
||||
// ==============================================================================
|
||||
|
||||
double tLockStart = now();
|
||||
lock_guard<mutex> lock(mtx_writeChunk);
|
||||
|
||||
//lowerLevelSubsamples.push_back(subsamples);
|
||||
lowerLevelSubsamples.insert(
|
||||
lowerLevelSubsamples.begin(),
|
||||
subsamples.begin(),
|
||||
subsamples.end()
|
||||
);
|
||||
|
||||
double lockDuration = now() - tLockStart;
|
||||
if (lockDuration > 0.1) {
|
||||
cout << "long lock duration: " << lockDuration << " s" << endl;
|
||||
}
|
||||
|
||||
for (NodePairing& pair : nodes) {
|
||||
int numPoints = pair.node->accepted.size() + pair.node->store.size();
|
||||
int nodeBufferSize = numPoints * bytesPerPoint;
|
||||
|
||||
pair.pwNode->byteOffset = currentByteOffset;
|
||||
pair.pwNode->byteSize = nodeBufferSize;
|
||||
pair.pwNode->numPoints = numPoints;
|
||||
|
||||
currentByteOffset += nodeBufferSize;
|
||||
}
|
||||
|
||||
// attach local chunk-root to global hierarchy, by replacing previously created dummy
|
||||
vector<int> pwid = toVectorID(pwChunkRoot->name);
|
||||
PWNode* pwMain = findPWNode(pwid);
|
||||
PWNode* pwLocal = pwChunkRoot;
|
||||
|
||||
pwMain->byteOffset = pwLocal->byteOffset;
|
||||
pwMain->byteSize = pwLocal->byteSize;
|
||||
pwMain->children = pwLocal->children;
|
||||
pwMain->name = pwLocal->name;
|
||||
pwMain->numPoints = pwLocal->numPoints;
|
||||
|
||||
// now write everything to file
|
||||
if (fsFile == nullptr) {
|
||||
fsFile = new fstream();
|
||||
fsFile->open(pathData, ios::out | ios::binary | ios::app);
|
||||
}
|
||||
|
||||
fsFile->write(reinterpret_cast<const char*>(buffer.data()), buffer.size());
|
||||
|
||||
// fsFile is closed in PotreeWriter::close()
|
||||
}
|
||||
|
||||
void processLowerLevelSubsamples() {
|
||||
|
||||
unordered_map<string, vector<shared_ptr<Points>>> data;
|
||||
|
||||
for (SubsampleData& subsample : lowerLevelSubsamples) {
|
||||
data[subsample.nodeName].push_back(subsample.points);
|
||||
}
|
||||
|
||||
for (auto it : data) {
|
||||
string nodeName = it.first;
|
||||
vector<shared_ptr<Points>> batches = it.second;
|
||||
|
||||
vector<int> id = toVectorID(nodeName);
|
||||
PWNode* pwNode = findPWNode(id);
|
||||
|
||||
int numPoints = 0;
|
||||
for (auto batch : batches) {
|
||||
numPoints += batch->points.size();
|
||||
}
|
||||
|
||||
Attributes attributes = batches[0]->attributes;
|
||||
|
||||
int bytesPerPoint = 12 + attributes.byteSize;
|
||||
int bufferSize = numPoints * bytesPerPoint;
|
||||
vector<uint8_t> buffer(bufferSize, 0);
|
||||
uint64_t bufferOffset = 0;
|
||||
auto min = this->min;
|
||||
auto scale = this->scale;
|
||||
|
||||
auto writePoint = [&bufferOffset, &bytesPerPoint, &buffer, &min, &scale](Point& point, shared_ptr<Points> points) {
|
||||
int32_t ix = int32_t((point.x - min.x) / scale);
|
||||
int32_t iy = int32_t((point.y - min.y) / scale);
|
||||
int32_t iz = int32_t((point.z - min.z) / scale);
|
||||
|
||||
memcpy(buffer.data() + bufferOffset + 0, reinterpret_cast<void*>(&ix), sizeof(int32_t));
|
||||
memcpy(buffer.data() + bufferOffset + 4, reinterpret_cast<void*>(&iy), sizeof(int32_t));
|
||||
memcpy(buffer.data() + bufferOffset + 8, reinterpret_cast<void*>(&iz), sizeof(int32_t));
|
||||
|
||||
int64_t attributeOffset = point.index * points->attributes.byteSize;
|
||||
|
||||
auto attributeTarget = buffer.data() + bufferOffset + 12;
|
||||
auto attributeSource = points->attributeBuffer->dataU8 + attributeOffset;
|
||||
memcpy(attributeTarget, attributeSource, points->attributes.byteSize);
|
||||
|
||||
bufferOffset += bytesPerPoint;
|
||||
};
|
||||
|
||||
for (auto batch : batches) {
|
||||
for (Point& point : batch->points) {
|
||||
writePoint(point, batch);
|
||||
}
|
||||
}
|
||||
|
||||
fsFile->write(reinterpret_cast<const char*>(buffer.data()), buffer.size());
|
||||
|
||||
|
||||
pwNode->numPoints = numPoints;
|
||||
pwNode->byteSize = bufferSize;
|
||||
pwNode->byteOffset = currentByteOffset;
|
||||
|
||||
currentByteOffset += bufferSize;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void close() {
|
||||
|
||||
processLowerLevelSubsamples();
|
||||
|
||||
fsFile->close();
|
||||
|
||||
writeHierarchy();
|
||||
writeCloudJson();
|
||||
|
||||
}
|
||||
|
||||
void writeCloudJson() {
|
||||
|
||||
auto min = this->min;
|
||||
auto max = this->max;
|
||||
|
||||
json box = {
|
||||
{"min", {min.x, min.y, min.z}},
|
||||
{"max", {max.x, max.y, max.z}},
|
||||
};
|
||||
|
||||
json aPosition = {
|
||||
{"name", "position"},
|
||||
{"elements", 3},
|
||||
{"elementSize", 4},
|
||||
{"type", "int32"},
|
||||
};
|
||||
|
||||
json aRGBA = {
|
||||
{"name", "rgba"},
|
||||
{"elements", 4},
|
||||
{"elementSize", 1},
|
||||
{"type", "uint8"},
|
||||
};
|
||||
|
||||
json attributes = {
|
||||
{"bla", "blubb"}
|
||||
};
|
||||
|
||||
json js = {
|
||||
{"version", "1.9"},
|
||||
{"projection", ""},
|
||||
{"boundingBox", box},
|
||||
{"spacing", spacing},
|
||||
{"scale", scale},
|
||||
{"attributes", {aPosition, aRGBA}},
|
||||
};
|
||||
|
||||
|
||||
{
|
||||
string str = js.dump(4);
|
||||
|
||||
fstream file;
|
||||
file.open(pathCloudJs, ios::out);
|
||||
|
||||
file << str;
|
||||
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
void writeHierarchy() {
|
||||
|
||||
function<json(PWNode*)> traverse = [&traverse](PWNode* node) -> json {
|
||||
|
||||
vector<json> jsChildren;
|
||||
for (PWNode* child : node->children) {
|
||||
if (child == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
json jsChild = traverse(child);
|
||||
jsChildren.push_back(jsChild);
|
||||
}
|
||||
|
||||
uint64_t numPoints = node->numPoints;
|
||||
int64_t byteOffset = node->byteOffset;
|
||||
int64_t byteSize = node->byteSize;
|
||||
|
||||
json jsNode = {
|
||||
{"name", node->name},
|
||||
{"numPoints", numPoints},
|
||||
{"byteOffset", byteOffset},
|
||||
{"byteSize", byteSize},
|
||||
{"children", jsChildren}
|
||||
};
|
||||
|
||||
return jsNode;
|
||||
};
|
||||
|
||||
json js;
|
||||
js["hierarchy"] = traverse(root);
|
||||
|
||||
{ // write to file
|
||||
string str = js.dump(4);
|
||||
|
||||
string jsPath = pathHierarchy;
|
||||
|
||||
fstream file;
|
||||
file.open(jsPath, ios::out);
|
||||
|
||||
file << str;
|
||||
|
||||
file.close();
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
};
|
||||
@@ -1,59 +1,130 @@
|
||||
|
||||
|
||||
#ifndef SPARSE_GRID_H
|
||||
#define SPARSE_GRID_H
|
||||
|
||||
#include "AABB.h"
|
||||
#include "Point.h"
|
||||
#include "GridCell.h"
|
||||
|
||||
#include <map>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
#include <math.h>
|
||||
|
||||
using std::vector;
|
||||
using std::map;
|
||||
using std::unordered_map;
|
||||
using std::min;
|
||||
using std::max;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
#define MAX_FLOAT std::numeric_limits<float>::max()
|
||||
|
||||
class SparseGrid : public unordered_map<long long, GridCell*>{
|
||||
public:
|
||||
int width;
|
||||
int height;
|
||||
int depth;
|
||||
AABB aabb;
|
||||
float squaredSpacing;
|
||||
unsigned int numAccepted = 0;
|
||||
|
||||
SparseGrid(AABB aabb, float minGap);
|
||||
|
||||
SparseGrid(const SparseGrid &other)
|
||||
: width(other.width), height(other.height), depth(other.depth), aabb(other.aabb), squaredSpacing(other.squaredSpacing), numAccepted(other.numAccepted)
|
||||
{
|
||||
}
|
||||
|
||||
~SparseGrid();
|
||||
|
||||
bool isDistant(const Vector3<double> &p, GridCell *cell);
|
||||
|
||||
bool isDistant(const Vector3<double> &p, GridCell *cell, float &squaredSpacing);
|
||||
|
||||
bool willBeAccepted(const Vector3<double> &p);
|
||||
|
||||
bool willBeAccepted(const Vector3<double> &p, float &squaredSpacing);
|
||||
|
||||
bool add(Vector3<double> &p);
|
||||
|
||||
void addWithoutCheck(Vector3<double> &p);
|
||||
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <functional>
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
#include <algorithm>
|
||||
|
||||
#include "Vector3.h"
|
||||
#include "Points.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
|
||||
struct Cell {
|
||||
vector<Point> accepted;
|
||||
};
|
||||
|
||||
|
||||
class SparseGrid {
|
||||
|
||||
public:
|
||||
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
Vector3<double> size;
|
||||
double spacing = 1.0;
|
||||
double squaredSpacing = 1.0;
|
||||
|
||||
unordered_map<uint64_t, Cell*> grid;
|
||||
int64_t gridSize = 1;
|
||||
double gridSizeD = 1.0;
|
||||
|
||||
SparseGrid(Vector3<double> min, Vector3<double> max, double spacing) {
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->size = max - min;
|
||||
this->spacing = spacing;
|
||||
this->squaredSpacing = spacing * spacing;
|
||||
|
||||
gridSize = 5.0 * size.x / spacing;
|
||||
gridSizeD = double(gridSize);
|
||||
}
|
||||
|
||||
bool isDistant(Point& candidate) {
|
||||
|
||||
Vector3<int64_t> gridCoord = toGridCoordinate(candidate);
|
||||
|
||||
int64_t xStart = std::max(gridCoord.x - 1, 0ll);
|
||||
int64_t yStart = std::max(gridCoord.y - 1, 0ll);
|
||||
int64_t zStart = std::max(gridCoord.z - 1, 0ll);
|
||||
int64_t xEnd = std::min(gridCoord.x + 1, gridSize - 1);
|
||||
int64_t yEnd = std::min(gridCoord.y + 1, gridSize - 1);
|
||||
int64_t zEnd = std::min(gridCoord.z + 1, gridSize - 1);
|
||||
|
||||
for (uint64_t x = xStart; x <= xEnd; x++) {
|
||||
for (uint64_t y = yStart; y <= yEnd; y++) {
|
||||
for (uint64_t z = zStart; z <= zEnd; z++) {
|
||||
|
||||
uint64_t index = x + y * gridSize + z * gridSize * gridSize;
|
||||
|
||||
auto it = grid.find(index);
|
||||
|
||||
if (it == grid.end()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
Cell* cell = it->second;
|
||||
|
||||
for (Point& alreadyAccepted : cell->accepted) {
|
||||
double dd = candidate.squaredDistanceTo(alreadyAccepted);
|
||||
|
||||
if (dd < squaredSpacing) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void add(Point& point) {
|
||||
uint64_t index = toGridIndex(point);
|
||||
|
||||
if (grid.find(index) == grid.end()) {
|
||||
Cell* cell = new Cell();
|
||||
|
||||
grid.insert(std::make_pair(index, cell));
|
||||
}
|
||||
|
||||
grid[index]->accepted.push_back(point);
|
||||
}
|
||||
|
||||
Vector3<int64_t> toGridCoordinate(Point& point) {
|
||||
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
int64_t ux = int32_t(gridSizeD * (x - min.x) / size.x);
|
||||
int64_t uy = int32_t(gridSizeD * (y - min.y) / size.y);
|
||||
int64_t uz = int32_t(gridSizeD * (z - min.z) / size.z);
|
||||
|
||||
return { ux, uy, uz };
|
||||
}
|
||||
|
||||
uint64_t toGridIndex(Point& point) {
|
||||
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
int64_t ux = int32_t(gridSizeD * (x - min.x) / size.x);
|
||||
int64_t uy = int32_t(gridSizeD * (y - min.y) / size.y);
|
||||
int64_t uz = int32_t(gridSizeD * (z - min.z) / size.z);
|
||||
|
||||
ux = std::min(ux, gridSize - 1);
|
||||
uy = std::min(uy, gridSize - 1);
|
||||
uz = std::min(uz, gridSize - 1);
|
||||
|
||||
uint64_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
};
|
||||
122
PotreeConverter/include/Subsampler.h
Normal file
122
PotreeConverter/include/Subsampler.h
Normal file
@@ -0,0 +1,122 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <assert.h>
|
||||
#include <filesystem>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
|
||||
#include "Points.h"
|
||||
#include "Vector3.h"
|
||||
|
||||
using std::string;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::numeric_limits;
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
|
||||
|
||||
class Subsampler {
|
||||
|
||||
struct Cell {
|
||||
Vector3<double> point;
|
||||
double squaredDistToCellCenter = numeric_limits<double>::infinity();
|
||||
};
|
||||
|
||||
|
||||
public:
|
||||
|
||||
string targetDirectory = "";
|
||||
double spacing = 1.0;
|
||||
Vector3<double> min = { 0.0, 0.0, 0.0 };
|
||||
Vector3<double> max = { 0.0, 0.0, 0.0 };
|
||||
Vector3<double> size = { 0.0, 0.0, 0.0 };
|
||||
double cubeSize = 0.0;
|
||||
|
||||
vector<Cell*> grid;
|
||||
vector<Vector3<double>> accepted;
|
||||
int gridSize = 1.0;
|
||||
|
||||
Subsampler(string targetDirectory, double spacing, Vector3<double> min, Vector3<double> max) {
|
||||
this->targetDirectory = targetDirectory;
|
||||
this->spacing = spacing;
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->size = max - min;
|
||||
this->cubeSize = std::max(std::max(size.x, size.y), size.z);
|
||||
|
||||
this->gridSize = int(cubeSize / spacing);
|
||||
|
||||
cout << "gridSize: " << gridSize << endl;
|
||||
int numCells = gridSize * gridSize * gridSize;
|
||||
grid.resize(numCells, nullptr);
|
||||
|
||||
}
|
||||
|
||||
void add(Points* batch) {
|
||||
|
||||
double gridSizeD = double(gridSize);
|
||||
Vector3<double> cellsD = Vector3<double>(gridSizeD, gridSizeD, gridSizeD);
|
||||
|
||||
for (int64_t i = 0; i < batch->points.size(); i++) {
|
||||
Point point = batch->points[i];
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
double gx = cellsD.x * (x - min.x) / size.x;
|
||||
double gy = cellsD.y * (y - min.y) / size.y;
|
||||
double gz = cellsD.z * (z - min.z) / size.z;
|
||||
|
||||
gx = std::min(gx, std::nextafter(gx, min.x));
|
||||
gy = std::min(gy, std::nextafter(gy, min.y));
|
||||
gz = std::min(gz, std::nextafter(gz, min.z));
|
||||
|
||||
double dx = fmod(gx, 1.0);
|
||||
double dy = fmod(gy, 1.0);
|
||||
double dz = fmod(gz, 1.0);
|
||||
double squaredDistToCellCenter = dx * dx + dy * dy + dz * dz;
|
||||
|
||||
int32_t ux = int32_t(gx);
|
||||
int32_t uy = int32_t(gy);
|
||||
int32_t uz = int32_t(gz);
|
||||
|
||||
int32_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
assert(index >= 0 && index < grid.size());
|
||||
|
||||
if (grid[index] == nullptr) {
|
||||
Cell* cell = new Cell();
|
||||
|
||||
cell->point = { x, y, z };
|
||||
grid[index] = cell;
|
||||
} else {
|
||||
Cell* cell = grid[index];
|
||||
|
||||
if (squaredDistToCellCenter < cell->squaredDistToCellCenter) {
|
||||
cell->point = {x, y, z};
|
||||
cell->squaredDistToCellCenter = squaredDistToCellCenter;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vector<Vector3<double>> getPoints() {
|
||||
|
||||
vector<Vector3<double>> points;
|
||||
|
||||
for (Cell* cell : grid) {
|
||||
if (cell == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto point = cell->point;
|
||||
points.push_back(point);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
};
|
||||
206
PotreeConverter/include/Subsampler_PoissonDisc.h
Normal file
206
PotreeConverter/include/Subsampler_PoissonDisc.h
Normal file
@@ -0,0 +1,206 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <assert.h>
|
||||
#include <filesystem>
|
||||
#include <iostream>
|
||||
#include <limits>
|
||||
#include <unordered_map>
|
||||
#include <vector>
|
||||
|
||||
#include "Points.h"
|
||||
#include "Vector3.h"
|
||||
|
||||
using std::string;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::numeric_limits;
|
||||
using std::vector;
|
||||
using std::unordered_map;
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
|
||||
|
||||
class Subsampler_PoissonDisc {
|
||||
|
||||
struct Cell {
|
||||
Vector3<double> point;
|
||||
double squaredDistToCellCenter = numeric_limits<double>::infinity();
|
||||
};
|
||||
|
||||
|
||||
public:
|
||||
|
||||
string targetDirectory = "";
|
||||
double spacing = 1.0;
|
||||
double spacingSquared = 1.0;
|
||||
Vector3<double> min = { 0.0, 0.0, 0.0 };
|
||||
Vector3<double> max = { 0.0, 0.0, 0.0 };
|
||||
Vector3<double> size = { 0.0, 0.0, 0.0 };
|
||||
double cubeSize = 0.0;
|
||||
|
||||
vector<Cell*> grid;
|
||||
//unordered_map<int64_t, Cell*> grid;
|
||||
vector<Vector3<double>> accepted;
|
||||
int gridSize = 1.0;
|
||||
vector<int64_t> randomIndices;
|
||||
|
||||
Subsampler_PoissonDisc(string targetDirectory, double spacing, Vector3<double> min, Vector3<double> max) {
|
||||
this->targetDirectory = targetDirectory;
|
||||
this->spacing = spacing;
|
||||
this->min = min;
|
||||
this->max = max;
|
||||
this->size = max - min;
|
||||
this->cubeSize = std::max(std::max(size.x, size.y), size.z);
|
||||
|
||||
this->spacingSquared = spacing * spacing;
|
||||
|
||||
double cellSize = sqrt(2.0 * spacingSquared);
|
||||
this->gridSize = int(cubeSize / cellSize);
|
||||
|
||||
cout << "gridSize: " << gridSize << endl;
|
||||
// vector variant
|
||||
int numCells = gridSize * gridSize * gridSize;
|
||||
grid.resize(numCells, nullptr);
|
||||
|
||||
randomIndices.reserve(1'000'000);
|
||||
for (int64_t i = 0; i < 1'000'000; i++) {
|
||||
randomIndices.push_back(i);
|
||||
}
|
||||
std::random_shuffle(randomIndices.begin(), randomIndices.end());
|
||||
}
|
||||
|
||||
double closestDistanceToNeighbor(
|
||||
double x, double y, double z,
|
||||
int32_t ux, int32_t uy, int32_t uz) {
|
||||
|
||||
//int32_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
double closestDistance = 10000000.0;
|
||||
|
||||
int64_t xStart = std::max(ux - 2, 0);
|
||||
int64_t yStart = std::max(uy - 2, 0);
|
||||
int64_t zStart = std::max(uz - 2, 0);
|
||||
int64_t xEnd = std::min(ux + 2, gridSize - 1);
|
||||
int64_t yEnd = std::min(uy + 2, gridSize - 1);
|
||||
int64_t zEnd = std::min(uz + 2, gridSize - 1);
|
||||
|
||||
for (int64_t nx = xStart; nx <= xEnd; nx++) {
|
||||
for (int64_t ny = yStart; ny <= yEnd; ny++) {
|
||||
for (int64_t nz = zStart; nz <= zEnd; nz++) {
|
||||
|
||||
int32_t index = nx + gridSize * ny + gridSize * gridSize * nz;
|
||||
|
||||
// map variant
|
||||
//if (grid.find(index) == grid.end()) {
|
||||
// continue;
|
||||
//}
|
||||
|
||||
Cell* cell = grid[index];
|
||||
|
||||
// vector variant
|
||||
if (cell == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
double dx = cell->point.x - x;
|
||||
double dy = cell->point.y - y;
|
||||
double dz = cell->point.z - z;
|
||||
|
||||
double dd = dx * dx + dy * dy + dz * dz;
|
||||
|
||||
closestDistance = std::min(closestDistance, dd);
|
||||
|
||||
if (closestDistance <= spacingSquared) {
|
||||
return closestDistance;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return closestDistance;
|
||||
}
|
||||
|
||||
void add(Points* batch) {
|
||||
|
||||
double gridSizeD = double(gridSize);
|
||||
Vector3<double> cellsD = Vector3<double>(gridSizeD, gridSizeD, gridSizeD);
|
||||
|
||||
|
||||
|
||||
|
||||
//for (int64_t i = 0; i < batch->count; i++) {
|
||||
for (int64_t iii = 0; iii < batch->points.size(); iii++) {
|
||||
|
||||
int64_t i = iii;
|
||||
if (batch->points.size() == randomIndices.size()) {
|
||||
i = randomIndices[iii];
|
||||
}
|
||||
|
||||
Point point = batch->points[i];
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
double gx = cellsD.x * (x - min.x) / size.x;
|
||||
double gy = cellsD.y * (y - min.y) / size.y;
|
||||
double gz = cellsD.z * (z - min.z) / size.z;
|
||||
|
||||
gx = std::min(gx, std::nextafter(gx, min.x));
|
||||
gy = std::min(gy, std::nextafter(gy, min.y));
|
||||
gz = std::min(gz, std::nextafter(gz, min.z));
|
||||
|
||||
int32_t ux = int32_t(gx);
|
||||
int32_t uy = int32_t(gy);
|
||||
int32_t uz = int32_t(gz);
|
||||
|
||||
int32_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
// vector variant
|
||||
if (grid[index] != nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// map variant
|
||||
//if (grid.find(index) != grid.end()) {
|
||||
// continue;
|
||||
//}
|
||||
|
||||
double closestSquared = closestDistanceToNeighbor(x, y, z, ux, uy, uz);
|
||||
|
||||
if (closestSquared > spacingSquared) {
|
||||
Cell* cell = new Cell();
|
||||
cell->point = { x, y, z };
|
||||
grid[index] = cell;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
vector<Vector3<double>> getPoints() {
|
||||
|
||||
vector<Vector3<double>> points;
|
||||
|
||||
// map variant
|
||||
//for (auto it : grid) {
|
||||
|
||||
// auto point = it.second->point;
|
||||
// points.push_back(point);
|
||||
//}
|
||||
|
||||
// vector variant
|
||||
for (Cell* cell : grid) {
|
||||
if (cell == nullptr) {
|
||||
continue;
|
||||
}
|
||||
|
||||
auto point = cell->point;
|
||||
points.push_back(point);
|
||||
}
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
};
|
||||
105
PotreeConverter/include/TaskPool.h
Normal file
105
PotreeConverter/include/TaskPool.h
Normal file
@@ -0,0 +1,105 @@
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <atomic>
|
||||
#include <deque>
|
||||
#include <vector>
|
||||
|
||||
using namespace std;
|
||||
|
||||
// might be better off using https://github.com/progschj/ThreadPool
|
||||
template<class Task>
|
||||
class TaskPool {
|
||||
public:
|
||||
int numThreads = 0;
|
||||
deque<shared_ptr<Task>> tasks;
|
||||
using TaskProcessorType = function<void(shared_ptr<Task>)> ;
|
||||
TaskProcessorType processor;
|
||||
|
||||
vector<thread> threads;
|
||||
|
||||
atomic<bool> isClosed = false;
|
||||
|
||||
mutex mtx_task;
|
||||
|
||||
TaskPool(int numThreads, TaskProcessorType processor){
|
||||
this->numThreads = numThreads;
|
||||
this->processor = processor;
|
||||
|
||||
for (int i = 0; i < numThreads; i++) {
|
||||
|
||||
threads.emplace_back([this](){
|
||||
|
||||
while(true){
|
||||
|
||||
shared_ptr<Task> task = nullptr;
|
||||
|
||||
{ // retrieve task or leave thread if done
|
||||
lock_guard<mutex> lock(mtx_task);
|
||||
|
||||
bool allDone = tasks.size() == 0 && isClosed;
|
||||
bool waitingForWork = tasks.size() == 0 && !allDone;
|
||||
bool workAvailable = tasks.size() > 0;
|
||||
|
||||
if (allDone) {
|
||||
break;
|
||||
} else if (workAvailable) {
|
||||
task = tasks.front();
|
||||
tasks.pop_front();
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
if (task != nullptr) {
|
||||
this->processor(task);
|
||||
}
|
||||
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
|
||||
});
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
void addTask(shared_ptr<Task> t) {
|
||||
lock_guard<mutex> lock(mtx_task);
|
||||
|
||||
tasks.push_back(t);
|
||||
}
|
||||
|
||||
void close() {
|
||||
isClosed = true;
|
||||
|
||||
for (thread& t : threads) {
|
||||
t.join();
|
||||
}
|
||||
}
|
||||
|
||||
void waitTillEmpty() {
|
||||
|
||||
while (true) {
|
||||
|
||||
int size = 0;
|
||||
|
||||
{
|
||||
lock_guard<mutex> lock(mtx_task);
|
||||
|
||||
size = tasks.size();
|
||||
}
|
||||
|
||||
if (size == 0) {
|
||||
return;
|
||||
} else {
|
||||
std::this_thread::sleep_for(std::chrono::milliseconds(10));
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
@@ -1,96 +1,83 @@
|
||||
|
||||
#ifndef VECTOR3_H
|
||||
#define VECTOR3_H
|
||||
|
||||
#include <math.h>
|
||||
#include <iostream>
|
||||
#include <iomanip>
|
||||
#include <sstream>
|
||||
|
||||
using std::ostream;
|
||||
#ifndef _MSC_VER
|
||||
using std::max;
|
||||
#endif
|
||||
|
||||
namespace Potree{
|
||||
|
||||
template<class T>
|
||||
class Vector3{
|
||||
|
||||
public:
|
||||
T x = 0;
|
||||
T y = 0;
|
||||
T z = 0;
|
||||
|
||||
Vector3() = default;
|
||||
|
||||
Vector3(T x, T y, T z){
|
||||
this->x = x;
|
||||
this->y = y;
|
||||
this->z = z;
|
||||
}
|
||||
|
||||
Vector3(T value){
|
||||
this->x = value;
|
||||
this->y = value;
|
||||
this->z = value;
|
||||
}
|
||||
|
||||
Vector3(const Vector3<T> &other)
|
||||
:x(other.x), y(other.y), z(other.z)
|
||||
{
|
||||
}
|
||||
|
||||
~Vector3() = default;
|
||||
|
||||
|
||||
T length(){
|
||||
return sqrt(x*x + y*y + z*z);
|
||||
}
|
||||
|
||||
T squaredLength(){
|
||||
return x*x + y*y + z*z;
|
||||
}
|
||||
|
||||
T distanceTo(Vector3<T> p) const{
|
||||
return ((*this) - p).length();
|
||||
}
|
||||
|
||||
T squaredDistanceTo(const Vector3<T> &p) const{
|
||||
return ((*this) - p).squaredLength();
|
||||
}
|
||||
|
||||
T maxValue(){
|
||||
return max(x, max(y,z));
|
||||
}
|
||||
|
||||
Vector3<T> operator-(const Vector3<T>& right) const {
|
||||
return Vector3<T>(x - right.x, y - right.y, z - right.z);
|
||||
}
|
||||
|
||||
Vector3<T> operator+(const Vector3<T>& right) const {
|
||||
return Vector3<T>(x + right.x, y + right.y, z + right.z);
|
||||
}
|
||||
|
||||
Vector3<T> operator+(const T right) const {
|
||||
return Vector3<T>(x + right, y + right, z + right);
|
||||
}
|
||||
|
||||
Vector3<T> operator/(const T &a) const{
|
||||
return Vector3<T>(x / a, y / a, z / a);
|
||||
}
|
||||
|
||||
friend ostream &operator<<( ostream &output, const Vector3<T> &value ){
|
||||
std::stringstream ss;
|
||||
ss << std::setprecision(6) << std::fixed;
|
||||
ss << "[" << value.x << ", " << value.y << ", " << value.z << "]";
|
||||
|
||||
output << ss.str();
|
||||
|
||||
return output;
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <limits>
|
||||
#include <string>
|
||||
|
||||
using namespace std;
|
||||
|
||||
static double Infinity = std::numeric_limits<double>::infinity();
|
||||
|
||||
template<typename T>
|
||||
struct Vector3{
|
||||
|
||||
T x = T(0.0);
|
||||
T y = T(0.0);
|
||||
T z = T(0.0);
|
||||
|
||||
Vector3<T>() {
|
||||
|
||||
}
|
||||
|
||||
Vector3<T>(T x, T y, T z) {
|
||||
this->x = x;
|
||||
this->y = y;
|
||||
this->z = z;
|
||||
}
|
||||
|
||||
T squaredDistanceTo(const Vector3<T>& right) {
|
||||
double dx = right.x - x;
|
||||
double dy = right.y - y;
|
||||
double dz = right.z - z;
|
||||
|
||||
double dd = dx * dx + dy * dy + dz * dz;
|
||||
|
||||
return dd;
|
||||
}
|
||||
|
||||
T distanceTo(const Vector3<T>& right) {
|
||||
double dx = right.x - x;
|
||||
double dy = right.y - y;
|
||||
double dz = right.z - z;
|
||||
|
||||
double dd = dx * dx + dy * dy + dz * dz;
|
||||
double d = std::sqrt(dd);
|
||||
|
||||
return d;
|
||||
}
|
||||
|
||||
T max() {
|
||||
return std::max(std::max(x, y), z);
|
||||
}
|
||||
|
||||
string toString() {
|
||||
string str = to_string(x) + ", " + to_string(y) + ", " + to_string(z);
|
||||
|
||||
return str;
|
||||
}
|
||||
|
||||
Vector3<T> operator-(const Vector3<T>& right) const {
|
||||
return Vector3<T>(x - right.x, y - right.y, z - right.z);
|
||||
}
|
||||
|
||||
Vector3<T> operator+(const Vector3<T>& right) const {
|
||||
return Vector3<T>(x + right.x, y + right.y, z + right.z);
|
||||
}
|
||||
|
||||
Vector3<T> operator+(const double& scalar) const {
|
||||
return Vector3<T>(x + scalar, y + scalar, z + scalar);
|
||||
}
|
||||
|
||||
Vector3<T> operator/(const double& scalar) const {
|
||||
return Vector3<T>(x / scalar, y / scalar, z / scalar);
|
||||
}
|
||||
|
||||
Vector3<T> operator*(const Vector3<T>& right) const {
|
||||
return Vector3<T>(x * right.x, y * right.y, z * right.z);
|
||||
}
|
||||
|
||||
Vector3<T> operator*(const double& scalar) const {
|
||||
return Vector3<T>(x * scalar, y * scalar, z * scalar);
|
||||
}
|
||||
|
||||
};
|
||||
@@ -1,226 +0,0 @@
|
||||
#ifndef XYZPOINTREADER_H
|
||||
#define XYZPOINTREADER_H
|
||||
|
||||
#include "Point.h"
|
||||
#include "PointReader.h"
|
||||
#include "PotreeException.h"
|
||||
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <regex>
|
||||
#include <vector>
|
||||
#include <sstream>
|
||||
#include <algorithm>
|
||||
|
||||
using std::getline;
|
||||
using std::ifstream;
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::stringstream;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
class XYZPointReader : public PointReader{
|
||||
private:
|
||||
AABB aabb;
|
||||
ifstream stream;
|
||||
long pointsRead;
|
||||
long pointCount;
|
||||
char *buffer;
|
||||
int pointByteSize;
|
||||
Point point;
|
||||
string format;
|
||||
|
||||
float colorOffset;
|
||||
float colorScale;
|
||||
|
||||
float intensityOffset;
|
||||
float intensityScale;
|
||||
|
||||
int linesSkipped;
|
||||
|
||||
public:
|
||||
XYZPointReader(string file, string format, vector<double> colorRange, vector<double> intensityRange)
|
||||
: stream(file, std::ios::in | std::ios::binary)
|
||||
{
|
||||
this->format = format;
|
||||
pointsRead = 0;
|
||||
linesSkipped = 0;
|
||||
pointCount = 0;
|
||||
colorScale = -1;
|
||||
|
||||
if(intensityRange.size() == 2){
|
||||
intensityOffset = (float)intensityRange[0];
|
||||
intensityScale = (float)intensityRange[1]-(float)intensityRange[0];
|
||||
}else if(intensityRange.size() == 1){
|
||||
intensityOffset = 0.0f;
|
||||
intensityScale = (float)intensityRange[0];
|
||||
}else{
|
||||
intensityOffset = 0.0f;
|
||||
intensityScale = 1.0f;
|
||||
}
|
||||
|
||||
if(colorRange.size() == 2){
|
||||
colorOffset = (float)colorRange[0];
|
||||
colorScale = (float)colorRange[1];
|
||||
}else if(colorRange.size() == 1){
|
||||
colorOffset = 0.0f;
|
||||
colorScale = (float)colorRange[0];
|
||||
}else if(colorRange.size() == 0){
|
||||
colorOffset = 0.0f;
|
||||
|
||||
// try to find color range by evaluating the first x points.
|
||||
float max = 0;
|
||||
int j = 0;
|
||||
string line;
|
||||
while(getline(stream, line) && j < 1000){
|
||||
trim(line);
|
||||
vector<string> tokens = split(line, { '\t', ' ', ',' });
|
||||
|
||||
if(this->format == "" && tokens.size() >= 3){
|
||||
string f(tokens.size(), 's');
|
||||
f.replace(0, 3, "xyz");
|
||||
|
||||
if(tokens.size() >= 6){
|
||||
f.replace(tokens.size() - 3, 3, "rgb");
|
||||
}
|
||||
|
||||
this->format = f;
|
||||
cout << "using format: '" << this->format << "'" << endl;
|
||||
}
|
||||
|
||||
if(tokens.size() < this->format.size()){
|
||||
continue;
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
for(const auto &f : format) {
|
||||
string token = tokens[i++];
|
||||
if(f == 'r'){
|
||||
max = std::max(max, stof(token));
|
||||
}else if(f == 'g'){
|
||||
max = std::max(max, stof(token));
|
||||
}else if(f == 'b'){
|
||||
max = std::max(max, stof(token));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
j++;
|
||||
}
|
||||
|
||||
if(max <= 1.0f){
|
||||
colorScale = 1.0f;
|
||||
} else if(max <= 255){
|
||||
colorScale = 255.0f;
|
||||
}else if(max <= pow(2, 16) - 1){
|
||||
colorScale =(float)pow(2, 16) - 1;
|
||||
}else{
|
||||
colorScale = (float)max;
|
||||
}
|
||||
|
||||
stream.clear();
|
||||
stream.seekg(0, stream.beg);
|
||||
|
||||
}
|
||||
|
||||
// read through once to calculate aabb and number of points
|
||||
while(readNextPoint()){
|
||||
Point p = getPoint();
|
||||
aabb.update(p.position);
|
||||
pointCount++;
|
||||
}
|
||||
stream.clear();
|
||||
stream.seekg(0, stream.beg);
|
||||
}
|
||||
|
||||
bool readNextPoint(){
|
||||
double x = 0;
|
||||
double y = 0;
|
||||
double z = 0;
|
||||
float nx = 0;
|
||||
float ny = 0;
|
||||
float nz = 0;
|
||||
unsigned char r = 255;
|
||||
unsigned char g = 255;
|
||||
unsigned char b = 255;
|
||||
// unsigned char a = 255; // unused variable
|
||||
unsigned short intensity = 0;
|
||||
|
||||
string line;
|
||||
while(getline(stream, line)){
|
||||
trim(line);
|
||||
vector<string> tokens = split(line, {'\t', ' ', ','});
|
||||
if(tokens.size() != format.size()){
|
||||
//throw PotreeException("Not enough tokens for the given format");
|
||||
|
||||
if(linesSkipped == 0){
|
||||
cout << "some lines may be skipped because they do not match the given format: '" << format << "'" << endl;
|
||||
}
|
||||
|
||||
linesSkipped++;
|
||||
continue;
|
||||
}
|
||||
|
||||
int i = 0;
|
||||
for(const auto &f : format) {
|
||||
string token = tokens[i++];
|
||||
if(f == 'x'){
|
||||
x = stod(token);
|
||||
}else if(f == 'y'){
|
||||
y = stod(token);
|
||||
}else if(f == 'z'){
|
||||
z = stod(token);
|
||||
}else if(f == 'r'){
|
||||
r = (unsigned char)(255.0f * (stof(token) - colorOffset) / colorScale);
|
||||
}else if(f == 'g'){
|
||||
g = (unsigned char)(255.0f * (stof(token) - colorOffset) / colorScale);
|
||||
}else if(f == 'b'){
|
||||
b = (unsigned char)(255.0f * (stof(token) - colorOffset) / colorScale);
|
||||
}else if(f == 'i'){
|
||||
intensity = (unsigned short)( 65535 * (stof(token) - intensityOffset) / intensityScale);
|
||||
}else if(f == 's'){
|
||||
// skip
|
||||
}else if(f == 'X'){
|
||||
nx = stof(token);
|
||||
}else if(f == 'Y'){
|
||||
ny = stof(token);
|
||||
}else if(f == 'Z'){
|
||||
nz = stof(token);
|
||||
}
|
||||
}
|
||||
|
||||
point = Point(x,y,z,r,g,b);
|
||||
point.normal.x = nx;
|
||||
point.normal.y = ny;
|
||||
point.normal.z = nz;
|
||||
point.intensity = intensity;
|
||||
pointsRead++;
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
Point getPoint(){
|
||||
return point;
|
||||
}
|
||||
|
||||
AABB getAABB(){
|
||||
return aabb;
|
||||
}
|
||||
long long numPoints(){
|
||||
return pointCount;
|
||||
}
|
||||
void close(){
|
||||
stream.close();
|
||||
}
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,27 +0,0 @@
|
||||
|
||||
#ifndef DEFINITIONS_H
|
||||
#define DEFINITIONS_H
|
||||
|
||||
namespace Potree{
|
||||
|
||||
enum class OutputFormat{
|
||||
BINARY,
|
||||
LAS,
|
||||
LAZ
|
||||
};
|
||||
|
||||
enum class StoreOption{
|
||||
ABORT_IF_EXISTS,
|
||||
OVERWRITE,
|
||||
INCREMENTAL
|
||||
};
|
||||
|
||||
enum class ConversionQuality{
|
||||
FAST,
|
||||
DEFAULT,
|
||||
NICE
|
||||
};
|
||||
|
||||
}
|
||||
|
||||
#endif
|
||||
@@ -1,129 +1,33 @@
|
||||
|
||||
#ifndef STUFF_H
|
||||
#define STUFF_H
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
#include <cctype>
|
||||
|
||||
//#include <unistd.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "Vector3.h"
|
||||
#include "AABB.h"
|
||||
#include "Point.h"
|
||||
#include "SparseGrid.h"
|
||||
#include "GridCell.h"
|
||||
|
||||
using std::ifstream;
|
||||
using std::ofstream;
|
||||
using std::ios;
|
||||
using std::string;
|
||||
using std::min;
|
||||
using std::max;
|
||||
using std::ostream;
|
||||
using std::cout;
|
||||
using std::cin;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::binary_function;
|
||||
using std::map;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
namespace Potree {
|
||||
|
||||
AABB readAABB(string fIn, int numPoints);
|
||||
|
||||
AABB readAABB(string fIn);
|
||||
|
||||
/**
|
||||
* y
|
||||
* |-z
|
||||
* |/
|
||||
* O----x
|
||||
*
|
||||
* 3----7
|
||||
* /| /|
|
||||
* 2----6 |
|
||||
* | 1--|-5
|
||||
* |/ |/
|
||||
* 0----4
|
||||
*
|
||||
*/
|
||||
AABB childAABB(const AABB &aabb, const int &index);
|
||||
|
||||
|
||||
/**
|
||||
* y
|
||||
* |-z
|
||||
* |/
|
||||
* O----x
|
||||
*
|
||||
* 3----7
|
||||
* /| /|
|
||||
* 2----6 |
|
||||
* | 1--|-5
|
||||
* |/ |/
|
||||
* 0----4
|
||||
*
|
||||
*/
|
||||
int nodeIndex(const AABB &aabb, const Point &point);
|
||||
|
||||
|
||||
/**
|
||||
* from http://stackoverflow.com/questions/5840148/how-can-i-get-a-files-size-in-c
|
||||
*/
|
||||
long filesize(string filename);
|
||||
|
||||
|
||||
/**
|
||||
* from http://stackoverflow.com/questions/874134/find-if-string-endswith-another-string-in-c
|
||||
*/
|
||||
bool endsWith(std::string const &fullString, std::string const &ending);
|
||||
|
||||
/**
|
||||
* see http://stackoverflow.com/questions/735204/convert-a-string-in-c-to-upper-case
|
||||
*/
|
||||
string toUpper(string str);
|
||||
|
||||
bool copyDir(fs::path source, fs::path destination);
|
||||
|
||||
float psign(float value);
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare_pred(unsigned char a, unsigned char b);
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare(string const& a, string const& b);
|
||||
|
||||
bool endsWith(const string &str, const string &suffix);
|
||||
|
||||
bool iEndsWith(const string &str, const string &suffix);
|
||||
|
||||
vector<string> split(string str, vector<char> delimiters);
|
||||
|
||||
vector<string> split(string str, char delimiter);
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string ltrim(string s);
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string rtrim(string s);
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string trim(string s);
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
#endif
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <string>
|
||||
#include <chrono>
|
||||
#include <cstdarg>
|
||||
#include <sstream>
|
||||
|
||||
using std::string;
|
||||
using std::stringstream;
|
||||
|
||||
string stringReplace(string str, string search, string replacement);
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare_pred(unsigned char a, unsigned char b);
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare(std::string const& a, std::string const& b);
|
||||
|
||||
bool endsWith(const string& str, const string& suffix);
|
||||
|
||||
bool iEndsWith(const std::string& str, const std::string& suffix);
|
||||
|
||||
double now();
|
||||
|
||||
void printElapsedTime(string label, double startTime);
|
||||
|
||||
void printThreadsafe(string str);
|
||||
void printThreadsafe(string str1, string str2);
|
||||
void printThreadsafe(string str1, string str2, string str3);
|
||||
void printThreadsafe(string str1, string str2, string str3, string str4);
|
||||
|
||||
|
||||
|
||||
98
PotreeConverter/lib/ThreadPool/ThreadPool.h
Normal file
98
PotreeConverter/lib/ThreadPool/ThreadPool.h
Normal file
@@ -0,0 +1,98 @@
|
||||
|
||||
// from https://github.com/progschj/ThreadPool/blob/master/ThreadPool.h
|
||||
|
||||
#pragma once
|
||||
|
||||
#include <vector>
|
||||
#include <queue>
|
||||
#include <memory>
|
||||
#include <thread>
|
||||
#include <mutex>
|
||||
#include <condition_variable>
|
||||
#include <future>
|
||||
#include <functional>
|
||||
#include <stdexcept>
|
||||
|
||||
class ThreadPool {
|
||||
public:
|
||||
ThreadPool(size_t);
|
||||
template<class F, class... Args>
|
||||
auto enqueue(F&& f, Args&& ... args)
|
||||
->std::future<typename std::result_of<F(Args...)>::type>;
|
||||
~ThreadPool();
|
||||
private:
|
||||
// need to keep track of threads so we can join them
|
||||
std::vector< std::thread > workers;
|
||||
// the task queue
|
||||
std::queue< std::function<void()> > tasks;
|
||||
|
||||
// synchronization
|
||||
std::mutex queue_mutex;
|
||||
std::condition_variable condition;
|
||||
bool stop;
|
||||
};
|
||||
|
||||
// the constructor just launches some amount of workers
|
||||
inline ThreadPool::ThreadPool(size_t threads)
|
||||
: stop(false)
|
||||
{
|
||||
for (size_t i = 0; i < threads; ++i)
|
||||
workers.emplace_back(
|
||||
[this]
|
||||
{
|
||||
for (;;)
|
||||
{
|
||||
std::function<void()> task;
|
||||
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(this->queue_mutex);
|
||||
this->condition.wait(lock,
|
||||
[this] { return this->stop || !this->tasks.empty(); });
|
||||
if (this->stop && this->tasks.empty())
|
||||
return;
|
||||
task = std::move(this->tasks.front());
|
||||
this->tasks.pop();
|
||||
}
|
||||
|
||||
task();
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
// add new work item to the pool
|
||||
template<class F, class... Args>
|
||||
auto ThreadPool::enqueue(F&& f, Args&& ... args)
|
||||
-> std::future<typename std::result_of<F(Args...)>::type>
|
||||
{
|
||||
using return_type = typename std::result_of<F(Args...)>::type;
|
||||
|
||||
auto task = std::make_shared< std::packaged_task<return_type()> >(
|
||||
std::bind(std::forward<F>(f), std::forward<Args>(args)...)
|
||||
);
|
||||
|
||||
std::future<return_type> res = task->get_future();
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(queue_mutex);
|
||||
|
||||
// don't allow enqueueing after stopping the pool
|
||||
if (stop)
|
||||
throw std::runtime_error("enqueue on stopped ThreadPool");
|
||||
|
||||
tasks.emplace([task]() { (*task)(); });
|
||||
}
|
||||
condition.notify_one();
|
||||
return res;
|
||||
}
|
||||
|
||||
// the destructor joins all threads
|
||||
inline ThreadPool::~ThreadPool()
|
||||
{
|
||||
{
|
||||
std::unique_lock<std::mutex> lock(queue_mutex);
|
||||
stop = true;
|
||||
}
|
||||
condition.notify_all();
|
||||
for (std::thread& worker : workers)
|
||||
worker.join();
|
||||
}
|
||||
@@ -1,300 +0,0 @@
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <unordered_map>
|
||||
#include <iostream>
|
||||
#include <algorithm>
|
||||
|
||||
using std::unordered_map;
|
||||
using std::vector;
|
||||
using std::string;
|
||||
using std::cout;
|
||||
using std::cerr;
|
||||
using std::endl;
|
||||
|
||||
class AValue{
|
||||
public:
|
||||
vector<string> values;
|
||||
|
||||
AValue(vector<string> values) {
|
||||
this->values = values;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
T as(T alternative) {
|
||||
return !values.empty() ? T(values[0]) : alternative;
|
||||
}
|
||||
|
||||
template<typename T>
|
||||
T as() {
|
||||
return !values.empty() ? T(values[0]) : T();
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
template<> vector<string> AValue::as<vector<string>>(vector<string> alternative) {
|
||||
return !values.empty() ? values : alternative;
|
||||
}
|
||||
|
||||
template<> vector<string> AValue::as<vector<string>>() {
|
||||
return !values.empty() ? values : vector<string>{};
|
||||
}
|
||||
|
||||
template<> vector<double> AValue::as<vector<double>>(vector<double> alternative) {
|
||||
vector<double> res;
|
||||
for (auto &v : values) {
|
||||
res.push_back(std::stod(v));
|
||||
}
|
||||
return !res.empty() ? res : alternative;
|
||||
}
|
||||
|
||||
template<> vector<double> AValue::as<vector<double>>() {
|
||||
return as<vector<double>>({});
|
||||
}
|
||||
|
||||
template<> double AValue::as<double>(double alternative) {
|
||||
return !values.empty() ? std::stod(values[0]) : alternative;
|
||||
}
|
||||
|
||||
template<> double AValue::as<double>() {
|
||||
return !values.empty() ? std::stod(values[0]) : 0.0;
|
||||
}
|
||||
|
||||
template<> int AValue::as<int>(int alternative) {
|
||||
return !values.empty() ? std::stoi(values[0]) : alternative;
|
||||
}
|
||||
|
||||
template<> int AValue::as<int>() {
|
||||
return !values.empty() ? std::stoi(values[0]) : 0;
|
||||
}
|
||||
|
||||
class Argument {
|
||||
private:
|
||||
|
||||
vector<string> split(string str, vector<char> delimiters) {
|
||||
|
||||
vector<string> tokens;
|
||||
|
||||
auto isDelimiter = [&delimiters](char ch) {
|
||||
for (auto &delimiter : delimiters) {
|
||||
if (ch == delimiter) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
int start = 0;
|
||||
for (int i = 0; i < str.size(); i++) {
|
||||
if (isDelimiter(str[i])) {
|
||||
if (start < i) {
|
||||
auto token = str.substr(start, i - start);
|
||||
tokens.push_back(token);
|
||||
} else {
|
||||
tokens.push_back("");
|
||||
}
|
||||
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (start < str.size()) {
|
||||
tokens.push_back(str.substr(start));
|
||||
} else if (isDelimiter(str[str.size() - 1])) {
|
||||
tokens.push_back("");
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
public:
|
||||
string id = "";
|
||||
string description = "";
|
||||
|
||||
Argument(string id, string description) {
|
||||
this->id = id;
|
||||
this->description = description;
|
||||
}
|
||||
|
||||
bool is(string name) {
|
||||
auto tokens = split(id, { ',' });
|
||||
|
||||
for (auto token : tokens) {
|
||||
if (token == name) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
string fullname() {
|
||||
auto tokens = split(id, { ',' });
|
||||
|
||||
for (auto token : tokens) {
|
||||
if (token.size() > 1) {
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
string shortname() {
|
||||
auto tokens = split(id, { ',' });
|
||||
|
||||
for (auto token : tokens) {
|
||||
if (token.size() == 1) {
|
||||
return token;
|
||||
}
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
};
|
||||
|
||||
class Arguments {
|
||||
|
||||
private:
|
||||
bool startsWith(const string &str, const string &prefix) {
|
||||
if (str.size() < prefix.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return str.substr(0, prefix.size()).compare(prefix) == 0;
|
||||
}
|
||||
|
||||
public:
|
||||
|
||||
int argc = 0;
|
||||
char **argv = nullptr;
|
||||
|
||||
bool ignoreFirst = true;
|
||||
|
||||
vector<string> tokens;
|
||||
vector<Argument> argdefs;
|
||||
unordered_map<string, vector<string>> map;
|
||||
|
||||
Arguments(int argc, char **argv, bool ignoreFirst = true) {
|
||||
this->argc = argc;
|
||||
this->argv = argv;
|
||||
this->ignoreFirst = ignoreFirst;
|
||||
|
||||
for (int i = ignoreFirst ? 1 : 0; i < argc; i++) {
|
||||
string token = string(argv[i]);
|
||||
tokens.push_back(token);
|
||||
}
|
||||
|
||||
string currentKey = "";
|
||||
map.insert({ currentKey, {} });
|
||||
for (string token : tokens) {
|
||||
if(startsWith(token, "---")) {
|
||||
cerr << "Invalid argument: " << token << endl;
|
||||
exit(1);
|
||||
} else if (startsWith(token, "--")) {
|
||||
currentKey = token.substr(2);
|
||||
map.insert({ currentKey,{} });
|
||||
} else if (startsWith(token, "-")) {
|
||||
currentKey = token.substr(1);
|
||||
map.insert({ currentKey,{} });
|
||||
} else {
|
||||
map[currentKey].push_back(token);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void addArgument(string id, string description) {
|
||||
Argument arg(id, description);
|
||||
|
||||
argdefs.push_back(arg);
|
||||
}
|
||||
|
||||
Argument *getArgument(string name) {
|
||||
for (Argument &arg : argdefs) {
|
||||
if (arg.is(name)) {
|
||||
return &arg;
|
||||
}
|
||||
}
|
||||
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
vector<string> keys() {
|
||||
vector<string> keys;
|
||||
for (auto entry : map) {
|
||||
keys.push_back(entry.first);
|
||||
}
|
||||
|
||||
return keys;
|
||||
}
|
||||
|
||||
string usage() {
|
||||
std::stringstream ss;
|
||||
|
||||
vector<string> keys;
|
||||
|
||||
for (auto argdef : argdefs) {
|
||||
stringstream ssKey;
|
||||
if (!argdef.shortname().empty()) {
|
||||
ssKey << " -" << argdef.shortname();
|
||||
|
||||
if (!argdef.fullname().empty()) {
|
||||
ssKey << " [ --" << argdef.fullname() << " ]";
|
||||
}
|
||||
|
||||
} else if(!argdef.fullname().empty()) {
|
||||
ssKey << " --" << argdef.fullname();
|
||||
}
|
||||
|
||||
keys.push_back(ssKey.str());
|
||||
}
|
||||
|
||||
int keyColumnLength = 0;
|
||||
for (auto key : keys) {
|
||||
keyColumnLength = std::max(int(key.size()), keyColumnLength);
|
||||
}
|
||||
keyColumnLength = keyColumnLength + 2;
|
||||
|
||||
for (int i = 0; i < argdefs.size(); i++) {
|
||||
keys[i].resize(keyColumnLength, ' ');
|
||||
ss << keys[i] << argdefs[i].description << endl;
|
||||
}
|
||||
|
||||
|
||||
return ss.str();
|
||||
}
|
||||
|
||||
bool has(string name) {
|
||||
Argument *arg = getArgument(name);
|
||||
|
||||
if (arg == nullptr) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (auto entry : map) {
|
||||
if (arg->is(entry.first)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
AValue get(string name) {
|
||||
Argument *arg = getArgument(name);
|
||||
|
||||
for (auto entry : map) {
|
||||
if (arg->is(entry.first)) {
|
||||
return AValue(entry.second);
|
||||
}
|
||||
}
|
||||
|
||||
return AValue({});
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
};
|
||||
|
||||
22875
PotreeConverter/lib/json/json.hpp
Normal file
22875
PotreeConverter/lib/json/json.hpp
Normal file
File diff suppressed because it is too large
Load Diff
24
PotreeConverter/lib/json/readme.txt
Normal file
24
PotreeConverter/lib/json/readme.txt
Normal file
@@ -0,0 +1,24 @@
|
||||
from: https://github.com/nlohmann/json/releases
|
||||
|
||||
LICENSE:
|
||||
MIT License
|
||||
|
||||
Copyright (c) 2013-2019 Niels Lohmann
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
||||
@@ -1,210 +0,0 @@
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "BINPointReader.hpp"
|
||||
#include "stuff.h"
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
using std::ifstream;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::ios;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
BINPointReader::BINPointReader(string path, AABB aabb, double scale, PointAttributes pointAttributes){
|
||||
this->path = path;
|
||||
this->aabb = aabb;
|
||||
this->scale = scale;
|
||||
this->attributes = pointAttributes;
|
||||
|
||||
if(fs::is_directory(path)){
|
||||
// if directory is specified, find all las and laz files inside directory
|
||||
|
||||
for(fs::directory_iterator it(path); it != fs::directory_iterator(); it++){
|
||||
fs::path filepath = it->path();
|
||||
if(fs::is_regular_file(filepath)){
|
||||
files.push_back(filepath.string());
|
||||
}
|
||||
}
|
||||
}else{
|
||||
files.push_back(path);
|
||||
}
|
||||
|
||||
currentFile = files.begin();
|
||||
reader = new ifstream(*currentFile, ios::in | ios::binary);
|
||||
}
|
||||
|
||||
BINPointReader::~BINPointReader(){
|
||||
close();
|
||||
}
|
||||
|
||||
void BINPointReader::close(){
|
||||
if(reader != NULL){
|
||||
reader->close();
|
||||
delete reader;
|
||||
reader = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
long long BINPointReader::numPoints(){
|
||||
//TODO
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
bool BINPointReader::readNextPoint(){
|
||||
bool hasPoints = reader->good();
|
||||
|
||||
if(!hasPoints){
|
||||
// try to open next file, if available
|
||||
reader->close();
|
||||
delete reader;
|
||||
reader = NULL;
|
||||
currentFile++;
|
||||
|
||||
if(currentFile != files.end()){
|
||||
reader = new ifstream(*currentFile, ios::in | ios::binary);
|
||||
hasPoints = reader->good();
|
||||
}
|
||||
}
|
||||
|
||||
if(hasPoints){
|
||||
point = Point();
|
||||
char* buffer = new char[attributes.byteSize];
|
||||
reader->read(buffer, attributes.byteSize);
|
||||
|
||||
if(!reader->good()){
|
||||
delete [] buffer;
|
||||
return false;
|
||||
}
|
||||
|
||||
int offset = 0;
|
||||
for(int i = 0; i < attributes.size(); i++){
|
||||
const PointAttribute attribute = attributes[i];
|
||||
if(attribute == PointAttribute::POSITION_CARTESIAN){
|
||||
int* iBuffer = reinterpret_cast<int*>(buffer+offset);
|
||||
point.position.x = (iBuffer[0] * scale) + aabb.min.x;
|
||||
point.position.y = (iBuffer[1] * scale) + aabb.min.y;
|
||||
point.position.z = (iBuffer[2] * scale) + aabb.min.z;
|
||||
}else if(attribute == PointAttribute::COLOR_PACKED){
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer+offset);
|
||||
point.color.x = ucBuffer[0];
|
||||
point.color.y = ucBuffer[1];
|
||||
point.color.z = ucBuffer[2];
|
||||
}else if(attribute == PointAttribute::INTENSITY){
|
||||
unsigned short* usBuffer = reinterpret_cast<unsigned short*>(buffer+offset);
|
||||
point.intensity = usBuffer[0];
|
||||
}else if(attribute == PointAttribute::CLASSIFICATION){
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer+offset);
|
||||
point.classification = ucBuffer[0];
|
||||
} else if (attribute == PointAttribute::RETURN_NUMBER) {
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer + offset);
|
||||
point.returnNumber = ucBuffer[0];
|
||||
} else if (attribute == PointAttribute::NUMBER_OF_RETURNS) {
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer + offset);
|
||||
point.numberOfReturns = ucBuffer[0];
|
||||
} else if (attribute == PointAttribute::SOURCE_ID) {
|
||||
unsigned short* usBuffer = reinterpret_cast<unsigned short*>(buffer + offset);
|
||||
point.pointSourceID = usBuffer[0];
|
||||
} else if (attribute == PointAttribute::GPS_TIME) {
|
||||
double* dBuffer = reinterpret_cast<double*>(buffer + offset);
|
||||
point.gpsTime = dBuffer[0];
|
||||
} else if(attribute == PointAttribute::NORMAL_SPHEREMAPPED){
|
||||
// see http://aras-p.info/texts/CompactNormalStorage.html
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer+offset);
|
||||
unsigned char bx = ucBuffer[0];
|
||||
unsigned char by = ucBuffer[1];
|
||||
|
||||
float ex = (float)bx / 255.0f;
|
||||
float ey = (float)by / 255.0f;
|
||||
|
||||
float nx = ex * 2 - 1;
|
||||
float ny = ey * 2 - 1;
|
||||
float nz = 1;
|
||||
float nw = -1;
|
||||
|
||||
float l = (nx * (-nx) + ny * (-ny) + nz * (-nw));
|
||||
nz = l;
|
||||
nx = nx * sqrt(l);
|
||||
ny = ny * sqrt(l);
|
||||
|
||||
nx = nx * 2;
|
||||
ny = ny * 2;
|
||||
nz = nz * 2 -1;
|
||||
|
||||
point.normal.x = nx;
|
||||
point.normal.y = ny;
|
||||
point.normal.z = nz;
|
||||
|
||||
}else if(attribute == PointAttribute::NORMAL_OCT16){
|
||||
unsigned char* ucBuffer = reinterpret_cast<unsigned char*>(buffer+offset);
|
||||
unsigned char bx = ucBuffer[0];
|
||||
unsigned char by = ucBuffer[1];
|
||||
|
||||
float u = (float)((bx / 255.0) * 2.0 - 1.0);
|
||||
float v = (float)((by / 255.0) * 2.0 - 1.0);
|
||||
|
||||
float x = 0.0f;
|
||||
float y = 0.0f;
|
||||
float z = 1.0f - abs(u) - abs(v);
|
||||
|
||||
if(z >= 0){
|
||||
x = u;
|
||||
y = v;
|
||||
}else{
|
||||
x = float(-( v / psign(v) - 1.0 ) / psign(u));
|
||||
y = float(-( u / psign(u) - 1.0 ) / psign(v));
|
||||
}
|
||||
|
||||
float length = sqrt(x*x + y*y + z*z);
|
||||
x = x / length;
|
||||
y = y / length;
|
||||
z = z / length;
|
||||
|
||||
point.normal.x = x;
|
||||
point.normal.y = y;
|
||||
point.normal.z = z;
|
||||
}else if(attribute == PointAttribute::NORMAL){
|
||||
float* fBuffer = reinterpret_cast<float*>(buffer+offset);
|
||||
point.normal.x = fBuffer[0];
|
||||
point.normal.y = fBuffer[1];
|
||||
point.normal.z = fBuffer[2];
|
||||
} else {
|
||||
cout << "ERROR: attribute reader not implemented: " << attribute.name << endl;
|
||||
exit(1);
|
||||
}
|
||||
|
||||
offset += attribute.byteSize;
|
||||
}
|
||||
|
||||
delete [] buffer;
|
||||
}
|
||||
|
||||
return hasPoints;
|
||||
}
|
||||
|
||||
Point BINPointReader::getPoint(){
|
||||
return point;
|
||||
}
|
||||
|
||||
AABB BINPointReader::getAABB(){
|
||||
AABB aabb;
|
||||
//TODO
|
||||
|
||||
return aabb;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
228
PotreeConverter/src/ChunkProcessor.cpp
Normal file
228
PotreeConverter/src/ChunkProcessor.cpp
Normal file
@@ -0,0 +1,228 @@
|
||||
|
||||
#include <unordered_map>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
#include <iterator>
|
||||
#include <stack>
|
||||
|
||||
#include "ChunkProcessor.h"
|
||||
#include "LASWriter.hpp"
|
||||
|
||||
using namespace std;
|
||||
|
||||
uint64_t toGridIndex(Chunk* chunk, Point& point, uint64_t gridSize) {
|
||||
|
||||
double x = point.x;
|
||||
double y = point.y;
|
||||
double z = point.z;
|
||||
|
||||
double gridSizeD = double(gridSize);
|
||||
Vector3<double>& min = chunk->min;
|
||||
Vector3<double> size = chunk->max - chunk->min;
|
||||
|
||||
uint64_t ux = int32_t(gridSizeD * (x - min.x) / size.x);
|
||||
uint64_t uy = int32_t(gridSizeD * (y - min.y) / size.y);
|
||||
uint64_t uz = int32_t(gridSizeD * (z - min.z) / size.z);
|
||||
|
||||
ux = std::min(ux, gridSize - 1);
|
||||
uy = std::min(uy, gridSize - 1);
|
||||
uz = std::min(uz, gridSize - 1);
|
||||
|
||||
uint64_t index = ux + gridSize * uy + gridSize * gridSize * uz;
|
||||
|
||||
return index;
|
||||
}
|
||||
|
||||
|
||||
struct BoundingBox {
|
||||
Vector3<double> min;
|
||||
Vector3<double> max;
|
||||
};
|
||||
|
||||
BoundingBox childBoundingBoxOf(BoundingBox box, int index) {
|
||||
Vector3<double> center = (box.max + box.min) / 2.0;
|
||||
BoundingBox childBox;
|
||||
|
||||
if ((index & 0b100) == 0) {
|
||||
childBox.min.x = box.min.x;
|
||||
childBox.max.x = center.x;
|
||||
} else {
|
||||
childBox.min.x = center.x;
|
||||
childBox.max.x = box.max.x;
|
||||
}
|
||||
|
||||
if ((index & 0b010) == 0) {
|
||||
childBox.min.y = box.min.y;
|
||||
childBox.max.y = center.y;
|
||||
} else {
|
||||
childBox.min.y = center.y;
|
||||
childBox.max.y = box.max.y;
|
||||
}
|
||||
|
||||
if ((index & 0b01) == 0) {
|
||||
childBox.min.z = box.min.z;
|
||||
childBox.max.z = center.z;
|
||||
} else {
|
||||
childBox.min.z = center.z;
|
||||
childBox.max.z = box.max.z;
|
||||
}
|
||||
|
||||
return childBox;
|
||||
}
|
||||
|
||||
vector<shared_ptr<Chunk>> getListOfChunks(Metadata& metadata) {
|
||||
string chunkDirectory = metadata.targetDirectory + "/chunks";
|
||||
|
||||
auto toID = [](string filename) -> string {
|
||||
string strID = stringReplace(filename, "chunk_", "");
|
||||
strID = stringReplace(strID, ".bin", "");
|
||||
|
||||
return strID;
|
||||
};
|
||||
|
||||
vector<shared_ptr<Chunk>> chunksToLoad;
|
||||
for (const auto& entry : fs::directory_iterator(chunkDirectory)) {
|
||||
string filename = entry.path().filename().string();
|
||||
string chunkID = toID(filename);
|
||||
|
||||
if (!iEndsWith(filename, ".bin")) {
|
||||
continue;
|
||||
}
|
||||
|
||||
shared_ptr<Chunk> chunk = make_shared<Chunk>();
|
||||
chunk->file = entry.path().string();
|
||||
chunk->id = chunkID;
|
||||
|
||||
Vector3<double> min = metadata.min;
|
||||
Vector3<double> max = metadata.max;
|
||||
BoundingBox box = { min, max };
|
||||
|
||||
for (int i = 1; i < chunkID.size(); i++) {
|
||||
int index = chunkID[i] - '0'; // this feels so wrong...
|
||||
|
||||
box = childBoundingBoxOf(box, index);
|
||||
}
|
||||
|
||||
chunk->min = box.min;
|
||||
chunk->max = box.max;
|
||||
|
||||
chunksToLoad.push_back(chunk);
|
||||
}
|
||||
|
||||
return chunksToLoad;
|
||||
}
|
||||
|
||||
shared_ptr<Points> loadChunk(shared_ptr<Chunk> chunk, Attributes attributes) {
|
||||
auto filesize = fs::file_size(chunk->file);
|
||||
uint64_t bytesPerPoint = 28;
|
||||
int numPoints = filesize / bytesPerPoint;
|
||||
|
||||
uint64_t attributeBufferSize = numPoints * attributes.byteSize;
|
||||
|
||||
shared_ptr<Points> points = make_shared<Points>();
|
||||
points->attributes = attributes;
|
||||
points->attributeBuffer = make_shared<Buffer>(attributeBufferSize);
|
||||
|
||||
int attributesByteSize = 4;
|
||||
|
||||
auto file = fstream(chunk->file, std::ios::in | std::ios::binary);
|
||||
|
||||
|
||||
ifstream inputFile("shorts.txt", std::ios::binary);
|
||||
|
||||
int bufferSize = numPoints * bytesPerPoint;
|
||||
void* buffer = malloc(bufferSize);
|
||||
uint8_t* bufferU8 = reinterpret_cast<uint8_t*>(buffer);
|
||||
|
||||
file.read(reinterpret_cast<char*>(buffer), bufferSize);
|
||||
|
||||
for (uint64_t i = 0; i < numPoints; i++) {
|
||||
|
||||
uint64_t pointOffset = i * bytesPerPoint;
|
||||
double* coordinates = reinterpret_cast<double*>(bufferU8 + pointOffset);
|
||||
|
||||
Point point;
|
||||
point.x = coordinates[0];
|
||||
point.y = coordinates[1];
|
||||
point.z = coordinates[2];
|
||||
|
||||
uint8_t* attSrc = bufferU8 + (bytesPerPoint * i + 24);
|
||||
uint8_t* attDest = points->attributeBuffer->dataU8 + (4 * i);
|
||||
memcpy(attDest, attSrc, attributesByteSize);
|
||||
|
||||
point.index = i;
|
||||
|
||||
points->points.push_back(point);
|
||||
}
|
||||
|
||||
free(buffer);
|
||||
|
||||
file.close();
|
||||
|
||||
//chunk->points = points;
|
||||
|
||||
return points;
|
||||
}
|
||||
|
||||
|
||||
|
||||
vector<Points*> split(Chunk* chunk, Points* input, int gridSize) {
|
||||
struct Cell {
|
||||
|
||||
Points* points;
|
||||
|
||||
int32_t ux = 0;
|
||||
int32_t uy = 0;
|
||||
int32_t uz = 0;
|
||||
};
|
||||
|
||||
auto min = chunk->min;
|
||||
auto max = chunk->max;
|
||||
auto size = max - min;
|
||||
double gridSizeD = double(gridSize);
|
||||
vector<Cell*> grid(gridSize * gridSize * gridSize, nullptr);
|
||||
|
||||
for (int i = 0; i < input->points.size(); i++) {
|
||||
|
||||
Point point = input->points[i];
|
||||
|
||||
uint64_t index = toGridIndex(chunk, point, gridSize);
|
||||
|
||||
Cell* cell = grid[index];
|
||||
|
||||
if (cell == nullptr) {
|
||||
cell = new Cell();
|
||||
cell->points = new Points();
|
||||
cell->points->attributes = input->attributes;
|
||||
grid[index] = cell;
|
||||
}
|
||||
|
||||
cell->points->points.push_back(point);
|
||||
}
|
||||
|
||||
vector<Points*> pointCells;
|
||||
for (Cell* cell : grid) {
|
||||
if (cell != nullptr) {
|
||||
pointCells.push_back(cell->points);
|
||||
}
|
||||
}
|
||||
|
||||
return pointCells;
|
||||
}
|
||||
|
||||
|
||||
shared_ptr<Node> processChunk(shared_ptr<Chunk> chunk, shared_ptr<Points> points, double spacing) {
|
||||
|
||||
shared_ptr<Node> chunkRoot = make_shared<Node>(chunk->min, chunk->max, spacing);
|
||||
chunkRoot->name = chunk->id;
|
||||
|
||||
double tStart = now();
|
||||
|
||||
for (Point& point : points->points) {
|
||||
chunkRoot->add(point);
|
||||
}
|
||||
|
||||
printElapsedTime("processing", tStart);
|
||||
|
||||
return chunkRoot;
|
||||
}
|
||||
@@ -1,60 +0,0 @@
|
||||
|
||||
#include "GridCell.h"
|
||||
#include "SparseGrid.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
using std::min;
|
||||
using std::max;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
#define MAX_FLOAT std::numeric_limits<float>::max()
|
||||
|
||||
GridCell::GridCell(){
|
||||
|
||||
}
|
||||
|
||||
GridCell::GridCell(SparseGrid *grid, GridIndex &index){
|
||||
this->grid = grid;
|
||||
neighbours.reserve(26);
|
||||
|
||||
for(int i = max(index.i -1, 0); i <= min(grid->width-1, index.i + 1); i++){
|
||||
for(int j = max(index.j -1, 0); j <= min(grid->height-1, index.j + 1); j++){
|
||||
for(int k = max(index.k -1, 0); k <= min(grid->depth-1, index.k + 1); k++){
|
||||
|
||||
long long key = ((long long)k << 40) | ((long long)j << 20) | i;
|
||||
SparseGrid::iterator it = grid->find(key);
|
||||
if(it != grid->end()){
|
||||
GridCell *neighbour = it->second;
|
||||
if(neighbour != this){
|
||||
neighbours.push_back(neighbour);
|
||||
neighbour->neighbours.push_back(this);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void GridCell::add(Vector3<double> p){
|
||||
points.push_back(p);
|
||||
}
|
||||
|
||||
bool GridCell::isDistant(const Vector3<double> &p, const double &squaredSpacing) const {
|
||||
for(const Vector3<double> &point : points){
|
||||
|
||||
if(p.squaredDistanceTo(point) < squaredSpacing){
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,133 +0,0 @@
|
||||
|
||||
|
||||
#include <fstream>
|
||||
#include <iostream>
|
||||
#include <vector>
|
||||
|
||||
#include <experimental/filesystem>
|
||||
#include "laszip_api.h"
|
||||
|
||||
#include "LASPointReader.h"
|
||||
#include "stuff.h"
|
||||
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
using std::ifstream;
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::ios;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
AABB LIBLASReader::getAABB(){
|
||||
AABB aabb;
|
||||
|
||||
Point minp = transform(header->min_x, header->min_y, header->min_z);
|
||||
Point maxp = transform(header->max_x, header->max_y, header->max_z);
|
||||
aabb.update(minp.position);
|
||||
aabb.update(maxp.position);
|
||||
|
||||
return aabb;
|
||||
}
|
||||
|
||||
LASPointReader::LASPointReader(string path){
|
||||
this->path = path;
|
||||
|
||||
|
||||
if(fs::is_directory(path)){
|
||||
// if directory is specified, find all las and laz files inside directory
|
||||
|
||||
for(fs::directory_iterator it(path); it != fs::directory_iterator(); it++){
|
||||
fs::path filepath = it->path();
|
||||
if(fs::is_regular_file(filepath)){
|
||||
if(icompare(fs::path(filepath).extension().string(), ".las") || icompare(fs::path(filepath).extension().string(), ".laz")){
|
||||
files.push_back(filepath.string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}else{
|
||||
files.push_back(path);
|
||||
}
|
||||
|
||||
|
||||
// read bounding box
|
||||
for (const auto &file : files) {
|
||||
LIBLASReader aabbReader(file);
|
||||
AABB lAABB = aabbReader.getAABB();
|
||||
|
||||
aabb.update(lAABB.min);
|
||||
aabb.update(lAABB.max);
|
||||
|
||||
aabbReader.close();
|
||||
}
|
||||
|
||||
// open first file
|
||||
currentFile = files.begin();
|
||||
reader = new LIBLASReader(*currentFile);
|
||||
// cout << "let's go..." << endl;
|
||||
}
|
||||
|
||||
LASPointReader::~LASPointReader(){
|
||||
close();
|
||||
}
|
||||
|
||||
void LASPointReader::close(){
|
||||
if(reader != NULL){
|
||||
reader->close();
|
||||
delete reader;
|
||||
reader = NULL;
|
||||
}
|
||||
}
|
||||
|
||||
long long LASPointReader::numPoints(){
|
||||
if (reader->header->version_major >= 1 && reader->header->version_minor >= 4) {
|
||||
return reader->header->extended_number_of_point_records;
|
||||
} else {
|
||||
return reader->header->number_of_point_records;
|
||||
}
|
||||
}
|
||||
|
||||
bool LASPointReader::readNextPoint(){
|
||||
|
||||
bool hasPoints = reader->readPoint();
|
||||
|
||||
if(!hasPoints){
|
||||
// try to open next file, if available
|
||||
reader->close();
|
||||
delete reader;
|
||||
reader = NULL;
|
||||
|
||||
currentFile++;
|
||||
|
||||
if(currentFile != files.end()){
|
||||
reader = new LIBLASReader(*currentFile);
|
||||
hasPoints = reader->readPoint();
|
||||
}
|
||||
}
|
||||
|
||||
return hasPoints;
|
||||
}
|
||||
|
||||
Point LASPointReader::getPoint(){
|
||||
Point const p = reader->GetPoint();
|
||||
//cout << p.position.x << ", " << p.position.y << ", " << p.position.z << endl;
|
||||
return p;
|
||||
}
|
||||
|
||||
AABB LASPointReader::getAABB(){
|
||||
return aabb;
|
||||
}
|
||||
|
||||
Vector3<double> LASPointReader::getScale(){
|
||||
|
||||
Vector3<double> scale;
|
||||
scale.x =reader->header->x_scale_factor;
|
||||
scale.y =reader->header->y_scale_factor;
|
||||
scale.z =reader->header->z_scale_factor;
|
||||
|
||||
return scale;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,35 +0,0 @@
|
||||
|
||||
#include <vector>
|
||||
|
||||
#include "LASPointWriter.hpp"
|
||||
|
||||
using std::vector;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
void LASPointWriter::write(Point &point){
|
||||
|
||||
coordinates[0] = point.position.x;
|
||||
coordinates[1] = point.position.y;
|
||||
coordinates[2] = point.position.z;
|
||||
laszip_set_coordinates(writer, coordinates);
|
||||
|
||||
this->point->rgb[0] = point.color.x * 256;
|
||||
this->point->rgb[1] = point.color.y * 256;
|
||||
this->point->rgb[2] = point.color.z * 256;
|
||||
|
||||
this->point->intensity = point.intensity;
|
||||
this->point->classification = point.classification;
|
||||
this->point->return_number = point.returnNumber;
|
||||
this->point->number_of_returns = point.numberOfReturns;
|
||||
this->point->point_source_ID = point.pointSourceID;
|
||||
this->point->extra_bytes = reinterpret_cast<laszip_U8*>(&point.extraBytes[0]);
|
||||
this->point->num_extra_bytes = point.extraBytes.size();
|
||||
|
||||
laszip_set_point(writer, this->point);
|
||||
laszip_write_point(writer);
|
||||
|
||||
numPoints++;
|
||||
}
|
||||
|
||||
}
|
||||
139
PotreeConverter/src/LASWriter.cpp
Normal file
139
PotreeConverter/src/LASWriter.cpp
Normal file
@@ -0,0 +1,139 @@
|
||||
|
||||
#include "LASWriter.hpp"
|
||||
|
||||
#include <fstream>
|
||||
|
||||
using namespace std;
|
||||
|
||||
vector<uint8_t> makeHeaderBuffer(LASHeader header) {
|
||||
int headerSize = header.headerSize;
|
||||
vector<uint8_t> buffer(headerSize, 0);
|
||||
uint8_t* data = buffer.data();
|
||||
|
||||
// file signature
|
||||
data[0] = 'L';
|
||||
data[1] = 'A';
|
||||
data[2] = 'S';
|
||||
data[3] = 'F';
|
||||
|
||||
// version major & minor -> 1.4
|
||||
data[24] = 1;
|
||||
data[25] = 4;
|
||||
|
||||
// header size
|
||||
reinterpret_cast<uint16_t*>(data + 94)[0] = headerSize;
|
||||
|
||||
// point data format
|
||||
data[104] = 2;
|
||||
|
||||
// bytes per point
|
||||
reinterpret_cast<uint16_t*>(data + 105)[0] = 26;
|
||||
|
||||
// #points
|
||||
uint64_t numPoints = header.numPoints;
|
||||
reinterpret_cast<uint64_t*>(data + 247)[0] = numPoints;
|
||||
|
||||
// min
|
||||
reinterpret_cast<double*>(data + 187)[0] = header.min.x;
|
||||
reinterpret_cast<double*>(data + 203)[0] = header.min.y;
|
||||
reinterpret_cast<double*>(data + 219)[0] = header.min.z;
|
||||
|
||||
// offset
|
||||
reinterpret_cast<double*>(data + 155)[0] = header.min.x;
|
||||
reinterpret_cast<double*>(data + 163)[0] = header.min.y;
|
||||
reinterpret_cast<double*>(data + 171)[0] = header.min.z;
|
||||
|
||||
// max
|
||||
reinterpret_cast<double*>(data + 179)[0] = header.max.x;
|
||||
reinterpret_cast<double*>(data + 195)[0] = header.max.y;
|
||||
reinterpret_cast<double*>(data + 211)[0] = header.max.z;
|
||||
|
||||
// scale
|
||||
reinterpret_cast<double*>(data + 131)[0] = header.scale.x;
|
||||
reinterpret_cast<double*>(data + 139)[0] = header.scale.y;
|
||||
reinterpret_cast<double*>(data + 147)[0] = header.scale.z;
|
||||
|
||||
// offset to point data
|
||||
uint32_t offSetToPointData = headerSize;
|
||||
reinterpret_cast<uint32_t*>(data + 96)[0] = offSetToPointData;
|
||||
|
||||
return buffer;
|
||||
}
|
||||
|
||||
struct LASPointF2 {
|
||||
int32_t x;
|
||||
int32_t y;
|
||||
int32_t z;
|
||||
uint16_t intensity;
|
||||
uint8_t returnNumber;
|
||||
uint8_t classification;
|
||||
uint8_t scanAngleRank;
|
||||
uint8_t userData;
|
||||
uint16_t pointSourceID;
|
||||
uint16_t r;
|
||||
uint16_t g;
|
||||
uint16_t b;
|
||||
};
|
||||
|
||||
void writeLAS(string path, LASHeader header, vector<Point> points) {
|
||||
|
||||
vector<uint8_t> headerBuffer = makeHeaderBuffer(header);
|
||||
|
||||
fstream file(path, ios::out | ios::binary);
|
||||
|
||||
file.write(reinterpret_cast<const char*>(headerBuffer.data()), header.headerSize);
|
||||
|
||||
LASPointF2 laspoint;
|
||||
|
||||
for (Point& point : points) {
|
||||
|
||||
int32_t ix = (point.x - header.min.x) / header.scale.x;
|
||||
int32_t iy = (point.y - header.min.y) / header.scale.y;
|
||||
int32_t iz = (point.z - header.min.z) / header.scale.z;
|
||||
|
||||
laspoint.x = ix;
|
||||
laspoint.y = iy;
|
||||
laspoint.z = iz;
|
||||
laspoint.r = 255;
|
||||
laspoint.g = 0;
|
||||
laspoint.b = 0;
|
||||
|
||||
file.write(reinterpret_cast<const char*>(&laspoint), 26);
|
||||
}
|
||||
|
||||
file.close();
|
||||
|
||||
|
||||
}
|
||||
|
||||
void writeLAS(string path, LASHeader header, vector<Point> sample, Points* points) {
|
||||
vector<uint8_t> headerBuffer = makeHeaderBuffer(header);
|
||||
|
||||
fstream file(path, ios::out | ios::binary);
|
||||
|
||||
file.write(reinterpret_cast<const char*>(headerBuffer.data()), header.headerSize);
|
||||
|
||||
LASPointF2 laspoint;
|
||||
auto attributeBuffer = points->attributeBuffer;
|
||||
int bytesPerPointAttribute = 4;
|
||||
|
||||
for (Point& point : sample) {
|
||||
|
||||
int32_t ix = (point.x - header.min.x) / header.scale.x;
|
||||
int32_t iy = (point.y - header.min.y) / header.scale.y;
|
||||
int32_t iz = (point.z - header.min.z) / header.scale.z;
|
||||
|
||||
laspoint.x = ix;
|
||||
laspoint.y = iy;
|
||||
laspoint.z = iz;
|
||||
|
||||
uint8_t* pointAttributeData = attributeBuffer->dataU8 + (point.index * bytesPerPointAttribute);
|
||||
laspoint.r = pointAttributeData[0];
|
||||
laspoint.g = pointAttributeData[1];
|
||||
laspoint.b = pointAttributeData[2];
|
||||
|
||||
file.write(reinterpret_cast<const char*>(&laspoint), 26);
|
||||
}
|
||||
|
||||
file.close();
|
||||
}
|
||||
@@ -1,265 +0,0 @@
|
||||
#include <fstream>
|
||||
#include <sstream>
|
||||
|
||||
#include "PTXPointReader.h"
|
||||
#include "stuff.h"
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::ios;
|
||||
using std::string;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
static const int INVALID_INTENSITY = 32767;
|
||||
|
||||
std::map<string, AABB> PTXPointReader::aabbs = std::map<string, AABB>();
|
||||
std::map<string, long> PTXPointReader::counts = std::map<string, long>();
|
||||
|
||||
//inline void split(vector<double> &v, char (&str)[512]) {
|
||||
// // vector<std::pair<string::const_iterator, string::const_iterator> > sp;
|
||||
// if (strlen(str) > 200) return;
|
||||
//
|
||||
// //string strstr(str);
|
||||
// //split(sp, strstr, is_space(), token_compress_on);
|
||||
// vector<string>
|
||||
// for (auto beg = sp.begin(); beg != sp.end(); ++beg) {
|
||||
// string token(beg->first, beg->second);
|
||||
// if (!token.empty()) {
|
||||
// v.push_back(atof(token.c_str()));
|
||||
// }
|
||||
// }
|
||||
//}
|
||||
|
||||
inline void getlined(fstream &stream, vector<double> &result) {
|
||||
char str[512];
|
||||
result.clear();
|
||||
stream.getline(str, 512);
|
||||
//split(result, str);
|
||||
|
||||
vector<string> tokens = split(str, ' ');
|
||||
for (auto &token : tokens) {
|
||||
result.push_back(std::stod(token));
|
||||
}
|
||||
}
|
||||
|
||||
inline void skipline(fstream &stream) {
|
||||
string str;
|
||||
getline(stream, str);
|
||||
}
|
||||
|
||||
bool assertd(fstream &stream, size_t i) {
|
||||
vector<double> tokens;
|
||||
getlined(stream, tokens);
|
||||
return i == tokens.size();
|
||||
}
|
||||
|
||||
/**
|
||||
* The constructor needs to scan the whole PTX file to find out the bounding box. Unluckily.
|
||||
* TODO: during the scan all the points are read and transformed. Afterwards, during loading
|
||||
* the points are read again and transformed again. It should be nice to save the
|
||||
* transformed points in a temporary file. That would mean a LAS file or something similar.
|
||||
* Unuseful. It's better to convert the PTX to LAS files.
|
||||
* TODO: it seems theat the PTXPointReader is asked to produce the bounding box more than once.
|
||||
* Maybe it should be saved somewhere. Chez moi, scanning 14m points needs 90 secs. The
|
||||
* process speed of the PTX file is about 1m points every 50 secs.
|
||||
*/
|
||||
PTXPointReader::PTXPointReader(string path) {
|
||||
this->path = path;
|
||||
|
||||
if (fs::is_directory(path)) {
|
||||
// if directory is specified, find all ptx files inside directory
|
||||
|
||||
for (fs::directory_iterator it(path); it != fs::directory_iterator(); it++) {
|
||||
fs::path filepath = it->path();
|
||||
if (fs::is_regular_file(filepath)) {
|
||||
if (icompare(fs::path(filepath).extension().string(), ".ptx")) {
|
||||
files.push_back(filepath.string());
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
files.push_back(path);
|
||||
}
|
||||
|
||||
// open first file
|
||||
this->currentFile = files.begin();
|
||||
this->stream = new fstream(*(this->currentFile), ios::in);
|
||||
this->currentChunk = 0;
|
||||
skipline(*this->stream);
|
||||
loadChunk(this->stream, this->currentChunk, this->tr);
|
||||
}
|
||||
|
||||
void PTXPointReader::scanForAABB() {
|
||||
// read bounding box
|
||||
double x(0), y(0), z(0);
|
||||
// TODO: verify that this initial values are ok
|
||||
double minx = std::numeric_limits<float>::max();
|
||||
double miny = std::numeric_limits<float>::max();
|
||||
double minz = std::numeric_limits<float>::max();
|
||||
double maxx = -std::numeric_limits<float>::max();
|
||||
double maxy = -std::numeric_limits<float>::max();
|
||||
double maxz = -std::numeric_limits<float>::max();
|
||||
double intensity(0);
|
||||
bool firstPoint = true;
|
||||
bool pleaseStop = false;
|
||||
long currentChunk = 0;
|
||||
long count = 0;
|
||||
double tr[16];
|
||||
vector<double> split;
|
||||
for (const auto &file : files) {
|
||||
fstream stream(file, ios::in);
|
||||
currentChunk = 0;
|
||||
getlined(stream, split);
|
||||
while (!pleaseStop) {
|
||||
if (1 == split.size()) {
|
||||
if (!loadChunk(&stream, currentChunk, tr)) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
while (true) {
|
||||
getlined(stream, split);
|
||||
if (4 == split.size() || 7 == split.size()) {
|
||||
x = split[0];
|
||||
y = split[1];
|
||||
z = split[2];
|
||||
intensity = split[3];
|
||||
if (0.5 != intensity) {
|
||||
Point p = transform(tr, x, y, z);
|
||||
if (firstPoint) {
|
||||
maxx = minx = p.position.x;
|
||||
maxy = miny = p.position.y;
|
||||
maxz = minz = p.position.z;
|
||||
firstPoint = false;
|
||||
} else {
|
||||
minx = p.position.x < minx ? p.position.x : minx;
|
||||
maxx = p.position.x > maxx ? p.position.x : maxx;
|
||||
miny = p.position.y < miny ? p.position.y : miny;
|
||||
maxy = p.position.y > maxy ? p.position.y : maxy;
|
||||
minz = p.position.z < minz ? p.position.z : minz;
|
||||
maxz = p.position.z > maxz ? p.position.z : maxz;
|
||||
}
|
||||
count++;
|
||||
if (0 == count % 1000000)
|
||||
cout << "AABB-SCANNING: " << count << " points; " << currentChunk << " chunks" << endl;
|
||||
}
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (stream.eof()) {
|
||||
pleaseStop = true;
|
||||
break;
|
||||
}
|
||||
currentChunk++;
|
||||
}
|
||||
stream.close();
|
||||
}
|
||||
|
||||
counts[path] = count;
|
||||
AABB lAABB(Vector3<double>(minx, miny, minz), Vector3<double>(maxx, maxy, maxz));
|
||||
PTXPointReader::aabbs[path] = lAABB;
|
||||
|
||||
}
|
||||
|
||||
bool PTXPointReader::loadChunk(fstream *stream, long currentChunk, double tr[16]) {
|
||||
vector<double> split;
|
||||
|
||||
// The first 5 lines should have respectively 1, 3, 3, 3, 3 numbers each.
|
||||
if (!assertd(*stream, 1) || !assertd(*stream, 3) || !assertd(*stream, 3) || !assertd(*stream, 3) || !assertd(*stream, 3))
|
||||
return false;
|
||||
|
||||
getlined(*stream, split);
|
||||
if (4 != split.size()) {
|
||||
return false;
|
||||
};
|
||||
tr[0] = split[0];
|
||||
tr[1] = split[1];
|
||||
tr[2] = split[2];
|
||||
tr[3] = split[3];
|
||||
|
||||
getlined(*stream, split);
|
||||
if (4 != split.size()) {
|
||||
return false;
|
||||
};
|
||||
tr[4] = split[0];
|
||||
tr[5] = split[1];
|
||||
tr[6] = split[2];
|
||||
tr[7] = split[3];
|
||||
|
||||
getlined(*stream, split);
|
||||
if (4 != split.size()) {
|
||||
return false;
|
||||
};
|
||||
tr[8] = split[0];
|
||||
tr[9] = split[1];
|
||||
tr[10] = split[2];
|
||||
tr[11] = split[3];
|
||||
|
||||
getlined(*stream, split);
|
||||
if (4 != split.size()) {
|
||||
return false;
|
||||
};
|
||||
tr[12] = split[0];
|
||||
tr[13] = split[1];
|
||||
tr[14] = split[2];
|
||||
tr[15] = split[3];
|
||||
origin = Vector3<double>(split[0], split[1], split[2]);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool PTXPointReader::readNextPoint() {
|
||||
while (true) {
|
||||
bool result = doReadNextPoint();
|
||||
if (!result)
|
||||
return false;
|
||||
if (INVALID_INTENSITY != p.intensity)
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool PTXPointReader::doReadNextPoint() {
|
||||
if (this->stream->eof()) {
|
||||
this->stream->close();
|
||||
this->currentFile++;
|
||||
|
||||
if (this->currentFile != files.end()) {
|
||||
this->stream = new fstream(*(this->currentFile), ios::in);
|
||||
this->currentChunk = 0;
|
||||
skipline(*stream);
|
||||
loadChunk(stream, currentChunk, tr);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
vector<double> split;
|
||||
getlined(*stream, split);
|
||||
if (1 == split.size()) {
|
||||
this->currentChunk++;
|
||||
loadChunk(stream, currentChunk, tr);
|
||||
getlined(*stream, split);
|
||||
}
|
||||
auto size1 = split.size();
|
||||
if (size1 > 3) {
|
||||
this->p = transform(tr, split[0], split[1], split[2]);
|
||||
double intensity = split[3];
|
||||
this->p.intensity = (unsigned short)(65535.0 * intensity);
|
||||
if (4 == size1) {
|
||||
this->p.color.x = (unsigned char)(intensity * 255.0);
|
||||
this->p.color.y = (unsigned char)(intensity * 255.0);
|
||||
this->p.color.z = (unsigned char)(intensity * 255.0);
|
||||
} else if (7 == size1) {
|
||||
this->p.color.x = (unsigned char)(split[4]);
|
||||
this->p.color.y = (unsigned char)(split[5]);
|
||||
this->p.color.z = (unsigned char)(split[6]);
|
||||
}
|
||||
} else {
|
||||
this->p.intensity = INVALID_INTENSITY;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,49 +0,0 @@
|
||||
|
||||
#include "PointAttributes.hpp"
|
||||
#include "PotreeException.h"
|
||||
|
||||
namespace Potree{
|
||||
|
||||
const PointAttribute PointAttribute::POSITION_CARTESIAN = PointAttribute(0, "POSITION_CARTESIAN", ATTRIBUTE_TYPE_INT32, 3, 12);
|
||||
const PointAttribute PointAttribute::COLOR_PACKED = PointAttribute(1, "RGBA", ATTRIBUTE_TYPE_UINT8, 4, 4);
|
||||
const PointAttribute PointAttribute::INTENSITY = PointAttribute(2, "intensity", ATTRIBUTE_TYPE_UINT16, 1, 2);
|
||||
const PointAttribute PointAttribute::CLASSIFICATION = PointAttribute(3, "classification", ATTRIBUTE_TYPE_UINT8, 1, 1);
|
||||
const PointAttribute PointAttribute::RETURN_NUMBER = PointAttribute(4, "return number", ATTRIBUTE_TYPE_UINT8, 1, 1);
|
||||
const PointAttribute PointAttribute::NUMBER_OF_RETURNS = PointAttribute(5, "number of returns", ATTRIBUTE_TYPE_UINT8, 1, 1);
|
||||
const PointAttribute PointAttribute::SOURCE_ID = PointAttribute(6, "source id", ATTRIBUTE_TYPE_UINT16, 1, 2);
|
||||
const PointAttribute PointAttribute::GPS_TIME = PointAttribute(7, "gps-time", ATTRIBUTE_TYPE_DOUBLE, 1, 8);
|
||||
const PointAttribute PointAttribute::NORMAL_SPHEREMAPPED = PointAttribute(8, "NORMAL_SPHEREMAPPED", ATTRIBUTE_TYPE_INT8, 2, 2);
|
||||
const PointAttribute PointAttribute::NORMAL_OCT16 = PointAttribute(9, "NORMAL_OCT16", ATTRIBUTE_TYPE_INT8, 2, 2);
|
||||
const PointAttribute PointAttribute::NORMAL = PointAttribute(10, "NORMAL", ATTRIBUTE_TYPE_FLOAT, 3, 12);
|
||||
|
||||
PointAttribute PointAttribute::fromString(string name){
|
||||
if(name == "POSITION_CARTESIAN"){
|
||||
return PointAttribute::POSITION_CARTESIAN;
|
||||
}else if(name == "COLOR_PACKED"){
|
||||
return PointAttribute::COLOR_PACKED;
|
||||
}else if(name == "INTENSITY"){
|
||||
return PointAttribute::INTENSITY;
|
||||
}else if(name == "CLASSIFICATION"){
|
||||
return PointAttribute::CLASSIFICATION;
|
||||
} else if (name == "RETURN_NUMBER") {
|
||||
return PointAttribute::RETURN_NUMBER;
|
||||
} else if (name == "NUMBER_OF_RETURNS") {
|
||||
return PointAttribute::NUMBER_OF_RETURNS;
|
||||
} else if (name == "SOURCE_ID") {
|
||||
return PointAttribute::SOURCE_ID;
|
||||
} else if(name == "GPS_TIME"){
|
||||
return PointAttribute::GPS_TIME;
|
||||
}else if(name == "NORMAL_OCT16"){
|
||||
return PointAttribute::NORMAL_OCT16;
|
||||
}else if(name == "NORMAL"){
|
||||
return PointAttribute::NORMAL;
|
||||
}
|
||||
|
||||
throw PotreeException("Invalid PointAttribute name: '" + name + "'");
|
||||
}
|
||||
|
||||
bool operator==(const PointAttribute& lhs, const PointAttribute& rhs){
|
||||
return lhs.ordinal == rhs.ordinal;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,705 +0,0 @@
|
||||
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "rapidjson/document.h"
|
||||
#include "rapidjson/prettywriter.h"
|
||||
#include "rapidjson/stringbuffer.h"
|
||||
|
||||
#include "PotreeConverter.h"
|
||||
#include "stuff.h"
|
||||
#include "LASPointReader.h"
|
||||
#include "PTXPointReader.h"
|
||||
#include "PotreeException.h"
|
||||
#include "PotreeWriter.h"
|
||||
#include "LASPointWriter.hpp"
|
||||
#include "BINPointWriter.hpp"
|
||||
#include "BINPointReader.hpp"
|
||||
#include "PlyPointReader.h"
|
||||
#include "XYZPointReader.hpp"
|
||||
#include "ExtraBytes.hpp"
|
||||
|
||||
#include <chrono>
|
||||
#include <sstream>
|
||||
#include <string>
|
||||
#include <map>
|
||||
#include <vector>
|
||||
#include <math.h>
|
||||
#include <fstream>
|
||||
|
||||
|
||||
|
||||
|
||||
using rapidjson::Document;
|
||||
using rapidjson::StringBuffer;
|
||||
using rapidjson::Writer;
|
||||
using rapidjson::PrettyWriter;
|
||||
using rapidjson::Value;
|
||||
|
||||
using std::stringstream;
|
||||
using std::map;
|
||||
using std::string;
|
||||
using std::vector;
|
||||
using std::find;
|
||||
using std::chrono::high_resolution_clock;
|
||||
using std::chrono::milliseconds;
|
||||
using std::chrono::duration_cast;
|
||||
using std::fstream;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
PointReader *PotreeConverter::createPointReader(string path, PointAttributes pointAttributes){
|
||||
PointReader *reader = NULL;
|
||||
if(iEndsWith(path, ".las") || iEndsWith(path, ".laz")){
|
||||
reader = new LASPointReader(path);
|
||||
}else if(iEndsWith(path, ".ptx")){
|
||||
reader = new PTXPointReader(path);
|
||||
}else if(iEndsWith(path, ".ply")){
|
||||
reader = new PlyPointReader(path);
|
||||
}else if(iEndsWith(path, ".xyz") || iEndsWith(path, ".txt")){
|
||||
reader = new XYZPointReader(path, format, colorRange, intensityRange);
|
||||
}else if(iEndsWith(path, ".pts")){
|
||||
vector<double> intensityRange;
|
||||
|
||||
if(this->intensityRange.size() == 0){
|
||||
intensityRange.push_back(-2048);
|
||||
intensityRange.push_back(+2047);
|
||||
}
|
||||
|
||||
reader = new XYZPointReader(path, format, colorRange, intensityRange);
|
||||
}else if(iEndsWith(path, ".bin")){
|
||||
reader = new BINPointReader(path, aabb, scale, pointAttributes);
|
||||
}
|
||||
|
||||
return reader;
|
||||
}
|
||||
|
||||
PotreeConverter::PotreeConverter(string executablePath, string workDir, vector<string> sources){
|
||||
this->executablePath = executablePath;
|
||||
this->workDir = workDir;
|
||||
this->sources = sources;
|
||||
}
|
||||
|
||||
vector<PointAttribute> checkAvailableStandardAttributes(string file) {
|
||||
|
||||
vector<PointAttribute> attributes;
|
||||
|
||||
bool isLas = iEndsWith(file, ".las") || iEndsWith(file, ".laz");
|
||||
if (!isLas) {
|
||||
return attributes;
|
||||
}
|
||||
|
||||
laszip_POINTER laszip_reader;
|
||||
laszip_header* header;
|
||||
|
||||
laszip_create(&laszip_reader);
|
||||
|
||||
laszip_BOOL request_reader = 1;
|
||||
laszip_request_compatibility_mode(laszip_reader, request_reader);
|
||||
|
||||
bool hasClassification = false;
|
||||
bool hasGpsTime = false;
|
||||
bool hasIntensity = false;
|
||||
bool hasNumberOfReturns = false;
|
||||
bool hasReturnNumber = false;
|
||||
bool hasPointSourceId = false;
|
||||
|
||||
{
|
||||
laszip_BOOL is_compressed = iEndsWith(file, ".laz") ? 1 : 0;
|
||||
laszip_open_reader(laszip_reader, file.c_str(), &is_compressed);
|
||||
|
||||
laszip_get_header_pointer(laszip_reader, &header);
|
||||
|
||||
long long npoints = (header->number_of_point_records ? header->number_of_point_records : header->extended_number_of_point_records);
|
||||
|
||||
laszip_point* point;
|
||||
laszip_get_point_pointer(laszip_reader, &point);
|
||||
|
||||
for (int i = 0; i < 1'000'000 && i < npoints; i++) {
|
||||
laszip_read_point(laszip_reader);
|
||||
|
||||
hasClassification |= point->classification != 0;
|
||||
hasGpsTime |= point->gps_time != 0;
|
||||
hasIntensity |= point->intensity != 0;
|
||||
hasNumberOfReturns |= point->number_of_returns != 0;
|
||||
hasReturnNumber |= point->return_number != 0;
|
||||
hasPointSourceId |= point->point_source_ID != 0;
|
||||
}
|
||||
}
|
||||
|
||||
laszip_close_reader(laszip_reader);
|
||||
laszip_destroy(laszip_reader);
|
||||
|
||||
if (hasClassification) {
|
||||
attributes.push_back(PointAttribute::CLASSIFICATION);
|
||||
}
|
||||
|
||||
if (hasGpsTime) {
|
||||
attributes.push_back(PointAttribute::GPS_TIME);
|
||||
}
|
||||
|
||||
if (hasIntensity) {
|
||||
attributes.push_back(PointAttribute::INTENSITY);
|
||||
}
|
||||
|
||||
if (hasNumberOfReturns) {
|
||||
attributes.push_back(PointAttribute::NUMBER_OF_RETURNS);
|
||||
}
|
||||
|
||||
if (hasReturnNumber) {
|
||||
attributes.push_back(PointAttribute::RETURN_NUMBER);
|
||||
}
|
||||
|
||||
if (hasPointSourceId) {
|
||||
attributes.push_back(PointAttribute::SOURCE_ID);
|
||||
}
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
vector<PointAttribute> parseExtraAttributes(string file) {
|
||||
|
||||
vector<PointAttribute> attributes;
|
||||
|
||||
bool isLas = iEndsWith(file, ".las") || iEndsWith(file, ".laz");
|
||||
if(!isLas) {
|
||||
return attributes;
|
||||
}
|
||||
|
||||
laszip_POINTER laszip_reader;
|
||||
laszip_header* header;
|
||||
|
||||
laszip_create(&laszip_reader);
|
||||
|
||||
laszip_BOOL request_reader = 1;
|
||||
laszip_request_compatibility_mode(laszip_reader, request_reader);
|
||||
|
||||
laszip_BOOL is_compressed = iEndsWith(file, ".laz") ? 1 : 0;
|
||||
laszip_open_reader(laszip_reader, file.c_str(), &is_compressed);
|
||||
|
||||
laszip_get_header_pointer(laszip_reader, &header);
|
||||
|
||||
{ // read extra bytes
|
||||
|
||||
for (int i = 0; i < header->number_of_variable_length_records; i++) {
|
||||
laszip_vlr_struct vlr = header->vlrs[i];
|
||||
|
||||
if (vlr.record_id != 4) {
|
||||
continue;
|
||||
}
|
||||
|
||||
cout << "record id: " << vlr.record_id << endl;
|
||||
cout << "record_length_after_header: " << vlr.record_length_after_header << endl;
|
||||
|
||||
int numExtraBytes = vlr.record_length_after_header / sizeof(ExtraBytesRecord);
|
||||
|
||||
ExtraBytesRecord* extraBytes = reinterpret_cast<ExtraBytesRecord*>(vlr.data);
|
||||
|
||||
for (int j = 0; j < numExtraBytes; j++) {
|
||||
ExtraBytesRecord extraAttribute = extraBytes[j];
|
||||
|
||||
string name = string(extraAttribute.name);
|
||||
|
||||
cout << "name: " << name << endl;
|
||||
|
||||
//ExtraType type = extraTypeFromID(extraAttribute.data_type);
|
||||
ExtraType type = typeToExtraType.at(extraAttribute.data_type);
|
||||
int byteSize = type.size;
|
||||
PointAttribute attribute(123, name, type.type, type.numElements, byteSize);
|
||||
|
||||
attributes.push_back(attribute);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
laszip_close_reader(laszip_reader);
|
||||
laszip_destroy(laszip_reader);
|
||||
|
||||
return attributes;
|
||||
}
|
||||
|
||||
void PotreeConverter::prepare(){
|
||||
|
||||
// if sources contains directories, use files inside the directory instead
|
||||
vector<string> sourceFiles;
|
||||
for (const auto &source : sources) {
|
||||
fs::path pSource(source);
|
||||
if(fs::is_directory(pSource)){
|
||||
fs::directory_iterator it(pSource);
|
||||
for(;it != fs::directory_iterator(); it++){
|
||||
fs::path pDirectoryEntry = it->path();
|
||||
if(fs::is_regular_file(pDirectoryEntry)){
|
||||
string filepath = pDirectoryEntry.string();
|
||||
if(iEndsWith(filepath, ".las")
|
||||
|| iEndsWith(filepath, ".laz")
|
||||
|| iEndsWith(filepath, ".xyz")
|
||||
|| iEndsWith(filepath, ".pts")
|
||||
|| iEndsWith(filepath, ".ptx")
|
||||
|| iEndsWith(filepath, ".ply")){
|
||||
sourceFiles.push_back(filepath);
|
||||
}
|
||||
}
|
||||
}
|
||||
}else if(fs::is_regular_file(pSource)){
|
||||
sourceFiles.push_back(source);
|
||||
}
|
||||
}
|
||||
this->sources = sourceFiles;
|
||||
|
||||
pointAttributes = PointAttributes();
|
||||
pointAttributes.add(PointAttribute::POSITION_CARTESIAN);
|
||||
|
||||
bool addExtraAttributes = false;
|
||||
|
||||
if(outputAttributes.size() > 0){
|
||||
for(const auto &attribute : outputAttributes){
|
||||
if(attribute == "RGB"){
|
||||
pointAttributes.add(PointAttribute::COLOR_PACKED);
|
||||
}else if(attribute == "INTENSITY"){
|
||||
pointAttributes.add(PointAttribute::INTENSITY);
|
||||
} else if (attribute == "CLASSIFICATION") {
|
||||
pointAttributes.add(PointAttribute::CLASSIFICATION);
|
||||
} else if (attribute == "RETURN_NUMBER") {
|
||||
pointAttributes.add(PointAttribute::RETURN_NUMBER);
|
||||
} else if (attribute == "NUMBER_OF_RETURNS") {
|
||||
pointAttributes.add(PointAttribute::NUMBER_OF_RETURNS);
|
||||
} else if (attribute == "SOURCE_ID") {
|
||||
pointAttributes.add(PointAttribute::SOURCE_ID);
|
||||
} else if (attribute == "GPS_TIME") {
|
||||
pointAttributes.add(PointAttribute::GPS_TIME);
|
||||
} else if(attribute == "NORMAL"){
|
||||
pointAttributes.add(PointAttribute::NORMAL_OCT16);
|
||||
} else if (attribute == "EXTRA") {
|
||||
addExtraAttributes = true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
string file = sourceFiles[0];
|
||||
|
||||
// always add colors?
|
||||
pointAttributes.add(PointAttribute::COLOR_PACKED);
|
||||
|
||||
vector<PointAttribute> attributes = checkAvailableStandardAttributes(file);
|
||||
|
||||
for (PointAttribute attribute : attributes) {
|
||||
pointAttributes.add(attribute);
|
||||
|
||||
//cout << attribute.name << ", " << attribute.byteSize << endl;
|
||||
}
|
||||
|
||||
addExtraAttributes = true;
|
||||
}
|
||||
|
||||
if(addExtraAttributes){
|
||||
string file = sourceFiles[0];
|
||||
|
||||
vector<PointAttribute> extraAttributes = parseExtraAttributes(file);
|
||||
for (PointAttribute attribute : extraAttributes) {
|
||||
pointAttributes.add(attribute);
|
||||
|
||||
//cout << attribute.name << ", " << attribute.byteSize << endl;
|
||||
}
|
||||
}
|
||||
|
||||
cout << "processing following attributes: " << endl;
|
||||
for (PointAttribute& attribute : pointAttributes.attributes) {
|
||||
cout << attribute.name << endl;
|
||||
}
|
||||
cout << endl;
|
||||
}
|
||||
|
||||
FileInfos PotreeConverter::computeInfos(){
|
||||
|
||||
|
||||
AABB aabb;
|
||||
uint64_t numPoints = 0;
|
||||
|
||||
if(aabbValues.size() == 6){
|
||||
Vector3<double> userMin(aabbValues[0],aabbValues[1],aabbValues[2]);
|
||||
Vector3<double> userMax(aabbValues[3],aabbValues[4],aabbValues[5]);
|
||||
aabb = AABB(userMin, userMax);
|
||||
}else{
|
||||
for(string source : sources){
|
||||
|
||||
PointReader *reader = createPointReader(source, pointAttributes);
|
||||
|
||||
numPoints += reader->numPoints();
|
||||
|
||||
AABB lAABB = reader->getAABB();
|
||||
aabb.update(lAABB.min);
|
||||
aabb.update(lAABB.max);
|
||||
|
||||
reader->close();
|
||||
delete reader;
|
||||
}
|
||||
}
|
||||
|
||||
FileInfos infos = {aabb, numPoints};
|
||||
|
||||
return infos;
|
||||
}
|
||||
|
||||
void PotreeConverter::generatePage(string name){
|
||||
|
||||
string pagedir = this->workDir;
|
||||
string templateSourcePath = this->executablePath + "/resources/page_template/viewer_template.html";
|
||||
string mapTemplateSourcePath = this->executablePath + "/resources/page_template/lasmap_template.html";
|
||||
string templateDir = this->executablePath + "/resources/page_template";
|
||||
|
||||
if(!this->pageTemplatePath.empty()) {
|
||||
templateSourcePath = this->pageTemplatePath + "/viewer_template.html";
|
||||
mapTemplateSourcePath = this->pageTemplatePath + "/lasmap_template.html";
|
||||
templateDir = this->pageTemplatePath;
|
||||
}
|
||||
|
||||
string templateTargetPath = pagedir + "/" + name + ".html";
|
||||
string mapTemplateTargetPath = pagedir + "/lasmap_" + name + ".html";
|
||||
|
||||
Potree::copyDir(fs::path(templateDir), fs::path(pagedir));
|
||||
fs::remove(pagedir + "/viewer_template.html");
|
||||
fs::remove(pagedir + "/lasmap_template.html");
|
||||
|
||||
if(!this->sourceListingOnly){ // change viewer template
|
||||
ifstream in( templateSourcePath );
|
||||
ofstream out( templateTargetPath );
|
||||
|
||||
string line;
|
||||
while(getline(in, line)){
|
||||
if(line.find("<!-- INCLUDE POINTCLOUD -->") != string::npos){
|
||||
out << "\t\tPotree.loadPointCloud(\"pointclouds/" << name << "/cloud.js\", \"" << name << "\", e => {" << endl;
|
||||
out << "\t\t\tlet pointcloud = e.pointcloud;\n";
|
||||
out << "\t\t\tlet material = pointcloud.material;\n";
|
||||
|
||||
out << "\t\t\tviewer.scene.addPointCloud(pointcloud);" << endl;
|
||||
|
||||
out << "\t\t\t" << "material.pointColorType = Potree.PointColorType." << material << "; // any Potree.PointColorType.XXXX \n";
|
||||
out << "\t\t\tmaterial.size = 1;\n";
|
||||
out << "\t\t\tmaterial.pointSizeType = Potree.PointSizeType.ADAPTIVE;\n";
|
||||
out << "\t\t\tmaterial.shape = Potree.PointShape.SQUARE;\n";
|
||||
|
||||
out << "\t\t\tviewer.fitToScreen();" << endl;
|
||||
out << "\t\t});" << endl;
|
||||
}else if(line.find("<!-- INCLUDE SETTINGS HERE -->") != string::npos){
|
||||
out << std::boolalpha;
|
||||
out << "\t\t" << "document.title = \"" << title << "\";\n";
|
||||
out << "\t\t" << "viewer.setEDLEnabled(" << edlEnabled << ");\n";
|
||||
if(showSkybox){
|
||||
out << "\t\t" << "viewer.setBackground(\"skybox\"); // [\"skybox\", \"gradient\", \"black\", \"white\"];\n";
|
||||
}else{
|
||||
out << "\t\t" << "viewer.setBackground(\"gradient\"); // [\"skybox\", \"gradient\", \"black\", \"white\"];\n";
|
||||
}
|
||||
|
||||
string descriptionEscaped = string(description);
|
||||
std::replace(descriptionEscaped.begin(), descriptionEscaped.end(), '`', '\'');
|
||||
|
||||
out << "\t\t" << "viewer.setDescription(`" << descriptionEscaped << "`);\n";
|
||||
}else{
|
||||
out << line << endl;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
|
||||
// change lasmap template
|
||||
if(!this->projection.empty()){
|
||||
ifstream in( mapTemplateSourcePath );
|
||||
ofstream out( mapTemplateTargetPath );
|
||||
|
||||
string line;
|
||||
while(getline(in, line)){
|
||||
if(line.find("<!-- INCLUDE SOURCE -->") != string::npos){
|
||||
out << "\tvar source = \"" << "pointclouds/" << name << "/sources.json" << "\";";
|
||||
}else{
|
||||
out << line << endl;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
in.close();
|
||||
out.close();
|
||||
}
|
||||
|
||||
//{ // write settings
|
||||
// stringstream ssSettings;
|
||||
//
|
||||
// ssSettings << "var sceneProperties = {" << endl;
|
||||
// ssSettings << "\tpath: \"" << "../resources/pointclouds/" << name << "/cloud.js\"," << endl;
|
||||
// ssSettings << "\tcameraPosition: null, // other options: cameraPosition: [10,10,10]," << endl;
|
||||
// ssSettings << "\tcameraTarget: null, // other options: cameraTarget: [0,0,0]," << endl;
|
||||
// ssSettings << "\tfov: 60, // field of view in degrees," << endl;
|
||||
// ssSettings << "\tsizeType: \"Adaptive\", // other options: \"Fixed\", \"Attenuated\"" << endl;
|
||||
// ssSettings << "\tquality: null, // other options: \"Circles\", \"Interpolation\", \"Splats\"" << endl;
|
||||
// ssSettings << "\tmaterial: \"RGB\", // other options: \"Height\", \"Intensity\", \"Classification\"" << endl;
|
||||
// ssSettings << "\tpointLimit: 1, // max number of points in millions" << endl;
|
||||
// ssSettings << "\tpointSize: 1, // " << endl;
|
||||
// ssSettings << "\tnavigation: \"Orbit\", // other options: \"Orbit\", \"Flight\"" << endl;
|
||||
// ssSettings << "\tuseEDL: false, " << endl;
|
||||
// ssSettings << "};" << endl;
|
||||
//
|
||||
//
|
||||
// ofstream fSettings;
|
||||
// fSettings.open(pagedir + "/examples/" + name + ".js", ios::out);
|
||||
// fSettings << ssSettings.str();
|
||||
// fSettings.close();
|
||||
//}
|
||||
}
|
||||
|
||||
void writeSources(string path, vector<string> sourceFilenames, vector<int> numPoints, vector<AABB> boundingBoxes, string projection){
|
||||
Document d(rapidjson::kObjectType);
|
||||
|
||||
AABB bb;
|
||||
|
||||
|
||||
Value jProjection(projection.c_str(), (rapidjson::SizeType)projection.size());
|
||||
|
||||
Value jSources(rapidjson::kObjectType);
|
||||
jSources.SetArray();
|
||||
for(int i = 0; i < sourceFilenames.size(); i++){
|
||||
string &source = sourceFilenames[i];
|
||||
int points = numPoints[i];
|
||||
AABB boundingBox = boundingBoxes[i];
|
||||
|
||||
bb.update(boundingBox);
|
||||
|
||||
Value jSource(rapidjson::kObjectType);
|
||||
|
||||
Value jName(source.c_str(), (rapidjson::SizeType)source.size());
|
||||
Value jPoints(points);
|
||||
Value jBounds(rapidjson::kObjectType);
|
||||
|
||||
{
|
||||
Value bbMin(rapidjson::kObjectType);
|
||||
Value bbMax(rapidjson::kObjectType);
|
||||
|
||||
bbMin.SetArray();
|
||||
bbMin.PushBack(boundingBox.min.x, d.GetAllocator());
|
||||
bbMin.PushBack(boundingBox.min.y, d.GetAllocator());
|
||||
bbMin.PushBack(boundingBox.min.z, d.GetAllocator());
|
||||
|
||||
bbMax.SetArray();
|
||||
bbMax.PushBack(boundingBox.max.x, d.GetAllocator());
|
||||
bbMax.PushBack(boundingBox.max.y, d.GetAllocator());
|
||||
bbMax.PushBack(boundingBox.max.z, d.GetAllocator());
|
||||
|
||||
jBounds.AddMember("min", bbMin, d.GetAllocator());
|
||||
jBounds.AddMember("max", bbMax, d.GetAllocator());
|
||||
}
|
||||
|
||||
jSource.AddMember("name", jName, d.GetAllocator());
|
||||
jSource.AddMember("points", jPoints, d.GetAllocator());
|
||||
jSource.AddMember("bounds", jBounds, d.GetAllocator());
|
||||
|
||||
jSources.PushBack(jSource, d.GetAllocator());
|
||||
}
|
||||
|
||||
Value jBoundingBox(rapidjson::kObjectType);
|
||||
{
|
||||
Value bbMin(rapidjson::kObjectType);
|
||||
Value bbMax(rapidjson::kObjectType);
|
||||
|
||||
bbMin.SetArray();
|
||||
bbMin.PushBack(bb.min.x, d.GetAllocator());
|
||||
bbMin.PushBack(bb.min.y, d.GetAllocator());
|
||||
bbMin.PushBack(bb.min.z, d.GetAllocator());
|
||||
|
||||
bbMax.SetArray();
|
||||
bbMax.PushBack(bb.max.x, d.GetAllocator());
|
||||
bbMax.PushBack(bb.max.y, d.GetAllocator());
|
||||
bbMax.PushBack(bb.max.z, d.GetAllocator());
|
||||
|
||||
jBoundingBox.AddMember("min", bbMin, d.GetAllocator());
|
||||
jBoundingBox.AddMember("max", bbMax, d.GetAllocator());
|
||||
}
|
||||
|
||||
d.AddMember("bounds", jBoundingBox, d.GetAllocator());
|
||||
d.AddMember("projection", jProjection, d.GetAllocator());
|
||||
d.AddMember("sources", jSources, d.GetAllocator());
|
||||
|
||||
StringBuffer buffer;
|
||||
//PrettyWriter<StringBuffer> writer(buffer);
|
||||
Writer<StringBuffer> writer(buffer);
|
||||
d.Accept(writer);
|
||||
|
||||
if(!fs::exists(fs::path(path))){
|
||||
fs::path pcdir(path);
|
||||
fs::create_directories(pcdir);
|
||||
}
|
||||
|
||||
ofstream sourcesOut(path + "/sources.json", ios::out);
|
||||
sourcesOut << buffer.GetString();
|
||||
sourcesOut.close();
|
||||
}
|
||||
|
||||
void PotreeConverter::convert(){
|
||||
auto start = high_resolution_clock::now();
|
||||
|
||||
prepare();
|
||||
|
||||
long long pointsProcessed = 0;
|
||||
|
||||
FileInfos infos = computeInfos();
|
||||
AABB aabb = infos.aabb;
|
||||
|
||||
{
|
||||
cout << "AABB: {" << endl;
|
||||
cout << "\t\"min\": " << aabb.min << "," << endl;
|
||||
cout << "\t\"max\": " << aabb.max << "," << endl;
|
||||
cout << "\t\"size\": " << aabb.size << endl;
|
||||
cout << "}" << endl << endl;
|
||||
|
||||
aabb.makeCubic();
|
||||
|
||||
cout << "cubicAABB: {" << endl;
|
||||
cout << "\t\"min\": " << aabb.min << "," << endl;
|
||||
cout << "\t\"max\": " << aabb.max << "," << endl;
|
||||
cout << "\t\"size\": " << aabb.size << endl;
|
||||
cout << "}" << endl << endl;
|
||||
}
|
||||
|
||||
cout << "total number of points: " << infos.numPoints << endl;
|
||||
|
||||
|
||||
if (diagonalFraction != 0) {
|
||||
spacing = (float)(aabb.size.length() / diagonalFraction);
|
||||
cout << "spacing calculated from diagonal: " << spacing << endl;
|
||||
}
|
||||
|
||||
if(pageName.size() > 0){
|
||||
generatePage(pageName);
|
||||
workDir = workDir + "/pointclouds/" + pageName;
|
||||
}
|
||||
|
||||
PotreeWriter *writer = NULL;
|
||||
if(fs::exists(fs::path(this->workDir + "/cloud.js"))){
|
||||
|
||||
if(storeOption == StoreOption::ABORT_IF_EXISTS){
|
||||
cout << "ABORTING CONVERSION: target already exists: " << this->workDir << "/cloud.js" << endl;
|
||||
cout << "If you want to overwrite the existing conversion, specify --overwrite" << endl;
|
||||
cout << "If you want add new points to the existing conversion, make sure the new points ";
|
||||
cout << "are contained within the bounding box of the existing conversion and then specify --incremental" << endl;
|
||||
|
||||
return;
|
||||
}else if(storeOption == StoreOption::OVERWRITE){
|
||||
fs::remove_all(workDir + "/data");
|
||||
fs::remove_all(workDir + "/temp");
|
||||
fs::remove(workDir + "/cloud.js");
|
||||
writer = new PotreeWriter(this->workDir, aabb, spacing, maxDepth, scale, outputFormat, pointAttributes, quality);
|
||||
writer->setProjection(this->projection);
|
||||
}else if(storeOption == StoreOption::INCREMENTAL){
|
||||
writer = new PotreeWriter(this->workDir, quality);
|
||||
writer->loadStateFromDisk();
|
||||
}
|
||||
}else{
|
||||
writer = new PotreeWriter(this->workDir, aabb, spacing, maxDepth, scale, outputFormat, pointAttributes, quality);
|
||||
writer->setProjection(this->projection);
|
||||
}
|
||||
|
||||
if(writer == NULL){
|
||||
return;
|
||||
}
|
||||
|
||||
writer->storeSize = storeSize;
|
||||
|
||||
vector<AABB> boundingBoxes;
|
||||
vector<int> numPoints;
|
||||
vector<string> sourceFilenames;
|
||||
|
||||
for (const auto &source : sources) {
|
||||
cout << "READING: " << source << endl;
|
||||
|
||||
PointReader *reader = createPointReader(source, pointAttributes);
|
||||
|
||||
boundingBoxes.push_back(reader->getAABB());
|
||||
numPoints.push_back(reader->numPoints());
|
||||
sourceFilenames.push_back(fs::path(source).filename().string());
|
||||
|
||||
writeSources(this->workDir, sourceFilenames, numPoints, boundingBoxes, this->projection);
|
||||
if(this->sourceListingOnly){
|
||||
reader->close();
|
||||
delete reader;
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
while(reader->readNextPoint()){
|
||||
pointsProcessed++;
|
||||
|
||||
Point p = reader->getPoint();
|
||||
writer->add(p);
|
||||
|
||||
if((pointsProcessed % (1'000'000)) == 0){
|
||||
writer->processStore();
|
||||
writer->waitUntilProcessed();
|
||||
|
||||
auto end = high_resolution_clock::now();
|
||||
long long duration = duration_cast<milliseconds>(end-start).count();
|
||||
float seconds = duration / 1'000.0f;
|
||||
|
||||
stringstream ssMessage;
|
||||
|
||||
ssMessage.imbue(std::locale(""));
|
||||
|
||||
//ssMessage << "INDEXING: ";
|
||||
//ssMessage << pointsProcessed << " points processed; ";
|
||||
//ssMessage << writer->numAccepted << " points written; ";
|
||||
//ssMessage << seconds << " seconds passed";
|
||||
|
||||
int percent = 100.0f * float(pointsProcessed) / float(infos.numPoints);
|
||||
|
||||
ssMessage << "INDEXING: ";
|
||||
ssMessage << pointsProcessed << " of " << infos.numPoints << " processed (" << percent << "%); ";
|
||||
ssMessage << writer->numAccepted << " written; ";
|
||||
ssMessage << seconds << " seconds passed";
|
||||
|
||||
cout << ssMessage.str() << endl;
|
||||
}
|
||||
if((pointsProcessed % (flushLimit)) == 0){
|
||||
cout << "FLUSHING: ";
|
||||
|
||||
auto start = high_resolution_clock::now();
|
||||
|
||||
writer->flush();
|
||||
|
||||
auto end = high_resolution_clock::now();
|
||||
long long duration = duration_cast<milliseconds>(end-start).count();
|
||||
float seconds = duration / 1'000.0f;
|
||||
|
||||
cout << seconds << "s" << endl;
|
||||
}
|
||||
|
||||
//if(pointsProcessed >= 10'000'000){
|
||||
// break;
|
||||
//}
|
||||
}
|
||||
reader->close();
|
||||
delete reader;
|
||||
|
||||
|
||||
}
|
||||
|
||||
cout << "closing writer" << endl;
|
||||
writer->flush();
|
||||
writer->close();
|
||||
|
||||
writeSources(this->workDir + "/sources.json", sourceFilenames, numPoints, boundingBoxes, this->projection);
|
||||
|
||||
float percent = (float)writer->numAccepted / (float)pointsProcessed;
|
||||
percent = percent * 100;
|
||||
|
||||
auto end = high_resolution_clock::now();
|
||||
long long duration = duration_cast<milliseconds>(end-start).count();
|
||||
|
||||
|
||||
cout << endl;
|
||||
cout << "conversion finished" << endl;
|
||||
cout << pointsProcessed << " points were processed and " << writer->numAccepted << " points ( " << percent << "% ) were written to the output. " << endl;
|
||||
|
||||
cout << "duration: " << (duration / 1000.0f) << "s" << endl;
|
||||
}
|
||||
|
||||
}
|
||||
@@ -1,765 +0,0 @@
|
||||
|
||||
|
||||
#include <cmath>
|
||||
#include <sstream>
|
||||
#include <stack>
|
||||
#include <chrono>
|
||||
#include <fstream>
|
||||
#include <iomanip>
|
||||
|
||||
#include <experimental/filesystem>
|
||||
|
||||
#include "AABB.h"
|
||||
#include "SparseGrid.h"
|
||||
#include "stuff.h"
|
||||
#include "CloudJS.hpp"
|
||||
#include "PointAttributes.hpp"
|
||||
#include "PointReader.h"
|
||||
#include "PointWriter.hpp"
|
||||
#include "LASPointReader.h"
|
||||
#include "BINPointReader.hpp"
|
||||
#include "LASPointWriter.hpp"
|
||||
#include "BINPointWriter.hpp"
|
||||
#include "PotreeException.h"
|
||||
|
||||
#include "PotreeWriter.h"
|
||||
|
||||
using std::ifstream;
|
||||
using std::stack;
|
||||
using std::stringstream;
|
||||
using std::chrono::high_resolution_clock;
|
||||
using std::chrono::milliseconds;
|
||||
using std::chrono::duration_cast;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
PWNode::PWNode(PotreeWriter* potreeWriter, AABB aabb){
|
||||
this->potreeWriter = potreeWriter;
|
||||
this->aabb = aabb;
|
||||
this->grid = new SparseGrid(aabb, spacing());
|
||||
}
|
||||
|
||||
PWNode::PWNode(PotreeWriter* potreeWriter, int index, AABB aabb, int level){
|
||||
this->index = index;
|
||||
this->aabb = aabb;
|
||||
this->level = level;
|
||||
this->potreeWriter = potreeWriter;
|
||||
this->grid = new SparseGrid(aabb, spacing());
|
||||
}
|
||||
|
||||
PWNode::~PWNode(){
|
||||
for(PWNode *child : children){
|
||||
if(child != NULL){
|
||||
delete child;
|
||||
}
|
||||
}
|
||||
delete grid;
|
||||
}
|
||||
|
||||
string PWNode::name() const {
|
||||
if(parent == NULL){
|
||||
return "r";
|
||||
}else{
|
||||
return parent->name() + std::to_string(index);
|
||||
}
|
||||
}
|
||||
|
||||
float PWNode::spacing(){
|
||||
return float(potreeWriter->spacing / pow(2.0, float(level)));
|
||||
}
|
||||
|
||||
string PWNode::workDir(){
|
||||
return potreeWriter->workDir;
|
||||
}
|
||||
|
||||
string PWNode::hierarchyPath(){
|
||||
string path = "r/";
|
||||
|
||||
int hierarchyStepSize = potreeWriter->hierarchyStepSize;
|
||||
string indices = name().substr(1);
|
||||
|
||||
int numParts = (int)floor((float)indices.size() / (float)hierarchyStepSize);
|
||||
for(int i = 0; i < numParts; i++){
|
||||
path += indices.substr(i * hierarchyStepSize, hierarchyStepSize) + "/";
|
||||
}
|
||||
|
||||
return path;
|
||||
}
|
||||
|
||||
string PWNode::path(){
|
||||
string path = hierarchyPath() + name() + potreeWriter->getExtension();
|
||||
return path;
|
||||
}
|
||||
|
||||
PointReader *PWNode::createReader(string path){
|
||||
PointReader *reader = NULL;
|
||||
OutputFormat outputFormat = this->potreeWriter->outputFormat;
|
||||
if(outputFormat == OutputFormat::LAS || outputFormat == OutputFormat::LAZ){
|
||||
reader = new LASPointReader(path);
|
||||
}else if(outputFormat == OutputFormat::BINARY){
|
||||
reader = new BINPointReader(path, aabb, potreeWriter->scale, this->potreeWriter->pointAttributes);
|
||||
}
|
||||
|
||||
return reader;
|
||||
}
|
||||
|
||||
PointWriter *PWNode::createWriter(string path){
|
||||
PointWriter *writer = NULL;
|
||||
OutputFormat outputFormat = this->potreeWriter->outputFormat;
|
||||
if(outputFormat == OutputFormat::LAS || outputFormat == OutputFormat::LAZ){
|
||||
writer = new LASPointWriter(path, aabb, potreeWriter->scale);
|
||||
}else if(outputFormat == OutputFormat::BINARY){
|
||||
writer = new BINPointWriter(path, aabb, potreeWriter->scale, this->potreeWriter->pointAttributes);
|
||||
}
|
||||
|
||||
return writer;
|
||||
|
||||
|
||||
}
|
||||
|
||||
void PWNode::loadFromDisk(){
|
||||
|
||||
PointReader *reader = createReader(workDir() + "/data/" + path());
|
||||
while(reader->readNextPoint()){
|
||||
Point p = reader->getPoint();
|
||||
|
||||
if(isLeafNode()){
|
||||
store.push_back(p);
|
||||
}else{
|
||||
grid->addWithoutCheck(p.position);
|
||||
}
|
||||
}
|
||||
grid->numAccepted = numAccepted;
|
||||
reader->close();
|
||||
delete reader;
|
||||
|
||||
isInMemory = true;
|
||||
}
|
||||
|
||||
PWNode *PWNode::createChild(int childIndex ){
|
||||
AABB cAABB = childAABB(aabb, childIndex);
|
||||
PWNode *child = new PWNode(potreeWriter, childIndex, cAABB, level+1);
|
||||
child->parent = this;
|
||||
children[childIndex] = child;
|
||||
|
||||
return child;
|
||||
}
|
||||
|
||||
void PWNode ::split(){
|
||||
children.resize(8, NULL);
|
||||
|
||||
string filepath = workDir() + "/data/" + path();
|
||||
if(fs::exists(filepath)){
|
||||
fs::remove(filepath);
|
||||
}
|
||||
|
||||
for(Point &point : store){
|
||||
add(point);
|
||||
}
|
||||
|
||||
store = vector<Point>();
|
||||
}
|
||||
|
||||
PWNode *PWNode::add(Point &point){
|
||||
addCalledSinceLastFlush = true;
|
||||
|
||||
if(!isInMemory){
|
||||
loadFromDisk();
|
||||
}
|
||||
|
||||
if(isLeafNode()){
|
||||
store.push_back(point);
|
||||
if(int(store.size()) >= potreeWriter->storeSize){
|
||||
split();
|
||||
}
|
||||
|
||||
return this;
|
||||
}else{
|
||||
|
||||
bool accepted = false;
|
||||
//if(potreeWriter->quality == ConversionQuality::FAST){
|
||||
accepted = grid->add(point.position);
|
||||
//}else/* if(potreeWriter->quality == ConversionQuality::DEFAULT)*/{
|
||||
// PWNode *node = this;
|
||||
// accepted = true;
|
||||
// while(accepted && node != NULL){
|
||||
// accepted = accepted && node->grid->willBeAccepted(point.position, grid->squaredSpacing);
|
||||
// node = node->parent;
|
||||
// }
|
||||
//
|
||||
// //node = this;
|
||||
// //while(accepted && node != NULL && node->children.size() > 0){
|
||||
// // int childIndex = nodeIndex(node->aabb, point);
|
||||
// //
|
||||
// // if(childIndex == -1){
|
||||
// // break;
|
||||
// // }
|
||||
// //
|
||||
// // node = node->children[childIndex];
|
||||
// //
|
||||
// // if(node == NULL){
|
||||
// // break;
|
||||
// // }
|
||||
// //
|
||||
// // accepted = accepted && node->grid->willBeAccepted(point.position, grid->squaredSpacing);
|
||||
// //}
|
||||
//
|
||||
// if(accepted){
|
||||
// grid->addWithoutCheck(point.position);
|
||||
// }
|
||||
//}/*else if(potreeWriter->quality == ConversionQuality::NICE){
|
||||
// PWNode *node = this;
|
||||
// accepted = true;
|
||||
// while(accepted && node != NULL){
|
||||
// accepted = accepted && node->grid->willBeAccepted(point.position, grid->squaredSpacing);
|
||||
// node = node->parent;
|
||||
// }
|
||||
//
|
||||
// node = this;
|
||||
// while(accepted && node != NULL && node->children.size() > 0){
|
||||
// int childIndex = nodeIndex(node->aabb, point);
|
||||
//
|
||||
// if(childIndex == -1){
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// node = node->children[childIndex];
|
||||
//
|
||||
// if(node == NULL){
|
||||
// break;
|
||||
// }
|
||||
//
|
||||
// accepted = accepted && node->grid->willBeAccepted(point.position, grid->squaredSpacing);
|
||||
// }
|
||||
//
|
||||
//
|
||||
// if(accepted){
|
||||
// grid->addWithoutCheck(point.position);
|
||||
// }
|
||||
//}*/
|
||||
|
||||
if(accepted){
|
||||
cache.push_back(point);
|
||||
acceptedAABB.update(point.position);
|
||||
numAccepted++;
|
||||
|
||||
return this;
|
||||
}else{
|
||||
// try adding point to higher level
|
||||
|
||||
if(potreeWriter->maxDepth != -1 && level >= potreeWriter->maxDepth){
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int childIndex = nodeIndex(aabb, point);
|
||||
if(childIndex >= 0){
|
||||
if(isLeafNode()){
|
||||
children.resize(8, NULL);
|
||||
}
|
||||
PWNode *child = children[childIndex];
|
||||
|
||||
// create child node if not existent
|
||||
if(child == NULL){
|
||||
child = createChild(childIndex);
|
||||
}
|
||||
|
||||
return child->add(point);
|
||||
//child->add(point, targetLevel);
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
void PWNode::flush(){
|
||||
|
||||
std::function<void(vector<Point> &points, bool append)> writeToDisk = [&](vector<Point> &points, bool append){
|
||||
string filepath = workDir() + "/data/" + path();
|
||||
PointWriter *writer = NULL;
|
||||
|
||||
if(!fs::exists(workDir() + "/data/" + hierarchyPath())){
|
||||
fs::create_directories(workDir() + "/data/" + hierarchyPath());
|
||||
}
|
||||
|
||||
if(append){
|
||||
string temppath = workDir() + "/temp/prepend" + potreeWriter->getExtension();
|
||||
if(fs::exists(filepath)){
|
||||
fs::rename(fs::path(filepath), fs::path(temppath));
|
||||
}
|
||||
|
||||
writer = createWriter(filepath);
|
||||
if(fs::exists(temppath)){
|
||||
PointReader *reader = createReader(temppath);
|
||||
while(reader->readNextPoint()){
|
||||
writer->write(reader->getPoint());
|
||||
}
|
||||
reader->close();
|
||||
delete reader;
|
||||
fs::remove(temppath);
|
||||
}
|
||||
}else{
|
||||
if(fs::exists(filepath)){
|
||||
fs::remove(filepath);
|
||||
}
|
||||
writer = createWriter(filepath);
|
||||
}
|
||||
|
||||
for(auto &e_c : points){
|
||||
writer->write(e_c);
|
||||
}
|
||||
|
||||
if(append && (writer->numPoints != this->numAccepted)){
|
||||
cout << "writeToDisk " << writer->numPoints << " != " << this->numAccepted << endl;
|
||||
exit(1);
|
||||
}
|
||||
|
||||
writer->close();
|
||||
delete writer;
|
||||
};
|
||||
|
||||
|
||||
if(isLeafNode()){
|
||||
if(addCalledSinceLastFlush){
|
||||
writeToDisk(store, false);
|
||||
|
||||
//if(store.size() != this->numAccepted){
|
||||
// cout << "store " << store.size() << " != " << this->numAccepted << " - " << this->name() << endl;
|
||||
//}
|
||||
}else if(!addCalledSinceLastFlush && isInMemory){
|
||||
store = vector<Point>();
|
||||
|
||||
isInMemory = false;
|
||||
}
|
||||
}else{
|
||||
if(addCalledSinceLastFlush){
|
||||
writeToDisk(cache, true);
|
||||
//if(cache.size() != this->numAccepted){
|
||||
// cout << "cache " << cache.size() << " != " << this->numAccepted << " - " << this->name() << endl;
|
||||
//
|
||||
// exit(1);
|
||||
//}
|
||||
cache = vector<Point>();
|
||||
}else if(!addCalledSinceLastFlush && isInMemory){
|
||||
delete grid;
|
||||
grid = new SparseGrid(aabb, spacing());
|
||||
isInMemory = false;
|
||||
}
|
||||
}
|
||||
|
||||
addCalledSinceLastFlush = false;
|
||||
|
||||
for(PWNode *child : children){
|
||||
if(child != NULL){
|
||||
child->flush();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
vector<PWNode*> PWNode::getHierarchy(int levels){
|
||||
|
||||
vector<PWNode*> hierarchy;
|
||||
|
||||
list<PWNode*> stack;
|
||||
stack.push_back(this);
|
||||
while(!stack.empty()){
|
||||
PWNode *node = stack.front();
|
||||
stack.pop_front();
|
||||
|
||||
if(node->level >= this->level + levels){
|
||||
break;
|
||||
}
|
||||
hierarchy.push_back(node);
|
||||
|
||||
for(PWNode *child : node->children){
|
||||
if(child != NULL){
|
||||
stack.push_back(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return hierarchy;
|
||||
}
|
||||
|
||||
|
||||
void PWNode::traverse(std::function<void(PWNode*)> callback){
|
||||
callback(this);
|
||||
|
||||
for(PWNode *child : this->children){
|
||||
if(child != NULL){
|
||||
child->traverse(callback);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void PWNode::traverseBreadthFirst(std::function<void(PWNode*)> callback){
|
||||
|
||||
// https://en.wikipedia.org/wiki/Iterative_deepening_depth-first_search
|
||||
|
||||
int currentLevel = 0;
|
||||
int visitedAtLevel = 0;
|
||||
|
||||
do{
|
||||
|
||||
// doing depth first search until node->level = curentLevel
|
||||
stack<PWNode*> st;
|
||||
st.push(this);
|
||||
while(!st.empty()){
|
||||
PWNode *node = st.top();
|
||||
st.pop();
|
||||
|
||||
if(node->level == currentLevel){
|
||||
callback(node);
|
||||
visitedAtLevel++;
|
||||
}else if(node->level < currentLevel){
|
||||
for(PWNode *child : node->children){
|
||||
if(child != NULL){
|
||||
st.push(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
currentLevel++;
|
||||
|
||||
}while(visitedAtLevel > 0);
|
||||
}
|
||||
|
||||
|
||||
PWNode* PWNode::findNode(string name){
|
||||
string thisName = this->name();
|
||||
|
||||
if(name.size() == thisName.size()){
|
||||
return (name == thisName) ? this : NULL;
|
||||
}else if(name.size() > thisName.size()){
|
||||
int childIndex = stoi(string(1, name[thisName.size()]));
|
||||
if(!isLeafNode() && children[childIndex] != NULL){
|
||||
return children[childIndex]->findNode(name);
|
||||
}else{
|
||||
return NULL;
|
||||
}
|
||||
}else{
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
PotreeWriter::PotreeWriter(string workDir, ConversionQuality quality){
|
||||
this->workDir = workDir;
|
||||
this->quality = quality;
|
||||
}
|
||||
|
||||
PotreeWriter::PotreeWriter(string workDir, AABB aabb, float spacing, int maxDepth, double scale, OutputFormat outputFormat, PointAttributes pointAttributes, ConversionQuality quality){
|
||||
this->workDir = workDir;
|
||||
this->aabb = aabb;
|
||||
this->spacing = spacing;
|
||||
this->scale = scale;
|
||||
this->maxDepth = maxDepth;
|
||||
this->outputFormat = outputFormat;
|
||||
this->quality = quality;
|
||||
|
||||
this->pointAttributes = pointAttributes;
|
||||
|
||||
if(this->scale == 0){
|
||||
if(aabb.size.length() > 1'000'000){
|
||||
this->scale = 0.01;
|
||||
}else if(aabb.size.length() > 100'000){
|
||||
this->scale = 0.001;
|
||||
}else if(aabb.size.length() > 1){
|
||||
this->scale = 0.001;
|
||||
}else{
|
||||
this->scale = 0.0001;
|
||||
}
|
||||
}
|
||||
|
||||
cloudjs.outputFormat = outputFormat;
|
||||
cloudjs.boundingBox = aabb;
|
||||
cloudjs.octreeDir = "data";
|
||||
cloudjs.spacing = spacing;
|
||||
cloudjs.version = "1.8";
|
||||
cloudjs.scale = this->scale;
|
||||
cloudjs.pointAttributes = pointAttributes;
|
||||
|
||||
root = new PWNode(this, aabb);
|
||||
}
|
||||
|
||||
string PotreeWriter::getExtension(){
|
||||
if(outputFormat == OutputFormat::LAS){
|
||||
return ".las";
|
||||
}else if(outputFormat == OutputFormat::LAZ){
|
||||
return ".laz";
|
||||
}else if(outputFormat == OutputFormat::BINARY){
|
||||
return ".bin";
|
||||
}
|
||||
|
||||
return "";
|
||||
}
|
||||
|
||||
void PotreeWriter::waitUntilProcessed(){
|
||||
if(storeThread.joinable()){
|
||||
storeThread.join();
|
||||
}
|
||||
}
|
||||
|
||||
void PotreeWriter::add(Point &p){
|
||||
if(numAdded == 0){
|
||||
fs::path dataDir(workDir + "/data");
|
||||
fs::path tempDir(workDir + "/temp");
|
||||
|
||||
fs::create_directories(dataDir);
|
||||
fs::create_directories(tempDir);
|
||||
}
|
||||
|
||||
store.push_back(p);
|
||||
numAdded++;
|
||||
|
||||
if(store.size() > 10'000){
|
||||
processStore();
|
||||
}
|
||||
}
|
||||
|
||||
void PotreeWriter::processStore(){
|
||||
vector<Point> st = store;
|
||||
store = vector<Point>();
|
||||
|
||||
waitUntilProcessed();
|
||||
|
||||
storeThread = thread([this, st]{
|
||||
for(Point p : st){
|
||||
PWNode *acceptedBy = root->add(p);
|
||||
if(acceptedBy != NULL){
|
||||
tightAABB.update(p.position);
|
||||
|
||||
pointsInMemory++;
|
||||
numAccepted++;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
void PotreeWriter::flush(){
|
||||
processStore();
|
||||
|
||||
if(storeThread.joinable()){
|
||||
storeThread.join();
|
||||
}
|
||||
|
||||
//auto start = high_resolution_clock::now();
|
||||
|
||||
root->flush();
|
||||
|
||||
//auto end = high_resolution_clock::now();
|
||||
//long long duration = duration_cast<milliseconds>(end-start).count();
|
||||
//float seconds = duration / 1'000.0f;
|
||||
//cout << "flush nodes: " << seconds << "s" << endl;
|
||||
|
||||
{// update cloud.js
|
||||
cloudjs.hierarchy = vector<CloudJS::Node>();
|
||||
cloudjs.hierarchyStepSize = hierarchyStepSize;
|
||||
cloudjs.tightBoundingBox = tightAABB;
|
||||
cloudjs.numAccepted = numAccepted;
|
||||
cloudjs.projection = projection;
|
||||
|
||||
ofstream cloudOut(workDir + "/cloud.js", ios::out);
|
||||
cloudOut << cloudjs.getString();
|
||||
cloudOut.close();
|
||||
}
|
||||
|
||||
|
||||
|
||||
{// write hierarchy
|
||||
//auto start = high_resolution_clock::now();
|
||||
|
||||
int hrcTotal = 0;
|
||||
int hrcFlushed = 0;
|
||||
|
||||
list<PWNode*> stack;
|
||||
stack.push_back(root);
|
||||
while(!stack.empty()){
|
||||
PWNode *node = stack.front();
|
||||
stack.pop_front();
|
||||
|
||||
hrcTotal++;
|
||||
|
||||
vector<PWNode*> hierarchy = node->getHierarchy(hierarchyStepSize + 1);
|
||||
bool needsFlush = false;
|
||||
for(const auto &descendant : hierarchy){
|
||||
if(descendant->level == node->level + hierarchyStepSize ){
|
||||
stack.push_back(descendant);
|
||||
}
|
||||
|
||||
needsFlush = needsFlush || descendant->addedSinceLastFlush;
|
||||
}
|
||||
|
||||
|
||||
if(needsFlush){
|
||||
string dest = workDir + "/data/" + node->hierarchyPath() + "/" + node->name() + ".hrc";
|
||||
ofstream fout;
|
||||
fout.open(dest, ios::out | ios::binary);
|
||||
|
||||
for(const auto &descendant : hierarchy){
|
||||
char children = 0;
|
||||
for(int j = 0; j < (int)descendant->children.size(); j++){
|
||||
if(descendant->children[j] != NULL){
|
||||
children = children | (1 << j);
|
||||
}
|
||||
}
|
||||
|
||||
fout.write(reinterpret_cast<const char*>(&children), 1);
|
||||
fout.write(reinterpret_cast<const char*>(&(descendant->numAccepted)), 4);
|
||||
}
|
||||
|
||||
fout.close();
|
||||
hrcFlushed++;
|
||||
}
|
||||
}
|
||||
|
||||
root->traverse([](PWNode* node){
|
||||
node->addedSinceLastFlush = false;
|
||||
});
|
||||
|
||||
//cout << "hrcTotal: " << hrcTotal << "; " << "hrcFlushed: " << hrcFlushed << endl;
|
||||
|
||||
//auto end = high_resolution_clock::now();
|
||||
//long long duration = duration_cast<milliseconds>(end-start).count();
|
||||
//float seconds = duration / 1'000.0f;
|
||||
//cout << "writing hierarchy: " << seconds << "s" << endl;
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
void PotreeWriter::setProjection(string projection){
|
||||
this->projection = projection;
|
||||
}
|
||||
|
||||
void PotreeWriter::loadStateFromDisk(){
|
||||
|
||||
|
||||
{// cloudjs
|
||||
string cloudJSPath = workDir + "/cloud.js";
|
||||
ifstream file(cloudJSPath);
|
||||
string line;
|
||||
string content;
|
||||
while (std::getline(file, line)){
|
||||
content += line + "\n";
|
||||
}
|
||||
cloudjs = CloudJS(content);
|
||||
}
|
||||
|
||||
{
|
||||
this->outputFormat = cloudjs.outputFormat;
|
||||
this->pointAttributes = cloudjs.pointAttributes;
|
||||
this->hierarchyStepSize = cloudjs.hierarchyStepSize;
|
||||
this->spacing = cloudjs.spacing;
|
||||
this->scale = cloudjs.scale;
|
||||
this->aabb = cloudjs.boundingBox;
|
||||
this->numAccepted = cloudjs.numAccepted;
|
||||
|
||||
}
|
||||
|
||||
{// tree
|
||||
vector<string> hrcPaths;
|
||||
fs::path rootDir(workDir + "/data/r");
|
||||
for (fs::recursive_directory_iterator iter(rootDir), end; iter != end; ++iter){
|
||||
fs::path path = iter->path();
|
||||
if(fs::is_regular_file(path)){
|
||||
if(iEndsWith(path.extension().string(), ".hrc")){
|
||||
hrcPaths.push_back(path.string());
|
||||
}else{
|
||||
|
||||
}
|
||||
}else if(fs::is_directory(path)){
|
||||
|
||||
}
|
||||
}
|
||||
std::sort(hrcPaths.begin(), hrcPaths.end(), [](string &a, string &b){
|
||||
return a.size() < b.size();
|
||||
});
|
||||
|
||||
PWNode *root = new PWNode(this, cloudjs.boundingBox);
|
||||
for(string hrcPath : hrcPaths){
|
||||
|
||||
fs::path pHrcPath(hrcPath);
|
||||
string hrcName = pHrcPath.stem().string();
|
||||
PWNode *hrcRoot = root->findNode(hrcName);
|
||||
|
||||
PWNode *current = hrcRoot;
|
||||
current->addedSinceLastFlush = false;
|
||||
current->isInMemory = false;
|
||||
vector<PWNode*> nodes;
|
||||
nodes.push_back(hrcRoot);
|
||||
|
||||
ifstream fin(hrcPath, ios::in | ios::binary);
|
||||
std::vector<char> buffer((std::istreambuf_iterator<char>(fin)), (std::istreambuf_iterator<char>()));
|
||||
|
||||
for(int i = 0; 5*i < (int)buffer.size(); i++){
|
||||
PWNode *current= nodes[i];
|
||||
|
||||
char children = buffer[i*5];
|
||||
char *p = &buffer[i*5+1];
|
||||
unsigned int* ip = reinterpret_cast<unsigned int*>(p);
|
||||
unsigned int numPoints = *ip;
|
||||
|
||||
//std::bitset<8> bs(children);
|
||||
//cout << i << "\t: " << "children: " << bs << "; " << "numPoints: " << numPoints << endl;
|
||||
|
||||
current->numAccepted = numPoints;
|
||||
|
||||
if(children != 0){
|
||||
current->children.resize(8, NULL);
|
||||
for(int j = 0; j < 8; j++){
|
||||
if((children & (1 << j)) != 0){
|
||||
AABB cAABB = childAABB(current->aabb, j);
|
||||
PWNode *child = new PWNode(this, j, cAABB, current->level + 1);
|
||||
child->parent = current;
|
||||
child->addedSinceLastFlush = false;
|
||||
child->isInMemory = false;
|
||||
current->children[j] = child;
|
||||
nodes.push_back(child);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
this->root = root;
|
||||
|
||||
// TODO set it to actual number
|
||||
this->numAdded = 1;
|
||||
|
||||
//int numNodes = 0;
|
||||
//root->traverse([&](PWNode *node){
|
||||
// if(numNodes < 50){
|
||||
// cout << std::left << std::setw(10) << node->name();
|
||||
// cout << std::right << std::setw(10) << node->numAccepted << "; ";
|
||||
// cout << node->aabb.min << " - " << node->aabb.max << endl;
|
||||
// }
|
||||
//
|
||||
// numNodes++;
|
||||
//
|
||||
//});
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
|
||||
|
||||
}
|
||||
@@ -1,195 +0,0 @@
|
||||
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
|
||||
#include "SparseGrid.h"
|
||||
#include "GridIndex.h"
|
||||
|
||||
using std::min;
|
||||
|
||||
namespace Potree{
|
||||
|
||||
const double cellSizeFactor = 5.0;
|
||||
|
||||
SparseGrid::SparseGrid(AABB aabb, float spacing){
|
||||
this->aabb = aabb;
|
||||
this->width = (int)(aabb.size.x / (spacing * cellSizeFactor) );
|
||||
this->height = (int)(aabb.size.y / (spacing * cellSizeFactor) );
|
||||
this->depth = (int)(aabb.size.z / (spacing * cellSizeFactor) );
|
||||
this->squaredSpacing = spacing * spacing;
|
||||
}
|
||||
|
||||
SparseGrid::~SparseGrid(){
|
||||
SparseGrid::iterator it;
|
||||
for(it = begin(); it != end(); it++){
|
||||
delete it->second;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
bool SparseGrid::isDistant(const Vector3<double> &p, GridCell *cell){
|
||||
if(!cell->isDistant(p, squaredSpacing)){
|
||||
return false;
|
||||
}
|
||||
|
||||
for(const auto &neighbour : cell->neighbours) {
|
||||
if(!neighbour->isDistant(p, squaredSpacing)){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SparseGrid::isDistant(const Vector3<double> &p, GridCell *cell, float &squaredSpacing){
|
||||
if(!cell->isDistant(p, squaredSpacing)){
|
||||
return false;
|
||||
}
|
||||
|
||||
for(const auto &neighbour : cell->neighbours) {
|
||||
if(!neighbour->isDistant(p, squaredSpacing)){
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
bool SparseGrid::willBeAccepted(const Vector3<double> &p, float &squaredSpacing){
|
||||
int nx = (int)(width*(p.x - aabb.min.x) / aabb.size.x);
|
||||
int ny = (int)(height*(p.y - aabb.min.y) / aabb.size.y);
|
||||
int nz = (int)(depth*(p.z - aabb.min.z) / aabb.size.z);
|
||||
|
||||
int i = min(nx, width-1);
|
||||
int j = min(ny, height-1);
|
||||
int k = min(nz, depth-1);
|
||||
|
||||
GridIndex index(i,j,k);
|
||||
long long key = ((long long)k << 40) | ((long long)j << 20) | (long long)i;
|
||||
SparseGrid::iterator it = find(key);
|
||||
if(it == end()){
|
||||
it = this->insert(value_type(key, new GridCell(this, index))).first;
|
||||
}
|
||||
|
||||
if(isDistant(p, it->second, squaredSpacing)){
|
||||
return true;
|
||||
}else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
//bool SparseGrid::willBeAccepted(const Vector3<double> &p, float &squaredSpacing){
|
||||
// float spacing = sqrt(squaredSpacing);
|
||||
// float cellSize = sqrt(this->squaredSpacing) * cellSizeFactor;
|
||||
//
|
||||
// float fx = (width*(p.x - aabb.min.x) / aabb.size.x);
|
||||
// float fy = (height*(p.y - aabb.min.y) / aabb.size.y);
|
||||
// float fz = (depth*(p.z - aabb.min.z) / aabb.size.z);
|
||||
//
|
||||
// float cx = fmod(fx, cellSize);
|
||||
// float cy = fmod(fy, cellSize);
|
||||
// float cz = fmod(fz, cellSize);
|
||||
//
|
||||
// bool inner = cx < spacing || cx > (cellSize - spacing);
|
||||
// inner = inner && (cy < spacing || cy > (cellSize - spacing));
|
||||
// inner = inner && (cz < spacing || cz > (cellSize - spacing));
|
||||
//
|
||||
// int nx = (int)fx;
|
||||
// int ny = (int)fy;
|
||||
// int nz = (int)fz;
|
||||
//
|
||||
// int i = min(nx, width-1);
|
||||
// int j = min(ny, height-1);
|
||||
// int k = min(nz, depth-1);
|
||||
//
|
||||
// GridIndex index(i,j,k);
|
||||
// long long key = ((long long)k << 40) | ((long long)j << 20) | (long long)i;
|
||||
// SparseGrid::iterator it = find(key);
|
||||
// if(it == end()){
|
||||
// it = this->insert(value_type(key, new GridCell(this, index))).first;
|
||||
// }
|
||||
//
|
||||
// if(!it->second->isDistant(p, squaredSpacing)){
|
||||
// return false;
|
||||
// }
|
||||
//
|
||||
// if(!inner){
|
||||
// for(const auto &neighbour : it->second->neighbours) {
|
||||
// if(!neighbour->isDistant(p, squaredSpacing)){
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
//
|
||||
// return true;
|
||||
//}
|
||||
|
||||
bool SparseGrid::willBeAccepted(const Vector3<double> &p){
|
||||
int nx = (int)(width*(p.x - aabb.min.x) / aabb.size.x);
|
||||
int ny = (int)(height*(p.y - aabb.min.y) / aabb.size.y);
|
||||
int nz = (int)(depth*(p.z - aabb.min.z) / aabb.size.z);
|
||||
|
||||
int i = min(nx, width-1);
|
||||
int j = min(ny, height-1);
|
||||
int k = min(nz, depth-1);
|
||||
|
||||
GridIndex index(i,j,k);
|
||||
long long key = ((long long)k << 40) | ((long long)j << 20) | (long long)i;
|
||||
SparseGrid::iterator it = find(key);
|
||||
if(it == end()){
|
||||
it = this->insert(value_type(key, new GridCell(this, index))).first;
|
||||
}
|
||||
|
||||
if(isDistant(p, it->second)){
|
||||
return true;
|
||||
}else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool SparseGrid::add(Vector3<double> &p){
|
||||
int nx = (int)(width*(p.x - aabb.min.x) / aabb.size.x);
|
||||
int ny = (int)(height*(p.y - aabb.min.y) / aabb.size.y);
|
||||
int nz = (int)(depth*(p.z - aabb.min.z) / aabb.size.z);
|
||||
|
||||
int i = min(nx, width-1);
|
||||
int j = min(ny, height-1);
|
||||
int k = min(nz, depth-1);
|
||||
|
||||
GridIndex index(i,j,k);
|
||||
long long key = ((long long)k << 40) | ((long long)j << 20) | (long long)i;
|
||||
SparseGrid::iterator it = find(key);
|
||||
if(it == end()){
|
||||
it = this->insert(value_type(key, new GridCell(this, index))).first;
|
||||
}
|
||||
|
||||
if(isDistant(p, it->second)){
|
||||
this->operator[](key)->add(p);
|
||||
numAccepted++;
|
||||
return true;
|
||||
}else{
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
void SparseGrid::addWithoutCheck(Vector3<double> &p){
|
||||
int nx = (int)(width*(p.x - aabb.min.x) / aabb.size.x);
|
||||
int ny = (int)(height*(p.y - aabb.min.y) / aabb.size.y);
|
||||
int nz = (int)(depth*(p.z - aabb.min.z) / aabb.size.z);
|
||||
|
||||
int i = min(nx, width-1);
|
||||
int j = min(ny, height-1);
|
||||
int k = min(nz, depth-1);
|
||||
|
||||
GridIndex index(i,j,k);
|
||||
long long key = ((long long)k << 40) | ((long long)j << 20) | (long long)i;
|
||||
SparseGrid::iterator it = find(key);
|
||||
if(it == end()){
|
||||
it = this->insert(value_type(key, new GridCell(this, index))).first;
|
||||
}
|
||||
|
||||
it->second->add(p);
|
||||
}
|
||||
|
||||
}
|
||||
@@ -6,328 +6,224 @@
|
||||
#include <exception>
|
||||
#include <fstream>
|
||||
|
||||
#include "AABB.h"
|
||||
#include "PotreeConverter.h"
|
||||
#include "PotreeException.h"
|
||||
|
||||
#include "arguments.hpp"
|
||||
#include <filesystem>
|
||||
|
||||
#include "Subsampler.h"
|
||||
#include "Subsampler_PoissonDisc.h"
|
||||
#include "Metadata.h"
|
||||
#include "LASLoader.hpp"
|
||||
#include "Chunker.h"
|
||||
#include "Vector3.h"
|
||||
#include "ChunkProcessor.h"
|
||||
#include "PotreeWriter.h"
|
||||
#include "ThreadPool/ThreadPool.h"
|
||||
|
||||
using namespace std::experimental;
|
||||
|
||||
namespace fs = std::experimental::filesystem;
|
||||
|
||||
using std::string;
|
||||
using std::cout;
|
||||
using std::cerr;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::binary_function;
|
||||
using std::map;
|
||||
using std::chrono::high_resolution_clock;
|
||||
using std::chrono::milliseconds;
|
||||
using std::chrono::duration_cast;
|
||||
using std::exception;
|
||||
using Potree::PotreeConverter;
|
||||
using Potree::StoreOption;
|
||||
using Potree::ConversionQuality;
|
||||
|
||||
#define MAX_FLOAT std::numeric_limits<float>::max()
|
||||
|
||||
class SparseGrid;
|
||||
|
||||
struct PotreeArguments {
|
||||
bool help = false;
|
||||
StoreOption storeOption = StoreOption::ABORT_IF_EXISTS;
|
||||
vector<string> source;
|
||||
string outdir;
|
||||
float spacing;
|
||||
int levels;
|
||||
string format;
|
||||
double scale;
|
||||
int diagonalFraction;
|
||||
Potree::OutputFormat outFormat;
|
||||
vector<double> colorRange;
|
||||
vector<double> intensityRange;
|
||||
vector<string> outputAttributes;
|
||||
bool generatePage;
|
||||
bool pageTemplate;
|
||||
string pageTemplatePath = "";
|
||||
vector<double> aabbValues;
|
||||
string pageName = "";
|
||||
string projection = "";
|
||||
bool sourceListingOnly = false;
|
||||
string listOfFiles = "";
|
||||
ConversionQuality conversionQuality = ConversionQuality::DEFAULT;
|
||||
string conversionQualityString = "";
|
||||
string title = "PotreeViewer";
|
||||
string description = "";
|
||||
bool edlEnabled = false;
|
||||
bool showSkybox = false;
|
||||
string material = "RGB";
|
||||
string executablePath;
|
||||
int storeSize;
|
||||
int flushLimit;
|
||||
};
|
||||
|
||||
PotreeArguments parseArguments(int argc, char **argv){
|
||||
Arguments args(argc, argv);
|
||||
|
||||
args.addArgument("source,i,", "input files");
|
||||
args.addArgument("help,h", "prints usage");
|
||||
args.addArgument("generate-page,p", "Generates a ready to use web page with the given name.");
|
||||
args.addArgument("page-template", "directory where the web page template is located.");
|
||||
args.addArgument("outdir,o", "output directory");
|
||||
args.addArgument("spacing,s", "Distance between points at root level. Distance halves each level.");
|
||||
args.addArgument("spacing-by-diagonal-fraction,d", "Maximum number of points on the diagonal in the first level (sets spacing). spacing = diagonal / value");
|
||||
args.addArgument("levels,l", "Number of levels that will be generated. 0: only root, 1: root and its children, ...");
|
||||
args.addArgument("input-format,f", "Input format. xyz: cartesian coordinates as floats, rgb: colors as numbers, i: intensity as number");
|
||||
args.addArgument("color-range", "");
|
||||
args.addArgument("intensity-range", "");
|
||||
args.addArgument("output-format", "Output format can be BINARY, LAS or LAZ. Default is BINARY");
|
||||
args.addArgument("output-attributes,a", "can be any combination of RGB, INTENSITY and CLASSIFICATION. Default is RGB.");
|
||||
args.addArgument("scale", "Scale of the X, Y, Z coordinate in LAS and LAZ files.");
|
||||
args.addArgument("aabb", "Bounding cube as \"minX minY minZ maxX maxY maxZ\". If not provided it is automatically computed");
|
||||
args.addArgument("incremental", "Add new points to existing conversion");
|
||||
args.addArgument("overwrite", "Replace existing conversion at target directory");
|
||||
args.addArgument("source-listing-only", "Create a sources.json but no octree.");
|
||||
args.addArgument("projection", "Specify projection in proj4 format.");
|
||||
args.addArgument("list-of-files", "A text file containing a list of files to be converted.");
|
||||
args.addArgument("source", "Source file. Can be LAS, LAZ, PTX or PLY");
|
||||
args.addArgument("title", "Page title");
|
||||
args.addArgument("description", "Description to be shown in the page.");
|
||||
args.addArgument("edl-enabled", "Enable Eye-Dome-Lighting.");
|
||||
args.addArgument("show-skybox", "");
|
||||
args.addArgument("material", "RGB, ELEVATION, INTENSITY, INTENSITY_GRADIENT, CLASSIFICATION, RETURN_NUMBER, SOURCE, LEVEL_OF_DETAIL");
|
||||
args.addArgument("store-size", "A node is split once more than store-size points are added. Reduce for better results at cost of performance. Default is 20000");
|
||||
args.addArgument("flush-limit", "Flush after X points. Default is 10000000");
|
||||
|
||||
PotreeArguments a;
|
||||
|
||||
if (args.has("help")){
|
||||
cout << args.usage() << endl;
|
||||
exit(0);
|
||||
} else if (!args.has("source") && !args.has("list-of-files")){
|
||||
cout << args.usage() << endl;
|
||||
exit(1);
|
||||
} else if (argc == 1) {
|
||||
cout << args.usage() << endl;
|
||||
exit(0);
|
||||
}
|
||||
|
||||
if (args.has("incremental") && args.has("overwrite")) {
|
||||
cout << "cannot have --incremental and --overwrite at the same time";
|
||||
exit(1);
|
||||
}
|
||||
|
||||
///a.source = args.get("source").as<vector<string>>();
|
||||
a.generatePage = args.has("generate-page");
|
||||
if (a.generatePage) {
|
||||
a.pageName = args.get("generate-page").as<string>();
|
||||
}
|
||||
a.pageTemplate = args.has("page-template");
|
||||
if (a.pageTemplate) {
|
||||
a.pageTemplatePath = args.get("page-template").as<string>();
|
||||
}
|
||||
a.outdir = args.get("outdir").as<string>();
|
||||
a.spacing = args.get("spacing").as<double>(0.0);
|
||||
a.storeSize = args.get("store-size").as<int>(20'000);
|
||||
a.flushLimit= args.get("flush-limit").as<int>(10'000'000);
|
||||
a.diagonalFraction = args.get("d").as<double>(0.0);
|
||||
a.levels = args.get("levels").as<int>(-1);
|
||||
a.format = args.get("input-format").as<string>();
|
||||
a.colorRange = args.get("color-range").as<vector<double>>();
|
||||
a.intensityRange = args.get("intensity-range").as<vector<double>>();
|
||||
|
||||
if (args.has("output-format")) {
|
||||
string of = args.get("output-format").as<string>("BINARY");
|
||||
|
||||
if (of == "BINARY") {
|
||||
a.outFormat = Potree::OutputFormat::BINARY;
|
||||
} else if (of == "LAS") {
|
||||
a.outFormat = Potree::OutputFormat::LAS;
|
||||
} else if (of == "LAZ") {
|
||||
a.outFormat = Potree::OutputFormat::LAZ;
|
||||
} else {
|
||||
a.outFormat = Potree::OutputFormat::BINARY;
|
||||
}
|
||||
} else {
|
||||
a.outFormat = Potree::OutputFormat::BINARY;
|
||||
}
|
||||
|
||||
if (args.has("output-attributes")) {
|
||||
a.outputAttributes = args.get("output-attributes").as<vector<string>>();
|
||||
} else {
|
||||
//a.outputAttributes = { "RGB" };
|
||||
}
|
||||
|
||||
a.scale = args.get("scale").as<double>(0.0);
|
||||
|
||||
if (args.has("aabb")) {
|
||||
string strAABB = args.get("aabb").as<string>();
|
||||
vector<double> aabbValues;
|
||||
char sep = ' ';
|
||||
for (size_t p = 0, q = 0; p != strAABB.npos; p = q)
|
||||
aabbValues.push_back(atof(strAABB.substr(p + (p != 0), (q = strAABB.find(sep, p + 1)) - p - (p != 0)).c_str()));
|
||||
|
||||
if (aabbValues.size() != 6) {
|
||||
cerr << "AABB requires 6 arguments" << endl;
|
||||
exit(1);
|
||||
}
|
||||
|
||||
a.aabbValues = aabbValues;
|
||||
}
|
||||
|
||||
if(args.has("incremental")){
|
||||
a.storeOption = StoreOption::INCREMENTAL;
|
||||
}else if(args.has("overwrite")){
|
||||
a.storeOption = StoreOption::OVERWRITE;
|
||||
}else{
|
||||
a.storeOption = StoreOption::ABORT_IF_EXISTS;
|
||||
}
|
||||
|
||||
a.sourceListingOnly = args.has("source-listing-only");
|
||||
a.projection = args.get("projection").as<string>();
|
||||
|
||||
if (args.has("source")) {
|
||||
a.source = args.get("source").as<vector<string>>();
|
||||
}
|
||||
if (a.source.size() == 0 && args.has("list-of-files")) {
|
||||
string lof = args.get("list-of-files").as<string>();
|
||||
a.listOfFiles = lof;
|
||||
|
||||
if (fs::exists(fs::path(a.listOfFiles))) {
|
||||
std::ifstream in(a.listOfFiles);
|
||||
string line;
|
||||
while (std::getline(in, line)) {
|
||||
string path;
|
||||
if (fs::path(line).is_absolute()) {
|
||||
path = line;
|
||||
} else {
|
||||
fs::path absPath = fs::canonical(fs::path(a.listOfFiles));
|
||||
fs::path lofDir = absPath.parent_path();
|
||||
path = lofDir.string() + "/" + line;
|
||||
}
|
||||
|
||||
if (fs::exists(fs::path(path))) {
|
||||
a.source.push_back(path);
|
||||
} else {
|
||||
cerr << "ERROR: file not found: " << path << endl;
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
in.close();
|
||||
} else {
|
||||
cerr << "ERROR: specified list of files not found: '" << a.listOfFiles << "'" << endl;
|
||||
exit(1);
|
||||
}
|
||||
}
|
||||
|
||||
a.title = args.get("title").as<string>();
|
||||
a.description = args.get("description").as<string>();
|
||||
a.edlEnabled = args.has("edl-enabled");
|
||||
a.showSkybox = args.has("show-skybox");
|
||||
a.material = args.get("material").as<string>("RGB");
|
||||
|
||||
vector<string> validMaterialNames = {"RGB", "ELEVATION", "INTENSITY", "INTENSITY_GRADIENT", "CLASSIFICATION", "RETURN_NUMBER", "SOURCE", "LEVEL_OF_DETAIL"};
|
||||
if(std::find(validMaterialNames.begin(), validMaterialNames.end(), a.material) == validMaterialNames.end()){
|
||||
cout << args.usage();
|
||||
cout << endl;
|
||||
cout << "ERROR: " << "invalid material name specified" << endl;
|
||||
exit(1);
|
||||
}
|
||||
|
||||
// set default parameters
|
||||
fs::path pSource(a.source[0]);
|
||||
if (args.has("outdir")) {
|
||||
a.outdir = args.get("outdir").as<string>();
|
||||
} else {
|
||||
string name = fs::canonical(pSource).filename().string();
|
||||
a.outdir = name + "_converted";
|
||||
}
|
||||
|
||||
if (a.diagonalFraction != 0) {
|
||||
a.spacing = 0;
|
||||
}else if(a.spacing == 0){
|
||||
a.diagonalFraction = 200;
|
||||
}
|
||||
|
||||
try {
|
||||
auto absolutePath = fs::canonical(fs::system_complete(argv[0]));
|
||||
a.executablePath = absolutePath.parent_path().string();
|
||||
} catch (const fs::filesystem_error &e) {
|
||||
// do nothing
|
||||
}
|
||||
|
||||
return a;
|
||||
}
|
||||
|
||||
void printArguments(PotreeArguments &a){
|
||||
try{
|
||||
|
||||
cout << "== params ==" << endl;
|
||||
int i = 0;
|
||||
for(const auto &s : a.source) {
|
||||
cout << "source[" << i << "]: \t" << a.source[i] << endl;
|
||||
++i;
|
||||
}
|
||||
cout << "outdir: \t" << a.outdir << endl;
|
||||
cout << "spacing: \t" << a.spacing << endl;
|
||||
cout << "diagonal-fraction: \t" << a.diagonalFraction << endl;
|
||||
cout << "levels: \t" << a.levels << endl;
|
||||
cout << "format: \t" << a.format << endl;
|
||||
cout << "scale: \t" << a.scale << endl;
|
||||
cout << "pageName: \t" << a.pageName << endl;
|
||||
cout << "projection: \t" << a.projection << endl;
|
||||
cout << endl;
|
||||
}catch(exception &e){
|
||||
cout << "ERROR: " << e.what() << endl;
|
||||
|
||||
exit(1);
|
||||
int gridSizeFromPointCount(uint64_t pointCount) {
|
||||
if (pointCount < 10'000'000) {
|
||||
return 2;
|
||||
} if (pointCount < 100'000'000) {
|
||||
return 4;
|
||||
} else if (pointCount < 1'000'000'000) {
|
||||
return 8;
|
||||
} else if (pointCount < 10'000'000'000) {
|
||||
return 16;
|
||||
} else if (pointCount < 100'000'000'000) {
|
||||
return 32;
|
||||
} else{
|
||||
return 64;
|
||||
}
|
||||
}
|
||||
|
||||
#include "Vector3.h"
|
||||
#include <random>
|
||||
future<Chunker*> chunking(LASLoader* loader, Metadata metadata) {
|
||||
|
||||
double tStart = now();
|
||||
|
||||
string path = metadata.targetDirectory + "/chunks";
|
||||
for (const auto& entry : fs::directory_iterator(path)){
|
||||
fs::remove(entry);
|
||||
}
|
||||
|
||||
Vector3<double> size = metadata.max - metadata.min;
|
||||
double cubeSize = std::max(std::max(size.x, size.y), size.z);
|
||||
Vector3<double> cubeMin = metadata.min;
|
||||
Vector3<double> cubeMax = cubeMin + cubeSize;
|
||||
|
||||
Attributes attributes = loader->getAttributes();
|
||||
Chunker* chunker = new Chunker(path, attributes, cubeMin, cubeMax, 8);
|
||||
|
||||
int batchNumber = 0;
|
||||
auto batch = co_await loader->nextBatch();
|
||||
while (batch != nullptr) {
|
||||
if ((batchNumber % 10) == 0) {
|
||||
cout << "batch loaded: " << batchNumber << endl;
|
||||
}
|
||||
|
||||
chunker->add(batch);
|
||||
|
||||
batch = co_await loader->nextBatch();
|
||||
|
||||
batchNumber++;
|
||||
}
|
||||
|
||||
chunker->close();
|
||||
|
||||
printElapsedTime("chunking duration", tStart);
|
||||
|
||||
return chunker;
|
||||
}
|
||||
|
||||
|
||||
future<void> run() {
|
||||
|
||||
//string path = "D:/dev/pointclouds/Riegl/Retz_Airborne_Terrestrial_Combined_1cm.las";
|
||||
//string path = "D:/dev/pointclouds/Riegl/niederweiden.las";
|
||||
string path = "D:/dev/pointclouds/archpro/heidentor.las";
|
||||
//string path = "D:/dev/pointclouds/pix4d/eclepens.las";
|
||||
//string path = "D:/dev/pointclouds/mschuetz/lion.las";
|
||||
//string path = "D:/dev/pointclouds/Riegl/Retz_Airborne_Terrestrial_Combined_1cm.las";
|
||||
//string path = "D:/dev/pointclouds/open_topography/ca13/morro_rock/merged.las";
|
||||
//string targetDirectory = "C:/temp/test";
|
||||
|
||||
string targetDirectory = "C:/dev/workspaces/potree/develop/test/new_format";
|
||||
|
||||
auto tStart = now();
|
||||
|
||||
LASLoader* loader = new LASLoader(path);
|
||||
Attributes attributes = loader->getAttributes();
|
||||
|
||||
auto size = loader->max - loader->min;
|
||||
double octreeSize = size.max();
|
||||
|
||||
fs::create_directories(targetDirectory);
|
||||
fs::create_directories(targetDirectory + "/chunks");
|
||||
|
||||
|
||||
Metadata metadata;
|
||||
metadata.targetDirectory = targetDirectory;
|
||||
metadata.min = loader->min;
|
||||
metadata.max = loader->min + octreeSize;
|
||||
metadata.numPoints = loader->numPoints;
|
||||
//metadata.chunkGridSize = gridSizeFromPointCount(metadata.numPoints);
|
||||
|
||||
int upperLevels = 3;
|
||||
metadata.chunkGridSize = pow(2, upperLevels);
|
||||
|
||||
Chunker* chunker = co_await chunking(loader, metadata);
|
||||
|
||||
|
||||
|
||||
vector<shared_ptr<Chunk>> chunks = getListOfChunks(metadata);
|
||||
//chunks.resize(2);
|
||||
|
||||
double scale = 0.001;
|
||||
double spacing = loader->min.distanceTo(loader->max) / 200.0;
|
||||
PotreeWriter writer(targetDirectory,
|
||||
metadata.min,
|
||||
metadata.max,
|
||||
spacing,
|
||||
scale,
|
||||
upperLevels,
|
||||
chunks
|
||||
);
|
||||
double cSpacing = spacing / 8.0;
|
||||
|
||||
//{ // sequential
|
||||
// for (Chunk* chunk : chunks) {
|
||||
// loadChunk(chunk);
|
||||
// }
|
||||
|
||||
// vector<Node*> chunkRoots;
|
||||
// for (Chunk* chunk : chunks) {
|
||||
// Node* chunkRoot = processChunk(chunk, cSpacing);
|
||||
// chunkRoots.push_back(chunkRoot);
|
||||
|
||||
// }
|
||||
|
||||
// for (int i = 0; i < chunks.size(); i++) {
|
||||
// Chunk* chunk = chunks[i];
|
||||
// Node* chunkRoot = chunkRoots[i];
|
||||
|
||||
// writer.writeChunk(chunk, chunkRoot);
|
||||
// }
|
||||
//}
|
||||
|
||||
|
||||
// parallel
|
||||
ThreadPool* pool = new ThreadPool(16);
|
||||
for(int i = 0; i < chunks.size(); i++){
|
||||
|
||||
shared_ptr<Chunk> chunk = chunks[i];
|
||||
|
||||
pool->enqueue([chunk, attributes, &writer, cSpacing](){
|
||||
auto points = loadChunk(chunk, attributes);
|
||||
|
||||
auto chunkRoot = processChunk(chunk, points, cSpacing);
|
||||
|
||||
writer.writeChunk(chunk, points, chunkRoot);
|
||||
});
|
||||
}
|
||||
delete pool;
|
||||
|
||||
writer.close();
|
||||
|
||||
|
||||
|
||||
|
||||
auto tEnd = now();
|
||||
auto duration = tEnd - tStart;
|
||||
cout << "duration: " << duration << endl;
|
||||
|
||||
co_return;
|
||||
}
|
||||
|
||||
//#include "TaskPool.h"
|
||||
|
||||
//void testTaskPool() {
|
||||
//
|
||||
// struct Batch {
|
||||
// string path = "";
|
||||
// string text = "";
|
||||
//
|
||||
// Batch(string path, string text) {
|
||||
// this->path = path;
|
||||
// this->text = text;
|
||||
// }
|
||||
// };
|
||||
//
|
||||
// string someCapturedValue = "asoudh adpif sdgsrg";
|
||||
// auto processor = [someCapturedValue](shared_ptr<Batch> batch) {
|
||||
// fstream file;
|
||||
// file.open(batch->path, ios::out);
|
||||
// file << batch->text;
|
||||
// file << someCapturedValue;
|
||||
// file.close();
|
||||
// };
|
||||
//
|
||||
// TaskPool<Batch> pool(5, processor);
|
||||
//
|
||||
// shared_ptr<Batch> batch1 = make_shared<Batch>(
|
||||
// "C:/temp/test1.txt",
|
||||
// "content of file 1 ");
|
||||
// shared_ptr<Batch> batch2 = make_shared<Batch>(
|
||||
// "C:/temp/test2.txt",
|
||||
// "content of file 2 ");
|
||||
//
|
||||
// pool.addTask(batch1);
|
||||
// pool.addTask(batch2);
|
||||
//
|
||||
// pool.close();
|
||||
//}
|
||||
|
||||
int main(int argc, char **argv){
|
||||
cout.imbue(std::locale(""));
|
||||
|
||||
try{
|
||||
PotreeArguments a = parseArguments(argc, argv);
|
||||
printArguments(a);
|
||||
|
||||
PotreeConverter pc(a.executablePath, a.outdir, a.source);
|
||||
run().wait();
|
||||
|
||||
pc.spacing = a.spacing;
|
||||
pc.diagonalFraction = a.diagonalFraction;
|
||||
pc.maxDepth = a.levels;
|
||||
pc.format = a.format;
|
||||
pc.colorRange = a.colorRange;
|
||||
pc.intensityRange = a.intensityRange;
|
||||
pc.scale = a.scale;
|
||||
pc.outputFormat = a.outFormat;
|
||||
pc.outputAttributes = a.outputAttributes;
|
||||
pc.aabbValues = a.aabbValues;
|
||||
pc.pageName = a.pageName;
|
||||
pc.pageTemplatePath = a.pageTemplatePath;
|
||||
pc.storeOption = a.storeOption;
|
||||
pc.projection = a.projection;
|
||||
pc.sourceListingOnly = a.sourceListingOnly;
|
||||
pc.quality = a.conversionQuality;
|
||||
pc.title = a.title;
|
||||
pc.description = a.description;
|
||||
pc.edlEnabled = a.edlEnabled;
|
||||
pc.material = a.material;
|
||||
pc.showSkybox = a.showSkybox;
|
||||
pc.storeSize = a.storeSize;
|
||||
pc.flushLimit = a.flushLimit;
|
||||
//testTaskPool();
|
||||
|
||||
pc.convert();
|
||||
}catch(exception &e){
|
||||
cout << "ERROR: " << e.what() << endl;
|
||||
return 1;
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
|
||||
@@ -1,300 +1,107 @@
|
||||
#include "stuff.h"
|
||||
|
||||
#include <vector>
|
||||
#include <map>
|
||||
#include <iostream>
|
||||
#include <math.h>
|
||||
#include <string>
|
||||
#include <fstream>
|
||||
|
||||
//#include <unistd.h>
|
||||
#include <sys/stat.h>
|
||||
#include <sys/types.h>
|
||||
|
||||
#include "Vector3.h"
|
||||
#include "AABB.h"
|
||||
#include "Point.h"
|
||||
#include "GridIndex.h"
|
||||
#include "SparseGrid.h"
|
||||
#include "GridCell.h"
|
||||
|
||||
using std::ifstream;
|
||||
using std::ofstream;
|
||||
using std::ios;
|
||||
using std::string;
|
||||
using std::min;
|
||||
using std::max;
|
||||
using std::ostream;
|
||||
using std::cout;
|
||||
using std::cin;
|
||||
using std::endl;
|
||||
using std::vector;
|
||||
using std::binary_function;
|
||||
using std::map;
|
||||
|
||||
|
||||
namespace Potree{
|
||||
|
||||
/**
|
||||
* y
|
||||
* |-z
|
||||
* |/
|
||||
* O----x
|
||||
*
|
||||
* 3----7
|
||||
* /| /|
|
||||
* 2----6 |
|
||||
* | 1--|-5
|
||||
* |/ |/
|
||||
* 0----4
|
||||
*
|
||||
*/
|
||||
AABB childAABB(const AABB &aabb, const int &index){
|
||||
|
||||
Vector3<double> min = aabb.min;
|
||||
Vector3<double> max = aabb.max;
|
||||
|
||||
if((index & 0b0001) > 0){
|
||||
min.z += aabb.size.z / 2;
|
||||
}else{
|
||||
max.z -= aabb.size.z / 2;
|
||||
}
|
||||
|
||||
if((index & 0b0010) > 0){
|
||||
min.y += aabb.size.y / 2;
|
||||
}else{
|
||||
max.y -= aabb.size.y / 2;
|
||||
}
|
||||
|
||||
if((index & 0b0100) > 0){
|
||||
min.x += aabb.size.x / 2;
|
||||
}else{
|
||||
max.x -= aabb.size.x / 2;
|
||||
}
|
||||
|
||||
return AABB(min, max);
|
||||
}
|
||||
|
||||
|
||||
|
||||
/**
|
||||
* y
|
||||
* |-z
|
||||
* |/
|
||||
* O----x
|
||||
*
|
||||
* 3----7
|
||||
* /| /|
|
||||
* 2----6 |
|
||||
* | 1--|-5
|
||||
* |/ |/
|
||||
* 0----4
|
||||
*
|
||||
*/
|
||||
int nodeIndex(const AABB &aabb, const Point &point){
|
||||
int mx = (int)(2.0 * (point.position.x - aabb.min.x) / aabb.size.x);
|
||||
int my = (int)(2.0 * (point.position.y - aabb.min.y) / aabb.size.y);
|
||||
int mz = (int)(2.0 * (point.position.z - aabb.min.z) / aabb.size.z);
|
||||
|
||||
mx = min(mx, 1);
|
||||
my = min(my, 1);
|
||||
mz = min(mz, 1);
|
||||
|
||||
return (mx << 2) | (my << 1) | mz;
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* from http://stackoverflow.com/questions/5840148/how-can-i-get-a-files-size-in-c
|
||||
*/
|
||||
long filesize(string filename){
|
||||
struct stat stat_buf;
|
||||
int rc = stat(filename.c_str(), &stat_buf);
|
||||
return rc == 0 ? stat_buf.st_size : -1;
|
||||
}
|
||||
|
||||
|
||||
///**
|
||||
// * from http://stackoverflow.com/questions/874134/find-if-string-endswith-another-string-in-c
|
||||
// */
|
||||
//bool endsWith (std::string const &fullString, std::string const &ending)
|
||||
//{
|
||||
// if (fullString.length() >= ending.length()) {
|
||||
// return (0 == fullString.compare (fullString.length() - ending.length(), ending.length(), ending));
|
||||
// } else {
|
||||
// return false;
|
||||
// }
|
||||
//}
|
||||
|
||||
/**
|
||||
* see http://stackoverflow.com/questions/735204/convert-a-string-in-c-to-upper-case
|
||||
*/
|
||||
string toUpper(string str){
|
||||
string tmp = str;
|
||||
std::transform(tmp.begin(), tmp.end(),tmp.begin(), ::toupper);
|
||||
|
||||
return tmp;
|
||||
}
|
||||
|
||||
// http://stackoverflow.com/questions/8593608/how-can-i-copy-a-directory-using-boost-filesystem
|
||||
bool copyDir(fs::path source, fs::path destination){
|
||||
|
||||
try{
|
||||
// Check whether the function call is valid
|
||||
if(!fs::exists(source) || !fs::is_directory(source) ) {
|
||||
std::cerr << "Source directory " << source.string() << " does not exist or is not a directory." << '\n';
|
||||
return false;
|
||||
}
|
||||
//if(fs::exists(destination)){
|
||||
// std::cerr << "Destination directory " << destination.string()
|
||||
// << " already exists." << '\n';
|
||||
// return false;
|
||||
//}
|
||||
// Create the destination directory
|
||||
if(!fs::exists(destination)){
|
||||
if(!fs::create_directory(destination)){
|
||||
std::cerr << "Unable to create destination directory" << destination.string() << '\n';
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}catch(fs::filesystem_error const & e){
|
||||
std::cerr << e.what() << '\n';
|
||||
return false;
|
||||
}
|
||||
// Iterate through the source directory
|
||||
for( fs::directory_iterator file(source); file != fs::directory_iterator(); ++file){
|
||||
try{
|
||||
fs::path current(file->path());
|
||||
if(fs::is_directory(current)) {
|
||||
// Found directory: Recursion
|
||||
if(!copyDir(current, destination / current.filename())){
|
||||
return false;
|
||||
}
|
||||
}else{
|
||||
// Found file: Copy
|
||||
fs::copy_file(current, destination / current.filename(), fs::copy_options::overwrite_existing);
|
||||
}
|
||||
}catch(fs::filesystem_error const & e){
|
||||
std:: cerr << e.what() << '\n';
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
float psign(float value){
|
||||
if(value == 0.0){
|
||||
return 0.0;
|
||||
}else if(value < 0.0){
|
||||
return -1.0;
|
||||
}else{
|
||||
return 1.0;
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare_pred(unsigned char a, unsigned char b) {
|
||||
return std::tolower(a) == std::tolower(b);
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare(std::string const& a, std::string const& b) {
|
||||
if (a.length() == b.length()) {
|
||||
return std::equal(b.begin(), b.end(), a.begin(), icompare_pred);
|
||||
}
|
||||
else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
//bool endsWith(const std::string &str, const std::string &suffix) {
|
||||
// return str.size() >= suffix.size() && str.compare(str.size() - suffix.size(), suffix.size(), suffix) == 0;
|
||||
//}
|
||||
|
||||
bool endsWith(const string &str, const string &suffix) {
|
||||
|
||||
if (str.size() < suffix.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto tstr = str.substr(str.size() - suffix.size());
|
||||
|
||||
return tstr.compare(suffix) == 0;
|
||||
}
|
||||
|
||||
bool iEndsWith(const std::string &str, const std::string &suffix) {
|
||||
|
||||
if (str.size() < suffix.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto tstr = str.substr(str.size() - suffix.size());
|
||||
|
||||
return icompare(tstr, suffix);
|
||||
}
|
||||
|
||||
vector<string> split(string str, vector<char> delimiters) {
|
||||
|
||||
vector<string> tokens;
|
||||
|
||||
auto isDelimiter = [&delimiters](char ch) {
|
||||
for (auto &delimiter : delimiters) {
|
||||
if (ch == delimiter) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
};
|
||||
|
||||
int start = 0;
|
||||
for (int i = 0; i < str.size(); i++) {
|
||||
if (isDelimiter(str[i])) {
|
||||
if (start < i) {
|
||||
auto token = str.substr(start, i - start);
|
||||
tokens.push_back(token);
|
||||
}
|
||||
|
||||
start = i + 1;
|
||||
}
|
||||
}
|
||||
|
||||
if (start < str.size()) {
|
||||
tokens.push_back(str.substr(start));
|
||||
}
|
||||
|
||||
return tokens;
|
||||
}
|
||||
|
||||
vector<string> split(string str, char delimiter) {
|
||||
return split(str, { delimiter });
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string ltrim(string s) {
|
||||
s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](unsigned char ch) {
|
||||
return !std::isspace(ch);
|
||||
}));
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string rtrim(string s) {
|
||||
s.erase(std::find_if(s.rbegin(), s.rend(), [](unsigned char ch) {
|
||||
return !std::isspace(ch);
|
||||
}).base(), s.end());
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/216823/whats-the-best-way-to-trim-stdstring
|
||||
string trim(string s) {
|
||||
s = ltrim(s);
|
||||
s = rtrim(s);
|
||||
|
||||
return s;
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
#include "stuff.h"
|
||||
|
||||
#include <iostream>
|
||||
|
||||
using std::cout;
|
||||
using std::endl;
|
||||
|
||||
string stringReplace(string str, string search, string replacement) {
|
||||
|
||||
auto index = str.find(search);
|
||||
|
||||
if (index == str.npos) {
|
||||
return str;
|
||||
}
|
||||
|
||||
string strCopy = str;
|
||||
strCopy.replace(index, search.length(), replacement);
|
||||
|
||||
return strCopy;
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare_pred(unsigned char a, unsigned char b) {
|
||||
return std::tolower(a) == std::tolower(b);
|
||||
}
|
||||
|
||||
// see https://stackoverflow.com/questions/23943728/case-insensitive-standard-string-comparison-in-c
|
||||
bool icompare(std::string const& a, std::string const& b) {
|
||||
if (a.length() == b.length()) {
|
||||
return std::equal(b.begin(), b.end(), a.begin(), icompare_pred);
|
||||
} else {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
bool endsWith(const string& str, const string& suffix) {
|
||||
|
||||
if (str.size() < suffix.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto tstr = str.substr(str.size() - suffix.size());
|
||||
|
||||
return tstr.compare(suffix) == 0;
|
||||
}
|
||||
|
||||
bool iEndsWith(const std::string& str, const std::string& suffix) {
|
||||
|
||||
if (str.size() < suffix.size()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
auto tstr = str.substr(str.size() - suffix.size());
|
||||
|
||||
return icompare(tstr, suffix);
|
||||
}
|
||||
|
||||
static long long unsuc_start_time = std::chrono::high_resolution_clock::now().time_since_epoch().count();
|
||||
|
||||
double now() {
|
||||
auto now = std::chrono::high_resolution_clock::now();
|
||||
long long nanosSinceStart = now.time_since_epoch().count() - unsuc_start_time;
|
||||
|
||||
double secondsSinceStart = double(nanosSinceStart) / 1'000'000'000;
|
||||
|
||||
return secondsSinceStart;
|
||||
}
|
||||
|
||||
void printElapsedTime(string label, double startTime) {
|
||||
|
||||
double elapsed = now() - startTime;
|
||||
|
||||
cout << label << ": " << elapsed << "s" << endl;
|
||||
|
||||
}
|
||||
|
||||
|
||||
void printThreadsafe(string str) {
|
||||
|
||||
stringstream ss;
|
||||
ss << str << endl;
|
||||
|
||||
cout << ss.str();
|
||||
|
||||
}
|
||||
|
||||
void printThreadsafe(string str1, string str2) {
|
||||
stringstream ss;
|
||||
ss << str1 << str2 << endl;
|
||||
|
||||
cout << ss.str();
|
||||
}
|
||||
|
||||
void printThreadsafe(string str1, string str2, string str3) {
|
||||
stringstream ss;
|
||||
ss << str1 << str2 << str3 << endl;
|
||||
|
||||
cout << ss.str();
|
||||
}
|
||||
|
||||
void printThreadsafe(string str1, string str2, string str3, string str4) {
|
||||
stringstream ss;
|
||||
ss << str1 << str2 << str3 << str4 << endl;
|
||||
|
||||
cout << ss.str();
|
||||
}
|
||||
|
||||
42
PotreeConverter/src/test.cpp
Normal file
42
PotreeConverter/src/test.cpp
Normal file
@@ -0,0 +1,42 @@
|
||||
|
||||
|
||||
#include <vector>
|
||||
|
||||
using std::vector;
|
||||
|
||||
|
||||
template<T>
|
||||
class Structure{
|
||||
|
||||
vector<int> arr;
|
||||
|
||||
T* get(int&x, int &y, int &z){
|
||||
int d = 64;
|
||||
int p = 2;
|
||||
for(int i = 0; i < 7; i++){
|
||||
int ix = x / d;
|
||||
int iy = y / d;
|
||||
int iz = z / d;
|
||||
|
||||
int index = ix + iy * p + iz * p * p;
|
||||
|
||||
int val = arr[index];
|
||||
if(val == 0){
|
||||
return NULL;
|
||||
}
|
||||
|
||||
p *= 2;
|
||||
d /= 2;
|
||||
}
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
|
||||
|
||||
|
||||
int main(int argc, char **argv){
|
||||
|
||||
}
|
||||
|
||||
|
||||
Reference in New Issue
Block a user