diff --git a/.travis.yml b/.travis.yml index 250ca13cb..ab50e27e6 100644 --- a/.travis.yml +++ b/.travis.yml @@ -32,7 +32,9 @@ matrix: - sudo apt-get install -y gnupg mongodb-org=5.0.8 mongodb-org-database=5.0.8 mongodb-org-server=5.0.8 mongodb-org-shell=5.0.8 mongodb-org-tools=5.0.8 - sudo systemctl daemon-reload && sudo systemctl start mongod && echo $(mongod --version) - mkdir testData && cd testData - - svn --no-auth-cache export --username $TESTUSER --password $TESTPW https://github.com/3drepo/tests/trunk/cplusplus/bouncer + - git clone --filter=blob:none --sparse https://$TESTUSER:$TESTPW@github.com/3drepo/tests.git . + - git sparse-checkout add cplusplus/bouncer + - mv cplusplus/bouncer/ . - cd ../ - until nc -z localhost 27017; do echo Waiting for MongoDB; sleep 1; done - mongo admin testData/bouncer/createUser.js @@ -114,7 +116,9 @@ matrix: - export CXX="g++" - export CC="gcc" - mkdir testData && cd testData - - svn --no-auth-cache export --username $TESTUSER --password $TESTPW https://github.com/3drepo/tests/trunk/cplusplus/bouncer/ext_libs/focal + - git clone --filter=blob:none --sparse https://$TESTUSER:$TESTPW@github.com/3drepo/tests.git . + - git sparse-checkout add cplusplus/bouncer/ext_libs/focal + - mv cplusplus/bouncer/ext_libs/focal/ . - cd ../ - echo ============ BOOST INSTALL ============= - sudo apt-get install libboost-all-dev diff --git a/README.md b/README.md index 364966377..d3f905d82 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,7 @@ 3drepobouncer [![Build Status](https://travis-ci.org/3drepo/3drepobouncer.svg?branch=master)](https://travis-ci.org/3drepo/3drepobouncer) [![Coverage Status](https://coveralls.io/repos/github/3drepo/3drepobouncer/badge.svg?branch=master)](https://coveralls.io/github/3drepo/3drepobouncer?branch=master) ========= -3DRepoBouncer is essentially the refactored 3DRepoCore and (parts of) 3DRepoGUI. It is a C++ library providing 3D Repo Scene Graph definition, repository management and manipulation logic as well as direct MongoDB databse access. +3DRepoBouncer is essentially the refactored 3DRepoCore and (parts of) 3DRepoGUI. It is a C++ library providing 3D Repo Scene Graph definition, repository management and manipulation logic as well as direct MongoDB database access. ### Latest Releases We always recommend using the [Latest stable release](https://github.com/3drepo/3drepobouncer/releases). However, to access cutting-edge development versions, check out the [tags](https://github.com/3drepo/3drepobouncer/tags). diff --git a/bouncer/src/repo/core/handler/fileservice/repo_file_manager.cpp b/bouncer/src/repo/core/handler/fileservice/repo_file_manager.cpp index e362b7bcc..f783b8b03 100644 --- a/bouncer/src/repo/core/handler/fileservice/repo_file_manager.cpp +++ b/bouncer/src/repo/core/handler/fileservice/repo_file_manager.cpp @@ -18,9 +18,15 @@ #include "repo_file_manager.h" #include "../../../lib/repo_exception.h" #include "../../model/repo_model_global.h" +#include "../../model/bson/repo_bson_builder.h" #include "../../model/bson/repo_bson_factory.h" #include "repo_file_handler_fs.h" #include "repo_file_handler_gridfs.h" +#include +#include +#include +#include +#include using namespace repo::core::handler::fileservice; @@ -46,11 +52,49 @@ bool FileManager::uploadFileAndCommit( const std::string &collectionNamePrefix, const std::string &fileName, const std::vector &bin, - const repo::core::model::RepoBSON &metadata) + const repo::core::model::RepoBSON &metadata, + const Encoding &encoding) { bool success = true; auto fileUUID = repo::lib::RepoUUID::createUUID(); - auto linkName = defaultHandler->uploadFile(databaseName, collectionNamePrefix, fileUUID.toString(), bin); + + // Create local references that can be reassigned, if we need to do any + // further processing... + + const std::vector* fileContents = &bin; + repo::core::model::RepoBSON fileMetadata = metadata; + + switch (encoding) + { + case Encoding::Gzip: + { + // Use stringstream as a binary container to hold the compressed data + std::ostringstream compressedstream; + + // Bufferstream operates directly over the user provided array + boost::interprocess::bufferstream uncompressed((char*)bin.data(), bin.size()); + + // In stream form for filtering_istream + std::istream uncompressedStream(uncompressed.rdbuf()); + + boost::iostreams::filtering_istream in; + in.push(boost::iostreams::gzip_compressor()); + in.push(uncompressedStream); // For some reason bufferstream is ambigous between stream and streambuf, so wrap it unambiguously + + boost::iostreams::copy(in, compressedstream); + + auto compresseddata = compressedstream.str(); + fileContents = new std::vector(compresseddata.begin(), compresseddata.end()); + + repo::core::model::RepoBSONBuilder builder; + builder.append("encoding", "gzip"); + auto bson = builder.obj(); + fileMetadata = metadata.cloneAndAddFields(&bson); + } + break; + } + + auto linkName = defaultHandler->uploadFile(databaseName, collectionNamePrefix, fileUUID.toString(), *fileContents); if (success = !linkName.empty()) { success = upsertFileRef( databaseName, @@ -58,8 +102,17 @@ bool FileManager::uploadFileAndCommit( cleanFileName(fileName), linkName, defaultHandler->getType(), - bin.size(), - metadata); + fileContents->size(), + fileMetadata); + } + + // If we've created a new vector, clean it up. metadata doesn't need to be + // deleted because Mongo BSONs have built in smart-pointers allowing them to + // be passed by value. + + if (fileContents != &bin) + { + delete fileContents; } return success; diff --git a/bouncer/src/repo/core/handler/fileservice/repo_file_manager.h b/bouncer/src/repo/core/handler/fileservice/repo_file_manager.h index a407624cc..ab1f23077 100644 --- a/bouncer/src/repo/core/handler/fileservice/repo_file_manager.h +++ b/bouncer/src/repo/core/handler/fileservice/repo_file_manager.h @@ -54,6 +54,14 @@ namespace repo { repo::core::handler::AbstractDatabaseHandler *dbHandler ); + /* + * Possible options for static compression of stored files + */ + enum Encoding { + None = 0, + Gzip = 1 + }; + /** * Upload file and commit ref entry to database. */ @@ -62,7 +70,8 @@ namespace repo { const std::string &collectionNamePrefix, const std::string &fileName, const std::vector &bin, - const repo::core::model::RepoBSON &metadata = repo::core::model::RepoBSON() + const repo::core::model::RepoBSON &metadata = repo::core::model::RepoBSON(), + const Encoding &encoding = Encoding::None ); /** diff --git a/bouncer/src/repo/lib/datastructure/repo_structs.h b/bouncer/src/repo/lib/datastructure/repo_structs.h index cc3d8efd4..afaad0ecc 100644 --- a/bouncer/src/repo/lib/datastructure/repo_structs.h +++ b/bouncer/src/repo/lib/datastructure/repo_structs.h @@ -25,11 +25,14 @@ #include "repo_vector.h" #include -typedef struct { - std::unordered_map> geoFiles; //files where geometery are stored - std::unordered_map> jsonFiles; //JSON mapping files +using repo_web_geo_files_t = std::unordered_map>; +using repo_web_json_files_t = std::unordered_map>; + +struct repo_web_buffers_t{ + repo_web_geo_files_t geoFiles; //files where geometery are stored + repo_web_json_files_t jsonFiles; //JSON mapping files repo::core::model::RepoUnityAssets unityAssets; //Unity assets list -}repo_web_buffers_t; +}; //This is used to map info for multipart optimization typedef struct { diff --git a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_asset.cpp b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_asset.cpp index c02b61840..5cde3b05d 100644 --- a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_asset.cpp +++ b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_asset.cpp @@ -85,7 +85,7 @@ AssetModelExport::~AssetModelExport() repo_web_buffers_t AssetModelExport::getAllFilesExportedAsBuffer() const { return { - std::unordered_map>(), + repo_web_geo_files_t(), getJSONFilesAsBuffer(), getUnityAssets() }; diff --git a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.cpp b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.cpp index c8df7ddc7..d1bef9779 100644 --- a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.cpp +++ b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.cpp @@ -564,9 +564,9 @@ repo_web_buffers_t GLTFModelExport::getAllFilesExportedAsBuffer() const return{ getGLTFFilesAsBuffer(), getJSONFilesAsBuffer() }; } -std::unordered_map> GLTFModelExport::getGLTFFilesAsBuffer() const +repo_web_geo_files_t GLTFModelExport::getGLTFFilesAsBuffer() const { - std::unordered_map> files; + repo_web_geo_files_t files; //GLTF files for (const auto &pair : trees) { diff --git a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.h b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.h index 8ee677106..1640a296d 100644 --- a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.h +++ b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_gltf.h @@ -236,7 +236,7 @@ namespace repo{ * Return the GLTF file as raw bytes buffer * returns an empty vector if the export has failed */ - std::unordered_map> getGLTFFilesAsBuffer() const; + repo_web_geo_files_t getGLTFFilesAsBuffer() const; /** * Reindex the given faces base on the given information diff --git a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.cpp b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.cpp index 6d82f5ed5..b53337c1b 100644 --- a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.cpp +++ b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.cpp @@ -136,9 +136,9 @@ SRCModelExport::~SRCModelExport() { } -std::unordered_map> SRCModelExport::getSRCFilesAsBuffer() const +repo_web_geo_files_t SRCModelExport::getSRCFilesAsBuffer() const { - std::unordered_map < std::string, std::vector > fileBuffers; + repo_web_geo_files_t fileBuffers; for (const auto &treePair : trees) { diff --git a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.h b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.h index 9338e746a..8892b40c6 100644 --- a/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.h +++ b/bouncer/src/repo/manipulator/modelconvertor/export/repo_model_export_src.h @@ -94,7 +94,7 @@ namespace repo{ * Return the SRC file as raw bytes buffer * returns an empty vector if the export has failed */ - std::unordered_map> getSRCFilesAsBuffer() const; + repo_web_geo_files_t getSRCFilesAsBuffer() const; }; } //namespace modelconvertor } //namespace manipulator diff --git a/bouncer/src/repo/manipulator/modeloptimizer/repo_optimizer_multipart.cpp b/bouncer/src/repo/manipulator/modeloptimizer/repo_optimizer_multipart.cpp index 49d5e286a..a35c0d385 100644 --- a/bouncer/src/repo/manipulator/modeloptimizer/repo_optimizer_multipart.cpp +++ b/bouncer/src/repo/manipulator/modeloptimizer/repo_optimizer_multipart.cpp @@ -39,8 +39,14 @@ using Scalar = float; using Bvh = bvh::Bvh; using BvhVector3 = bvh::Vector3; -static const size_t REPO_MP_MAX_VERTEX_COUNT = 65536; +// The vertex count is used as a rough approximation of the total geometry size. +// This figure is empirically set to end up with an average bundle size of 24 Mb. +static const size_t REPO_MP_MAX_VERTEX_COUNT = 1200000; + +// This limit is used to prevent metadata files becoming unwieldly, and the +// supermesh UV resolution falling below the quantisation noise floor. static const size_t REPO_MP_MAX_MESHES_IN_SUPERMESH = 5000; + static const size_t REPO_BVH_MAX_LEAF_SIZE = 16; static const size_t REPO_MODEL_LOW_CLUSTERING_RATIO = 0.2f; diff --git a/bouncer/src/repo/manipulator/modelutility/repo_scene_manager.cpp b/bouncer/src/repo/manipulator/modelutility/repo_scene_manager.cpp index b36260d4a..2672b084a 100644 --- a/bouncer/src/repo/manipulator/modelutility/repo_scene_manager.cpp +++ b/bouncer/src/repo/manipulator/modelutility/repo_scene_manager.cpp @@ -43,7 +43,7 @@ bool SceneManager::commitWebBuffers( //Upload the files for (const auto &bufferPair : resultBuffers.geoFiles) { - if (success &= fileManager->uploadFileAndCommit(databaseName, projectName + "." + geoStashExt, bufferPair.first, bufferPair.second)) + if (success &= fileManager->uploadFileAndCommit(databaseName, projectName + "." + geoStashExt, bufferPair.first, bufferPair.second, {}, repo::core::handler::fileservice::FileManager::Encoding::Gzip)) // Web geometry files are gzipped by default { repoInfo << "File (" << bufferPair.first << ") added successfully to file storage."; } diff --git a/test/src/unit/repo/manipulator/modeloptimizer/ut_repo_optimizer_multipart.cpp b/test/src/unit/repo/manipulator/modeloptimizer/ut_repo_optimizer_multipart.cpp index c9455b441..83da89a4f 100644 --- a/test/src/unit/repo/manipulator/modeloptimizer/ut_repo_optimizer_multipart.cpp +++ b/test/src/unit/repo/manipulator/modeloptimizer/ut_repo_optimizer_multipart.cpp @@ -389,7 +389,7 @@ TEST(MultipartOptimizer, TestSingleOversizedMesh) auto nMesh = 3; repo::core::model::RepoNodeSet meshes, trans, dummy; trans.insert(root); - meshes.insert(createRandomMesh(65537, false, 3, { rootID })); + meshes.insert(createRandomMesh(1200000 + 1, false, 3, { rootID })); // 1200000 comes from the const in repo_optimizer_multipart.cpp repo::core::model::RepoScene* scene = new repo::core::model::RepoScene({}, dummy, meshes, dummy, dummy, dummy, trans); ASSERT_TRUE(scene->hasRoot(DEFAULT_GRAPH));