Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Issue 654 - gzipping asset bundles #656

Merged
merged 14 commits into from
Jan 15, 2024
Merged
Show file tree
Hide file tree
Changes from 3 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 5 additions & 2 deletions bouncer/src/repo/lib/datastructure/repo_structs.h
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,12 @@
#include "repo_vector.h"
#include <boost/crc.hpp>

typedef std::unordered_map<std::string, std::tuple<std::vector<uint8_t>, repo::core::model::RepoBSON>> repo_web_geo_files_t;
typedef std::unordered_map<std::string, std::vector<uint8_t>> repo_web_json_files_t;

typedef struct {
std::unordered_map<std::string, std::vector<uint8_t>> geoFiles; //files where geometery are stored
std::unordered_map<std::string, std::vector<uint8_t>> jsonFiles; //JSON mapping files
repo_web_geo_files_t geoFiles; //files where geometery are stored
repo_web_json_files_t jsonFiles; //JSON mapping files
repo::core::model::RepoUnityAssets unityAssets; //Unity assets list
}repo_web_buffers_t;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ AssetModelExport::~AssetModelExport()
repo_web_buffers_t AssetModelExport::getAllFilesExportedAsBuffer() const
{
return {
std::unordered_map<std::string, std::vector<uint8_t>>(),
repo_web_geo_files_t(),
getJSONFilesAsBuffer(),
getUnityAssets()
};
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -564,9 +564,9 @@ repo_web_buffers_t GLTFModelExport::getAllFilesExportedAsBuffer() const
return{ getGLTFFilesAsBuffer(), getJSONFilesAsBuffer() };
}

std::unordered_map<std::string, std::vector<uint8_t>> GLTFModelExport::getGLTFFilesAsBuffer() const
repo_web_geo_files_t GLTFModelExport::getGLTFFilesAsBuffer() const
{
std::unordered_map<std::string, std::vector<uint8_t>> files;
repo_web_geo_files_t files;
//GLTF files
for (const auto &pair : trees)
{
Expand All @@ -575,10 +575,11 @@ std::unordered_map<std::string, std::vector<uint8_t>> GLTFModelExport::getGLTFFi
std::string jsonString = ss.str();
if (!jsonString.empty())
{
files[pair.first] = std::vector<uint8_t>();
files[pair.first] = { std::vector<uint8_t>(), repo::core::model::RepoBSON() };
size_t byteLength = jsonString.size() * sizeof(*jsonString.data());
files[pair.first].resize(byteLength);
memcpy(files[pair.first].data(), jsonString.data(), byteLength);
auto& buffer = std::get<0>(files[pair.first]);
buffer.resize(byteLength);
memcpy(buffer.data(), jsonString.data(), byteLength);
}
else
{
Expand All @@ -597,10 +598,11 @@ std::unordered_map<std::string, std::vector<uint8_t>> GLTFModelExport::getGLTFFi
//None of the gltf should ever share the same name, this is a sanity check
if (it == files.end())
{
files[fileName] = std::vector<uint8_t>();
files[fileName] = { std::vector<uint8_t>(), repo::core::model::RepoBSON() };
auto& buffer = std::get<0>(files[fileName]);
size_t byteLength = pair.second.size() * sizeof(*pair.second.data());
files[fileName].resize(byteLength);
memcpy(files[fileName].data(), pair.second.data(), byteLength);
buffer.resize(byteLength);
memcpy(buffer.data(), pair.second.data(), byteLength);
}
else
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -236,7 +236,7 @@ namespace repo{
* Return the GLTF file as raw bytes buffer
* returns an empty vector if the export has failed
*/
std::unordered_map<std::string, std::vector<uint8_t>> getGLTFFilesAsBuffer() const;
repo_web_geo_files_t getGLTFFilesAsBuffer() const;

/**
* Reindex the given faces base on the given information
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -136,9 +136,9 @@ SRCModelExport::~SRCModelExport()
{
}

std::unordered_map<std::string, std::vector<uint8_t>> SRCModelExport::getSRCFilesAsBuffer() const
repo_web_geo_files_t SRCModelExport::getSRCFilesAsBuffer() const
{
std::unordered_map < std::string, std::vector<uint8_t> > fileBuffers;
repo_web_geo_files_t fileBuffers;

for (const auto &treePair : trees)
{
Expand Down Expand Up @@ -198,7 +198,7 @@ std::unordered_map<std::string, std::vector<uint8_t>> SRCModelExport::getSRCFile

//Add data buffer to the full buffer
buffer.insert(buffer.end(), fullDataArray.begin(), fullDataArray.end());
fileBuffers[fName] = buffer;
fileBuffers[fName] = { buffer, {} };
}
else
{
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -94,7 +94,7 @@ namespace repo{
* Return the SRC file as raw bytes buffer
* returns an empty vector if the export has failed
*/
std::unordered_map<std::string, std::vector<uint8_t>> getSRCFilesAsBuffer() const;
repo_web_geo_files_t getSRCFilesAsBuffer() const;
};
} //namespace modelconvertor
} //namespace manipulator
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -72,7 +72,8 @@ bool WebModelExport::exportToFile(
FILE* fp = fopen(boostPath.string().c_str(), "wb");
if (fp)
{
fwrite(buff.second.data(), sizeof(*buff.second.data()), buff.second.size(), fp);
auto content = std::get<0>(buff.second);
fwrite(content.data(), sizeof(*content.data()), content.size(), fp);
fclose(fp);
}
else
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,8 +39,14 @@ using Scalar = float;
using Bvh = bvh::Bvh<Scalar>;
using BvhVector3 = bvh::Vector3<Scalar>;

static const size_t REPO_MP_MAX_VERTEX_COUNT = 65536;
// The vertex count is used as a rough approximation of the total geometry size.
// This figure is empirically set to end up with an average bundle size of 24 Mb.
static const size_t REPO_MP_MAX_VERTEX_COUNT = 1200000;

// This limit is used to prevent metadata files becoming unwieldly, and the
// supermesh UV resolution falling below the quantisation noise floor.
static const size_t REPO_MP_MAX_MESHES_IN_SUPERMESH = 5000;

static const size_t REPO_BVH_MAX_LEAF_SIZE = 16;
static const size_t REPO_MODEL_LOW_CLUSTERING_RATIO = 0.2f;

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ bool SceneManager::commitWebBuffers(
//Upload the files
for (const auto &bufferPair : resultBuffers.geoFiles)
{
if (success &= fileManager->uploadFileAndCommit(databaseName, projectName + "." + geoStashExt, bufferPair.first, bufferPair.second))
if (success &= fileManager->uploadFileAndCommit(databaseName, projectName + "." + geoStashExt, bufferPair.first, std::get<0>(bufferPair.second), std::get<1>(bufferPair.second)))
{
repoInfo << "File (" << bufferPair.first << ") added successfully to file storage.";
}
Expand Down
23 changes: 19 additions & 4 deletions wrapper/src/c_sharp_wrapper.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,14 @@


#include "c_sharp_wrapper.h"
#include <repo/core/model/bson/repo_bson_builder.h>
#include <repo/core/model/bson/repo_node_material.h>
#include <repo/core/model/bson/repo_node_texture.h>
#include <repo/lib/repo_exception.h>
#include <boost/filesystem.hpp>
#include <boost/iostreams/filtering_streambuf.hpp>
#include <boost/iostreams/filter/gzip.hpp>
#include <boost/iostreams/copy.hpp>
#include <fstream>

using namespace repo::lib;
Expand Down Expand Up @@ -386,13 +390,24 @@ bool CSharpWrapper::saveAssetBundles(

if (inputStream.is_open())
{
std::ostringstream ss;
ss << inputStream.rdbuf();
auto s = ss.str();
boost::filesystem::path path(assetFiles[i]);
std::string fileName = path.filename().string();
fileName = "/" + databaseName + "/" + projectName + "/" + fileName;
webBuffers.geoFiles[fileName] = std::vector<uint8_t>(s.begin(), s.end());

std::ostringstream ss; // memory stream containing the binary data to write to the database

// All unity asset bundles are gzipped by default

boost::iostreams::filtering_streambuf<boost::iostreams::input> in;
in.push(boost::iostreams::gzip_compressor());
in.push(inputStream);
boost::iostreams::copy(in, ss);

repo::core::model::RepoBSONBuilder builder;
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

@sebjf

I wouldn't do it this way. This is quite difficult to read and making the way we store metadata data about a file highly coupled with the functions/a classes that are simply passing in the files.

I think this could be a lot simpler (Unless i'm missing something, which I'm often am!). I think the file manager can manage the gzipping and it should just be a flag on the function https://github.com/3drepo/3drepobouncer/blob/master/bouncer/src/repo/core/handler/fileservice/repo_file_manager.h#L60

If this is set to true, it would know to append the gzip flag onto the metadata of the file ref.

We could just gzip all the geo files we export, or it could be a flag in the exporter to indicate whether we want to gzip this. (Persumably we benefit from gzipping the SRCs too, but I'm not sure how unreal handles those requests!)

And it would also make gzipping the JSON caches much easier to do if we want to implement it in the future.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Hi @carmenfan, this should be resolved now!

builder.append("encoding", "gzip");

auto s = ss.str();
webBuffers.geoFiles[fileName] = { std::vector<uint8_t>(s.begin(), s.end()), builder.obj() };
}
else
{
Expand Down
Loading