diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml deleted file mode 100644 index 83ce2183..00000000 --- a/.github/workflows/docs.yml +++ /dev/null @@ -1,54 +0,0 @@ -name: Docs - -on: [push, pull_request] - -jobs: - docs: - name: Docs - - runs-on: ubuntu-latest - strategy: - fail-fast: true - container: osgeo/proj-docs - - steps: - - uses: actions/checkout@v2 - - name: Print versions - shell: bash -l {0} - run: | - python3 --version - sphinx-build --version - - name: Lint .rst files - shell: bash -l {0} - run: | - if find . -name '*.rst' | xargs grep -P '\t'; then echo 'Tabs are bad, please use four spaces in .rst files.'; false; fi - working-directory: ./doc - - name: HTML - shell: bash -l {0} - run: | - make html - working-directory: ./doc - - name: PDF - shell: bash -l {0} - run: | - make latexpdf - working-directory: ./doc - - uses: actions/upload-artifact@v2 - with: - name: PDF - path: doc/build/latex/Entwine.pdf - - uses: actions/upload-artifact@v2 - with: - name: HTML - path: doc/build/html/* - - - name: Deploy docs - env: - API_TOKEN_GITHUB: ${{ secrets.DOCS_SECRET_KEY}} - shell: bash -l {0} - if: contains(github.ref, '2.2-maintenance') - - run: | - ./scripts/ci/docs/deploy_website.sh - - diff --git a/.github/workflows/linux.yml b/.github/workflows/linux.yml deleted file mode 100644 index 3b03eda5..00000000 --- a/.github/workflows/linux.yml +++ /dev/null @@ -1,71 +0,0 @@ -name: Linux - -on: - push: - paths-ignore: - - 'doc/**' - pull_request: - paths-ignore: - - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -jobs: - build: - name: Linux ${{ matrix.type }} - - runs-on: 'ubuntu-latest' - strategy: - fail-fast: true - matrix: - type: ['floating'] - env: - BUILD_TYPE: ${{ matrix.type }} - CACHE_NUMBER: 0 - - steps: - - uses: actions/checkout@v2 - - name: Support longpaths - run: git config --system core.longpaths true - if: matrix.platform == 'windows-latest' - - name: Cache Conda Environment - uses: actions/cache@v2 - with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }} - - uses: conda-incubator/setup-miniconda@v2 - with: - miniforge-variant: Mambaforge - miniforge-version: latest - environment-file: scripts/ci/environment.yml - python-version: "3.10" - activate-environment: "entwine-build" - use-mamba: true - auto-update-conda: true - - - - name: Setup - shell: bash -l {0} - run: | - source ./scripts/ci/linux/setup.sh - - - name: CMake - shell: bash -l {0} - run: | - source ../scripts/ci/linux/cmake.sh - working-directory: ./build - - - name: Compile - shell: bash -l {0} - run: | - source ../scripts/ci/linux/compile.sh - working-directory: ./build - - - name: Test - shell: bash -l {0} - run: | - source ../scripts/ci/linux/test.sh - working-directory: ./build - diff --git a/.github/workflows/osx.yml b/.github/workflows/osx.yml deleted file mode 100644 index 0ab67e5d..00000000 --- a/.github/workflows/osx.yml +++ /dev/null @@ -1,70 +0,0 @@ -name: OSX - -on: - push: - paths-ignore: - - 'doc/**' - pull_request: - paths-ignore: - - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -jobs: - build: - name: OSX - - runs-on: 'macos-latest' - strategy: - fail-fast: true - matrix: - type: ['floating'] - env: - BUILD_TYPE: ${{ matrix.type }} - CACHE_NUMBER: 0 - - steps: - - uses: actions/checkout@v2 - - name: Support longpaths - run: git config --system core.longpaths true - if: matrix.platform == 'windows-latest' - - name: Cache Conda Environment - uses: actions/cache@v2 - with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }} - - uses: conda-incubator/setup-miniconda@v2 - with: - miniforge-variant: Mambaforge - miniforge-version: latest - environment-file: scripts/ci/environment.yml - python-version: "3.10" - activate-environment: "entwine-build" - use-mamba: true - auto-update-conda: true - - - name: Setup - shell: bash -l {0} - run: | - source ./scripts/ci/osx/setup.sh - - - name: CMake - shell: bash -l {0} - run: | - source ../scripts/ci/osx/cmake.sh - working-directory: ./build - - - name: Compile - shell: bash -l {0} - run: | - source ../scripts/ci/osx/compile.sh - working-directory: ./build - - - name: Test - shell: bash -l {0} - run: | - source ../scripts/ci/osx/test.sh - working-directory: ./build - diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml new file mode 100644 index 00000000..9373169a --- /dev/null +++ b/.github/workflows/test.yml @@ -0,0 +1,77 @@ +name: Test +on: [push, pull_request] + +concurrency: + group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} + cancel-in-progress: true + +jobs: + build: + name: OS ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [macos-latest, windows-latest, ubuntu-latest] + shared: [ON, OFF] + + steps: + - uses: actions/checkout@v4 + - uses: ilammy/msvc-dev-cmd@v1 + if: matrix.os == 'windows-latest' + - name: Support longpaths + run: git config --system core.longpaths true + if: matrix.os == 'windows-latest' + - uses: mamba-org/setup-micromamba@v1 + with: + init-shell: bash + environment-file: scripts/ci/environment.yml + environment-name: "build" + cache-environment: true + cache-downloads: true + + - name: Setup + shell: bash -l {0} + run: | + mkdir build + + - name: CMake + shell: bash -l {0} + + run: | + + if [ "$RUNNER_OS" == "Windows" ]; then + export CC=cl.exe + export CXX=cl.exe + export ENTWINE_LIBRARY_PATH=$CONDA_PREFIX/Library/lib + export ENTWINE_INCLUDE_PATH=$CONDA_PREFIX/Library/include + else + export ENTWINE_LIBRARY_PATH=$CONDA_PREFIX/lib + export ENTWINE_INCLUDE_PATH=$CONDA_PREFIX/include + fi + + cmake -G "Ninja" \ + -DCMAKE_BUILD_TYPE=Release \ + -DCMAKE_INSTALL_PREFIX=${CONDA_PREFIX} \ + -Dgtest_force_shared_crt=ON \ + -DBUILD_TESTING=ON \ + -DCMAKE_LIBRARY_PATH:FILEPATH=$ENTWINE_LIBRARY_PATH \ + -DCMAKE_INCLUDE_PATH:FILEPATH=$ENTWINE_INCLUDE_PATH \ + -D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING=1 \ + -DBUILD_SHARED_LIBS=ON \ + .. + + + working-directory: ./build + + - name: Compile + shell: bash -l {0} + run: | + ninja + working-directory: ./build + + - name: Test + shell: bash -l {0} + run: | + ctest -VV --rerun-failed --output-on-failure + working-directory: ./build + diff --git a/.github/workflows/win.yml b/.github/workflows/win.yml deleted file mode 100644 index 33c60884..00000000 --- a/.github/workflows/win.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: Windows - -on: - push: - paths-ignore: - - 'doc/**' - pull_request: - paths-ignore: - - 'doc/**' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.ref }} - cancel-in-progress: true - -jobs: - build: - name: MSVC - - runs-on: 'windows-latest' - strategy: - fail-fast: true - matrix: - type: ['floating'] - defaults: - run: - shell: bash -l {0} - env: - BUILD_TYPE: ${{ matrix.type }} - CACHE_NUMBER: 0 - - steps: - - uses: actions/checkout@v2 - - uses: ilammy/msvc-dev-cmd@v1 - - name: Support longpaths - run: git config --system core.longpaths true - if: matrix.platform == 'windows-latest' - - name: Cache Conda Environment - uses: actions/cache@v2 - with: - path: ~/conda_pkgs_dir - key: ${{ runner.os }}-${{ steps.get-date.outputs.today }}-conda-${{ env.CACHE_NUMBER }} - - uses: conda-incubator/setup-miniconda@v2 - with: - miniforge-variant: Mambaforge - miniforge-version: latest - environment-file: scripts/ci/environment.yml - python-version: "3.10" - activate-environment: "entwine-build" - use-mamba: true - auto-update-conda: true - - - name: Setup - shell: bash -l {0} - run: | - source ./scripts/ci/win/setup.sh - - - name: CMake - shell: bash -l {0} - working-directory: ./build - run: | - source ../scripts/ci/win/cmake.sh - - - name: Compile - shell: bash -l {0} - working-directory: ./build - run: | - source ../scripts/ci/win/compile.sh - - - name: Paths - shell: bash -l {0} - run: | - echo "D:/a/entwine/entwine/build/bin" >> $GITHUB_PATH - - - name: Test - shell: bash -l {0} - working-directory: ./build - run: | - source ../scripts/ci/win/test.sh - - diff --git a/CMakeLists.txt b/CMakeLists.txt index 0cc2dd5f..bdcb30be 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -95,13 +95,16 @@ set_target_properties( # # Test # -option(WITH_TESTS "Choose if Entwine unit tests should be built" TRUE) -if (WITH_TESTS) +option(BUILD_TESTING "Choose if Entwine unit tests should be built" TRUE) +if(DEFINED WITH_TESTS) + message(DEPRECATION "WITH_TESTS has been replaced with the standard CMake BUILD_TESTING variable") + set(BUILD_TESTING ${WITH_TESTS}) +endif() +if (BUILD_TESTING) message("Configuring with unit tests") enable_testing() set(GOOGLETEST_VERSION 1.13.0) add_subdirectory("${PROJECT_SOURCE_DIR}/test/googletest-1.13.0/googletest") - add_subdirectory(test) else() message("Configuring with NO unit tests") diff --git a/entwine/third/arbiter/arbiter.cpp b/entwine/third/arbiter/arbiter.cpp index 1ccbfc89..041e21e2 100644 --- a/entwine/third/arbiter/arbiter.cpp +++ b/entwine/third/arbiter/arbiter.cpp @@ -353,16 +353,13 @@ std::shared_ptr Arbiter::getDriver(const std::string path) const { const auto type(getProtocol(path)); - { - std::lock_guard lock(m_mutex); - auto it = m_drivers.find(type); - if (it != m_drivers.end()) return it->second; - } + std::lock_guard lock(m_mutex); + auto it = m_drivers.find(type); + if (it != m_drivers.end()) return it->second; const json config = getConfig(m_config); if (auto driver = Driver::create(*m_pool, type, config.dump())) { - std::lock_guard lock(m_mutex); m_drivers[type] = driver; return driver; } @@ -1695,6 +1692,8 @@ namespace const std::string ec2CredBase( ec2CredIp + "/latest/meta-data/iam/security-credentials"); + const std::string defaultDnsSuffix = "amazonaws.com"; + // https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-iam-roles.html const std::string fargateCredIp("169.254.170.2"); @@ -1751,6 +1750,11 @@ namespace else if (auto e = env("ARBITER_VERBOSE")) verbose = *e; return (!verbose.empty()) && !!std::stol(verbose); } + + bool doSignRequests() + { + return !env("AWS_NO_SIGN_REQUEST"); + } } namespace drivers @@ -1780,9 +1784,7 @@ std::unique_ptr S3::create( if (auto p = env("AWS_PROFILE")) profile = *p; } - auto auth(Auth::create(s, profile)); - if (!auth) return std::unique_ptr(); - + auto auth(doSignRequests() ? Auth::create(s, profile) : nullptr); auto config = makeUnique(s, profile); return makeUnique(pool, profile, std::move(auth), std::move(config)); } @@ -1953,11 +1955,6 @@ S3::Config::Config(const std::string s, const std::string profile) m_baseHeaders[p.key()] = p.value().get(); } } - else - { - std::cout << "s3.headers expected to be object - skipping" << - std::endl; - } } } @@ -2007,6 +2004,12 @@ std::string S3::Config::extractBaseUrl( const std::string s, const std::string region) { + if (auto p = env("AWS_ENDPOINT_URL")) + { + const std::string path = *p; + return path.back() == '/' ? path : path + '/'; + } + const json c(s.size() ? json::parse(s) : json()); if (!c.is_null() && @@ -2024,8 +2027,6 @@ std::string S3::Config::extractBaseUrl( endpointsPath = *e; } - std::string dnsSuffix("amazonaws.com"); - drivers::Fs fsDriver; if (std::unique_ptr e = fsDriver.tryGet(endpointsPath)) { @@ -2033,32 +2034,42 @@ std::string S3::Config::extractBaseUrl( for (const auto& partition : ep["partitions"]) { - if (partition.count("dnsSuffix")) + if ( + !partition.count("regions") || + !partition.at("regions").count(region)) { - dnsSuffix = partition["dnsSuffix"].get(); + continue; } - const auto& endpoints( - partition.at("services").at("s3").at("endpoints")); - - for (const auto& r : endpoints.items()) + // Look for an explicit hostname for this region/service. + if ( + partition.count("services") && + partition["services"].count("s3") && + partition["services"]["s3"].count("endpoints")) { - if (r.key() == region && - endpoints.value("region", json::object()) - .count("hostname")) + const auto& endpoints(partition["services"]["s3"]["endpoints"]); + + for (const auto& r : endpoints.items()) { - return endpoints["region"]["hostname"].get() + - '/'; + if (r.key() == region && + endpoints.value("region", json::object()) + .count("hostname")) + { + return endpoints["region"]["hostname"].get() + + '/'; + } } } + + // No explicit hostname found, so build it from our region/DNS suffix. + std::string dnsSuffix = partition.value("dnsSuffix", defaultDnsSuffix); + return "s3." + region + "." + dnsSuffix + "/"; } } - if (dnsSuffix.size() && dnsSuffix.back() != '/') dnsSuffix += '/'; - // https://docs.aws.amazon.com/general/latest/gr/rande.html#s3_region - if (region == "us-east-1") return "s3." + dnsSuffix; - else return "s3-" + region + "." + dnsSuffix; + if (region == "us-east-1") return "s3." + defaultDnsSuffix + "/"; + else return "s3-" + region + "." + defaultDnsSuffix + "/"; } S3::AuthFields S3::Auth::fields() const @@ -2142,7 +2153,7 @@ std::unique_ptr S3::tryGetSize( "HEAD", m_config->region(), resource, - m_auth->fields(), + authFields(), query, headers, empty); @@ -2178,7 +2189,7 @@ bool S3::get( "GET", m_config->region(), resource, - m_auth->fields(), + authFields(), query, headers, empty); @@ -2196,11 +2207,10 @@ bool S3::get( data = res.data(); return true; } - else - { - std::cout << res.code() << ": " << res.str() << std::endl; - return false; - } + + if (isVerbose()) std::cout << res.code() << ": " << res.str() << std::endl; + + return false; } std::vector S3::put( @@ -2223,7 +2233,7 @@ std::vector S3::put( "PUT", m_config->region(), resource, - m_auth->fields(), + authFields(), query, headers, data); @@ -2373,6 +2383,11 @@ std::vector S3::glob(std::string path, bool verbose) const return results; } +S3::AuthFields S3::authFields() const +{ + return m_auth ? m_auth->fields() : S3::AuthFields(); +} + S3::ApiV4::ApiV4( const std::string verb, const std::string& region, @@ -2407,6 +2422,8 @@ S3::ApiV4::ApiV4( m_headers.erase("Expect"); } + if (!m_authFields) return; + const Headers normalizedHeaders( std::accumulate( m_headers.begin(), @@ -2900,7 +2917,7 @@ std::unique_ptr AZ::tryGetSize( if (m_config->hasSasToken()) { Query q = m_config->sasToken(); - q.insert(std::cbegin(query),std::cend(query)); + q.insert(std::begin(query), std::end(query)); res.reset(new Response(http.internalHead(resource.url(), headers, q))); } else @@ -4445,7 +4462,11 @@ int Curl::perform() if (code != CURLE_OK) { - std::cerr << "Curl failure: " << curl_easy_strerror(code) << std::endl; + if (m_verbose) + { + std::cout << "Curl failure: " << curl_easy_strerror(code) << + std::endl; + } httpCode = 550; } @@ -4965,9 +4986,25 @@ Contents parse(const std::string& s) #ifndef ARBITER_IS_AMALGAMATION #include -#include #endif +// Various crypto utilities. +#define ROTLEFT(a,b) (((a) << (b)) | ((a) >> (32-(b)))) +#define ROTRIGHT(a,b) (((a) >> (b)) | ((a) << (32-(b)))) +#define F(x,y,z) ((x & y) | (~x & z)) +#define G(x,y,z) ((x & z) | (y & ~z)) +#define H(x,y,z) (x ^ y ^ z) +#define I(x,y,z) (y ^ (x | ~z)) + +#define FF(a,b,c,d,m,s,t) { a += F(b,c,d) + m + t; \ + a = b + ROTLEFT(a,s); } +#define GG(a,b,c,d,m,s,t) { a += G(b,c,d) + m + t; \ + a = b + ROTLEFT(a,s); } +#define HH(a,b,c,d,m,s,t) { a += H(b,c,d) + m + t; \ + a = b + ROTLEFT(a,s); } +#define II(a,b,c,d,m,s,t) { a += I(b,c,d) + m + t; \ + a = b + ROTLEFT(a,s); } + #ifdef ARBITER_CUSTOM_NAMESPACE namespace ARBITER_CUSTOM_NAMESPACE { @@ -5188,9 +5225,19 @@ std::string md5(const std::string& data) #ifndef ARBITER_IS_AMALGAMATION #include -#include #endif + +// Various crypto utilities. +#define ROTLEFT(a,b) (((a) << (b)) | ((a) >> (32-(b)))) +#define ROTRIGHT(a,b) (((a) >> (b)) | ((a) << (32-(b)))) +#define CH(x,y,z) (((x) & (y)) ^ (~(x) & (z))) +#define MAJ(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) +#define EP0(x) (ROTRIGHT(x,2) ^ ROTRIGHT(x,13) ^ ROTRIGHT(x,22)) +#define EP1(x) (ROTRIGHT(x,6) ^ ROTRIGHT(x,11) ^ ROTRIGHT(x,25)) +#define SIG0(x) (ROTRIGHT(x,7) ^ ROTRIGHT(x,18) ^ ((x) >> 3)) +#define SIG1(x) (ROTRIGHT(x,17) ^ ROTRIGHT(x,19) ^ ((x) >> 10)) + #ifdef ARBITER_CUSTOM_NAMESPACE namespace ARBITER_CUSTOM_NAMESPACE { diff --git a/entwine/third/arbiter/arbiter.hpp b/entwine/third/arbiter/arbiter.hpp index dcd3dbef..7c6b04df 100644 --- a/entwine/third/arbiter/arbiter.hpp +++ b/entwine/third/arbiter/arbiter.hpp @@ -1,7 +1,7 @@ /// Arbiter amalgamated header (https://github.com/connormanning/arbiter). /// It is intended to be used with #include "arbiter.hpp" -// Git SHA: 3e8919a297f4dc357e1ef2473bb295d8d1eac226 +// Git SHA: 5c3f36e86e7a74aadb8a98edb14ad207158aa785 // ////////////////////////////////////////////////////////////////////// // Beginning of content of file: LICENSE @@ -3276,48 +3276,6 @@ class ARBITER_DLL Time -// ////////////////////////////////////////////////////////////////////// -// Beginning of content of file: arbiter/util/macros.hpp -// ////////////////////////////////////////////////////////////////////// - -#pragma once - -#define ROTLEFT(a,b) (((a) << (b)) | ((a) >> (32-(b)))) -#define ROTRIGHT(a,b) (((a) >> (b)) | ((a) << (32-(b)))) - -// SHA256. -#define CH(x,y,z) (((x) & (y)) ^ (~(x) & (z))) -#define MAJ(x,y,z) (((x) & (y)) ^ ((x) & (z)) ^ ((y) & (z))) -#define EP0(x) (ROTRIGHT(x,2) ^ ROTRIGHT(x,13) ^ ROTRIGHT(x,22)) -#define EP1(x) (ROTRIGHT(x,6) ^ ROTRIGHT(x,11) ^ ROTRIGHT(x,25)) -#define SIG0(x) (ROTRIGHT(x,7) ^ ROTRIGHT(x,18) ^ ((x) >> 3)) -#define SIG1(x) (ROTRIGHT(x,17) ^ ROTRIGHT(x,19) ^ ((x) >> 10)) - -// MD5. -#define F(x,y,z) ((x & y) | (~x & z)) -#define G(x,y,z) ((x & z) | (y & ~z)) -#define H(x,y,z) (x ^ y ^ z) -#define I(x,y,z) (y ^ (x | ~z)) - -#define FF(a,b,c,d,m,s,t) { a += F(b,c,d) + m + t; \ - a = b + ROTLEFT(a,s); } -#define GG(a,b,c,d,m,s,t) { a += G(b,c,d) + m + t; \ - a = b + ROTLEFT(a,s); } -#define HH(a,b,c,d,m,s,t) { a += H(b,c,d) + m + t; \ - a = b + ROTLEFT(a,s); } -#define II(a,b,c,d,m,s,t) { a += I(b,c,d) + m + t; \ - a = b + ROTLEFT(a,s); } - - -// ////////////////////////////////////////////////////////////////////// -// End of content of file: arbiter/util/macros.hpp -// ////////////////////////////////////////////////////////////////////// - - - - - - // ////////////////////////////////////////////////////////////////////// // Beginning of content of file: arbiter/util/md5.hpp // ////////////////////////////////////////////////////////////////////// @@ -4320,6 +4278,8 @@ class S3 : public Http std::string path, bool verbose) const override; + AuthFields authFields() const; + class ApiV4; class Resource; @@ -4330,7 +4290,7 @@ class S3 : public Http class S3::AuthFields { public: - AuthFields(std::string access, std::string hidden, std::string token = "") + AuthFields(std::string access = "", std::string hidden = "", std::string token = "") : m_access(access), m_hidden(hidden), m_token(token) { } @@ -4338,6 +4298,8 @@ class S3::AuthFields const std::string& hidden() const { return m_hidden; } const std::string& token() const { return m_token; } + explicit operator bool() const { return m_access.size() || m_hidden.size() || m_token.size(); } + private: std::string m_access; std::string m_hidden; diff --git a/scripts/ci/build_docs.sh b/scripts/ci/build_docs.sh deleted file mode 100755 index 4c0e866d..00000000 --- a/scripts/ci/build_docs.sh +++ /dev/null @@ -1,15 +0,0 @@ -#!/bin/bash - -echo "building docs for $TRAVIS_BUILD_DIR/doc" -buildpath=`pwd` -if [[ ! -z $TRAVIS_BUILD_DIR ]]; then -buildpath="$TRAVIS_BUILD_DIR" -fi - -# osgeo/proj-docs contains everything to build the website and -# it is kept up to date by the PROJ team -docker run -v $buildpath:/data -w /data/doc osgeo/proj-docs make html -docker run -v $buildpath:/data -w /data/doc osgeo/proj-docs make latexpdf -docker run -v $buildpath:/data -w /data/doc osgeo/proj-docs make spelling - - diff --git a/scripts/ci/deploy_website.sh b/scripts/ci/deploy_website.sh deleted file mode 100755 index 4ff42cfc..00000000 --- a/scripts/ci/deploy_website.sh +++ /dev/null @@ -1,12 +0,0 @@ -#!/bin/bash - -builddir=$1 - - -echo "deploying docs for $TRAVIS_BUILD_DIR/docs" - -export AWS_ACCESS_KEY_ID="$AWS_KEY" -export AWS_SECRET_ACCESS_KEY="$AWS_SECRET" - -docker run -e "AWS_SECRET_ACCESS_KEY=$AWS_SECRET" -e "AWS_ACCESS_KEY_ID=$AWS_KEY" -v $TRAVIS_BUILD_DIR:/data -w /data/doc osgeo/proj-docs aws s3 sync ./build/html/ s3://entwine.io --acl public-read - diff --git a/scripts/ci/docs/deploy_website.sh b/scripts/ci/docs/deploy_website.sh deleted file mode 100755 index 50e09202..00000000 --- a/scripts/ci/docs/deploy_website.sh +++ /dev/null @@ -1,27 +0,0 @@ -#!/bin/bash - -builddir=$(pwd)/doc/build -destdir=$(pwd)/../pdal.github.io -branch="master" -DATE=$(date +'%y.%m.%d %H:%M:%S') - -cd .. -git clone https://${API_TOKEN_GITHUB}:x-oauth-basic@github.com/PDAL/pdal.github.io.git - -cd $destdir -git checkout -f -b $branch - -cd $builddir/html -cp -rf * $destdir/ - -cd $builddir/latex/ -cp PDAL.pdf $destdir/ - -cd $destdir -git config user.email "pdal@hobu.net" -git config user.name "PDAL Travis docsbot" - -git add -A -git commit -m "update with results of commit https://github.com/PDAL/PDAL/commit/$GITHUB_SHA for ${DATE}" -git push origin $branch - diff --git a/scripts/ci/environment.yml b/scripts/ci/environment.yml index 3f09e372..d3e345b8 100644 --- a/scripts/ci/environment.yml +++ b/scripts/ci/environment.yml @@ -1,4 +1,4 @@ -name: entwine-build +name: build channels: - conda-forge dependencies: diff --git a/scripts/ci/linux/cmake.sh b/scripts/ci/linux/cmake.sh deleted file mode 100755 index 78839a29..00000000 --- a/scripts/ci/linux/cmake.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -LDFLAGS="$LDFLAGS -Wl,-rpath-link,$CONDA_PREFIX/lib" cmake .. \ - -G Ninja \ - -DCMAKE_LIBRARY_PATH:FILEPATH="$CONDA_PREFIX/lib" \ - -DCMAKE_INCLUDE_PATH:FILEPATH="$CONDA_PREFIX/include" \ - -DCMAKE_INSTALL_PREFIX=${CONDA_PREFIX} \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo \ - .. diff --git a/scripts/ci/linux/compile.sh b/scripts/ci/linux/compile.sh deleted file mode 100755 index cfacb049..00000000 --- a/scripts/ci/linux/compile.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -ninja -ninja install diff --git a/scripts/ci/linux/setup.sh b/scripts/ci/linux/setup.sh deleted file mode 100755 index 76a9103f..00000000 --- a/scripts/ci/linux/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -echo "Configuring build type '$BUILD_TYPE'" -mkdir build - -gdal-config --version - diff --git a/scripts/ci/linux/test.sh b/scripts/ci/linux/test.sh deleted file mode 100755 index c27a8879..00000000 --- a/scripts/ci/linux/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -ctest -V diff --git a/scripts/ci/osx/cmake.sh b/scripts/ci/osx/cmake.sh deleted file mode 100755 index e0bad2ed..00000000 --- a/scripts/ci/osx/cmake.sh +++ /dev/null @@ -1,9 +0,0 @@ -#!/bin/bash - -cmake .. \ - -G Ninja \ - -DCMAKE_BUILD_TYPE=Debug \ - -DCMAKE_INSTALL_PREFIX=`pwd`/../install \ - -DWITH_ZSTD=ON \ - -DWITH_ZLIB=ON \ - -DWITH_TESTS=ON diff --git a/scripts/ci/osx/compile.sh b/scripts/ci/osx/compile.sh deleted file mode 100755 index cfacb049..00000000 --- a/scripts/ci/osx/compile.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -ninja -ninja install diff --git a/scripts/ci/osx/examples.sh b/scripts/ci/osx/examples.sh deleted file mode 100755 index 7a40f295..00000000 --- a/scripts/ci/osx/examples.sh +++ /dev/null @@ -1,11 +0,0 @@ -#!/bin/bash - -export BASE=`pwd` -for EXAMPLE in writing writing-filter writing-kernel \ - writing-reader writing-writer -do - cd $BASE/examples/$EXAMPLE - mkdir -p _build || exit 1 - cd _build || exit 1 - cmake -G "Ninja" .. -DPDAL_DIR=$BASE/install/lib/cmake/PDAL && ninja -done diff --git a/scripts/ci/osx/setup.sh b/scripts/ci/osx/setup.sh deleted file mode 100755 index 76a9103f..00000000 --- a/scripts/ci/osx/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -echo "Configuring build type '$BUILD_TYPE'" -mkdir build - -gdal-config --version - diff --git a/scripts/ci/osx/test.sh b/scripts/ci/osx/test.sh deleted file mode 100755 index c27a8879..00000000 --- a/scripts/ci/osx/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -ctest -V diff --git a/scripts/ci/win/cmake.sh b/scripts/ci/win/cmake.sh deleted file mode 100755 index 8890d01c..00000000 --- a/scripts/ci/win/cmake.sh +++ /dev/null @@ -1,16 +0,0 @@ -#!/bin/bash - -pwd -where cl.exe -export CC=cl.exe -export CXX=cl.exe -cmake .. -G "Ninja" \ - -DCMAKE_BUILD_TYPE=RelWithDebInfo \ - -DCMAKE_INSTALL_PREFIX="$CONDA_PREFIX" \ - -DWITH_TESTS=ON \ - -DCMAKE_VERBOSE_MAKEFILE=OFF \ - -DCMAKE_LIBRARY_PATH:FILEPATH="$CONDA_PREFIX/Library/lib" \ - -DCMAKE_INCLUDE_PATH:FILEPATH="$CONDA_PREFIX/Library/include" \ - -DBUILD_SHARED_LIBS=ON \ - -D_SILENCE_TR1_NAMESPACE_DEPRECATION_WARNING=1 \ - -Dgtest_force_shared_crt=ON diff --git a/scripts/ci/win/compile.sh b/scripts/ci/win/compile.sh deleted file mode 100755 index b1fdea26..00000000 --- a/scripts/ci/win/compile.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash - -ninja -v -ninja install diff --git a/scripts/ci/win/examples.sh b/scripts/ci/win/examples.sh deleted file mode 100755 index 306b356c..00000000 --- a/scripts/ci/win/examples.sh +++ /dev/null @@ -1,13 +0,0 @@ -#!/bin/bash - -export BASE=`pwd` -export CC=cl.exe -export CXX=cl.exe -for EXAMPLE in writing writing-filter writing-kernel \ - writing-reader writing-writer -do - cd $BASE/examples/$EXAMPLE - mkdir -p _build || exit 1 - cd _build || exit 1 - cmake -G "Ninja" .. -DPDAL_DIR=$CONDA_PREFIX/lib/cmake/PDAL && ninja -done diff --git a/scripts/ci/win/setup.sh b/scripts/ci/win/setup.sh deleted file mode 100755 index d235f68c..00000000 --- a/scripts/ci/win/setup.sh +++ /dev/null @@ -1,7 +0,0 @@ -#!/bin/bash - -echo "Configuring build type '$BUILD_TYPE'" -mkdir build - -gdalinfo --version - diff --git a/scripts/ci/win/test.sh b/scripts/ci/win/test.sh deleted file mode 100755 index 1f98a07a..00000000 --- a/scripts/ci/win/test.sh +++ /dev/null @@ -1,3 +0,0 @@ -#!/bin/bash - -ctest -VV --output-on-failure diff --git a/test/CMakeLists.txt b/test/CMakeLists.txt index a1ee8cc4..23e08bef 100644 --- a/test/CMakeLists.txt +++ b/test/CMakeLists.txt @@ -19,6 +19,13 @@ macro(ENTWINE_ADD_TEST _name) PRIVATE GTEST_LINKED_AS_SHARED_LIBRARY=1) add_test(NAME ${_name} COMMAND ${test-name}) + if (WIN32) + file(TO_NATIVE_PATH ${CMAKE_BINARY_DIR} TESTBINDIR) + cmake_path(SET TESTBINDIR "${CMAKE_BINARY_DIR}/bin") + cmake_path(CONVERT "${TESTBINDIR}" TO_NATIVE_PATH_LIST native_paths) + set_property(TEST ${_name} PROPERTY ENVIRONMENT_MODIFICATION + "PATH=path_list_prepend:${TESTBINDIR}") + endif() endmacro(ENTWINE_ADD_TEST) ENTWINE_ADD_TEST(initialize FILES unit/init.cpp)