Skip to content

Commit

Permalink
Merge branch 'main' into SNOW-1449796-E2E-Cortex
Browse files Browse the repository at this point in the history
  • Loading branch information
sfc-gh-ralfaroviquez committed Jun 26, 2024
2 parents 3fd5d18 + 9693c6f commit 51b09e6
Show file tree
Hide file tree
Showing 8 changed files with 167 additions and 4 deletions.
115 changes: 115 additions & 0 deletions .github/workflows/ci.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,115 @@
name: native-app-examples

on:
pull_request:
types:
- opened
- edited
- labeled
- unlabeled
- synchronize

permissions:
contents: read

jobs:
build:
runs-on: ubuntu-latest
strategy:
matrix:
include:
- environment-file: shared_python_ci_env.yml
defaults:
run:
shell: bash -l {0}
steps:
- uses: actions/checkout@v4
with:
fetch-depth: ${{ github.event_name == 'pull_request' && 2 || 0 }}
- name: Set up Python 3.10
uses: actions/setup-python@v3
with:
python-version: "3.10"
- name: Get changed files
id: changed-files
run: |
if ${{ github.event_name == 'pull_request' }}; then
echo "changed_files=$(git diff --name-only -r HEAD^1 HEAD | xargs)" >> $GITHUB_OUTPUT
else
echo "changed_files=$(git diff --name-only ${{ github.event.before }} ${{ github.event.after }} | xargs)" >> $GITHUB_OUTPUT
fi
- name: Determine tests to run
uses: actions/github-script@v7
id: tests_to_run
env:
CHANGED_FILES: ${{ steps.changed-files.outputs.changed_files }}
with:
script: |
const { CHANGED_FILES } = process.env;
const fs = require('fs');
const path = require('path');
function getPytestPaths(dir, callback)
{
const files = fs.readdirSync(dir, { withFileTypes: true });
for (const file of files)
{
if (file.isDirectory())
{
getPytestPaths(path.join(dir, file.name), callback);
}
else
{
extension = path.extname(file.name);
if (extension == '.py')
{
callback(dir);
}
}
}
}
const paths = new Set(CHANGED_FILES.split(" ")
.map(x => x.substring(0, x.indexOf("/") + 1))
.filter(x => x.length > 0 && !x.startsWith('.')));
const pytestPaths = new Set()
const pytestArgs = new Set()
for (const rootPath of paths)
{
let subFoldersWithPythonFiles = 0;
getPytestPaths(rootPath, x =>
{
pytestPaths.add(x);
subFoldersWithPythonFiles++
})
if (subFoldersWithPythonFiles > 0)
{
pytestArgs.add(rootPath)
}
}
core.setOutput('pytestPaths', [...pytestPaths].join(' '));
core.setOutput('pytestArgs', [...pytestArgs].join(' '));
- name: Setup test environment
uses: conda-incubator/setup-miniconda@v2
with:
environment-file: ${{ matrix.environment-file }}
- name: Install dependencies
run: |
printf "[pytest]\npythonpath=${{ steps.tests_to_run.outputs.pytestPaths }}" > pytest.ini
python -m pip install pytest
- name: Run tests
run: |
args=${{ steps.tests_to_run.outputs.pytestArgs }}
pythonpath=${{ steps.tests_to_run.outputs.pytestPaths }}
if [ -z "${args}" ] || [ -z "${pythonpath}" ]; then
echo “Nothing to test”
else
pytest $args
fi
1 change: 1 addition & 0 deletions data-mapping/app/manifest.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
manifest_version: 1
artifacts:
readme: README.md
setup_script: setup_script.sql
default_streamlit: ui."Dashboard"

Expand Down
18 changes: 15 additions & 3 deletions data-mapping/prepare_data.sh
Original file line number Diff line number Diff line change
@@ -1,7 +1,11 @@
# Create a database and a schema to hold the data to lookup.
snow sql -q "
CREATE DATABASE IF NOT EXISTS IP2LOCATION;
CREATE SCHEMA IF NOT EXISTS IP2LOCATION;
"
# Create the table to host the data.
# Create a file format for the file
# Create a stage so we can upload the file

snow sql -q "
CREATE TABLE IF NOT EXISTS LITEDB11 (
Expand All @@ -27,22 +31,30 @@ COMPRESSION = AUTO;
CREATE STAGE IF NOT EXISTS IP2LOCATION.IP2LOCATION.LOCATION_DATA_STAGE
file_format = LOCATION_CSV;" --database ip2location --schema ip2location

# Copy the csv files from your local machine to the stage we created previously
snow stage copy /USER_PATH_HERE/IP2LOCATION-LITE-DB11.CSV @location_data_stage --database ip2location --schema ip2location

# Copy the csv file from the stage to load the table
snow sql -q "
copy into litedb11 from @location_data_stage
files = ('IP2LOCATION-LITE-DB11.CSV')
;" --database ip2location --schema ip2location

# Simple query test to ensure the table is correctly filled.
snow sql -q "SELECT COUNT(*) FROM LITEDB11;
SELECT * FROM LITEDB11 LIMIT 10;" --database ip2location --schema ip2location

# Create test database and schema.
snow sql -q "CREATE DATABASE IF NOT EXISTS TEST_IPLOCATION;
CREATE SCHEMA IF NOT EXISTS TEST_IPLOCATION;
CREATE SCHEMA IF NOT EXISTS TEST_IPLOCATION;"

# Create test table to insert some values
snow sql -q "
CREATE OR REPLACE TABLE TEST_IPLOCATION.TEST_IPLOCATION.TEST_DATA (
IP VARCHAR(16),
IP_DATA VARIANT
);
);"

INSERT INTO TEST_IPLOCATION.TEST_IPLOCATION.TEST_DATA(IP) VALUES('73.153.199.206'),('8.8.8.8');"
# Insert testing values to use later on
snow sql -q "
INSERT INTO TEST_IPLOCATION.TEST_IPLOCATION.TEST_DATA(IP) VALUES('73.153.199.206'),('8.8.8.8');"
2 changes: 1 addition & 1 deletion data-mapping/snowflake.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,4 +7,4 @@ native_app:
dest: ./
package:
scripts:
- scripts/setup-package-script.sql
- scripts/setup_package_script.sql
15 changes: 15 additions & 0 deletions shared_python_ci_env.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
# This file is used to install packages for local testing
name: native-apps-examples-testing
channels:
- snowflake
dependencies:
- python=3.8
- pip
- pip:
- git+https://github.com/snowflakedb/snowflake-telemetry-python.git
- snowflake-native-apps-permission-stub
- snowflake-snowpark-python>=1.15.0
- snowflake-cli-labs>=2.0.0
- pytest
- streamlit>=1.26.0

3 changes: 3 additions & 0 deletions spcs-three-tier/app/manifest.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,9 @@ artifacts:
- /spcs_app/napp/img_repo/eap_backend
- /spcs_app/napp/img_repo/eap_router

lifecycle_callbacks:
version_initializer: v1.init

privileges:
- BIND SERVICE ENDPOINT:
description: "Ability to create ingress URLs."
Expand Down
17 changes: 17 additions & 0 deletions spcs-three-tier/app/setup.sql
Original file line number Diff line number Diff line change
Expand Up @@ -49,6 +49,23 @@ $$;

GRANT USAGE ON PROCEDURE v1.get_configuration(STRING) TO APPLICATION ROLE app_admin;

-- The version initializer callback is executed after a successful installation, upgrade, or downgrade of an application object.
-- In case the application fails to upgrade, the version initializer of the previous (successful) version will be executed so you
-- can clean up application state that may have been modified during the failed upgrade.
CREATE OR REPLACE PROCEDURE v1.init()
RETURNS STRING
LANGUAGE SQL
EXECUTE AS OWNER
AS
$$
BEGIN
ALTER SERVICE IF EXISTS app_public.frontend FROM SPECIFICATION_FILE='frontend.yaml';
ALTER SERVICE IF EXISTS app_public.backend FROM SPECIFICATION_FILE='backend.yaml';
RETURN 'init complete';
END $$;

GRANT USAGE ON PROCEDURE v1.init() TO APPLICATION ROLE app_admin;

CREATE OR REPLACE PROCEDURE v1.start_backend(pool_name VARCHAR)
RETURNS string
LANGUAGE sql
Expand Down

0 comments on commit 51b09e6

Please sign in to comment.