Skip to content

Commit

Permalink
Merge branch 'develop' into mlir-triage-doc
Browse files Browse the repository at this point in the history
  • Loading branch information
manupak authored Jul 3, 2024
2 parents 8059fa0 + 497c277 commit cab23da
Show file tree
Hide file tree
Showing 33 changed files with 2,480 additions and 18 deletions.
2 changes: 1 addition & 1 deletion CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -362,7 +362,7 @@ rocm_package_add_rpm_dependencies(SHARED_DEPENDS "hip-devel")

rocm_create_package(
NAME MIGraphX
DESCRIPTION "AMD's graph optimizer"
DESCRIPTION "AMD graph optimizer"
MAINTAINER "AMDMIGraphX Maintainer <[email protected]>"
LDCONFIG
PTH
Expand Down
34 changes: 18 additions & 16 deletions src/include/migraphx/op/reshape.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -76,22 +76,24 @@ struct reshape
const bool has_negative_dim_attr = neg_dim_num < dims.size();
// construct output dynamic shape from dims attribute
std::vector<shape::dynamic_dimension> output_dyn_dims(dims.size());
std::transform(dims.begin(),
dims.end(),
input_dyn_dims.begin(),
output_dyn_dims.begin(),
[](auto dim, auto input_dyn_dim) -> shape::dynamic_dimension {
if(dim == 0)
{
return input_dyn_dim;
}
if(dim == -1)
{
return {1, 1};
}
std::size_t u_dim = dim;
return {u_dim, u_dim};
});
// NOTE: input_dyn_dims.size() may not equal dims.size()
for(std::size_t i = 0; i < dims.size(); ++i)
{
auto d = dims.at(i);
if(d == 0)
{
output_dyn_dims.at(i) = input_dyn_dims.at(i);
}
else if(d == -1)
{
output_dyn_dims.at(i) = {1, 1};
}
else
{
std::size_t u_dim = d;
output_dyn_dims.at(i) = {u_dim, u_dim};
}
}

if(has_negative_dim_attr)
{
Expand Down
26 changes: 26 additions & 0 deletions src/targets/gpu/lowering.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@
#include <migraphx/iterator_for.hpp>
#include <migraphx/program.hpp>

#include <migraphx/op/common.hpp>
#include <migraphx/op/dot.hpp>
#include <migraphx/op/if_op.hpp>
#include <migraphx/op/reshape.hpp>
Expand Down Expand Up @@ -99,6 +100,7 @@ struct miopen_apply
add_extend_op("rnn_var_sl_shift_sequence");
add_extend_op("topk");
add_generic_op("contiguous");
add_pooling_op();
#if MIGRAPHX_USE_MIOPEN
add_convolution_op("convolution");
add_convolution_op("convolution_backwards");
Expand Down Expand Up @@ -296,6 +298,30 @@ struct miopen_apply
});
}

void add_pooling_op()
{
apply_map.emplace("pooling", [=](instruction_ref ins) {
auto&& op = ins->get_operator();
auto op_val = op.to_value();
if(op_val.at("count_include_pad").to<bool>())
{
return insert_precompile_op(ins);
}
if(op_val["mode"].to<op::pooling_mode>() == op::pooling_mode::lpnorm)
{
return insert_precompile_op(ins);
}
#if MIGRAPHX_USE_MIOPEN
auto output = insert_allocation(ins, ins->get_shape());
std::vector<instruction_ref> refs = ins->inputs();
refs.push_back(output);
return mod->replace_instruction(ins, make_op("gpu::pooling", op.to_value()), refs);
#else
return insert_precompile_op(ins);
#endif
});
}

// use 0 - input to represent neg
void add_neg_op()
{
Expand Down
2 changes: 1 addition & 1 deletion test/api/test_cpu.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -229,7 +229,7 @@ TEST_CASE(set_external_data_path)
migraphx::onnx_options options;
std::string model_path = "ext_path/external_data_test.onnx";
auto onnx_buffer = migraphx::read_string(model_path);
options.set_external_data_path(migraphx::fs::path(model_path).parent_path());
options.set_external_data_path(migraphx::fs::path(model_path).parent_path().string());
auto p = migraphx::parse_onnx_buffer(onnx_buffer, options);
auto shapes_before = p.get_output_shapes();
p.compile(migraphx::target("ref"));
Expand Down
16 changes: 16 additions & 0 deletions test/op_shape_test.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -3270,6 +3270,22 @@ TEST_CASE(reshape_dyn_1in_negative_1_dims_1)
expect_shape(output, migraphx::make_op("reshape", {{"dims", {0, -1, 2, 2}}}), input);
}

TEST_CASE(reshape_dyn_1in_negative_1_dims_2)
{
migraphx::shape input{migraphx::shape::float_type, {{1, 4}, {24, 24}, {2, 8}, {2, 8}}};
std::vector<migraphx::shape::dynamic_dimension> out_dyn_dims = {{1, 4}, {24, 24}, {4, 64}};
migraphx::shape output{migraphx::shape::float_type, out_dyn_dims};
expect_shape(output, migraphx::make_op("reshape", {{"dims", {0, 0, -1}}}), input);
}

TEST_CASE(reshape_dyn_1in_negative_1_dims_3)
{
migraphx::shape input{migraphx::shape::float_type, {{1, 4}, {24, 24}}};
std::vector<migraphx::shape::dynamic_dimension> out_dyn_dims = {{1, 4}, {4, 4}, {3, 3}, {2, 2}};
migraphx::shape output{migraphx::shape::float_type, out_dyn_dims};
expect_shape(output, migraphx::make_op("reshape", {{"dims", {0, 4, 3, 2}}}), input);
}

// note how non-fixed dynamic dimension on axis=0 goes to 2 from `dims` attribute
// code assumes that this will work at run-time
TEST_CASE(reshape_dyn_1in_dyn_to_fixed)
Expand Down
4 changes: 4 additions & 0 deletions tools/model_zoo/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
# Model Zoo

- [Test Generator with Datasets](./test_generator/)
- [ONNX Zoo](./onnx_zoo/)
50 changes: 50 additions & 0 deletions tools/model_zoo/onnx_zoo/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,50 @@
# ONNX Zoo model tester

Helper script to test [`ONNX Zoo models`](https://onnx.ai/models/) which have test data with [`test_runner.py`](../../test_runner.py)

## Getting the repository

> [!IMPORTANT]
> Make sure to enable git-lfs.
```bash
git clone https://github.com/onnx/models.git --depth 1
```

## Running the tests

> [!IMPORTANT]
> The argument must point to a folder, not a file.
```bash
# VERBOSE=1 DEBUG=1 # use these for more log
# ATOL=0.001 RTOL=0.001 TARGET=gpu # are the default values
./test_models.sh models/validated
```

You can also pass multiple folders, e.g.:

```bash
./test_models.sh models/validated/text/machine_comprehension/t5/ models/validated/vision/classification/shufflenet/
```

## Results

Result are separated by dtype: `logs/fp32` and `logs/fp16`

### Helpers

```bash
# Something went wrong
grep -HRL PASSED logs
# Runtime error
grep -HRi RuntimeError logs/
# Accuracy issue
grep -HRl FAILED logs
```

## Cleanup

If at any point something fails, the following things might need cleanup:
- Remove `tmp_model` folder
- `git lfs prune` in `models`
118 changes: 118 additions & 0 deletions tools/model_zoo/onnx_zoo/test_models.sh
Original file line number Diff line number Diff line change
@@ -0,0 +1,118 @@
#!/bin/bash

#####################################################################################
# The MIT License (MIT)
#
# Copyright (c) 2015-2024 Advanced Micro Devices, Inc. All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
#
#####################################################################################

set -e

WORK_DIR="$(cd -P -- "$(dirname -- "${BASH_SOURCE[0]}")" && pwd -P)"
SCRIPT_PATH=$(dirname $(dirname $(dirname $(readlink -f "$0"))))/test_runner.py
TESTER_SCRIPT="${TESTER:-$SCRIPT_PATH}"
ATOL="${ATOL:-0.001}"
RTOL="${RTOL:-0.001}"
TARGET="${TARGET:-gpu}"

if [[ "${DEBUG:-0}" -eq 1 ]]; then
PIPE=/dev/stdout
else
PIPE=/dev/null
fi

if [[ "${VERBOSE:-0}" -eq 1 ]]; then
set -x
fi

# Iterate through input recursively, process any tar.gz file
function iterate() {
local dir="$1"

for file in "$dir"/*; do
if [ -f "$file" ]; then
if [[ $file = *.tar.gz ]]; then
process "$file"
fi
fi

if [ -d "$file" ]; then
iterate "$file"
fi
done
}

# Process will download the lfs file, extract model and test data
# Test it with test_runner.py, then cleanup
function process() {
local file="$1"
echo "INFO: process $file started"
setup $file
test $file fp32
test $file fp16
cleanup $file
echo "INFO: process $file finished"
}

# Download and extract files
function setup() {
local file="$1"
echo "INFO: setup $file"
local_file="$(basename $file)"
# We need to change the folder to pull the file
folder="$(cd -P -- "$(dirname -- "$file")" && pwd -P)"
cd $folder &> "${PIPE}" && git lfs pull --include="$local_file" --exclude="" &> "${PIPE}"; cd - &> "${PIPE}"
tar xzf $file -C $WORK_DIR/tmp_model &> "${PIPE}"
}

# Remove tmp files and prune models
function cleanup() {
local file="$1"
echo "INFO: cleanup $file"
# We need to change the folder to pull the file
folder="$(cd -P -- "$(dirname -- "$file")" && pwd -P)"
cd $folder &> "${PIPE}" && git lfs prune &> "${PIPE}"; cd - &> "${PIPE}"
rm -r $WORK_DIR/tmp_model/* &> "${PIPE}"
}

# Run test_runner.py and log if something goes wrong
function test() {
local file="$1"
echo "INFO: test $file ($2)"
local_file="$(basename $file)"
flag="--atol $ATOL --rtol $RTOL --target $TARGET"
if [[ "$2" = "fp16" ]]; then
flag="$flag --fp16"
fi
EXIT_CODE=0
python3 $TESTER_SCRIPT ${flag} $WORK_DIR/tmp_model/*/ &> "$WORK_DIR/logs/$2/${local_file//\//_}.log" || EXIT_CODE=$?
if [[ "${EXIT_CODE:-0}" -ne 0 ]]; then
echo "WARNING: ${file} failed ($2)"
fi
}

mkdir -p $WORK_DIR/logs/fp32/ $WORK_DIR/logs/fp16/ $WORK_DIR/tmp_model
rm -fr $WORK_DIR/tmp_model/*

for arg in "$@"; do
iterate "$(dirname $(readlink -e $arg))/$(basename $arg)"
done
Loading

0 comments on commit cab23da

Please sign in to comment.