Skip to content

Commit

Permalink
Merge branch 'develop' into ci_cdna_to_mi100
Browse files Browse the repository at this point in the history
  • Loading branch information
causten authored Sep 28, 2023
2 parents ed60630 + e12032f commit e7edd2e
Show file tree
Hide file tree
Showing 22 changed files with 1,007 additions and 98 deletions.
5 changes: 5 additions & 0 deletions .github/workflows/ci.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -227,6 +227,7 @@ jobs:

- name: Free space
uses: jlumbroso/free-disk-space@main
continue-on-error: true
with:
tool-cache: true
android: true
Expand Down Expand Up @@ -305,6 +306,7 @@ jobs:
steps:
- name: Free space
uses: jlumbroso/free-disk-space@main
continue-on-error: true
with:
tool-cache: true
android: true
Expand Down Expand Up @@ -335,6 +337,7 @@ jobs:
steps:
- name: Free space
uses: jlumbroso/free-disk-space@main
continue-on-error: true
with:
tool-cache: true
android: true
Expand Down Expand Up @@ -375,6 +378,7 @@ jobs:
steps:
- name: Free space
uses: jlumbroso/free-disk-space@main
continue-on-error: true
with:
tool-cache: true
android: true
Expand Down Expand Up @@ -493,6 +497,7 @@ jobs:
steps:
- name: Free space
uses: jlumbroso/free-disk-space@main
continue-on-error: true
with:
tool-cache: true
android: true
Expand Down
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -80,3 +80,6 @@ docs/html
cmake-build*/
build*/

# Recommended location to install rbuild dependencies from README.md
depend

9 changes: 9 additions & 0 deletions rbuild.ini
Original file line number Diff line number Diff line change
Expand Up @@ -29,3 +29,12 @@ define =
CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
MIGRAPHX_ENABLE_CPU=On
BUILD_DEV=On

[cibuild]
cxx = ${rocm_path}/llvm/bin/clang++
cc = ${rocm_path}/llvm/bin/clang
deps =
-f dev-requirements.txt
define =
CMAKE_C_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
CMAKE_CXX_COMPILER_LAUNCHER=${deps_dir}/bin/ccache
1 change: 1 addition & 0 deletions src/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -197,6 +197,7 @@ register_migraphx_ops(
reduce_sum
relu
reshape
reshape_lazy
reverse
rnn
rnn_last_cell_output
Expand Down
1 change: 0 additions & 1 deletion src/auto_contiguous.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,6 @@
#include <migraphx/program.hpp>
#include <migraphx/instruction.hpp>
#include <migraphx/make_op.hpp>

#include <migraphx/iterator_for.hpp>

namespace migraphx {
Expand Down
1 change: 1 addition & 0 deletions src/include/migraphx/instruction.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -81,6 +81,7 @@ struct MIGRAPHX_EXPORT instruction

const std::vector<module_ref>& module_inputs() const;

/// Where this instruction is used as an input to another instruction
const std::vector<instruction_ref>& outputs() const;

friend bool operator==(const instruction& x, const instruction& y);
Expand Down
66 changes: 20 additions & 46 deletions src/include/migraphx/op/reshape.hpp
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
/*
* The MIT License (MIT)
*
* Copyright (c) 2015-2022 Advanced Micro Devices, Inc. All rights reserved.
* Copyright (c) 2015-2023 Advanced Micro Devices, Inc. All rights reserved.
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
Expand Down Expand Up @@ -29,7 +29,8 @@
#include <migraphx/config.hpp>
#include <migraphx/value.hpp>
#include <migraphx/dyn_output.hpp>
#include <migraphx/optional.hpp>

#include <algorithm>

namespace migraphx {
inline namespace MIGRAPHX_INLINE_NS {
Expand All @@ -45,8 +46,6 @@ struct reshape
return pack(f(self.dims, "dims"));
}

value attributes() const { return {{"require_std_shape", true}}; }

std::string name() const { return "reshape"; }

shape dyn_compute_shape(shape s0) const
Expand Down Expand Up @@ -110,27 +109,9 @@ struct reshape
return it;
}

template <class DimIterator, class StrideIterator>
static auto can_strides_merge(DimIterator dim_start,
DimIterator dim_last,
StrideIterator stride_start,
StrideIterator stride_last)
{
assert(std::distance(dim_start, dim_last) == std::distance(stride_start, stride_last));
auto cstride = *std::prev(stride_last);
return std::equal(std::make_reverse_iterator(dim_last),
std::make_reverse_iterator(dim_start + 1),
std::make_reverse_iterator(stride_last - 1),
std::make_reverse_iterator(stride_start),
[&](auto dim, auto stride) {
cstride *= dim;
return stride == cstride;
});
}

// This will reshape the dimesions of the input shape to use the lens of
// `rdims`. If this can't be done without changing memory layout then it
// will return nullopt
// This will attempt to alias the dimensions of the input shape to the lens of
// `rdims`. Unlike reshape_lazy though we can modify memory layout with copies and this
// can remove previous nullopts that were sent back for the alias case
static optional<shape> reshape_dims(const shape& input, const std::vector<std::size_t>& rdims)
{
if(input.standard())
Expand All @@ -155,13 +136,8 @@ struct reshape
{
auto start = idims.begin() + i;
auto it = compute_end_dim(start, idims.end(), rdim);
if(it == start)
return nullopt;
auto n = it - start;
assert((i + n) <= istrides.size());
if(not can_strides_merge(
start, it + 1, istrides.begin() + i, istrides.begin() + i + n + 1))
return nullopt;
i += n;
rstrides.push_back(istrides[i]);
}
Expand All @@ -170,8 +146,7 @@ struct reshape
{
auto start = rdims.begin() + i;
auto it = compute_end_dim(start, rdims.end(), idim);
if(it == start)
return nullopt;

auto n = it - start;
assert((r + n) <= rdims.size());
auto stride = istrides[i] * idim;
Expand All @@ -191,15 +166,11 @@ struct reshape
auto stride = rstrides.back();
for(auto d : range(rdims.begin() + rstrides.size(), rdims.end()))
{
if(d != 1)
return nullopt;
(void)d;
rstrides.push_back(stride);
}
}

if(rdims.size() != rstrides.size())
return nullopt;

return shape{input.type(), rdims, rstrides};
}

Expand Down Expand Up @@ -233,25 +204,24 @@ struct reshape
}

auto s = reshape_dims(inputs.front(), rdims);
if(not s.has_value())
MIGRAPHX_THROW("Reshape on axis that is not packed.");

if(s->elements() != inputs.front().elements())
MIGRAPHX_THROW("Reshape: Wrong number of elements for reshape: reshape has " +
MIGRAPHX_THROW("reshape: Wrong number of elements for reshape: reshape has " +
std::to_string(s->elements()) + " elements whereas the input has " +
std::to_string(inputs.front().elements()));

assert(s->bytes() == inputs.front().bytes());
return *s;
}

shape compute_shape(std::vector<shape> inputs) const
{
check_shapes{inputs, *this, true}.has(1);

auto n_neg_dims = std::count(dims.begin(), dims.end(), -1);
if(n_neg_dims > 1)
MIGRAPHX_THROW("Reshape: Dimensions for reshape can only have one -1 dim");
auto s0 = inputs[0];
MIGRAPHX_THROW("reshape: Dimensions for reshape can only have one -1 dim");

auto s0 = inputs.front();
if(s0.dynamic())
{
return dyn_compute_shape(s0);
Expand All @@ -264,10 +234,14 @@ struct reshape

argument compute(const dyn_output& dyn_out, std::vector<argument> args) const
{
return args[0].reshape(dyn_out.computed_shape);
}
assert(dyn_out.computed_shape.standard());
argument result{dyn_out.computed_shape};

std::ptrdiff_t output_alias(const std::vector<shape>&) const { return 0; }
visit_all(result, args[0])([&](auto output, auto input) {
std::copy(input.begin(), input.end(), output.begin());
});
return result;
}
};

} // namespace op
Expand Down
Loading

0 comments on commit e7edd2e

Please sign in to comment.