Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions tmva/sofie/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -23,6 +23,7 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofie
TMVA/RModel_Base.hxx
TMVA/RModel.hxx
TMVA/ROperator.hxx
TMVA/ROperator_Basic_Is.hxx
TMVA/ROperator_BasicUnary.hxx
TMVA/ROperator_BasicBinary.hxx
TMVA/ROperator_BasicNary.hxx
Expand Down Expand Up @@ -67,6 +68,7 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofie
TMVA/ROperator_Gather.hxx
TMVA/ROperator_GatherND.hxx
TMVA/ROperator_NonZero.hxx
TMVA/ROperator_Not.hxx
TMVA/SOFIE_common.hxx
TMVA/SOFIEHelpers.hxx

Expand Down
95 changes: 95 additions & 0 deletions tmva/sofie/inc/TMVA/ROperator_Basic_Is.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,95 @@
#ifndef TMVA_EXPERIMENTAL_SOFIE_ROPERATOR_BASIC_IS
#define TMVA_EXPERIMENTAL_SOFIE_ROPERATOR_BASIC_IS

#include <TMVA/ROperator.hxx>
#include <TMVA/RModel.hxx>
#include <TMVA/SOFIE_common.hxx>
#include <cmath>

namespace TMVA {
namespace Experimental {
namespace SOFIE {

enum class EBasicIsOperator { kIsInf, kIsInfPos, kIsInfNeg, kIsNaN };

template <EBasicIsOperator Op>
struct IsOpTraits {
};
template<>
struct IsOpTraits<EBasicIsOperator::kIsInf> {
static std::string Name() { return "IsInf"; }
static std::string Op(const std::string &x) { return "std::isinf(" + x + ")"; }
static bool Impl(float x) { return std::isinf(x);}
};
template<>
struct IsOpTraits<EBasicIsOperator::kIsInfPos> {
static std::string Name() { return "IsInfPos"; }
static std::string Op(const std::string &x) { return "(std::isinf(" + x + ") && " + x + "> 0)"; }
static bool Impl(float x) { return std::isinf(x) && x > 0;}
};
template<>
struct IsOpTraits<EBasicIsOperator::kIsInfNeg> {
static std::string Name() { return "IsInfNeg"; }
static std::string Op(const std::string &x) { return "(std::isinf(" + x + ") && " + x + "< 0)"; }
static bool Impl(float x) { return std::isinf(x) && x < 0;}
};
template<>
struct IsOpTraits<EBasicIsOperator::kIsNaN> {
static std::string Name() { return "IsInf"; }
static std::string Op(const std::string &x) { return "std::isnan(" + x + ")"; }
static bool Impl(float x) { return std::isnan(x);}
};



template <EBasicIsOperator Op>
class ROperator_Basic_Is final : public ROperator {
private:
std::string fNX;
std::string fNY;

std::vector<Dim> fShapeX;
std::vector<Dim> fShapeY;

public:
ROperator_Basic_Is() {}

ROperator_Basic_Is(std::string nameX, std::string nameY)
: fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY))
{
fInputTensorNames = { fNX };
fOutputTensorNames = { fNY };
}

void Initialize(RModel& model) override {
if (!model.CheckIfTensorAlreadyExist(fNX)) {
throw std::runtime_error("TMVA::SOFIE - Tensor " + fNX + " not found.");
}
fShapeX = model.GetDimTensorShape(fNX);
fShapeY = fShapeX;
model.AddIntermediateTensor(fNY, ETensorType::BOOL, fShapeY);
}

std::string Generate(std::string opName) override
{
opName = "op_" + opName;
std::stringstream out;

out << SP << "\n//---- Operator" << IsOpTraits<Op>::Name() << " " << opName << "\n";
auto length = ConvertDimShapeToLength(fShapeX);
out << SP << "for (size_t i = 0; i < " << length << "; i++) {\n";
out << SP << SP << "tensor_" << fNY << "[i] = " << IsOpTraits<Op>::Op("tensor_" + fNX + "[i]") << ";\n";
out << SP << "}\n";
return out.str();
}

std::vector<std::string> GetStdLibs() override {
return { std::string("cmath") };
}
};

} // namespace SOFIE
} // namespace Experimental
} // namespace TMVA

#endif
9 changes: 0 additions & 9 deletions tmva/sofie/inc/TMVA/ROperator_NonZero.hxx
Original file line number Diff line number Diff line change
Expand Up @@ -92,15 +92,6 @@ public:
fShapeY.resize(2);
fShapeY[0] = fShapeX.size();

// identify as -1 since we will declare maximum as size of input
// auto inputLength = ConvertDimShapeToLength(fShapeX);
// // case X is Dim, becomes complicated to know the maximum. Shuld be allocated dynamically
// size_t inputLength = 0;
// if (!model.IsDynamicTensor(fNX)) {
// inputLength = ConvertShapeToLength(ConvertShapeToInt(fShapeX));
// else
// inputLength = static_cast<size_t>(-1); // flag -1 to define shape correctly

// flag -1 to define the shape variable in the constructor code and not in the constructor signature
fShapeY[1] = Dim{std::string("v_NonZero_") + fNX, static_cast<size_t>(-1) };

Expand Down
60 changes: 60 additions & 0 deletions tmva/sofie/inc/TMVA/ROperator_Not.hxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,60 @@
#ifndef TMVA_EXPERIMENTAL_SOFIE_ROPERATOR_BASIC_UNARY
#define TMVA_EXPERIMENTAL_SOFIE_ROPERATOR_BASIC_UNARY

#include <TMVA/ROperator.hxx>
#include <TMVA/RModel.hxx>
#include <TMVA/SOFIE_common.hxx>

namespace TMVA {
namespace Experimental {
namespace SOFIE {


class ROperator_Not final : public ROperator {
private:
std::string fNX;
std::string fNY;

std::vector<Dim> fShapeX;
std::vector<Dim> fShapeY;

public:
ROperator_Not() {}

ROperator_Not(std::string nameX, std::string nameY)
: fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY))
{
fInputTensorNames = { fNX };
fOutputTensorNames = { fNY };
}


void Initialize(RModel& model) override {
if (!model.CheckIfTensorAlreadyExist(fNX)) {
throw std::runtime_error("TMVA::SOFIE - Tensor " + fNX + " not found.");
}
fShapeX = model.GetDimTensorShape(fNX);
fShapeY = fShapeX;
model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShapeY);
}

std::string Generate(std::string opName) override
{
opName = "op_" + opName;
std::stringstream out;

out << SP << "\n//---- Operator Not " << opName << "\n";
auto length = ConvertDimShapeToLength(fShapeX);
out << SP << "for (size_t i = 0; i < " << length << "; i++) {\n";
out << SP << SP << "tensor_" << fNY << "[i] = !tensor_" + fNX + "[i];\n";
out << SP << "}\n";
return out.str();
}

};

} // namespace SOFIE
} // namespace Experimental
} // namespace TMVA

#endif
31 changes: 31 additions & 0 deletions tmva/sofie/test/TestCustomModelsFromONNX.cxx
Original file line number Diff line number Diff line change
Expand Up @@ -2973,5 +2973,36 @@ TEST(ONNX, NonZero_Constant)
EXPECT_EQ(output[i] , correct_output[i]);
}
}
TEST(ONNX, IsInf)
{
// expected input
std::vector<float> input = { 1, static_cast<float>(1./0.), 2.};
std::vector<uint8_t> correct_output = { 0,1,0 };

// not cannot use input.size() in string because input symbol will not be visible when running inference
ASSERT_INCLUDE_AND_RUN_SESSION_ARGS(std::vector<uint8_t>, "IsInf",std::string("\"\", ") + std::to_string(input.size()), input.size(),input);

// Checking output size
EXPECT_EQ(output.size(), correct_output.size());
// Checking output
for (size_t i = 0; i < output.size(); ++i) {
EXPECT_EQ(output[i] , correct_output[i]);
}
}

TEST(ONNX, NotIsNaN)
{
// expected input
std::vector<float> input = { 1, static_cast<float>(0./0.), 2.};
std::vector<uint8_t> correct_output = { 1,0,1 };

ASSERT_INCLUDE_AND_RUN_SESSION_ARGS(std::vector<uint8_t>, "NotIsNaN",std::string("\"\", ") + std::to_string(input.size()), input.size(),input);

// Checking output size
EXPECT_EQ(output.size(), correct_output.size());
// Checking output
for (size_t i = 0; i < output.size(); ++i) {
EXPECT_EQ(output[i] , correct_output[i]);
}
}

11 changes: 11 additions & 0 deletions tmva/sofie/test/input_models/IsInf.onnx
Original file line number Diff line number Diff line change
@@ -0,0 +1,11 @@
 onnx-example:S

inputoutput"IsInfTestZ
input


Nb
output
 

NB
Expand Down
13 changes: 13 additions & 0 deletions tmva/sofie/test/input_models/NotIsNaN.onnx
Original file line number Diff line number Diff line change
@@ -0,0 +1,13 @@
 onnx-example:t

input temp_result"IsNaN

temp_resultoutput"NotTestZ
input


Nb
output
 

NB
Expand Down
6 changes: 5 additions & 1 deletion tmva/sofie/test/test_helpers.h
Original file line number Diff line number Diff line change
Expand Up @@ -45,9 +45,11 @@ bool includeModel(std::string const &modelName)
template <class T>
std::string toInterpreter(T const &ptr, std::string const &className, bool toRawPointer = false)
{
if constexpr (std::is_same_v<T, int>) {
// for the integer arguments (shape values)
if constexpr (std::is_same_v<T, int> || std::is_same_v<T, size_t>) {
return std::to_string(ptr);
}
// for the data arguments
std::string out =
TString::Format("reinterpret_cast<%s*>(0x%zx)", className.c_str(), reinterpret_cast<std::size_t>(&ptr)).Data();
if (toRawPointer) {
Expand Down Expand Up @@ -94,6 +96,8 @@ runModel(std::string outputTypeName, std::string const &modelName, std::string s
auto type_name = []<typename T>() {
if constexpr (std::is_same_v<T, int>)
return "int";
else if constexpr (std::is_same_v<T, size_t>)
return "size_t";
else if constexpr (std::is_same_v<T, std::vector<float>>)
return "std::vector<float>";
else if constexpr (std::is_same_v<T, std::vector<int>>)
Expand Down
2 changes: 2 additions & 0 deletions tmva/sofie_parsers/CMakeLists.txt
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,7 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofieParser
src/RModelParser_ONNX.cxx
src/ParseBasicUnary.cxx
src/ParseBasicBinary.cxx
src/ParseBasicIs.cxx
src/ParseBatchNormalization.cxx
src/ParseCast.cxx
src/ParseConcat.cxx
Expand Down Expand Up @@ -76,6 +77,7 @@ ROOT_STANDARD_LIBRARY_PACKAGE(ROOTTMVASofieParser
src/ParseRandom.cxx
src/ParseScatterElements.cxx
src/ParseNonZero.cxx
src/ParseNot.cxx
${PROTO_SRCS}
LIBRARIES PUBLIC
protobuf::libprotobuf
Expand Down
70 changes: 70 additions & 0 deletions tmva/sofie_parsers/src/ParseBasicIs.cxx
Original file line number Diff line number Diff line change
@@ -0,0 +1,70 @@
#include "TMVA/RModelParser_ONNX.hxx"
#include "TMVA/ROperator_Basic_Is.hxx"
#include "onnx_proto3.pb.h"

namespace TMVA {
namespace Experimental {
namespace SOFIE {

template <EBasicIsOperator Op>
std::unique_ptr<ROperator> ParseBasicIs(RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto)
{

std::string input_name = nodeproto.input(0);
if (!parser.IsRegisteredTensorType(input_name)) {
throw
std::runtime_error("TMVA::SOFIE ONNX Parser " + IsOpTraits<Op>::Name() + " op has input tensor " + input_name +
" but its type is not yet registered");
}

// get attributes for the IsInf operator
int detect_negative = 1;
int detect_positive = 1;
for (int_t i = 0; i < nodeproto.attribute_size(); i++) {
std::string attribute_name = nodeproto.attribute(i).name();
if (attribute_name == "detect_negative")
detect_negative = nodeproto.attribute(i).i();
if (attribute_name == "detect_positive")
detect_positive = nodeproto.attribute(i).i();
}

if (detect_positive == 0 && detect_negative == 0)
throw std::runtime_error("TMVA::SOFIE ONNX Parser IsInf op has invalide attributes");


std::unique_ptr<ROperator> op;
std::string output_name = nodeproto.output(0);

if (nodeproto.attribute_size() == 0 || (detect_negative == 1 && detect_positive == 1))
op.reset(new ROperator_Basic_Is<Op>(input_name, output_name));
else if (nodeproto.attribute_size() > 0) {
// case detect_negative or detective_positive are set
if (detect_negative == 0)
op.reset(new ROperator_Basic_Is<EBasicIsOperator::kIsInfPos>(input_name, output_name));
else if (detect_positive == 0)
op.reset(new ROperator_Basic_Is<EBasicIsOperator::kIsInfNeg>(input_name, output_name));
} else
throw std::runtime_error("TMVA::SOFIE ONNX Parser " + IsOpTraits<Op>::Name() + " operator - invalid attributes");

// Register the output type (is always BOOL)
if (!parser.IsRegisteredTensorType(output_name)) {
parser.RegisterTensorType(output_name, ETensorType::BOOL);
}

return op;
};

// Parse IsNaN
ParserFuncSignature ParseIsNaN = [](RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto) {
return ParseBasicIs<EBasicIsOperator::kIsNaN>(parser, nodeproto);
};

// Parse IsInf
ParserFuncSignature ParseIsInf = [](RModelParser_ONNX &parser, const onnx::NodeProto &nodeproto) {
return ParseBasicIs<EBasicIsOperator::kIsInf>(parser, nodeproto);
};


} // namespace SOFIE
} // namespace Experimental
} // namespace TMVA
Loading
Loading