From 300f21bac8ef1790e3e0f6046d7f13b60cdad01d Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Fri, 11 Apr 2025 14:22:14 +0200 Subject: [PATCH 01/22] Initial implementation for ALPAKA integration to SOFIE --- .vscode/settings.json | 6 + src/SOFIE_core/inc/SOFIE/RModel_Base.hxx | 1 + src/SOFIE_core/src/RModel.cxx | 333 +++++++++++++++++++++-- src/SOFIE_core/src/RModel_Base.cxx | 32 +++ 4 files changed, 344 insertions(+), 28 deletions(-) create mode 100644 .vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json new file mode 100644 index 0000000..eb254be --- /dev/null +++ b/.vscode/settings.json @@ -0,0 +1,6 @@ +{ + "files.associations": { + "*.icc": "cpp", + "limits": "cpp" + } +} \ No newline at end of file diff --git a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx index f8a9d34..0a615c5 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx @@ -82,6 +82,7 @@ public: fCustomOpHeaders.insert(filename); } void GenerateHeaderInfo(std::string &hgname); + void GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname); void PrintGenerated() { std::cout << fGC; } std::string ReturnGenerated() { return fGC; } diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index e5495ed..e2bc530 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -594,6 +594,28 @@ void RModel::GenerateInitializedTensorInfo() } } +void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() +{ + if (!fInitializedTensors.empty()) + fGC += "// initialized tensors\n"; + + for (auto &i : fInitializedTensors) { + if (!fUseWeightFile || i.second.IsConstantTensor()) { + if (i.second.type() == ETensorType::FLOAT) + fGC += GenerateConstantTensorCode(i); + else if (i.second.type() == ETensorType::INT64) + fGC += GenerateConstantTensorCode(i); + + } else { + // case of tensors which are read from a file + size_t length = ConvertShapeToLength(i.second.shape()); + if (i.second.type() == ETensorType::FLOAT) { + fGC += "auto deviceBuf_"+i.first+" = alpaka::allocBuf(devAcc, "+std::to_string(length)+");\n"; + } + } + } +} + void RModel::GenerateIntermediateMemoryPool() { if (fIntermediateMemoryInfo.total_stack.size() == 0) return; fGC += "\n//--- Allocating session memory pool to be used for allocating intermediate tensors\n"; @@ -612,7 +634,7 @@ void RModel::GenerateIntermediateTensorInfo() { tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; // No pointer allocation needed for BOOL } - if (fIntermediateTensorFrequencyLookup.find(i.first) == fIntermediateTensorFrequencyLookup.end() && std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { + if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { size_t length = ConvertShapeToLength(i.second.shape); if (i.second.type == ETensorType::FLOAT) { @@ -652,6 +674,55 @@ void RModel::GenerateIntermediateTensorInfo() { } } +void GenerateGPU_ALPAKA_Buffers(){ + if (!fIntermediateTensorInfos.empty()) { + std::string tensor_declaration_block = ""; + + for (auto &i : fIntermediateTensorInfos) { + if (i.second.type == ETensorType::BOOL) { + tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; + // No pointer allocation needed for BOOL + } + if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { + size_t length = ConvertShapeToLength(i.second.shape); + + if (i.second.type == ETensorType::FLOAT) { + tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; + } + else if (i.second.type == ETensorType::DOUBLE) { + tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; + } + else if (i.second.type == ETensorType::INT64) { + tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; + + } + } + } + + if (tensor_declaration_block.length()) { + fGC += "\n//--- declare and allocate the intermediate tensors\n" + tensor_declaration_block; + } + } + // add also the dynamic tensors (only declarations, allocation will be done later) + if (!fDynamicTensorInfos.empty()) { + fGC += "//--- declare the dynamic tensors\n"; + fGC += "using bufDev_float = alpaka::Buf, size_t>;\n" + fGC += "using bufDev_double = alpaka::Buf, size_t>;\n" + fGC += "using bufDev_int64= alpaka::Buf, size_t>;\n" + for (auto &i : fDynamicTensorInfos) { + if (i.second.type == ETensorType::FLOAT) { + fGC += "bufDev_float bufDev_" + i.first + ";\n"; + } else if (i.second.type == ETensorType::DOUBLE) { + fGC += "bufDev_double bufDev_" + i.first + ";\n"; + } else if (i.second.type == ETensorType::INT64) { + fGC += "bufDev_int64 bufDev_" + i.first + ";\n"; + + } + } + } +} + + // generate code for specific operator declarations to be defined in the Session class void RModel::GenerateOperatorDeclarations() { std::string strcode; @@ -665,11 +736,26 @@ void RModel::GenerateOperatorDeclarations() { } void RModel::GenerateDynamicTensorInfo() { + fGC += "//---- allocate the intermediate dynamic tensors\n"; + std::stringstream out; + for (auto & i: fDynamicTensorInfos) { + auto length = ConvertDynamicShapeToLength(i.second.shape); + out << SP << "if (" << length << " > 0) {\n"; + out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; + out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; + out << SP << "}\n"; + } + fGC += out.str(); +} + +void RModel::GenerateDynamicTensorInfo_GPU_ALPAKA() { fGC += "//---- allocate the intermediate dynamic tensors\n"; std::stringstream out; for (auto & i: fDynamicTensorInfos) { auto length = ConvertDynamicShapeToLength(i.second.shape); out << SP << "if (" << length << " > 0) {\n"; + out << "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; + out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; out << SP << "}\n"; @@ -804,7 +890,7 @@ void RModel::GenerateSessionCode() { // define the Session struct (for GNN this is generated in RModel_GNN) - if (fUseSession && !fIsGNNComponent) { + if (fUseSession) { if (!fIsSubGraph) fGC += "struct Session {\n"; else @@ -814,32 +900,32 @@ void RModel::GenerateSessionCode() // generate code for declaring the initialized tensors GenerateInitializedTensorInfo(); - // evaluate total intermediate memory and position intermediate tensor addresses - std::string intermediate_memory_alloc_string = ""; - intermediate_memory_alloc_string += "\n// --- Positioning intermediate tensor memory --"; - for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { - intermediate_memory_alloc_string += AllocateIntermediateMemory(fOperators[op_idx]->GetOpOutputTensors()); - CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); - } - - // to check remaining unused fragments after memory allocation (lesser the better) - // for (const auto &it: fIntermediateMemoryInfo.available_stack){ - // std::cout<<"chunk_idx: "<GetOpOutputTensors()); + // CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); // } - // generate the memory pool to be used by intermediate tensors - GenerateIntermediateMemoryPool(); + // // to check remaining unused fragments after memory allocation (lesser the better) + // // for (const auto &it: fIntermediateMemoryInfo.available_stack){ + // // std::cout<<"chunk_idx: "<GenerateSessionMembersCode(opName); + // } + // fGC += "\n"; + // here add initialization and reading of weight tensors + if (fUseWeightFile) { + std::string fileName = fName; + if (fWeightFile == WeightFileType::Text) { + fileName += ".dat"; + } + if (fWeightFile == WeightFileType::RootBinary) { + fileName += ".root"; + } + fGC += sessionName + "(std::string filename =\"" + fileName + "\""; + } else { + // no need to pass weight file since it is not used + // keep passing a string for compatibility + fGC += sessionName + "(std::string = \"\""; + } + // add initialization of shape parameters + // assume all parameters are of type size_t + if (!fShapeParams.empty()) { + for (auto &p : fShapeParams) { + fGC += ",\n"; + fGC += " size_t " + p.first + " = " + p.second; + } + } + fGC += ") {\n"; + + if (fUseWeightFile) { + fGC += "\n//--- reading weights from file\n"; + ReadInitializedTensorsFromFile(fReadPos); + fGC += "\n"; + // fUseWeightFile = fUseWeightFile; + } + + // now we have passed the parameters we can allocate the dynamic tensors + GenerateDynamicTensorInfo(); + + // add here initialization code for operator for (size_t id = 0; id < fOperators.size(); id++) { - std::string opName = std::to_string(id); - fGC += fOperators[id]->GenerateSessionMembersCode(opName); + fGC += fOperators[id]->GenerateInitCode(); } + + fGC += "}\n\n"; + } + // generate the inference code + GenerateOutput(); + + // end of session + if (fUseSession && !fIsGNNComponent) { + fGC += "}; // end of Session\n"; + } +} + +void RModel::GenerateSessionCode_GPU_ALPAKA() +{ + + // define the Session struct (for GNN this is generated in RModel_GNN) + if (fUseSession) { + if (!fIsSubGraph) + fGC += "struct Session {\n"; + else + fGC += "struct Session_" + fName + " {\n"; + } + + // // generate code for declaring the initialized tensors + GenerateInitializedTensorInfo_GPU_ALPAKA(); + + // // evaluate total intermediate memory and position intermediate tensor addresses + // std::string intermediate_memory_alloc_string = ""; + // intermediate_memory_alloc_string += "\n// --- Positioning intermediate tensor memory --"; + // for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { + // intermediate_memory_alloc_string += AllocateIntermediateMemory(fOperators[op_idx]->GetOpOutputTensors()); + // CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); + // } + + // // to check remaining unused fragments after memory allocation (lesser the better) + // // for (const auto &it: fIntermediateMemoryInfo.available_stack){ + // // std::cout<<"chunk_idx: "<fName + " fSession_" + graph->fName + ";\n"; + } + + // Generate code for Session constructor + if (fUseSession) { + std::string sessionName = "Session"; + if (fIsSubGraph) + sessionName += "_" + fName; + // add here specific operator code that needs to define session data members + // fGC += "\n"; + // for (size_t id = 0; id < fOperators.size(); id++) { + // std::string opName = std::to_string(id); + // fGC += fOperators[id]->GenerateSessionMembersCode(opName); + // } fGC += "\n"; // here add initialization and reading of weight tensors if (fUseWeightFile) { @@ -885,13 +1080,15 @@ void RModel::GenerateSessionCode() if (fUseWeightFile) { fGC += "\n//--- reading weights from file\n"; - ReadInitializedTensorsFromFile(fReadPos); + ReadInitializedTensorsFromFile(0); fGC += "\n"; // fUseWeightFile = fUseWeightFile; } + MoveInitializedTensorsToBuffers_ALPAKA(); + // now we have passed the parameters we can allocate the dynamic tensors - GenerateDynamicTensorInfo(); + GenerateDynamicTensorInfo_GPU_ALPAKA(); // add here initialization code for operator for (size_t id = 0; id < fOperators.size(); id++) { @@ -967,6 +1164,62 @@ void RModel::Generate(std::underlying_type_t options, int batchSize, lo } } +void RModel::GenerateGPU_ALPAKA(std::underlying_type_t options, int batchSize, bool verbose) +{ + fVerbose = verbose; + fBatchSize = batchSize; + + // session flag is used in operator initialize + if (static_cast>(Options::kNoSession) & options) { + fUseSession = false; + fWeightFile = WeightFileType::None; + } + if (static_cast>(Options::kNoWeightFile) & options) { + fUseWeightFile = false; + fWeightFile = WeightFileType::None; + } + if (static_cast>(Options::kRootBinaryWeightFile) & options) { + fUseWeightFile = true; + fWeightFile = WeightFileType::RootBinary; + } + if (fUseWeightFile && !fUseSession) { + throw std::runtime_error( + "TMVA-SOFIE: RModel::Generate: cannot use a separate weight file without generating a Session class"); + } + + if (static_cast>(Options::kGNN) & options || static_cast>(Options::kGNNComponent) & options) + throw std::runtime_error("SOFIE GPU does not yet supports GNN Inference.") + + // initialize the model including all operators and sub-graphs + Initialize(batchSize, verbose); + + std::string hgname; + // if (!fIsSubGraph) { + // fGC.clear(); + // GenerateHeaderInfo_GPU_ALPAKA(hgname); + // } + + // generate first code for the subgraphs + // for (auto &graph : fSubGraphs) { + // if (fVerbose) + // std::cout << "generate session code for subgraph " << graph->fName << std::endl; + // graph->GenerateSessionCode(); + // fGC += graph->fGC; + // } + + if (fVerbose) + std::cout << "generate Main session code - model " << fName << std::endl; + + // generate main session code + GenerateSessionCode_GPU_ALPAKA(); + + if (!fIsSubGraph) { + fGC += ("} //SOFIE_" + fName + "\n"); + fGC += "\n#endif // " + hgname + "\n"; + } +} + + void RModel::ReadInitializedTensorsFromFile(long pos) { // generate the code to read initialized tensors from a text data file if (fWeightFile == WeightFileType::Text) { @@ -978,9 +1231,9 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { fGC += " throw std::runtime_error(\"tmva-sofie failed to open file \" + filename + \" for input weights\");\n"; fGC += " }\n"; - if(fIsGNNComponent) { - fGC += " f.seekg(" + std::to_string(pos) + ");\n"; - } + // if(fIsGNNComponent) { + // fGC += " f.seekg(" + std::to_string(pos) + ");\n"; + // } fGC += " std::string tensor_name;\n"; fGC += " size_t length;\n"; @@ -1048,10 +1301,34 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); } fGC += " }\n"; - } - fGC += " }\n"; + } } -} + } + + void RModel::MoveInitializedTensorsToBuffers_ALPAKA(){ + for (auto &i : fInitializedTensors) { + // skip Constant and shape tensors + if (!i.second.IsWeightTensor()) continue; + std::string tensor_name = "tensor_" + i.first; + length = ConvertShapeToLength(i.second.shape()); + std::string slength = std::to_string(length); + if (i.second.type() == ETensorType::FLOAT) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + } else if (i.second.type() == ETensorType::DOUBLE) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + } else if (i.second.type() == ETensorType::INT64) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + } else { + std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); + } + } + } long RModel::WriteInitializedTensorsToFile(std::string filename) { // Determine the file extension based on the weight file type diff --git a/src/SOFIE_core/src/RModel_Base.cxx b/src/SOFIE_core/src/RModel_Base.cxx index d4d1f1c..a3392d8 100644 --- a/src/SOFIE_core/src/RModel_Base.cxx +++ b/src/SOFIE_core/src/RModel_Base.cxx @@ -58,6 +58,38 @@ void RModel_Base::GenerateHeaderInfo(std::string& hgname) { } } +void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { + fGC += ("//Code generated automatically by TMVA for ALPAKA Inference of Model file [" + fFileName + "] at [" + fParseTime.substr(0, fParseTime.length()-1) +"] \n"); + // add header guards + hgname = fName; + std::transform(hgname.begin(), hgname.end(), hgname.begin(), [](unsigned char c) { + return std::toupper(c); + } ); + hgname = "SOFIE_" + hgname; + fGC += "\n#ifndef " + hgname + "\n"; + fGC += "#define " + hgname + "\n\n"; + for (auto& i: fNeededStdLib) { + fGC += "#include <" + i + ">\n"; + } + for (auto& i: fCustomOpHeaders) { + fGC += "#include \"" + i + "\"\n"; + } + fGC += "#include \n"; + fGC += "#include \n"; + fGC += "#include \n"; + + // for the session we need to include SOFIE_Common functions + //needed for convolution operator (need to add a flag) + fGC += "#include \"SOFIE/SOFIE_common.hxx\"\n"; + if (fUseWeightFile) + fGC += "#include \n"; + // Include TFile when saving the weights in a binary ROOT file + if (fWeightFile == WeightFileType::RootBinary) + fGC += "#include \"TFile.h\"\n"; + + fGC += "\nnamespace SOFIE_" + fName + "{\n"; +} + void RModel_Base::OutputGenerated(std::string filename, bool append) { // the model can be appended only if a file name is provided if (filename.empty()) { From fc9846cfbb5c94b30b35dc91fa03465ac12f79a6 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Fri, 11 Apr 2025 14:57:12 +0200 Subject: [PATCH 02/22] GPU ALPAKA Support in GEMM --- src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 108 ++++++++++++++++++++ src/SOFIE_core/src/RModel.cxx | 8 +- 2 files changed, 112 insertions(+), 4 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index 046bf56..b6901f0 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -289,6 +289,28 @@ namespace SOFIE{ return out.str(); } + std::string GenerateInitCode_GPU_ALPAKA() override { + std::stringstream out; + // generate initialization code for broadcasting of bias tensor + if (fShapeC.size() != fShapeY.size() && fNC != fNC2) { + // we broadcast here always C in Y output, so target shape is the one of Y + // no need to call UTILITY::UnidirectionalBroadcastShape. + // here in case of parametric shape we need to assume that the parameters will be defined in the initialization code. + auto targetShape = fShapeY; + // include a separate scope to avoid defining unique operator temp variables + out << "//--- broadcast bias tensor " << fNC << "for Gemm op\n"; + out << SP << "{\n"; + out << " float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" + << fNC << "," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; + auto length = SOFIE::ConvertDynamicShapeToLength(fShapeY); // output size + out << SP << SP << "auto hostBuf_"<< fNC2 << " = alpaka::allocBuf(hostAcc,"+ length +");\n"; + out << SP << SP << "std::memcpy(alpaka::getPtrNative(hostBuf_"<< fNC2 <<"), data, "<< length << " * sizeof(float));\n"; + out << SP << SP << "alpaka::memcpy(queue, deviceBuf_"<< fNC2 << ", hostBuf_"<< fNC2 << " , "<< length << ");\n"; + out << SP << "}\n"; + } + return out.str(); + } + std::string Generate(std::string opName) override { opName = "op_" + opName; @@ -389,6 +411,92 @@ namespace SOFIE{ return out.str(); } + std::string Generate_GPU_ALPAKA(std::string opName) override { + opName = "op_" + opName; + + if (fShapeA.empty() || fShapeB.empty() || fShapeY.empty() || (fNC != "" && fShapeC.empty())) { + throw std::runtime_error("TMVA SOFIE Gemm Op called to Generate without being initialized first"); + } + std::stringstream out; + out << "\n//--------- Gemm_GPU_ALPAKA\n"; + out << SP << "char " << opName << "_transA = " << (fAttrTransA ? "\'t\'" : "\'n\'") << ";\n"; + out << SP << "char " << opName << "_transB = " << (fAttrTransB ? "\'t\'" : "\'n\'") << ";\n"; + // need to consider case A and B have dim > 2 (for MatMul) + int64_t dimA = fShapeA.size(); + int64_t dimB = fShapeB.size(); + int64_t dimY = fShapeY.size(); + if (dimA != dimB || dimA != dimY) { + throw std::runtime_error("TMVA SOFIE Gemm(MatMul) has invalid shape for inputs or output"); + } + auto m = (fAttrTransA ? fShapeA[dimA-1].GetVal() : fShapeA[dimA-2].GetVal()); + auto n = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); + auto k = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); + std::vector sY = {fShapeY[dimY-2], fShapeY[dimY-1]}; + // extra dimensions in case of stacked MatMul + std::vector sA; + for (int64_t i = 0; i < dimY-2; i++) { + sA.push_back(fShapeY[i]); + } + auto lengthGemm = ConvertDynamicShapeToLength(sY); // size of the Gemm operation + auto lengthExtra = ConvertDynamicShapeToLength(sA); // extra length in case input tensors are of dim>2 (MatMul) + + out << SP << "int " << opName << "_m = " << m << ";\n"; + out << SP << "int " << opName << "_n = " << n << ";\n"; + out << SP << "int " << opName << "_k = " << k << ";\n"; + out << SP << "float " << opName << "_alpha = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrAlpha << ";\n"; + out << SP << "float " << opName << "_beta = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrBeta << ";\n"; + out << SP << "int " << opName << "_lda = " << (fAttrTransA ? m : k) << ";\n"; + out << SP << "int " << opName << "_ldb = " << (fAttrTransB ? k : n) << ";\n"; + + // case bias is present + if (!fNC.empty()){ + if (fNC2 == fNC) { + // add a check in case broadcasting was not needed or done outside of session + // C should have smaller dimension of Y + if (!fIsDynamic) { + if (std::stoi(lengthGemm) != static_cast(ConvertShapeToLength(fShapeC))) + throw std::runtime_error("TMVA SOFIE Gemm Op " + opName + " Bias tensor has not correct size " + + ConvertShapeToString(fShapeC) + " output length " + lengthGemm); + } else { + // add a dynamic check (C should not be a dynamic tensor) + out << SP << "assert(" << lengthGemm << " != " << ConvertShapeToLength(fShapeC) << ");\n"; + } + } + } else { + //in this case fAttrBeta needs to be equal to zero otherwise second time we run we will use + // the previous result + if (fAttrBeta != 0) { + throw std::runtime_error("TMVA SOFIE Gemm Op " + opName + " Bias tensor is not present but beta value in Gemm is not zero"); + } + } + + // include MatMul case where we stack the Gemm operations + // exclude case where we have only 1's in the additional dims + bool doStackMul = dimY > 2 && ( fIsDynamic || std::stoi(lengthExtra) > 1); + if (doStackMul) { + out << SP << "size_t " << opName << "_yoffset = 0;\n"; // needed if we stack the gemm operations + out << SP << "for (int i = 0; i < " << lengthExtra << "; i++){\n"; + out << SP; + } + // in the case of bias + if (!fNC.empty()){ + out << SP << "std::copy(" << "tensor_" << fNC2 << ", " << "tensor_" << fNC2 << " + " << lengthGemm << ", " + << "tensor_" << fNY; + if (doStackMul) out << " + " << opName << "_yoffset"; + out << ");\n"; + } + + + if (fType == "float"){ + out << SP << "Kokkos::View kokkos_dev_"< kokkos_dev_"< kokkos_dev_"< GetBlasRoutines() override { return { std::string("Gemm"), std::string("Gemv") }; } }; diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index e2bc530..5b6a793 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -827,7 +827,7 @@ std::string createOutputTensor(RModel const &rmodel, std::string const &name, bo } // namespace -void RModel::GenerateOutput() { +void RModel::GenerateOutput_GPU_ALPAKA() { if (fVerbose) std::cout << "Generating main inference code for " << fName << std::endl; @@ -871,7 +871,7 @@ void RModel::GenerateOutput() { for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { if (fVerbose) std::cout << "Generating code for operator .... " << op_idx << std::endl; - fGC += (fOperators[op_idx]->Generate(std::to_string(op_idx))); + fGC += (fOperators[op_idx]->Generate_GPU_ALPAKA(std::to_string(op_idx))); } fGC += SP + "return {"; @@ -1092,13 +1092,13 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() // add here initialization code for operator for (size_t id = 0; id < fOperators.size(); id++) { - fGC += fOperators[id]->GenerateInitCode(); + fGC += fOperators[id]->GenerateInitCode_GPU_ALPAKA(); } fGC += "}\n\n"; } // generate the inference code - GenerateOutput(); + GenerateOutput_GPU_ALPAKA(); // end of session if (fUseSession && !fIsGNNComponent) { From 419b3543b49a31939887ca72b44896115f34f99e Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Fri, 11 Apr 2025 16:17:54 +0200 Subject: [PATCH 03/22] fix: errors with the generation function --- .vscode/settings.json | 73 ++- Linear_16.dat | 40 ++ Linear_16.hxx | 658 ++++++++++++++++++++ src/SOFIE_core/inc/SOFIE/RModel.hxx | 15 + src/SOFIE_core/inc/SOFIE/ROperator.hxx | 2 + src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 7 +- src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx | 25 + src/SOFIE_core/src/RModel.cxx | 51 +- src/SOFIE_core/src/RModel_Base.cxx | 5 +- 9 files changed, 844 insertions(+), 32 deletions(-) create mode 100644 Linear_16.dat create mode 100644 Linear_16.hxx diff --git a/.vscode/settings.json b/.vscode/settings.json index eb254be..381ce8f 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -1,6 +1,77 @@ { "files.associations": { "*.icc": "cpp", - "limits": "cpp" + "limits": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstddef": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cstring": "cpp", + "ctime": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "array": "cpp", + "atomic": "cpp", + "bit": "cpp", + "*.tcc": "cpp", + "bitset": "cpp", + "compare": "cpp", + "complex": "cpp", + "concepts": "cpp", + "cstdint": "cpp", + "deque": "cpp", + "map": "cpp", + "set": "cpp", + "string": "cpp", + "unordered_map": "cpp", + "unordered_set": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "functional": "cpp", + "iterator": "cpp", + "memory": "cpp", + "memory_resource": "cpp", + "numeric": "cpp", + "optional": "cpp", + "random": "cpp", + "regex": "cpp", + "string_view": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "utility": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iomanip": "cpp", + "iosfwd": "cpp", + "iostream": "cpp", + "istream": "cpp", + "new": "cpp", + "numbers": "cpp", + "ostream": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "cinttypes": "cpp", + "typeinfo": "cpp", + "charconv": "cpp", + "chrono": "cpp", + "condition_variable": "cpp", + "list": "cpp", + "ratio": "cpp", + "future": "cpp", + "mutex": "cpp", + "semaphore": "cpp", + "shared_mutex": "cpp", + "span": "cpp", + "stop_token": "cpp", + "thread": "cpp", + "cfenv": "cpp", + "variant": "cpp", + "format": "cpp" } } \ No newline at end of file diff --git a/Linear_16.dat b/Linear_16.dat new file mode 100644 index 0000000..873ce7d --- /dev/null +++ b/Linear_16.dat @@ -0,0 +1,40 @@ +tensor_8weight 2500 +0.0268758684 0.139096066 0.0821818858 -0.127417535 -0.0831027254 0.109001353 -0.0448572189 0.0432091393 -0.100685023 -0.0782502964 -0.0569691472 -0.0834055692 -0.0914414823 -0.00128868222 0.114371844 0.157571077 -0.0249715224 -0.0275524613 -0.106611423 0.160815567 0.0850525424 -0.0246056858 0.0868391246 0.0197147224 0.0387364663 0.0334140956 -0.0329913124 0.110141195 0.105670758 -0.0897664875 -0.0678865984 0.0182914361 0.146356225 0.0747506022 -0.0347048417 0.0646456406 -0.0683225691 -0.0967762694 0.144724965 0.0968451351 -0.049604129 -0.0246048607 0.0982864648 -0.00104637037 -0.0540190488 0.02299482 -0.0587500408 0.162345782 -0.0178857595 -0.114502899 -0.0277074426 0.0523337275 -0.0407291614 -0.125408962 -0.0477996059 -0.144638136 -0.141282856 0.107945614 -0.0642622635 0.106897406 0.141127169 0.00702024298 0.115400836 0.0949773341 -0.0841375515 -0.029037755 -0.12251503 -0.113417722 0.152951673 -0.052355133 0.125115007 0.11263705 -0.0993821546 -0.100654982 0.13138859 -0.121526435 0.0688993633 0.0602294281 0.0230368655 -0.119217426 -0.131345615 -0.0928916186 0.0589227341 -0.0877812058 -0.0575322062 -0.0479355939 0.119958326 0.0839382187 0.0134669729 -0.120720349 -0.0728492588 -0.0201216638 -0.0426205285 0.0580729693 -0.0317371115 -0.0582037121 -0.058949165 -0.0108661382 -0.0596015975 0.0923921913 0.106575489 -0.00681856275 0.0882440805 -0.0621290579 -0.0726372078 -0.00623785472 0.0285876859 0.0697654709 0.0963460952 -0.0578520186 -0.0386559479 0.0133705661 -0.0272551179 0.0195438117 0.0346884355 -0.00187640428 0.0641605407 0.176762238 0.159317046 -0.0952501073 0.0985514522 -0.0871972367 -0.0842028037 -0.0397452265 0.17574358 -0.0538034029 0.136749208 -0.0399385504 0.068287164 0.0217975918 -0.132272243 -0.0182651877 0.105293095 0.00358554721 0.00108983321 -0.153865114 0.0111923162 0.168782786 0.0969837084 0.0112454593 -0.0346569866 -0.0916731507 -0.00954194739 0.154270783 -0.0877914429 0.0242955964 -0.0126784407 0.121400051 -0.0289624184 0.0689913705 0.0434036702 0.0646613985 0.0640042126 -0.0474287085 0.073149845 0.156802103 -0.041823104 0.0810273662 0.179701 -0.0470410772 -0.0788431466 -0.147018611 0.053253185 -0.0240673199 -0.0210381355 -0.0958639532 -0.0170583278 0.0516901463 -0.111291543 0.00283904956 0.142500415 0.141777232 0.126547039 0.129214615 0.0729232654 -0.0321790762 -0.143716827 -0.00954447314 0.172390178 0.0871036574 0.000518912973 0.103541978 0.00573523017 0.100544035 0.167853162 0.157549649 0.117853075 -0.0903918445 -0.00601014355 0.0462133735 -0.119286336 0.111245058 0.00672465004 -0.035454426 0.184626952 -0.0521864779 0.180116341 -0.0577540956 -0.0600765273 0.151180387 -0.0436708629 -0.119430825 0.163085073 0.0715407208 0.0878540799 0.0810162574 -0.0718293861 0.12325272 -0.0860322118 -0.122137249 0.00682034623 0.158898726 -0.109563902 -0.140805796 0.144035459 0.0911271796 -0.0533853769 0.158740863 -0.12681675 0.0929608271 -0.0734888241 -0.0542239025 -0.0844008029 -0.0349548869 0.0194364432 0.00317873154 -0.0542409308 -0.110601485 -0.0040136571 0.126150146 -0.0695957989 0.135092571 0.0206705686 0.0210149139 -0.175603613 -0.0048725917 0.0448048264 0.0753361583 -0.0960626155 -0.00809389353 0.00274648191 0.118843384 0.0924557075 -0.0390414186 0.104679853 -0.02499073 0.115688451 0.183578849 -0.0963831246 -0.0192914438 0.0245501548 0.132269129 0.0528996326 0.126745895 0.0650902316 0.142014235 -0.109039702 0.110978663 -0.168853745 -0.0998671725 -0.0272130556 -0.0755283609 -0.145343035 0.0856398046 -0.0465832911 0.156254068 -0.00759668648 0.0660862029 -0.128243685 0.123390384 -0.0730970129 0.0211296733 0.172928646 -0.0569610596 0.162485525 0.0430793129 0.148347437 0.0230896771 0.0979775786 0.0892469361 0.114915423 0.102117866 0.114598379 -0.0385860801 0.104682192 0.05711741 0.183385804 0.114157908 0.0803671777 0.121530138 0.0750841424 -0.0201519765 0.0892636031 -0.00840737578 -0.0380099379 0.0670845732 0.173552945 -0.0446153358 0.0231650621 -0.0720840693 0.104690835 0.113704339 0.0918174759 -0.025853835 0.0474199951 -0.0122872479 0.0429795273 -0.0552103594 0.00883762538 0.151448011 -0.0409595668 0.0949078426 0.0689887926 0.107225835 0.0818655714 0.0588729456 -0.0576343685 -0.0948910415 -0.0496109203 -0.10446807 0.187580436 0.165386483 -0.0410638899 0.109590538 -0.0093578482 -0.168643042 0.0453341343 0.0740079209 -0.0932418108 -0.136097178 0.0847565904 0.016970491 -0.196696535 0.0213545114 0.146830887 0.188490406 -0.104602233 0.102813676 -0.020301817 0.0581303798 0.154687196 0.0931936353 0.0230271649 -0.0598398224 0.00981738791 0.00165832066 0.130958691 -0.141922146 -0.0931093395 -0.0452646948 0.0832985789 -0.0752738565 0.108650707 -0.0635550246 0.161715269 -0.048298005 -0.0519500524 0.111362822 -0.0297681484 0.0919308066 0.00461465074 -0.123445027 -0.0583725758 0.0877097845 -0.0828031972 -0.0494801551 -0.0178236663 0.110060342 -0.104191855 0.00495020067 0.190953419 0.175235912 0.0748231113 -0.0354038626 -0.0866233632 0.0114633273 -0.0709704086 -0.0408563316 -0.00744438358 -0.12112467 -0.00320398994 -0.109712012 -0.13769187 -0.0328272358 0.091612272 0.14105013 0.146673918 -0.0514545403 0.202921212 0.0834511071 0.160404101 0.0601333193 0.0581494831 0.083636649 0.0984802917 0.0609594397 0.128031611 -0.0596118644 0.112030312 0.175436005 -0.0210987478 0.100863054 0.141590253 -0.014728453 -0.0364963971 0.0035578683 -0.0124314548 0.0569810681 0.0548986979 0.130580813 0.103102274 -0.0677055791 -0.116339654 0.129263833 0.162418574 0.122197464 -0.0109819323 0.0938043669 -0.0469912067 -0.101457044 0.131629422 -0.0877847597 -0.0320621915 0.0457580797 0.0759001374 -0.0854525268 0.0624620654 0.088354066 0.0471264385 0.122950502 -0.0319194868 0.0971357599 0.144149795 0.0968611538 -0.0324465856 -0.13455525 0.0447516218 -0.0679218769 -0.0809827521 0.0494714826 -0.0949900225 0.0311798677 -0.00109984947 0.174830928 -0.0281612556 0.163149565 0.0736394823 -0.0375521332 0.00539422035 -0.0927275494 -0.0925532579 0.0742847919 0.0994291157 0.127749816 0.0300972443 -0.0191503167 -0.0972991213 0.0944213718 -0.0106646148 0.0151962861 0.00275415881 0.0332029015 -0.0985995755 -0.0955503657 0.0529588386 -0.0463228486 -0.139574915 -0.105905958 0.0530111678 -0.153271616 0.00135927019 -0.018976111 0.0405978933 0.0479904711 0.0545446351 -0.114191957 0.141731873 -0.132812411 -0.0630234033 0.0933084786 0.0396189578 -0.0473725162 -0.0290426835 -0.123914912 -0.0582598001 0.152059436 0.0548362397 -0.0321423411 0.115950227 -0.120880082 0.184676751 -0.0445445627 -0.103703029 0.0245305933 -0.00212677591 -0.0626897737 -0.0121289967 0.0785561725 -0.0832984447 0.0998352543 0.117086425 -0.0671990365 -0.0363239795 0.0353550613 0.114468403 0.143954277 0.105897352 0.0256107412 0.156521618 0.0780752227 -0.0554250963 0.0736213177 -0.10541296 0.0503535867 0.00255402969 0.0666635558 0.129061893 -0.0195398014 0.0478001311 0.065228425 -0.0979058444 0.0814248547 0.108272545 -0.00994789507 -0.0218796581 -0.154623747 0.0106207961 -0.12293978 -0.00427472685 -0.126063108 -0.0116878618 0.100741506 -0.0546985939 -0.0451277271 0.0930468291 -0.0851750597 -0.0140047939 -0.123040549 0.132812724 0.0833404511 0.140870854 -0.120734856 -0.0805390403 -0.0502453148 -0.0170761105 0.00345065887 -0.0480272733 -0.0561171696 0.0876882076 0.0613627955 -0.0316582024 0.0100890994 0.0458408594 -0.135604486 -0.0192864686 -0.01957082 0.0726629794 -0.0564594492 -0.0693246424 0.0831580311 -0.123946451 0.0842915326 0.027340591 0.123881891 0.0634962171 0.0854125172 0.0679267496 0.0966168046 -0.029469654 0.0381903499 0.111267343 0.140807226 -0.13526763 0.0522036403 -0.115617715 0.0543578118 0.0461016595 0.103722617 0.0174795687 -0.136987507 -0.0600835234 0.0538240522 0.0903360397 0.0172370523 -0.0140279233 -0.114186123 0.0943125635 -0.0614755452 -0.0279850513 0.083064124 -0.12880753 -0.13455835 -0.0599042326 -0.0389251933 -0.0604324266 -0.0513332263 -0.0554876402 0.0233900547 -0.0640518144 0.111689016 0.0502607375 -0.00419057906 -0.0730830133 -0.0277305022 0.171559766 0.0534306914 0.00674414961 -0.107875511 -0.0510217324 -0.0838860199 -0.15236254 -0.138948157 -0.125519603 0.0523681492 -0.0187952798 0.114655808 0.0474532545 0.0917048305 -0.0550882407 0.0838057324 0.18853642 0.142427206 0.180868432 0.140680373 -0.0942524076 0.123159751 0.0897716284 -0.0308326464 0.0049529071 0.13588357 0.0297236629 -0.0363686383 -0.0588090122 0.12634854 0.0122025581 0.186096713 0.0920768976 0.0481046252 0.0876177624 0.0250588302 -0.0850643218 -0.0529115237 0.029226495 -0.0699693412 0.150488198 -0.0428842455 0.179663286 -0.0198406726 0.0218468606 0.197559595 0.0729278922 0.0885386169 -0.133979425 0.0167944431 -0.0360915139 0.0497089326 -0.0268492103 -0.0587182194 0.0121284872 -0.00810500979 -0.0885604918 -0.0682897642 -0.109051131 0.101431355 -0.10556107 -0.0689118356 0.0273847431 0.123891041 -0.0328962579 -0.0183387175 0.0236377716 -0.126516774 -0.027949512 0.125757441 -0.137005895 0.0159674287 -0.0881164894 0.0896662176 -0.0548697859 -0.0910438597 -0.126777187 0.0143643618 -0.0796068907 -0.0773626193 -0.0353754535 0.0982186347 0.102850467 0.0936983526 0.0350374728 0.0642853081 -0.0353903249 0.0034533143 -0.0836362615 -0.0474314392 0.137183502 0.00499179959 -0.0352529734 -0.12372198 0.0710547566 -0.0847075656 0.108061433 0.0962944925 -0.0228818804 0.0236922354 0.0593082607 -0.0698251426 -0.0753812417 -0.0950560495 -0.0748883784 -0.139509365 -0.0391269475 0.117235079 -0.0770111158 0.0286441594 -0.0478565544 0.0810799748 -0.0450968295 -0.0848289505 -0.0374233201 -0.0248766541 -0.0257886276 -0.00540667772 -0.131286308 -0.125986263 0.0405903906 -0.0291525051 -0.0074609369 -0.0744228065 0.0589668602 -0.0275227204 0.134439722 -0.116755374 -0.0779221952 0.0212557018 0.126257434 0.105313227 0.120735362 -0.0692541525 -0.0584569424 -0.108607799 -0.0476316065 -0.0588775352 0.0463445932 -0.133415371 -0.128379583 0.121760055 -0.0548802391 -0.0722203329 0.0508697033 -0.079833433 0.119531378 -0.0217971876 -0.105434492 -0.0522313938 -0.0453321934 0.107274927 0.0276630223 0.131097168 0.079335019 -0.114221223 0.0391028263 0.128627002 -0.0898075253 0.0599811226 0.072371535 0.0517965741 -0.0948484987 -0.00232080673 -0.120201647 -0.1168992 -0.163116753 0.101379991 -0.0693345442 -0.0656319857 0.0136408824 -0.0277835261 0.0546165146 -0.00200848537 0.105287716 0.021810092 0.101102382 -0.0842717886 -0.020271264 -0.121380635 0.0648328215 0.0722329915 0.0304982048 -0.0108427657 -0.0313236415 0.0242884308 0.0848189518 -0.00415426493 -0.129282877 -0.0663083941 -0.0568652116 -0.0136977984 0.0484237522 -0.136208966 -0.0747673362 -0.00170940161 0.00959950686 -0.0287488401 0.174732566 0.0778143853 -0.0412021503 0.138728648 -0.12335252 0.0248393398 -0.00735486019 -0.0928628147 -0.0812815279 0.125593081 0.0110786557 0.124990925 0.150338039 0.0616421662 -0.0968330279 0.11320933 0.116342612 -0.00344289024 0.141467705 -0.072798416 0.121146008 -0.0969213247 -0.0562434942 -0.0969665498 0.0179323703 -0.0307174679 0.0410963222 0.0908566862 -0.0271566976 0.181122735 0.176294565 -0.0137444139 -0.152425051 0.0303653441 -0.0654244274 0.103337444 0.161812425 -0.114469662 0.0337155983 0.0851140097 0.0473025665 -0.0455731675 0.0910733119 0.0064521106 0.187958792 0.167304024 -0.127127901 0.00861696992 -0.0400827006 -0.140295923 0.0512709506 0.0780323595 -0.0932431147 0.0957963392 -0.125637099 -0.0162038952 0.00915290881 0.0997759104 0.0987372771 0.16382876 -0.056870617 -0.139243662 -0.071242094 -0.0608208477 0.107261404 0.0251677446 -0.0958002061 -0.0900856256 0.0601827726 0.107842483 -0.0984033346 -0.00783828646 0.0255061835 0.00474396348 -0.0694380254 0.0950763747 -0.0441939719 0.0136436457 0.108352683 0.137562498 0.0213271081 0.0454172641 -0.0874122232 -0.089138791 0.0275239777 -0.0769107938 -0.0700656921 0.131777659 0.175489351 -0.0777074322 -0.00239577657 -0.00230550254 0.167611465 0.0103928242 -0.0727633685 -0.0352996625 0.00823523104 -0.0106461262 0.0824658424 -0.0121006668 -0.0598732941 -0.0662225783 0.0269689541 0.0804088712 0.138990924 0.149531111 -0.0406282917 -0.126480639 -0.025079472 0.0510983169 0.035402365 0.08281295 0.156712428 -0.026096575 -0.0651845187 -0.0323777311 -0.105685644 0.0783127025 0.0188494585 0.0856304839 -0.046786584 -0.0739144981 0.0625574216 0.127959684 -0.0416722037 0.114131734 -0.04018737 0.0335959457 -0.0786943138 -0.0593536906 0.000781891402 0.0818767101 0.07887806 0.0942715183 0.178498864 0.14772743 0.00345369685 -0.0423939079 -0.0205054302 0.123664357 0.0551863275 -0.177257061 0.114078067 0.0455558784 -0.0323475748 -0.112341911 0.0721865445 -0.0341178104 -0.0914598405 0.0694510341 -0.0585612506 -0.0373541526 -0.205118358 -0.0179533362 0.0257616416 0.18985191 0.101283662 0.0620856099 0.163825974 0.150054261 0.0351246744 -0.0134136677 0.0362584144 0.0490719676 -0.0219044462 0.100722261 -0.0236032922 -0.0624775924 -0.18531242 -0.0643399507 0.0405745842 0.0175180174 0.123290591 0.074898921 0.0684316009 -0.0228197258 0.138146341 -0.0247859173 -0.136237904 0.0807761028 0.190366209 -0.0662142709 0.0290480666 -0.0762866884 0.106889285 0.00406613294 0.00212845136 -0.0551334918 0.176173732 -0.000862196088 0.0479077958 0.12893793 0.0908120275 0.0428063385 -0.105808966 0.0208340362 -0.0391079783 -0.17646575 -0.0161272287 -0.0779476464 -0.139349103 0.132013753 0.0993892252 0.064087227 0.131257027 -0.0114984009 -0.120081656 0.0867618024 0.0161269289 0.0568408556 -0.0086016655 -0.00697259605 -0.127590686 0.00164337456 -0.106980473 -0.0617386699 -0.093155548 -0.0321060345 0.0353029482 -0.144390106 -0.041361127 -0.102071285 -0.0588951148 0.0818923414 -0.127334356 0.0141031453 -0.111001149 -0.123913996 -0.0247361958 -0.0820739791 -0.030570088 0.127384081 0.0231190175 -0.10356193 -0.139310062 -0.0380821303 -0.0285825692 0.126087889 -0.066886954 0.0766842216 -0.135645509 -0.0953988656 0.131382018 -0.12620239 -0.0145515203 0.134063303 0.0396169424 0.0967397094 0.119111016 -0.0184818357 0.176523507 -0.0199789405 0.0826793611 -0.110192202 -0.0409205034 0.00472770026 -0.14348729 0.147804692 0.0450261496 0.0670832992 -0.0345766172 0.126415744 0.00601782696 0.104479343 0.0414096117 -0.0710287988 0.112614326 -0.147158608 -0.0370420963 -0.119263552 0.126887798 -0.105801471 -0.00610316033 -0.105143495 0.196164653 -0.0159037225 -0.0744655356 0.132361174 0.0196442343 -0.0159279685 0.126357719 -0.029065378 0.0336539075 -0.168783128 0.0148825208 0.0555515438 0.0461699739 0.102379352 0.155959725 -0.137117967 0.0191216022 0.222972959 0.125019222 -0.0988391787 -0.0180195421 -0.00158139609 0.029359296 -0.142667904 0.103885561 -0.105973668 0.0307869632 0.00780402496 -0.0674499497 0.114393353 -0.127377525 0.00557687134 -0.061907284 -0.124339581 0.0993482098 -0.0195321329 -0.0585047677 0.0953318775 0.0480449647 -0.0227444768 0.0408569276 0.0562379509 0.0313135199 -0.132302389 0.125221208 -0.0873878524 -0.111026652 0.115592606 -0.0906311348 -0.00652401475 -0.127266601 -0.144731417 0.0330261067 -0.135340631 -0.1036596 0.0953472406 0.0560712516 -0.135941952 -0.040515393 -0.0903434008 -0.0961870179 0.119984761 -0.023610061 0.136370555 -0.0166805629 0.127074108 -0.0724001899 -0.0508536957 0.115113258 -0.0258387104 0.0489959568 -0.142882243 0.104940325 -0.109118342 -0.0262665749 -0.139981106 0.15954946 -0.00940728188 0.0964377075 0.0164540596 0.0535212867 0.189132586 -0.035037268 -0.0510806404 -0.0423220247 -0.0387359485 0.0559357852 -0.0354634076 0.174487337 -0.0849912167 -0.124549776 0.0769607276 -0.0557537489 -0.0945133492 0.152356565 -0.0749799982 0.124006495 -0.11373242 0.0692153648 -0.0678370595 -0.0376192741 -0.0456925295 0.0221248977 0.0522562191 -0.098749496 0.0200695693 0.122223869 -0.0449365303 0.0366582051 -0.135746583 -0.0222668201 -0.00500035612 0.0176082794 0.128107294 -0.0137729133 0.0214566886 0.110726222 0.150741264 -0.0353633799 0.159352034 0.0344046839 0.105027668 -0.0436317027 -0.140568197 -0.101473704 -0.032741949 -0.0106838001 -0.0316685364 -0.129210651 -0.0679190904 0.133524075 0.123635188 0.10515888 -0.0406672806 -0.0610394813 0.091179423 -0.110931419 0.079060778 0.116783403 -0.0516342819 -0.13402909 0.0207334459 0.0812019557 -0.0820832253 0.0703516304 -0.084283106 -0.143779725 0.0990532935 -0.0511374213 -0.0828005821 -0.0313504227 0.108964168 -0.0947234705 -0.129062966 -0.0215799771 0.0714171082 -0.013629063 0.100170761 0.121061251 -0.122967482 0.0534396805 -0.104779765 -0.0699278712 0.0862568319 0.0739753321 0.0636002868 0.113115802 -0.0251864307 0.0644432828 0.00374182384 0.00278152619 0.0277899243 -0.0400727838 -0.0934138894 0.0662064999 0.0740315318 0.156935647 -0.125642329 -0.0181016717 0.0717086047 -0.0788133815 -0.127949879 -0.0690763518 0.0889543295 -0.172350034 -0.100477748 0.00906703342 -0.0588162839 0.0321615078 0.143424392 0.0124900788 -0.0917625949 -0.0731047541 -0.0883597806 0.193930492 -0.0296085142 0.153995425 0.056007009 0.0701433122 0.0177569669 -0.0888565779 -0.0714818016 -0.00891659409 0.14154695 0.186709836 -0.0978443697 -0.0666612759 0.00330674648 0.15638712 -0.0387458205 -0.0156392194 0.029518418 0.177299723 -0.00526926899 -0.03754526 0.142723694 0.0702423528 -0.0628082901 -0.0962519944 -0.0736426339 0.00633251155 -0.0707057118 -0.0702924654 0.0923877209 0.00983795524 0.0308573246 0.117168695 -0.112862423 0.142043695 0.0422373824 -0.0367415026 -0.106207736 -0.0756792426 0.117217235 -0.0436312594 0.0131786875 -0.0440221652 0.097473219 -0.0796951875 -0.125066265 0.124969348 0.0678982735 0.139240772 -0.076055415 -0.025149785 0.022765873 0.00105297181 -0.0667744279 -0.142704338 0.0969702899 -0.131543919 -0.0815857351 0.107110865 0.00967122614 -0.0012585416 0.122922324 -0.0521417297 0.0298166722 0.069311209 0.0601125322 0.0429519527 0.0771004632 0.138341069 0.0400493145 0.0989085436 -0.115988038 0.0790331438 0.0363655277 0.0255561695 -0.127918124 0.0311952345 -0.0788384601 0.0947113633 0.144414648 -0.0196468234 0.0516601503 0.0307283401 0.0900717825 -0.0476232618 0.0064008832 0.0990933776 -0.105707288 0.0693743229 -0.124773592 0.0183412433 0.0536187291 -0.0942984521 -0.108814135 0.0376636833 0.0153515637 0.00406998396 0.0265448689 -0.135168463 -0.120080709 -0.121890008 -0.0570892245 -0.124603435 -0.0809690952 -0.0672037601 0.13564612 0.0371975005 0.112579718 0.014319554 -0.108871549 -0.0871257633 0.0682478845 0.0747066289 -0.0636163577 0.0898959637 -0.090092048 0.129719719 -0.0293056145 0.026599288 0.0581899136 0.0231975913 -0.0209574401 -0.00654032826 0.0904182643 -0.114409715 -0.0957838446 -0.044936955 0.0165019929 0.135205165 -0.117184259 0.0401535928 0.134790704 -0.0130638136 -0.0190193728 -0.0656322092 -0.113029599 0.00841842592 0.0140976086 -0.0268416889 0.0387401059 -0.00842970423 0.153014824 0.0431912951 0.0146862119 0.0648952872 0.159487918 -0.107046999 -0.0957565159 0.0974680409 0.0875119492 -0.106558517 -0.00956180599 -0.10792207 0.0139202345 0.0237691645 0.170654655 0.182866856 0.0337778889 -0.122317858 0.104945458 -0.00241611805 -0.0189588871 0.154625118 0.0883154273 0.0683931634 0.0424042568 -0.0296660978 0.0563843139 0.0966898203 -0.00994552113 0.18443881 -0.146624371 -0.0824042782 0.116611265 -0.108186543 0.0102908229 0.103478007 0.0179831069 0.131399289 -0.0788777545 -0.0424850732 -0.00943686068 0.083427988 -0.137433812 -0.0903602764 -0.105054028 0.124498554 0.088755466 -0.0493076742 -0.027949756 0.0751230642 0.115139447 -0.127784625 -0.0843564868 0.1399187 0.077144593 0.0511633307 -0.031733308 0.00182465685 0.149220034 -0.108540453 0.0384725034 0.0657235011 -0.150511175 0.0859548301 -0.031602826 0.153693542 0.100388199 0.153390184 -0.000678598415 -0.0295467041 0.107720926 0.0790676847 0.075719431 -0.0342444293 -0.105272986 -0.101255842 0.17369619 -0.0664026737 0.0892078429 -0.113553904 -0.0292268191 0.180186689 0.109818101 0.0164926779 0.0922102034 0.0830212384 -0.00248041586 -0.0470679849 -0.165345639 0.12901403 -0.0142368376 0.0695406124 -0.0135625293 -0.106628664 0.00303458911 -0.0594627149 0.0922242925 -0.0427582636 -0.0400496349 0.178693265 0.183653072 0.0575503781 -0.0235571191 0.048922874 -0.00795071851 0.0807336569 -0.0162454005 0.0234919712 -0.102429815 0.0907384083 0.126808628 0.0676393136 -0.167194471 0.0440359078 0.137444958 0.196354747 -0.0737531483 0.0279132333 0.123419479 0.058315713 0.0996660143 0.122060843 -0.0147102922 -0.121966586 -0.0944622681 0.142329201 -0.0832371339 -0.0505962893 0.133063897 0.134808093 -0.0476008505 0.0899724364 -0.00881881081 0.129822224 -0.0857772455 0.0220859721 0.128572404 0.0105826855 0.102347367 -0.0834473595 0.0246756226 0.0283253919 0.0684853047 0.101052776 -0.12661618 -0.086164698 -0.0609710813 -0.0304403771 -0.0863657966 -0.155161664 -0.00632416084 -0.00467219949 -0.137039587 0.111212932 0.166964158 0.0581296235 -0.103098728 0.0281474199 0.0514760315 -0.0765168592 0.0823773816 0.112306684 -0.119962633 0.108718097 -0.0627609268 0.0295355972 0.061364796 0.0683022588 0.164325893 -0.112472534 0.164340407 0.0510179065 0.00963465869 -0.0768766776 -0.04377589 0.096517235 0.148181275 -0.0579664111 0.0587554127 -0.0445416085 -0.03864979 0.00753601873 0.100015543 -0.0362141766 0.133224964 -0.0191601235 0.152707025 -0.0183888227 -0.059442617 0.104400992 -0.0854767412 -0.00690022996 0.0886835605 -0.0273776986 0.130314052 0.105323426 0.161530361 -0.108458608 0.0788582712 -0.0838668495 -0.08755178 -0.102095522 -0.096060887 0.169443905 0.0577232093 0.120626166 -0.0495226867 0.096482262 0.0348549932 0.199682817 -0.0202110633 0.0846792087 -0.0826675221 0.168889627 0.0768956468 0.0698982626 0.0966169164 0.14701435 0.00816824846 0.0694516674 -0.0396548584 0.109372504 -0.0630989447 0.117110327 0.0741739869 0.0132170692 0.100324839 0.00397197716 0.00673523871 0.0452416129 0.0100933397 0.181526616 -0.131816193 -0.0729396716 0.0076587908 0.0789732337 -0.0381261818 -0.082727015 -0.112304315 0.0861935169 0.0106273741 -0.114372075 0.0639646724 -0.0495705158 -0.0882112607 -0.0384016633 0.110038161 -0.0208555609 0.0701313913 -0.0773748457 0.078994669 -0.0506972298 -0.126550719 0.0909916982 0.00305084884 -0.128831208 -0.0751241222 0.134015068 -0.0960550979 -0.0882394835 -0.0782357231 -0.0271630995 -0.0772069469 -0.118653722 -0.0367000699 0.121580288 -0.0561355688 -0.138331473 0.125544876 -0.0296058543 0.0746538565 -0.0162153672 0.062437132 -0.0231160969 0.0841860995 0.0602102727 0.124720961 -0.0469560064 -0.140982583 -0.136888638 -0.0804962814 -0.0440254994 -0.107610121 -0.0446921699 -0.0253842529 -0.0920281038 -0.102073133 0.0864460468 -0.0521458536 -0.0281716138 -0.12248721 0.108246624 0.0153880091 -0.00498063862 -0.0892293677 -0.10190247 0.144978091 -0.0280745663 0.0683950707 -0.0395756029 0.0730759278 -0.00125575683 -0.0320034325 0.0139094684 0.127148211 0.00908912718 0.074735418 0.0604887865 -0.0297355298 0.06545984 -0.0896448418 0.131814942 0.0968025178 0.0828945488 0.0279728677 0.0131593319 -0.117338456 0.0965867341 -0.00240561157 0.10527093 0.0450791791 0.020316802 -0.00228017569 0.00896368176 0.0269619301 0.150818229 0.155888513 0.116388619 -0.137444928 -0.0795316845 0.119856015 0.0243596714 -0.113116957 0.0804922581 -0.0587314703 0.169540256 0.139007777 0.139136776 -0.0298109893 0.0328682661 0.0793280452 -0.132538036 0.10372372 -0.0968293995 -0.048270233 0.0598262101 -0.0930386782 0.00116990507 0.0488549769 0.034680441 0.0733197182 -0.0573430657 0.100507811 0.07086052 -0.101341262 -0.0982980579 0.124785289 0.075397715 -0.106355786 0.00812490284 0.118339553 -0.056385763 -0.073536776 -0.091773212 0.137454107 0.0820370913 -0.0450648963 -0.104219824 -0.0738624409 -0.0940235406 -0.0482450277 0.101593941 0.0674851388 -0.088429369 -0.0648159012 -0.0145470053 -0.138597056 0.086754784 -0.0615454912 0.0661340803 -0.0228997692 0.117914066 -0.0457687825 0.0386412889 -0.139924914 0.0699922591 -0.0633266196 -0.0395022333 -0.0829551816 0.123344138 -0.0269175917 -0.112848774 -0.127718255 0.0982895121 0.00638221437 -0.0981410667 0.0559622087 -0.0958316401 0.0377015024 -0.0980365872 -0.0954344869 -0.0140493829 0.0955453813 0.0694573075 0.132618234 -0.00649338961 0.162486911 -0.0644210577 0.0785710365 -0.0324906185 0.0616738945 0.131774053 -0.065818958 0.171095803 0.17064096 -0.0761323273 0.0750825778 0.0011169787 -0.0321993567 0.0106129069 0.0721838102 0.0231869202 0.0562860221 -0.00459621055 0.0595190637 -0.0840448812 -0.081757158 -0.0908710882 0.0917035788 -0.157435328 -0.0259377975 0.137452871 0.0223723184 0.0868041068 0.120598882 0.00937895011 0.155116081 -0.100786671 0.0592786036 -0.133716181 -0.080596447 0.0356213599 0.00931102037 -0.0950432196 -0.0769308135 0.0528798848 -0.120534495 0.00211757421 0.0449208915 0.0501613319 -0.0820226222 -0.0251336843 -0.0144774914 0.0143801719 -0.0881446749 -0.123702742 -0.0779745206 -0.139141038 0.0531492084 -0.12054643 0.0143575966 0.10409309 0.0117436498 -0.0403948873 -0.110459745 -0.0662581548 -0.1114856 0.00284221768 -0.052574873 0.0127746612 0.0721953511 -0.0164361224 0.0638382882 -0.0129706711 -0.0614550114 -0.060835205 -0.0864404589 -0.132438704 -0.108277529 -0.105507694 -0.0632813722 0.0298161656 -0.0744545162 0.0407374054 0.0746406019 -0.106669813 -0.10373731 0.12445356 0.0397888571 0.0220787525 0.0202515423 0.12489415 0.132801518 0.0626152232 0.0729931891 0.0667951256 -0.0493182763 -0.0674306005 0.0432554148 -0.0183124356 -0.104636811 0.063214466 -0.128902912 0.105252452 0.082741566 0.0974095464 -0.0517911017 -0.022460917 0.0845701918 0.00974364486 0.0755192861 0.122211002 -0.0929605439 -0.0323449485 -0.00275745941 0.13703306 -0.126241356 -0.0444845371 -0.0595683604 0.0420802012 -0.121947996 -0.0962189585 0.034678936 0.133177251 0.0845321864 0.0163415857 0.0860773325 -0.0293091722 -0.0457063392 0.116976917 -0.112045035 -0.114811443 -0.0518591814 0.0787069798 0.00974517968 0.11453367 -0.115365967 -0.0442552418 0.0140983164 -0.0719776675 -0.0414564312 -0.00496765925 -0.0418873541 0.0135209961 0.150040343 0.018279193 0.0670056716 -0.0128589002 0.00129946775 -0.0175037291 -0.10695336 -0.0812502131 -0.00151915848 0.105532125 0.156881258 0.107406408 0.0299301185 0.0623822287 0.105002947 -0.00692248205 -0.0561903454 0.0528496578 0.0645767525 -0.0349360071 0.0887037516 0.0392689034 0.160266221 0.0597992055 0.0948610157 0.0702525973 0.0522093065 0.116412245 0.0978843421 -0.148455024 -0.0602231361 0.0339148492 0.0785273239 0.143222392 -0.1303702 -0.0237084106 0.00480483705 -0.00544637674 0.123185195 0.066207394 0.138298839 -0.0356794894 0.176579177 0.0118068606 -0.0892722458 0.110782906 -0.105181009 0.0947949737 0.0951533318 -0.0448544845 -0.0301951393 -0.0468887351 -0.00123819872 0.101715624 0.0727012604 0.0642970651 -0.0423549041 0.0688230544 0.104760513 0.101078875 -0.0486233123 -0.0383191928 0.00411880249 0.110782482 -0.0667161196 -0.115577795 -0.107555278 -0.0455098785 0.0147148855 -0.0387868471 0.119055025 0.113614053 -0.0650238395 -0.116119511 0.0904611796 0.0928509012 0.036130257 0.063363038 0.088962093 -0.0774177462 0.00342554948 -0.0879331529 0.0105287833 -0.150841638 0.0330097973 -0.101222105 0.000547326345 0.0222531687 0.00177719572 -0.168144733 -0.102801181 0.127630353 -0.0044782632 -0.0718901828 -0.0688694715 -0.107435137 -0.00648547709 -0.137246579 0.116464987 0.0476300418 0.0752717406 0.0731578618 0.100494623 -0.0570759401 -0.0219588652 0.060869351 -0.0204062089 0.119302526 0.110057697 -0.0576427504 0.0296158791 -0.0681548789 -0.0114316642 -0.132044569 -0.0581466183 0.00203379989 -0.113137707 -0.0557623059 -0.0389640704 -0.129291847 0.121296927 -0.00863479078 0.109711155 -0.0720649809 -0.114019588 -0.0326023102 0.047520563 0.13592954 -0.136486098 -0.138339326 -0.130544424 -0.0851323009 0.0625912994 -0.0132746696 -0.0394350886 0.106071725 0.0412790775 -0.0212111101 -0.110249251 0.128628239 -0.00126694143 -0.0719275251 -0.0233325437 -0.0285679474 -0.119621359 0.0375062078 -0.0438100025 0.127980903 0.0915731937 0.0225412827 -0.0376331583 0.0345656835 0.135839269 -0.152139679 -0.00529241795 0.0757251382 -0.0507489964 0.0434143096 -0.109213173 -0.0232270882 0.110102899 -0.11542847 0.178933024 -0.146328598 0.080565691 -0.0281426851 -0.0798788965 -0.0825010538 0.102853604 0.176710308 0.105933264 0.142999679 0.0393511392 0.0469196737 0.155381039 -0.0202247016 0.170517668 0.00554223079 -0.067655623 0.128527895 0.00835985132 0.192383677 0.133679509 0.109696992 0.124087319 -0.0682987794 -0.0266768672 -0.0692853928 -0.15578717 0.111135691 0.152784228 0.182785735 0.115072496 -0.0234794691 -0.14098835 -0.0995724574 -0.0710255876 -0.0245003197 -0.121010661 0.214906275 0.126054928 0.0240032822 -0.0867983475 0.0794893727 -0.0287744384 -0.0114687914 -0.0225537177 0.00640312536 0.0122232735 0.148882598 -0.0123748779 -0.0145422816 -0.0797223598 -0.0824621096 0.050172396 0.197323322 0.0408616215 0.165366396 0.141404614 0.135947406 -0.0240413714 -0.115962207 0.193585813 0.0844455436 -0.0809815899 0.17395325 0.0107643139 -0.0946478769 -0.0715151504 -0.0346882716 0.0626753345 0.181155458 0.146124348 0.050194148 -0.016578801 -0.0884145498 -0.119957604 -0.0384309553 0.0239939895 0.071738176 -0.0269928221 -0.0424483791 0.0305357967 0.129883602 0.143514901 0.133759692 0.0695038289 -0.000178681847 -0.0580186956 -0.0775882527 0.136189267 -0.0727865323 0.0678651482 -0.049817346 -0.0649325028 -0.0088552665 0.156783015 -0.048809994 -0.0406761616 0.158696339 0.0890753791 0.136065736 0.160149634 -0.0645535365 -0.111809649 -0.0370648354 0.194843888 0.0213514157 0.102395862 -0.0400028452 0.0761639178 0.0394547395 0.0327902511 0.162416309 -0.00130897725 0.020087108 -0.0961144641 0.0304949749 -0.045121409 -0.0313251726 0.085803628 0.0291031022 -0.0910456851 0.0660064593 -0.068344146 -0.0507363826 0.0779818743 0.14303115 -0.0030358301 0.0674030483 0.147154242 0.0136561031 0.054678835 0.0916109383 -0.108775541 -0.0925002992 0.0767795593 0.0127289426 -0.0589483082 -0.119000398 -0.121735357 -0.0326918289 0.137503535 0.119247034 0.0430034138 0.0618253574 -0.0975219831 0.0736228079 -0.0372737274 0.153161958 -0.0518422537 -0.0204763189 -0.0608311482 -0.0457191877 0.147904009 0.0655161589 0.000597919687 -0.0326539725 -0.155328959 0.167974561 -0.0343649164 0.130426079 0.00636828598 0.141477138 0.057434544 -0.0446176901 -0.0856851637 -0.0112518054 -0.0844905823 -0.0406574272 -0.153341204 -0.0638041422 0.0856886953 0.0646770895 0.130005434 -0.0040921187 -0.0604991764 0.0163501818 -0.0783527344 0.120934926 0.161637381 0.115128227 -0.014278437 0.0813159347 -0.0724511221 0.0282054543 -0.00229437649 0.0406513065 -0.0661629364 0.0403244048 0.0202239044 -0.0395012945 -0.0349203013 -0.054835394 0.0276283957 0.0147262886 0.171100989 0.0777800605 0.113793746 0.029462589 -0.0530196279 -0.115720108 0.167530239 -0.0646177605 0.163341776 0.0519124195 -0.0436448865 -0.00622291025 -0.118472219 -0.11200767 -0.0293926019 -0.0851374194 -0.0335079357 -0.00235709315 -0.114089273 -0.125252411 0.0811661184 0.143217117 0.0657678992 -0.144908518 0.074894011 0.0680066049 0.0251119025 0.0237030108 0.130361617 -0.117028616 0.0458782166 -0.0999599174 0.0947599187 0.125303924 0.124342829 0.0346639715 0.0599663034 0.00829797983 -0.12561053 0.0641372502 0.076463908 0.0719346106 -0.0685527846 0.0804899856 0.133784577 0.0426442474 0.108722381 -0.120919384 0.0425808728 0.0937603563 -0.0493984073 -0.101851352 -0.0743994713 -0.0168575719 -0.0864764303 0.134012744 -0.0345991999 0.0765475258 -0.0502674989 0.148546934 0.154048041 0.112422884 -0.0310489275 0.0740677267 0.124277003 0.11085771 0.0675311983 0.0243612733 0.0620236471 0.0995759219 0.168544546 0.00101517653 -0.0610326529 0.0783934444 -0.0371061936 0.0923965722 -0.0212610923 0.133722678 0.0999392346 -0.089509137 0.0711376369 0.112333678 0.0204899628 0.17973493 0.0402919464 0.126110092 -0.00392504036 -0.000692084432 -0.0994881168 0.053651616 0.00273740292 -0.0712720156 -0.0218583867 0.00874059927 0.0359456241 0.0621751361 0.000342633168 0.0569748171 -0.0946905017 0.00123212801 0.142329782 -0.0667219386 -0.0533551276 -0.0563121587 0.071681805 0.101482138 0.161976591 -0.106341578 0.186979875 0.0426207557 0.140588462 0.0434038043 -0.0569239818 0.164891273 0.140453205 0.0955060944 0.0860626772 0.151230052 0.187594429 -0.18127653 0.131841645 0.15597482 -0.131366268 -0.165060341 0.128387749 -0.0202594791 0.0415013544 -0.0959378406 -0.0706115887 -0.121529371 0.0975343287 0.0213517249 0.183627069 0.0060459557 0.00643412722 0.113227792 0.169127882 -0.109208152 -0.151426241 -0.00370581448 0.0630536079 0.108941384 0.0786992684 0.0706410259 0.0392542407 -0.127687827 0.0440069884 0.0562533028 0.0949133858 -0.0812414587 0.092598483 0.0175310317 0.0891861469 -0.045317024 0.136054009 0.0259930789 0.00634265412 0.0573615059 -0.114974082 0.0966024846 -0.112975411 -0.0831556097 -0.0627890527 -0.00156623824 -0.0914661735 0.0832677707 -0.0836677551 0.0405839272 -0.072140947 0.0206351802 0.0579435751 0.175754473 0.0569373667 -0.0288151708 -0.133179188 0.0659383461 -0.0622974038 0.111952148 -0.0266913269 0.0549159199 0.126251087 -0.0655740872 0.103494681 0.000715725822 0.092549786 0.0262457915 -0.0120499283 -0.134702772 0.0988872126 +tensor_8bias 50 +0.0448136181 -0.0294532757 0.00591958454 -0.0112828789 0.0547700003 0.102279283 0.00554918963 0.0933698788 0.138683245 0.153071642 -0.0246890131 -0.066205956 0.0102847284 -0.0217106864 -0.11153923 -0.0833024532 0.0690509453 0.0574259795 0.0326761454 0.048058711 0.0932174474 0.173286349 0.0437983349 0.0692929476 -0.1425194 0.0164392311 -0.0525733009 -0.0926198289 0.01558726 0.124148585 0.159763724 -0.112289928 0.122134581 -0.0329846852 0.123975173 0.00884330273 -0.125247195 -0.108203024 -0.0963885933 0.12722528 0.105277926 -0.0898397416 0.108396716 0.133004621 0.111592449 -0.0548007637 0.112471558 0.0952548459 -0.0418147035 0.0495906435 +tensor_4bias 50 +0.0420062914 -0.0531011894 -0.0405919701 0.147642136 -0.0448930375 -0.0946018249 0.0368757285 0.0895275325 -0.00135793048 -0.0465053245 0.104558863 0.0464918055 -0.0928135291 0.145776987 -0.0437397324 0.0744188651 -0.0975865945 0.0791935027 -0.0783651695 0.0380954593 -0.0641139522 0.0319918618 0.0519438572 0.00847010501 0.124498516 0.182475775 -0.0537090674 0.0583103821 -0.0401648097 0.0082509499 -0.0618926026 -0.122952975 0.0772916004 0.014789585 0.101875983 0.0958903432 0.064464353 0.0122809373 0.149964184 -0.141134128 -0.0849211961 -0.0111745978 -0.0645377114 -0.0344211683 0.0628582314 0.0434207059 -0.0433468781 -0.0299602263 0.15525946 -0.0448016711 +tensor_2weight 2500 +-0.0597149245 -0.0791020989 -0.00306093879 0.113323435 0.118636928 -0.0843338519 -0.109422937 0.0164578613 0.168519169 -0.0703572854 0.0312314406 0.0899977908 0.0896739215 -0.0900451988 -0.057600379 0.0125688771 0.0722137764 -0.0290169287 -0.0694356412 -0.111381322 0.0917039365 0.00489026168 -0.0580901131 0.183314383 0.195475265 -0.12944217 -0.0534728765 0.074898228 0.104391731 0.123983808 -0.013343907 -0.112780578 0.012140803 -0.086059548 -0.0357166752 -0.0239756703 0.114319615 0.0447655618 -0.0479144566 0.0672920421 -0.039890483 -0.0342019647 0.170793653 -0.0611885674 0.128305733 0.0986138955 -0.0286394898 -0.0084637003 -0.141880184 0.0852712765 -0.0972362906 -0.00365298078 -0.108331524 -0.0803529769 0.179286033 0.0825248212 -0.0778654292 -0.0261579026 0.0222861301 0.199497893 -0.0576646812 0.142493397 0.018432891 -0.0569059029 0.0996442288 -0.0431534536 -0.0794040635 0.136226013 -0.0141376657 -0.0539442487 -0.133499324 -0.0887252018 -0.0284489784 -0.0330936722 -0.03493331 0.0510139801 0.192286044 -0.00151121407 -0.0730649382 0.136111543 0.162208974 -0.115568712 0.176949784 0.0509604737 -0.140759781 0.0942156538 0.15726684 0.0260999966 -0.0726049989 -0.0243513957 0.156701684 0.138213098 0.112526298 0.0941351131 0.104868479 0.105548747 -0.0304395221 0.0303013697 0.162006006 0.100969627 0.145671651 -0.0650625825 0.0855033845 0.0336373001 0.141778961 -0.0337854326 -0.00864057243 -0.0735450611 0.0464367941 -0.0596558116 0.0623771138 0.14349483 0.0591385625 -0.00258940901 -0.0122495294 0.14376843 -0.0750882924 -0.0664319023 0.0305001531 0.0184416007 0.02046955 0.0551448241 -0.0694528297 -0.0207397975 0.154329836 0.0494214594 0.0845211819 0.16324687 0.0757716969 0.0634511784 0.120605588 -0.113957532 -0.0832520127 -0.0171713699 -0.0601701811 0.148658082 0.0899651572 0.118677244 0.0283228904 -0.0590552986 0.0797857642 0.0911054611 -0.022215249 0.176669434 0.000942089071 0.112969555 0.105361097 -0.0645927563 0.103734575 -0.0436463058 -0.0349569395 0.115449831 0.0422306731 -0.0804883987 0.0807694271 -0.0505034067 0.00729625719 0.137707859 -0.0488397889 0.162600547 0.15114215 0.0636213571 0.00903507788 0.128289327 0.163847417 0.000159272255 0.0834238008 -0.104029171 -0.0793354735 0.0541718863 0.00707805855 0.077409409 -0.00238326658 0.125607908 0.0396535546 -0.0790733248 0.0564618595 0.100612111 -0.0357064828 0.117824152 0.132536173 -0.0289113428 -0.014852941 -0.0426625349 0.0135453995 0.103636682 -0.0972069129 0.0516828299 -0.00995481107 0.0232977849 0.0937414765 -0.023261575 -0.0417088531 0.0130363097 -0.14154911 0.0702126473 0.00403433712 -0.0650982484 -0.0789552182 0.216502696 0.122806698 0.027723331 0.063748695 -0.0578081496 -0.0157720149 0.0400142148 0.133040145 0.0334649682 0.0875510424 0.110794596 0.0254984461 -0.0512416363 0.0211649723 -0.143576398 -0.0205686055 -0.111181781 0.0162975509 0.121590719 0.0656936541 0.155964255 0.0245984644 0.0352118239 0.133722454 -0.0262214299 -0.0336278044 0.156469122 -0.13011755 -0.027528204 -0.0602145456 -0.0930233747 0.0099506909 -0.0182043407 -0.118824221 -0.00373798492 0.178733543 0.00827211235 -0.0456761308 -0.0721783042 0.00670965109 -0.0409170277 0.00431948341 0.124081343 -0.0710947514 -0.104117736 0.093746461 0.171907842 0.110169716 -0.070081532 -0.0667723492 0.125274718 -0.0586081445 0.139502883 0.177527696 0.0687526166 -0.0820335746 -0.0490859933 -0.12959671 0.124665432 -0.0872184113 0.0991814062 0.0363627896 0.190564334 -0.0296370834 0.0762037039 0.0642659366 -0.0918578207 -0.054685194 -0.0458993316 0.146039933 0.0528010353 -0.0662797019 0.00561331725 -0.01142208 0.0815358981 0.0418767408 0.110681847 -0.00722674327 0.130719125 0.139407441 0.0292424969 -0.0270317923 0.0958031863 -0.0573824011 0.12932986 -0.043775145 0.059319146 -0.0913528278 0.115791552 0.078004472 0.115792975 0.107448012 -0.0748391598 0.0529222861 0.13462083 -0.141233921 0.166953042 0.168474525 -0.0700130537 -0.117624134 -0.00714296196 0.0268919822 0.163626537 0.0181761291 -0.0640345961 -0.0449223928 -0.141952619 -0.0284713078 0.147408575 0.139610574 -0.0779195204 0.106946297 0.117024481 -0.0941873938 0.09258876 -0.00288540404 -0.0543360636 0.0990853012 -0.0131437555 -0.0769185126 0.0146610877 0.0856351554 -0.090552628 0.124525517 0.072334148 0.00881079119 0.0441620275 -0.0116904415 -0.108310528 -0.0406595394 0.0195690114 0.0474229716 0.08090958 0.0409525596 0.077940464 -0.121437781 -0.0896261111 -0.134390622 0.099559769 0.107502699 0.0738855898 -0.0311849546 0.12491411 0.0958716646 0.048406072 0.0154622868 -0.130314365 0.148058236 0.00762006547 -0.0898886994 0.144507095 -0.0986621678 0.0791233629 0.0717348233 0.137725651 0.0972002074 0.0856728703 0.0490715429 -0.0558436215 0.177653775 -0.0812159926 0.174190253 -0.0374299698 -0.0888636857 0.0568164624 0.0539831966 -0.046500802 -0.088104479 -0.0324098729 0.123006575 0.174390927 -0.0655597001 0.118238717 0.165678978 0.115315504 0.149962306 -0.0967894346 0.0218543001 -0.0471816473 0.136843204 0.0418579951 0.130341902 -0.10788656 -0.0118869822 0.0904047042 0.10771846 -0.0203160401 0.0716004148 0.121576704 0.114085183 0.0813911036 -0.0706418529 0.0724584237 0.0249532741 0.156553373 -0.00865705032 0.134671107 0.0270873979 0.0121872211 -0.000827496988 -0.103484429 0.12091063 0.0684384331 -0.112646192 -0.0716026947 0.0865510404 -0.0961387679 -0.0992462859 -0.014073588 0.0901760384 -0.0329191796 -0.00509604625 0.0300773419 -0.113896236 0.0637915656 0.176874548 -0.0267044064 0.12591213 0.0827189684 0.00802489929 -0.0155225964 0.139007181 -0.0314813517 -0.0244915821 0.0454487316 0.113499463 0.147255525 0.0290668719 0.0196187459 -0.0756559074 -0.0474474952 0.000423966238 -0.125565693 -0.142974168 0.0265704822 0.100150622 0.124454536 0.128189385 -0.125751778 -0.0660192817 -0.0496372506 -0.025079174 -0.0945867226 0.00687600998 -0.108164005 -0.0449875742 -0.0757939294 0.0345570296 -0.0277413465 -0.0288163945 -0.0649622455 0.00885617267 0.0745153949 -0.0630018637 -0.00193145883 0.0763816684 0.156405032 -0.0854697376 -0.0829446241 0.0749762207 -0.0894886181 0.00361103215 0.0892253667 -0.00260828738 -0.0638676211 0.00824388769 -0.0162695311 0.0992859229 0.0285193995 -0.0495389216 0.0868888199 0.0549531169 -0.0304261018 -0.0182636939 -0.0249298904 0.159364238 -0.0837972984 0.11065764 0.0529022627 0.010110856 0.110683426 0.0919133052 0.0737009645 0.0965587646 0.0305129029 -0.0127110174 0.0697814003 0.103699945 -0.0261213128 0.170093238 -0.0687487945 -0.12052843 -0.104825832 -0.126111925 0.142499581 -0.128851101 0.0239339732 -0.0617658421 0.0295549762 0.119156219 -0.0673037395 -0.0500704497 -0.0940866619 0.0919373184 0.146928117 0.0300044753 0.0634653345 0.0144530665 0.0691985935 0.0211127512 -0.0590388924 0.0216479953 -0.0947615728 0.00890090037 -0.143075675 -0.00150912558 0.101439707 0.0146557204 -0.0631864071 0.0695210993 0.159808844 0.0115857897 -0.00928535312 -0.0489135161 -0.0782282799 0.125244364 -0.0499396287 0.140853539 -0.0960367844 0.0661479533 -0.0767967701 0.0877454206 -0.0602071472 -0.00595363509 0.115926109 0.178855419 -0.000521433423 0.0932693109 0.0502367616 0.152228653 0.104619421 0.0170960594 -0.103684276 0.0711491629 0.0488289595 -0.0617828257 0.0788236633 0.163875833 -0.0177440327 0.0156344157 0.109268099 -0.0375487134 0.0692994222 0.0731202066 0.0198084135 -0.0638355985 -0.0859975517 -0.0729697719 0.0573660471 -0.0556606203 -0.0930642337 0.145462662 -0.00594186038 -0.0928620845 0.139376998 -0.0553284064 0.0321234614 0.122701474 -0.123724081 -0.118198179 -0.0378811546 0.0980066508 -0.110459164 -0.0362307765 -0.0317853428 0.0111791994 0.0406676829 0.102611743 0.181697577 0.0510763824 0.123425812 -2.60259403e-05 -0.00361568225 0.153392524 -0.0397593305 0.0637998879 -0.145311564 -0.0472530723 0.0862638727 -0.0162773281 0.14536725 -0.0755254775 0.111187324 -0.0551111922 -0.000961930782 0.0612597242 0.163095102 0.0857004449 0.134386837 -0.0350845531 0.104531094 -0.0771434605 -0.067063503 0.171728879 0.167630181 -0.055467926 0.0409745835 0.177932739 -0.0550457239 0.107211053 0.0066946256 -0.00466190139 0.00958849117 0.156683907 0.111835107 0.0854923576 0.0730453655 0.121291943 0.0563716777 -0.111218229 0.0502161607 0.00677639991 0.18682304 -0.0360405892 -0.0132346814 -0.0414354391 0.0244455282 0.0727593377 -0.0868931487 -0.102708675 0.0923786163 0.154591203 -0.0693407878 0.106593266 0.162805468 0.0318478011 -0.031251967 -0.126320124 0.0780377984 -0.0280229542 -0.0295661092 0.0982864872 0.101671919 0.120140024 0.0414738134 0.105208568 0.0855850428 -0.0743453577 -0.000411789661 -0.0912177339 0.0883763209 -0.0493486412 0.123505704 0.166449651 0.105023161 -0.0776017308 0.162414178 -0.117349826 0.168772966 -0.101528428 0.13711141 -0.0164699852 0.0836634934 0.0919587389 -0.0328455754 -0.0752447918 -0.009731967 0.0349985808 -0.0194250569 -0.0934877768 0.185227469 0.0435012877 0.154062793 -0.0773278996 0.0929438472 -0.00670079701 0.0923984647 0.102851599 0.134395629 -0.120910235 0.170304388 0.0816775039 -0.0626546219 -0.0595025942 0.0244693402 0.0510447063 -0.115126796 0.115374513 -0.0176392663 -0.0923264623 0.122397989 0.0872549042 0.125522628 -0.100656673 -0.00508889835 -0.123501971 0.0617450103 0.139201492 0.051387202 0.00884217676 0.0175981224 -0.0483928584 -0.0360136032 0.05417905 0.022909319 -0.0881463438 -0.0459814519 -0.0131944772 0.0480347835 0.1673228 0.137037218 0.14526543 0.0446226932 0.0413857326 0.0612014905 0.132490978 0.0794302076 -0.0342803597 0.0863904208 0.15610376 0.121424645 0.0110774338 -0.0368165858 0.104494691 -0.0254124962 0.154777497 -0.0138444677 0.118794315 0.0259997863 -0.00128288078 0.142353535 0.155503765 0.0894722044 0.0424166657 -0.0683410317 0.0425889567 -0.10710226 -0.0400536358 -0.000696110365 -0.0677292421 -0.0385467038 0.0813434571 -0.0811068788 0.0311896447 0.0156664345 -0.147901028 -0.000463384727 0.0149115929 -0.112064414 0.0082620522 -0.024438085 -0.0304117016 0.162811249 0.128227949 0.0702825859 0.0863868073 0.0475940667 -0.100322515 0.118678033 0.153219327 0.103472307 0.106240071 -0.00983386766 0.0908779651 0.0990438908 0.0359329022 -0.0689288601 -0.0298974775 -0.115996465 0.0365048237 0.0202663038 -0.133836135 0.0477452688 0.0554565825 0.0893209428 -0.0239705388 -0.0640460923 0.13875863 0.105679706 0.0737722218 -0.0183230489 -0.0404619724 -0.0105633233 -0.0761946291 0.164347902 -0.0172834061 -0.094510898 0.0345971286 0.0106645143 0.194848433 -0.0547695532 -0.106579714 -0.0123255178 0.0403105766 -0.0313294157 -0.00499826716 0.100480273 -0.00637257611 -0.0778858364 -0.0411514193 -0.00478123594 0.0215911381 -0.0732492954 0.194053754 0.0539965741 0.113155119 -0.0752726197 -0.0769620165 0.193490297 0.0789649859 -0.0801189467 -0.0407260284 0.0242670309 0.10401839 -0.0375796929 0.0314083621 0.0724864528 -0.0512620732 -0.137483209 -0.0787761062 0.0968051478 -0.087627165 0.190840423 0.158406734 0.0864097029 0.133482426 -0.0359799229 -0.0242824815 0.0159911942 -0.0485018119 0.144926906 -0.058078561 0.111498684 0.065325208 -0.0478983261 0.0192427151 -0.0443237759 0.0666328892 0.0329897963 0.134647146 0.0964290947 -0.109050713 -0.000148722494 -0.00192280754 0.07619223 -0.203230783 0.0140038347 -0.0237551313 0.113345623 -0.0610194132 -0.123688005 0.00247963867 -0.0892862976 0.0488414988 -0.0904518217 0.174596399 -0.131306589 0.0763920173 0.150487289 -0.153154299 0.0224560183 0.0973761827 -0.0426088274 -0.0505751371 0.104424372 -0.133484393 0.0833508074 0.0194486398 -0.142139688 -0.0637170449 -0.101654164 0.0209246967 -0.140727118 0.00838450529 0.0946883485 0.148535386 0.103271469 -0.114976875 -0.0612382665 0.0309834275 -0.080561161 -0.0438423492 -0.0763120055 -0.0871841311 0.0487271659 0.192025125 0.0274662226 0.0726716295 0.126637235 0.11231558 0.00397039996 0.192436248 -0.0129653281 0.087810427 0.101742446 -0.0811328292 -0.0573779941 -0.00594198145 0.157350421 0.0783605501 0.201680467 0.0806498379 -0.0635789633 0.173802316 0.046798829 -0.111674123 -0.105566561 -0.110239312 0.137469321 0.0206604954 0.190349817 0.169501752 0.126503631 0.167581499 -0.0180790145 -0.0187429003 -0.0419336259 0.0993470997 -0.0918064341 0.110130824 -0.0955291986 -0.0254780296 -0.0506573617 0.0148899863 0.104261681 -0.0428472869 -0.0548303574 -0.0506917909 -0.0156032071 0.0699625984 -0.15484792 -0.0459163263 -0.110004574 -0.0441328883 0.197784573 0.0838625804 -0.0522456057 0.0942399129 0.0829744935 0.057554815 0.109780334 0.10274224 0.154105842 0.10721004 -0.0166563932 -0.0466450788 0.0287705809 0.10081622 -0.0953564495 -0.0839984119 0.101254053 -0.0738965794 0.0948913991 0.167173281 0.0429453701 0.0383497626 0.091592297 0.106202237 0.156739905 0.0656498298 0.13198331 0.147935465 -0.0810967013 -0.018950887 0.00158079178 0.164120257 0.0798614174 0.0186908729 -0.132187113 0.124651186 0.112890542 -0.138516054 -0.0781108215 0.106892236 -0.0400021151 0.121923052 -0.00202938612 0.0550861284 0.115557112 -0.0589016899 0.103443392 0.138156414 -0.156100512 0.141828462 0.163233846 0.185174793 -0.134581283 -0.0946970135 0.0305168517 0.0545697697 0.122422308 0.0272117686 0.092828013 -0.0790654421 0.0789960772 -0.113887571 0.139471903 0.177440643 -0.0422447994 -0.0695037767 0.133454293 0.00472546089 0.0994608104 0.0261894893 -0.0549818948 -0.0645656288 0.0830694512 -0.122029178 0.110560618 0.021865055 0.0957053602 0.153808683 0.153240129 0.00273627671 0.107639149 0.0361639187 -0.0830527321 -0.0444489233 -0.00363346422 0.0640293211 -0.0754873753 0.0189995058 -0.1402542 0.162265539 0.138485089 -0.0446577705 -0.000309297611 0.169430003 -0.0834633186 0.00541008823 0.034290649 0.0778444111 -0.0421900116 -0.0198174808 0.0522686094 -0.0672751144 -0.0208641775 0.151453003 -0.0738410801 0.043029502 0.0127242813 0.0245345235 -0.0372197554 0.090350613 -0.0694454312 -0.045643907 0.0334979966 0.0695154294 -0.0134842489 0.141971424 -0.074017182 0.0237953663 0.106125079 -0.0695564449 -0.145977944 0.166921124 -0.0877014548 0.0716962293 0.0305217579 0.117084034 -0.0790342316 0.0964029655 0.13598761 -0.134147704 0.189042479 0.182969391 -0.107122943 0.0923936591 -0.0212771464 -0.0149747208 -0.0244534928 -0.0772951767 -0.0497068875 -0.0293945558 -0.00921653118 0.105580427 -0.0721728429 -0.0770729706 -0.00264244643 0.00397060299 -0.139186502 0.0292970631 -0.0475326255 0.08476118 -0.0867509693 0.126799867 -0.0671816245 0.160967201 -0.0940391421 0.036259234 -0.0219887402 0.0285151005 -0.0580190904 0.13070558 -0.171060801 0.135117233 -0.0228546057 -0.107383102 0.0618890449 -0.0694213063 -0.0618949234 0.133807048 0.17643562 0.128621712 0.0101680793 0.176956698 0.0840079859 0.097374849 -0.100451432 0.0399295464 0.172356963 0.00101820775 0.145156473 0.104961276 0.0815578476 0.146059379 0.107903466 -0.121531352 0.0570647754 0.047216557 0.170416638 -0.0707143247 0.0478855185 0.0394140966 0.0102794804 0.125966758 0.135744303 0.0133625893 -0.0925729126 0.136732638 -0.0822476298 0.154679909 -0.13795127 -0.0215002652 -0.0249491148 0.0930954218 -0.106826156 0.10373725 -0.0187940467 -0.0534816161 0.134281337 -0.0336386599 0.114718519 0.0787281469 0.0239171404 0.0408289284 0.117535852 0.0759770721 -0.0240571704 0.0102049625 0.0229755491 -0.0571867488 -0.0825752616 -0.0630160421 0.0233204234 -0.0362254977 -0.0341095217 0.110644877 -0.0943035707 0.0922036394 -0.052436009 -0.0474082902 0.0808229521 -0.0361060351 0.0341569446 0.127944812 -0.0520493798 0.00435285084 0.0624745227 0.0890819654 0.120440952 -0.125715539 -0.0429935902 -0.100851558 0.115291968 0.103215486 -0.0138821993 0.114144072 0.144928649 -0.0672504827 0.0337884873 0.178193495 0.0654718578 -0.032493107 -0.0594031401 -0.0149731291 -0.108951643 0.148577735 -0.0590856262 0.123775907 0.129149333 0.120561078 0.0938586891 -0.0787900835 0.157910496 -0.0426145568 0.124586366 0.153994665 -0.0279286914 0.0712008774 0.132654876 -0.058968544 0.152131483 0.0144725023 -0.0846911147 -0.0830136165 -0.0503571592 -0.129175395 0.107455552 0.0278498847 -0.0376918465 0.125603542 0.0866251886 0.0744670108 -0.0176635683 0.081767872 -0.116470791 -0.076551564 0.107822165 0.0519237667 0.169635236 0.000728378771 0.195453733 0.0923824608 -0.00255433074 0.130946428 0.033110749 0.0234523341 0.159311384 0.0584074371 -0.0724054351 -0.00702239107 0.0528859086 0.0255747363 0.123749338 -0.0502231903 -0.127378836 0.000618861057 0.168384925 0.0564158484 -0.0874255598 0.0325605795 0.110717267 -0.0185731165 0.0527723245 -0.0973552689 -0.0553385355 0.099351272 0.126928583 0.037081793 0.159003794 -0.0413037315 -0.0480074212 -0.0216640383 -0.109965399 -0.0768443644 -0.0550187156 -0.02983227 -0.041209314 -0.0762111172 0.00517032761 -0.021049602 -0.082603015 0.128907517 -0.0880745947 -0.101325043 0.0862258524 0.0882336497 -0.0198778603 -0.0331840217 -0.109220311 -0.107734382 0.0400603004 -0.132840812 -0.0447417721 0.00368672935 0.0691269711 -0.0319370776 -0.0310771763 -0.152229711 -0.126779377 -0.0754033923 -0.106641732 0.0897259042 0.0624115281 -0.084738642 -0.0616546944 -0.00815979205 0.0202450287 0.071183376 0.0515766665 -0.0504490845 0.0691114515 -0.121255346 -0.0616305098 -0.121116355 -0.0412869304 0.0541755706 -0.00845611095 -0.019792689 -0.0873068273 -0.1018041 0.00566182006 0.155241832 0.0709863603 -0.0654985607 0.0548714921 0.1288618 -0.0813171715 -0.0274352692 0.050162863 0.0174831059 0.115988865 -0.0983620062 0.00916780252 -0.111271255 -0.0194736812 0.122007161 -0.05491817 -0.155907109 0.0968826488 0.0766369477 0.120036878 0.118291102 0.144479945 -0.109428965 0.0191301908 -0.131886169 0.0119547276 -0.178280339 -0.074061133 0.0725457594 0.047452867 -0.0980938748 0.00940938015 0.17627655 0.0703046694 -0.0134887863 -0.0899318606 0.140372112 0.076489009 0.0844909772 -0.0435512364 -0.0578976758 0.0688769594 0.10411185 -0.114739448 0.11660511 -0.0925834179 0.0873279944 0.175916493 -0.0425273553 0.143908709 0.0721898228 -0.0761375278 -0.11759565 -0.020234637 0.0312824845 0.0598440468 0.110991903 -0.0383540764 0.105179779 0.0467798598 0.167937577 0.0386657864 0.0542986952 0.0948523358 0.0433264002 -0.148534298 -0.0129731102 -0.0280822664 0.0361635387 -0.00415288471 0.138104618 0.10342367 -0.0275076535 0.116774455 0.101908013 0.0884111896 0.0440905578 0.117984377 -0.013649038 -0.126955181 -0.0553081445 0.00625609886 0.133344293 0.00572153553 -0.0223944504 0.177496225 0.0815475732 -0.00271677272 -0.0417993777 0.106240101 -0.0412545837 0.18434307 -0.0274373218 0.178807244 0.0294444654 0.0646818206 0.0889737979 -0.0807103813 -0.0720598325 0.151093379 -0.033041738 -0.117520221 0.126272097 -0.106381506 0.037757419 0.00232348521 -0.0547570363 0.076936692 0.107204638 -0.0481706001 -0.00307636359 0.132705554 0.0537186749 0.0473928303 0.0915753692 0.119206257 0.176839486 -0.155492246 -0.151921168 -0.101832837 0.0815496519 -0.0724380389 0.0371969007 0.0601178631 -0.058801692 -0.0965428352 -0.0115199285 -0.0381022878 0.105424263 0.0742912889 -0.0960293785 -0.0112020867 0.0849209279 0.0520411208 0.122885831 0.151498944 0.122882292 0.153430328 -0.0156365 -0.0119283618 -0.0820145831 0.0155551042 -0.149645686 -0.0800471455 0.094618395 -0.0650205612 -0.0104006175 0.131104678 0.0416966155 -0.0876214504 0.0637880862 0.0821948424 -0.0084727779 -0.0978877619 0.168948174 0.149989381 -0.0299459342 0.0972742289 0.000181726937 -0.0922966674 0.176449746 -0.0968618467 0.0333946943 -0.0902563259 0.16238676 -0.0905583873 -0.03544081 0.152139008 -0.119497493 0.0413080677 0.0570814125 -0.000148650375 0.112468541 0.058299277 0.0766911507 0.0998951718 -0.0232174434 0.167854264 0.168274015 -0.0583992265 0.154771283 0.13650085 0.117386065 -0.0722455084 0.0544443242 0.0949110314 0.143746346 0.108085796 -0.0419899784 0.143864065 -0.025696218 0.133225232 -0.111586809 0.0990424305 -0.0993287787 0.078866601 -0.0784433931 0.132836834 -0.106675968 -0.1069621 0.077828303 0.187477276 0.0277358871 0.0359606817 -0.0598008744 -0.00338539528 0.00320412288 -0.0859301984 -0.0230135676 0.163934514 0.0130302329 -0.0939015523 0.0754140466 0.107570499 0.00613959366 0.135453999 -0.0996567607 0.109139353 -0.100983992 0.0580916367 0.0119609917 0.0797068924 0.151979074 0.16187796 0.075011678 0.0931628644 0.0360457934 0.00347460015 0.0319518261 -0.0867329165 0.0962795615 0.0821009502 -0.0356594585 -0.02422712 0.0152814919 0.0527246408 0.132090867 0.144757852 -0.0431858338 0.139542729 -0.0139607172 0.171679854 -0.0980732143 0.180641383 0.186638147 0.0636721104 0.0896847546 -0.0584419966 0.143411934 -0.0701248869 0.0328124799 -0.0829237774 0.145310253 -0.00750299264 0.118470781 -0.0582295991 -0.0695642605 0.0633131266 -0.0640299097 0.159781903 0.0272349548 0.109332368 0.168833092 0.0473833978 -0.0882677585 0.0282821339 -0.0536800846 0.0558247045 -0.13541919 -0.107939526 -0.0673646927 0.0988901109 0.144200847 -0.0628962666 0.0835133493 0.0278197322 0.112931602 -0.0297801625 -0.00817243289 -0.119054325 0.00599690992 0.16512607 -0.0284597538 0.133023679 0.108261056 0.175182506 0.148225054 0.0877180696 0.0722195581 -0.0575301908 0.0970565677 -0.0798201114 -0.0708039552 0.0288234167 0.155979618 -0.0531227216 -0.0605055392 -0.0713208541 -0.0868417323 -0.0402501673 0.0165239926 0.181012854 -0.160325661 0.0927709043 -0.0364443325 -0.0312899835 -0.109137982 0.111598797 0.11623574 -0.068648465 -0.0206921138 -0.13928856 0.0243328102 0.0555803142 0.132689178 -0.0608246513 -0.0354508683 0.172304466 0.0327581689 0.170413792 0.163448825 -0.0454982035 -0.0583826788 0.0481920801 -0.0540810302 0.182651609 -0.174388662 0.155782059 -0.0293228272 0.015085889 -0.108551085 -0.121923782 0.0719362497 -0.168311208 -0.120032616 0.0659890622 0.115256436 0.131331578 0.119614907 0.178487614 0.09089607 0.00386154489 -0.0554215722 -0.0119620096 -0.0467320494 0.0933647081 -0.0323850662 0.141180277 0.107638344 -0.0253946837 0.173948079 0.137527362 -0.0205912776 0.010461146 0.142752916 -0.0192131344 0.107507631 0.14598392 -0.0370280705 0.0341507122 0.138847277 0.102246776 0.067923449 -0.00280428468 -0.0519020297 0.0715199634 0.151722491 0.00090766669 -0.0237915833 -0.00762919895 0.0348006599 0.0952979177 0.11985556 0.143176049 -0.00832088478 0.0575121641 0.0766030177 0.0191355087 -0.0326572359 -0.104509436 0.0300794542 0.062483415 0.132271856 0.100359082 0.0538696684 0.0351427197 0.071405977 -0.040407598 0.157145143 0.0427171327 0.031071905 0.085857898 -0.140569568 0.0797872916 0.160493046 0.0627081841 0.0700800642 0.0625736564 -0.0243169125 -0.0110870786 0.150555253 0.057589937 0.166157231 0.120003015 -0.0579976961 0.0612958968 -0.0452432111 0.054822579 0.0524013229 0.149773791 -0.0714917257 -0.023807399 0.0454889461 0.0992185473 0.0658304542 0.02966832 0.101745747 0.0872673169 -0.125905886 0.0570483804 0.135766774 0.113360628 0.0364370681 0.0223136339 0.0019436914 0.0164411664 -0.0682152584 0.0921245515 0.0128188692 -0.0173182599 -0.00139826769 0.0518905111 0.0928640962 -0.0397072323 0.103173278 0.00413324265 -0.0670715123 -0.0588557124 -0.0857635513 0.0062935818 -0.0887346044 0.138430178 -0.025497932 -0.0639876872 0.0670730025 -0.0446693785 0.0594656765 -0.0820678324 0.157999322 -0.182115525 0.00614317786 0.0362051241 0.0657482669 0.106698424 0.191083074 0.135481074 0.0106980857 0.00957398489 0.0367676988 -0.0136649683 0.0735901445 0.0689036474 -0.134390131 -0.0718721896 -0.00467563979 -0.00951108709 -0.0722589716 -0.00359070604 0.0947275981 0.126477376 -0.0131597025 -0.12138617 -0.0347730219 -0.00653850706 0.135745063 -0.0925999731 -0.0944521725 -0.0723555461 -0.153894082 0.188464135 0.0043896623 -0.00922763348 0.213275999 0.00525289867 0.0494968928 0.0589311495 -0.07227844 -0.0655579418 0.173283234 0.0376433432 0.156717747 -0.078233324 -0.0844314247 0.0813227743 -0.0925660729 0.124083593 0.153772607 0.068052493 0.0846082121 0.127492517 0.147963956 0.0722059608 0.154211655 0.0819868073 0.111736804 -0.0420656316 -0.156026006 -0.135763094 0.132754937 0.110232912 -0.119001575 0.119631797 0.100629732 -0.0977817997 -0.0254859347 0.0714960396 0.0816458389 0.0694845393 0.107111402 0.0238910895 -0.11218477 -0.117907912 -0.0337541923 -0.114325784 -0.114220396 -0.153953075 -0.0372015573 -0.0811879858 -0.0323405489 0.128496513 -0.0856468379 0.0182948634 0.0260079242 0.0171604026 0.0346086845 0.114011452 -0.0935687795 -0.011811249 0.131105796 0.0234864186 0.0406528525 -0.114756532 0.122138247 0.0470963418 0.0791598186 -0.0303514749 -0.0687026605 0.194362417 -0.022815939 0.0876949206 0.0480690859 -0.0250811335 0.148164272 0.0488567054 -0.0881642401 0.198841885 -0.0379917733 -0.00708210841 0.041188851 0.0292753335 0.0645876899 0.00623326236 0.0477512181 -0.000584310852 -0.106165297 -0.015090609 0.0250555836 0.0323013403 0.041745469 -0.133533582 -0.0977768302 0.0384080522 -0.014046954 0.0909342691 -0.0820525289 0.132567018 -0.0924441591 0.155193165 0.110916458 -0.0571177192 -0.141427621 0.0474443957 0.0907076299 -0.064002499 -0.0244310405 0.0177996214 0.0721451417 -0.00413550809 -0.0516352393 0.0421805531 0.131461561 -0.0123250391 -0.0480676852 0.0910230353 -0.0799057558 0.0509942733 0.112865351 0.104875125 -0.085275501 0.0623678714 0.0686701387 -0.080322735 0.0964362845 -0.0460433923 -0.0657152012 -0.074650757 -0.0327339992 0.16137737 0.0697549805 -0.108303167 0.00211762171 -0.0693195313 0.00135752186 0.013155547 -0.0307769664 0.0750898421 0.0616175942 -0.0536347926 0.0857256502 0.0237529613 -0.021395212 0.00901291613 0.00728149712 -0.113782011 -0.0464270264 0.167675585 0.0525661036 -0.0210970417 0.156918585 0.061230965 0.0992827117 -0.0678927675 -0.151397571 0.075506404 -0.0497730784 -0.0540236272 -0.0624297559 0.0426682606 -0.0725995973 0.076271072 0.116657615 -0.0210639741 -0.0213112682 -0.0862966105 0.0802445114 -0.0198064968 0.176064715 -0.0988808721 0.101349302 0.119555868 0.128017023 0.0522831939 -0.0366001837 0.145147249 0.0257630255 0.0434764959 0.111463912 0.0327721126 0.123577833 0.0872525647 0.162450716 0.199674487 0.164900869 0.0993724763 -0.144233376 0.0194361061 -0.0317708701 -0.0597182736 0.0684302226 -0.0456766337 0.0549333505 0.101966113 0.0526848994 -0.118291982 0.0568728857 0.125453085 0.107232653 -0.0476998277 0.135429114 -0.130028173 0.0840768516 0.158564597 0.0256799646 -0.0523730144 0.031615708 0.0914076194 0.188867077 0.143099532 -0.0071679526 -0.0894442722 0.0995981768 -0.0183322281 0.0598267131 -0.0731880441 0.0917812809 -0.140130281 0.00585151743 0.00310393353 0.152784497 0.158248886 -0.137339488 0.0995804071 -0.0764262974 0.171144649 -0.0672199726 -0.0027869083 -0.102201961 0.107543819 -0.0715040565 0.214907989 -0.0438209847 -0.119756781 -0.00894473586 0.137930363 0.126813769 0.0999665186 0.0293341558 -0.0930798054 0.0649531037 -0.101108015 -0.0516813099 -0.0952921212 -0.0980607495 0.0295815989 0.0134664373 0.0469121002 0.0314295888 0.116126269 0.144171268 -0.109329514 0.102265686 0.0232829526 -0.0721712261 0.0460076891 0.00450235466 -0.078920044 0.120493911 -0.0557029285 -0.0781340078 -0.113349713 -0.146189392 -0.0182035994 0.10833291 -0.0549698845 0.111201309 0.0124426633 0.0892337486 -0.10926117 -0.00912767928 -0.0338271856 -0.105445758 0.211141855 -0.119850591 -0.180531412 -0.00868519023 0.218875796 -0.17652452 0.0990117118 0.0245204382 0.149801061 -0.00586622301 0.0881711915 -0.0257251803 -0.0817466527 0.0768139511 -0.0895878077 -0.107276358 0.0430153459 -0.162487656 0.0267249998 0.130476043 0.0166731309 -0.127173543 0.0398012064 0.0680280626 0.0879124179 -0.0295924786 0.0133784497 0.000692039728 -0.0751812905 -0.0830598176 -0.131929606 0.135406211 -0.112499252 0.0126099214 0.00665883068 -0.00475356216 0.0249228943 0.102894537 -0.0225983374 0.061107967 -0.0330257192 -0.0597277209 0.179125711 0.050645031 -0.0669407696 0.158360988 0.205021739 0.00652983878 0.121627569 -0.0640201867 -0.0522308983 0.0900547057 -0.122924723 -0.114422083 0.0658481047 0.0173800383 -0.0786071345 -0.0717952251 0.0280309897 0.0219677705 0.0759255365 0.084643513 -0.0923913196 0.166452676 -0.0389173701 0.0821120963 -0.113245606 -0.0364514329 -0.0393794179 -0.0335422643 -0.0305757262 0.0866778567 0.127289161 0.0190164503 0.0835306719 0.152247652 0.138753071 0.130225837 0.00651189126 -0.148018956 0.0714074373 -0.10346128 0.143939614 -0.0185987595 0.0229391046 0.101105355 0.0875614211 0.168692231 0.0672137067 -0.116006561 -0.069646351 -0.0440914668 -0.0887304097 0.0605254248 -0.0931111053 -0.0291967671 0.0453826375 -0.0647103488 -0.08282765 -0.0912294909 -0.0972726122 -0.000134341666 0.117167793 0.14967677 -0.103549697 0.070657745 -0.0581128635 -0.1150591 -0.0362361856 -0.00619550841 -0.0881526992 -0.0659523532 0.0312618017 -0.0897310749 -0.0618666895 -0.0287295692 0.176449448 0.159317598 0.0456931591 -0.0967225209 -0.0319313519 -0.0248310566 -0.021381963 -0.0773523748 0.0478850566 -0.0207856018 -0.0701669753 0.147760212 0.0727391243 0.0043316409 0.159602404 0.0948895067 0.0890363902 0.00142308639 -0.0310148094 0.0753311291 0.0981000364 0.0223886538 -0.0931696519 -0.0682313293 -0.0667512491 0.0491678901 0.0186522752 0.137692913 -0.0236726794 -0.00927374046 0.114333265 0.104560494 -0.0608054437 0.204097703 0.205062792 0.094779104 -0.0898220837 0.0206578374 0.142832264 -0.049721241 0.0423907191 -0.0763793141 -0.093291074 -0.0389654711 -0.15401794 -0.00734718097 -0.0189076923 0.170103803 -0.0205226652 0.0636992827 -0.0644718111 0.108284459 0.138413697 -0.0257913806 0.00794647168 -0.0307983607 -0.0936900675 -0.0606061853 -0.154241458 -0.00459569367 0.141765147 -0.117230393 -0.0629995316 0.125530377 -0.0406536944 0.200848967 -0.104435295 -0.123753838 0.0367464684 -0.141358763 -0.0206649359 0.202375904 0.022151649 0.0812491998 -0.131115228 -0.0367442705 0.172151536 -0.0442129597 0.0605035909 -0.082755737 0.0954060331 -0.0230737943 0.0334409699 0.13487561 -0.110517241 -0.0225322787 0.062106967 0.119216867 -0.0953765139 0.14884612 -0.0287193451 -0.0106208287 0.153283879 0.0467924252 0.177715346 0.068970874 -0.0450268574 -0.0325655416 0.0547090545 0.0860708728 0.0107891522 0.0944289416 -0.0758065358 -0.0732419118 0.183351845 0.0771918073 -0.140022755 0.0537439361 0.149029925 0.131033957 0.0027445287 0.107114777 0.0976283476 0.156666949 0.192800567 -0.0668931082 -0.048805628 -0.0531376973 0.00703070173 0.135483757 -0.0116650816 0.0274772495 0.0685842708 0.0897314772 0.168943107 -0.0672629923 -0.132059872 0.120118931 -0.0828481987 -0.0856146142 0.0526181571 -0.0933162645 -0.00942354929 0.169790015 0.0899278149 -0.112179808 -0.0518537723 -0.0197913386 0.107157297 -0.0402358808 -0.0525892675 -0.0279793683 -0.0564811975 0.111123219 0.0285395123 0.103563443 0.0289735198 -0.119780019 -0.138365477 0.0683289915 0.0212225579 -0.136515081 0.178696275 0.0141859809 0.0798230842 -0.0112462975 0.00572248595 0.165446609 0.1772625 0.0877416283 0.0257206522 0.0255760346 0.109550558 0.0173372477 0.0941226184 0.156007677 -0.0609857477 -0.168111339 0.112304233 0.141951621 0.0942777917 0.0506730452 0.0117849326 0.110127218 0.11164064 -0.0365206338 0.17797333 0.0919450074 -0.0340361036 0.0371871293 0.137831658 -0.0145821422 0.171712354 0.0708972588 0.00538893556 0.138060763 0.0720485598 -0.102158196 -0.00157117634 0.161901429 0.164550751 0.0551112816 0.00221332046 0.163098738 0.0364854559 0.137255967 -0.0139900306 0.116951399 0.0309598278 -0.0843858272 0.149897203 -0.0105481371 0.129127055 0.0816876367 0.0559929311 0.109518707 -0.0339205861 0.102632456 0.00931944605 -0.00911251083 -0.0575628281 -0.0238492247 -0.13892445 -0.0204126779 0.140317202 -0.0053701899 0.0747784898 0.0447182879 0.0408651866 0.00653599948 0.0279005133 -0.0747578964 0.0836786777 0.115476467 0.0596954301 0.0199178606 -0.0829307586 -0.0326780863 -0.113550745 -0.0547465198 -0.0249119569 -0.113830656 -0.0401589163 0.093042478 0.0902988166 0.0128500452 0.0456254408 0.0772466585 0.159909874 -0.0500820577 -0.0781232342 -0.051164262 0.0975523815 -0.0487125441 -0.0570325479 0.0562490486 -0.0282622576 0.0981458947 -0.131002113 -0.157566205 0.0727057457 -0.146405846 0.146773353 0.101571307 0.130754113 -0.0140098277 0.119490281 -0.134308934 0.0664061159 0.100537203 0.152347729 -0.066916585 -0.0918895528 0.0884105414 0.150467262 0.190053374 0.0404316857 0.0131604439 0.0616949201 0.0204220749 -0.00819397438 -0.0989773422 0.135392219 0.034526363 -0.0443142466 0.097198464 -0.0308621768 -0.116274439 -0.0293579884 -0.0962141603 -0.0434402823 0.199946746 0.112885021 -0.00315347826 -0.0176970661 0.0463292599 -0.0291460063 0.0120348148 -0.00303878007 -0.0146520687 0.155439973 0.0927805379 0.112616234 -0.10787008 0.199581042 -0.0556919798 0.0174912345 -0.147659183 0.00716985716 0.0517262444 -0.132578805 0.0649443641 0.0491473936 0.0128262425 0.0570766106 0.14404805 0.0125820432 -0.0378993787 -0.0890177265 0.0629585683 -0.0781803727 -0.0468946658 -0.0970080867 0.107453912 -0.0726181269 -0.0156788174 0.109249413 0.179553613 0.0696004778 0.0497379862 0.1262182 0.070380047 0.036693722 0.153491363 -0.0165516399 0.11358387 -0.0461250581 -0.0716388002 -0.063194491 0.182627037 -0.0979600772 +tensor_0bias 50 +0.0687436238 0.121526092 -0.0379632339 -0.0472203717 0.085131444 0.0979627594 0.12071842 0.0766481757 0.111980788 0.0231025815 0.0757955536 0.0592933744 -0.0364503562 0.118033081 -0.0119619071 -0.0852706879 -0.057033807 0.104488559 -0.0188827403 -0.00805457216 0.106946483 -0.0220598206 0.0901777968 0.154047132 0.127174735 0.0643620193 0.119487718 -0.0108337859 0.10897246 -0.0441434234 0.0685871169 -0.0381012894 0.0594230555 0.0113021079 0.163605392 -0.0388617851 0.0634208769 0.104776219 0.0779020116 0.0259756818 0.152426898 -0.0797943622 -0.0156979878 0.161263436 0.0584381446 -0.00747399358 0.0999026075 0.0664042234 -0.0277017541 0.0495129935 +tensor_12bias 50 +-0.0650987327 0.0561347082 -0.0524960756 -0.0606846772 0.0553311668 0.084040381 0.0655064732 0.132255286 0.0351522863 -0.0857200176 0.0463379882 -0.138509437 -0.0309930108 0.0726053342 -0.0611225963 0.040046718 0.0333271585 -0.139368355 -0.115387805 0.035529051 -0.0753710642 -0.108340122 -0.165888399 0.0588014238 0.0744016021 0.0737710446 -0.166636527 0.139449701 -0.107233316 0.166755453 0.111904733 0.142458484 -0.10559769 0.17358239 0.0248687863 -0.00832488388 -0.00902078301 0.0966997072 0.16634649 0.0510999039 -0.11830131 -0.137912169 -0.0549811572 -0.140467361 0.0248682722 -0.0492456779 0.132404506 -0.136645094 -0.00630686618 -0.0665986538 +tensor_18bias 10 +-0.0286833197 0.0315113701 -0.0158580411 0.0455938913 0.0954583585 -0.105117157 -0.0738683939 -0.119185224 -0.0686925054 0.0992293954 +tensor_14bias 50 +0.0160144269 0.0659304708 -0.134516135 -0.125120386 -0.126968578 0.011852067 0.111996368 -0.0366256982 -0.0781780854 -0.00189105608 -0.0748870224 0.11818061 -0.0440538898 -0.0143895745 0.0724157915 -0.0405166261 -0.0633764267 -0.0380873531 0.0670853108 0.0602433793 0.0957999155 -0.0834713951 -0.0430925563 -0.0397071578 -0.0210147407 -0.00462661684 0.0973884314 -0.153825372 -0.147843331 0.0121727288 0.180789441 0.0183312204 -0.130684286 -0.10787309 -0.049283646 -0.0444232263 -0.059754774 -0.0348485857 -0.159336865 0.0452591404 -0.0289487373 0.0982468277 -0.0173284747 -0.102011278 0.0217117742 0.0264923107 0.137902901 0.00199478748 -0.0913077295 0.0711041912 +tensor_4weight 2500 +0.0806722939 -0.0843013674 0.0457266495 0.0729286149 0.077442795 0.0629948378 -0.0479649454 0.18801184 -0.13502112 0.139153555 -0.0434509926 0.146485865 0.113608092 0.0014678 -0.00269440887 -0.0458229147 0.10888987 0.0207153317 0.182204321 -0.0768271685 -0.0549781434 -0.142413139 -0.0730496719 0.179332584 0.0324325487 -0.133773685 -0.0963223055 0.0017872625 0.0347659923 0.125833228 0.0962186158 0.0585900992 0.135499209 0.1581707 -0.0280255843 0.0394914299 -0.136502922 -0.0513676554 0.0243021026 0.13252829 0.0698634982 0.016627552 -0.0407816991 0.0601785071 0.0856543556 -0.0628332347 0.117408261 0.100019909 -0.0541871078 -0.0604645647 -0.10079059 0.129933119 0.0221414883 0.0313292705 0.0785076022 0.0515565313 0.103933319 0.130206779 0.0966112837 0.0613389611 -0.0399818122 0.185782522 0.0632089376 0.203524143 -0.00186416006 0.0119111966 0.175457805 0.0410548635 0.132948205 0.145901531 0.0722996518 -0.0906703025 -0.0737293884 0.114364773 -0.0281930994 0.0720122755 -0.008361076 0.090595901 -0.0460113436 0.0419355966 -0.0804221854 0.0608950555 0.170809716 -0.0476390161 0.169650152 0.0800267011 -0.148617357 0.0836865678 -0.0557933412 -0.0582912862 -0.0435073562 -0.0371096209 0.083936885 0.141528875 -0.0368124209 0.162937 0.182462409 0.0500146635 -0.0173346996 0.00344588282 0.077063866 0.0220477413 0.0806412846 -0.00341189606 -0.101663046 0.0451156981 0.0767283887 0.0141340708 0.039356716 -0.0705280155 0.0963134021 0.0619241111 0.0269989092 -0.021566296 0.145353943 0.0327979699 -0.0733648017 -0.0134483287 -0.0573412068 0.0651545897 0.0168404263 0.0243993159 0.0994131193 -0.0538567007 0.00572972698 0.0229900386 -0.0910499841 -0.105468161 0.0651908889 -0.0938435644 0.0910287648 0.0970317647 -0.0301273968 0.131670371 -0.035016574 0.0178018566 -0.117108293 -0.104218014 -0.0651276186 -0.0432341956 -0.0111995684 -0.113138527 -0.0553993024 -0.0827366859 0.111415848 0.0760915801 -0.036721129 0.0393031836 -0.107385024 0.0509901345 0.137131959 0.101355053 -0.0619110428 -0.0586683974 -0.125746161 0.146015525 0.0182545464 0.101042837 -0.103411034 0.141968071 -0.110162877 0.0819647014 0.151268393 0.181896359 -0.0776448846 0.158234477 -0.0236076955 0.162451014 -0.0745234191 -0.0891344696 -0.0165763423 0.0465359874 0.164738223 -0.0147009594 0.136529386 0.119493932 0.123288825 -0.0328544565 -0.0433127023 0.142125174 0.104210556 0.165809229 -0.0778093338 -0.0373858176 -0.0823435411 0.0155735873 0.103326514 -0.05250616 0.0069106333 -0.0459599681 0.0475454628 -0.0535901822 -0.0878656879 0.188019454 0.0936229303 0.00049323542 0.111949839 0.101932622 0.111278057 0.00650064787 -0.132303327 0.154887334 -0.0368106291 0.185657039 0.078358531 0.0830566436 0.000230199876 0.1663609 -0.110823177 0.0969691128 -0.0866530016 -0.0828108639 0.063297838 0.0365174599 -0.0799051747 -0.00249398779 0.0037801282 0.175893486 -0.0303821024 0.17547828 0.156276211 -0.0808659643 0.0175747536 0.0641925558 0.132568434 -0.107452586 0.0114268949 -0.0109794568 0.105085135 -0.00249309023 -0.105099067 0.11644727 0.00391933694 0.0913905948 0.195951268 0.0354405977 -0.00441903574 0.0888798311 0.0769788325 0.0180195644 0.0349424444 0.00618674606 0.0450688228 0.0371989682 -0.0904219598 0.0357578248 -0.0248050801 -0.019140562 -0.0610508397 0.0400745049 -0.0465866067 -0.0127383219 0.136099428 -0.00988076627 -0.0224271286 0.138736099 -0.0945356563 -0.0887529403 0.00517961383 0.000571548939 0.0760833472 -0.0421531834 0.107855894 -0.12219803 -0.120920695 0.010690853 0.00134639442 -0.0446796417 -0.0908018351 0.0188111514 -0.117398165 -0.00559857022 -0.112447143 0.0241752416 0.051861912 -0.0751599744 0.0727101266 -0.0994263515 -0.116854861 -0.109606072 0.0153764635 0.0296985656 -0.094453536 -0.123156615 -0.0982722938 0.102426931 -0.038060952 -0.127354875 -0.0424764156 -0.141495243 -0.136656374 -0.0849142522 0.100465015 0.0261948798 -0.0149132377 -0.095423542 0.0603073835 0.0239272416 0.0944917873 0.0912092179 0.0132168755 -0.0580653921 0.0647564083 0.0321055204 0.0641190782 0.107408777 -0.0238600653 0.0974822938 -0.151330233 0.0900393799 0.185146719 -0.0562634654 0.19377704 0.0260389671 0.149799034 0.0271496754 -0.0140250009 0.0241740346 0.0609554648 0.0848416314 0.0207572728 -0.107217379 -0.0550570227 0.0541072674 -0.0474922284 -0.0888904482 0.0580282025 0.0328076519 -0.00170895853 0.0491873212 0.090218015 -0.0734803379 -0.0112014636 0.150287092 0.16178152 -0.0178813841 -0.030176945 0.175077632 -0.0382624194 -0.0120699406 0.0354120433 0.0163750257 0.116489731 0.138228595 -0.00104773929 0.02116061 0.195760205 -0.113974452 0.204999462 -0.00420999806 0.0197493862 0.140327349 0.145642623 0.113952592 0.0668037087 0.144647643 0.138338432 0.0301354099 -0.092116423 0.141155869 -0.0757502168 -0.0111120678 -0.019430887 -0.132206604 0.0304258037 -0.00902231503 0.023573963 -0.11457108 -0.0038465804 0.0600269213 0.185435995 0.103536278 0.108110771 0.0262743887 0.092287004 0.0180775113 0.0580065064 0.109430514 0.167516813 -0.0948597863 0.147749871 -0.0377445519 -0.16559723 0.103454545 -0.0619672574 -0.0660705566 -0.0222212803 -0.00183966081 -0.0677803308 0.0315424129 -0.00417117588 -0.144289106 -0.0828239396 -0.146710843 0.0357453451 0.00331253489 0.0235776883 0.0018393771 -0.0240897052 0.112991959 -0.097301051 -0.0531368554 0.102575697 0.224759638 -0.100583948 0.027663447 0.0663552508 -0.0544704907 0.0913643613 0.10431046 0.14408429 0.160526797 -0.0272651091 0.128108725 0.137256622 0.0514451601 0.0290343836 0.0522942841 0.169628382 0.0517538302 0.039717 -0.112903044 -0.0319129899 0.142312348 0.16764465 0.00277794432 0.155595258 -0.016347399 0.0998492464 -0.0829867125 0.0122846849 0.10995502 0.176780567 0.166116044 -0.0651847348 0.0968866721 0.0796400309 0.156421289 0.00979311764 -0.111015052 0.100035012 0.198834509 0.104847461 0.0955422893 0.0701622218 -0.00574288098 0.0388571136 0.0862576142 0.0804817602 0.0700528994 0.0890722573 -0.00526280887 0.0880217403 0.108766705 0.0439562909 -0.136678606 -0.017526824 -0.101755708 0.150479943 -0.0441651195 0.0611818954 -0.010830123 -0.0615075193 -0.0789036453 -0.0960501432 -0.0448041894 -0.114129215 0.157983571 0.0660151616 0.00131378241 -0.0953527689 0.0812098756 0.0714970827 -0.0841728672 0.0815933347 -0.0241262466 -0.0606837049 0.117682979 0.170510948 0.0609742589 -0.0866294503 0.0337947756 0.0836874992 0.128505945 0.0142021542 0.00716301799 0.073032476 0.096828863 0.0873111039 -0.0305738319 -0.017774554 -0.0532108061 0.180189192 0.0185202211 -0.0507842451 0.100284688 0.0385177433 0.0343939774 -0.151525408 0.161509618 0.0328486934 0.0315718576 -0.0216901544 0.011822544 0.0440483205 0.123011395 -0.0785683393 -0.0336200632 -0.0159502272 0.148681283 -0.0669046566 0.185322538 -0.0733356997 0.0739779621 0.0648668483 -0.0301269554 0.156339601 0.0585463084 0.155070648 0.18907924 0.0647668913 0.013078318 0.167015359 -0.0203347579 0.134109989 0.110318691 0.0168762747 -0.00621265173 -0.0438371375 0.0293022711 0.0136860888 -0.0961167067 0.131283402 0.0199183244 0.0874097347 0.109514065 -0.0711590275 0.13801989 -0.100633904 -0.0766485333 -0.0797629207 0.120990887 0.117599219 -0.0800174996 -0.0554481633 -0.0130264247 -0.0096846018 0.0329468772 0.200460345 -0.0798182935 -0.127444193 0.137921482 0.056331329 0.0758561566 -0.0259927046 -0.00583240716 -0.151809007 -0.0616948605 0.0165051967 -0.106223613 -0.107458085 0.0237796139 -0.133243531 -0.0831126943 -0.012560742 0.0367795378 0.00879683718 -0.121515289 0.0290033501 -0.0651801005 -0.0471335575 -0.00044152551 0.112791196 -0.156063318 0.0274669975 -0.00387126207 -0.142437324 -0.133588076 -0.0751931593 -0.0703300163 -0.0889332145 0.0192210358 -0.0770214796 -0.142161205 0.118518829 0.015532054 0.00297008874 0.00619109394 -0.0286132246 0.067848444 0.0446564294 -0.12528789 0.0658862889 0.142027885 -0.0245133974 -0.0243382379 0.0615522414 -0.131100833 0.0117956251 -0.144661099 0.153534442 -0.00748422509 -0.101551078 0.140787482 0.120413505 0.133537158 0.109931737 -0.076232776 -0.0067446162 -0.105740324 -0.0634061843 0.0939473137 0.119690232 -0.0357088707 0.0102475164 0.150871128 -0.000345803623 0.111536011 0.0299190637 0.191872507 -0.00425557932 0.0131858671 0.0683450401 0.011605869 0.0222013909 0.0556304455 -0.0517201163 0.161248505 0.0784498453 0.171983451 0.119539365 -0.0555509515 0.0169317685 0.00468148896 0.0350351445 -0.143832296 -0.123316839 -0.060894113 -0.00792651903 0.165182695 0.13920185 -0.0278753694 -0.0646031126 0.0390878469 0.103839591 -0.0264649615 0.159046769 0.161050528 0.056850709 0.0216505565 -0.019877946 0.0416690223 0.0680721179 -0.00811236072 0.0474281274 0.139705688 0.128775299 -0.134791732 -0.0200266857 -0.0365998596 -0.0124936523 -0.0767151639 -0.042266313 -0.0712475628 -0.0536471978 0.133768514 -0.0192902926 0.105865858 0.113762073 0.104997188 0.0901620463 0.066951476 -0.0920727104 0.150316 -0.115942262 -0.0646094009 0.051550284 0.106786288 0.0553277843 -0.135059014 -0.0844271183 -0.093783997 0.14749904 -0.0715771541 0.104918532 0.169773012 0.030166015 0.0254033525 0.0341539346 -0.0932782665 0.0505385213 -0.13566044 0.172710717 0.181072846 0.0247942675 -0.0522602275 -0.0928869545 -0.0629897714 -0.00787132327 0.160656855 0.119059108 -0.0577676259 0.130579263 0.103787817 -0.0639968142 0.0237170234 -0.0796101764 0.0789383575 -0.11092788 0.0240584183 0.0880425051 0.0497003458 0.0207255501 0.0609250851 0.121555626 0.0423985943 0.169498548 -0.168339416 -0.0737465993 0.00344401528 -0.0481818803 -0.0240785405 0.138308004 -0.0498832725 -0.0870527998 0.135833338 0.0367706791 0.164695784 -0.0926531628 -0.0138947945 0.0515966341 -0.124790356 0.160091609 -0.08937978 0.0392833501 -0.0552154407 -0.0162713174 -0.0258723479 -0.065008454 0.0126740728 0.136108771 0.112826265 -0.0117993969 -0.0383974053 0.13958928 -0.111128941 0.0530200005 0.0264452137 -0.00290334155 -0.0446272232 0.061192058 0.175629675 0.0535970144 0.0791243389 -0.144284248 0.161759198 -0.0264586899 0.170231506 0.0360257179 0.0236983728 -0.0918620229 -0.124832675 -0.129897267 0.0827946812 0.167229131 -0.0483314805 0.0731398612 0.0330644958 -0.0419231206 0.147590339 0.120546743 0.0866150856 -0.0558574684 0.14509137 0.112841494 0.010361298 -0.0738257468 0.062864013 -0.000246174692 -0.0360012166 0.172877163 0.117604062 -0.143552348 0.169368088 0.0966829956 -0.0905596018 0.0989860147 0.143281475 0.0763563141 0.137610212 0.122151025 -0.139252588 0.0253664367 -0.0899616033 -0.0669621378 0.173054621 -0.0460386537 0.0831045434 0.136806592 0.134621754 -0.0229169969 -0.00980438758 0.0452408046 0.0591817014 0.186792865 0.00453559728 -0.0630519763 0.129970819 -0.0708865598 0.0169868432 -0.0856622308 -0.0697054416 -0.00249436265 -0.00108185853 -0.0849266797 -0.0896446258 0.205029503 0.0984538794 0.0920003206 0.0979186818 -0.0760004744 0.0389556028 0.154188663 0.0658197105 -0.0182720162 0.134750709 0.0715288147 0.17903395 -0.133035272 0.200460136 -0.0744331852 -0.0414500348 0.0895937532 0.0214252006 -0.022997003 -0.0672739893 -0.0548784323 0.166354895 -0.0431604087 0.165455922 -0.124262832 0.113898836 -0.166768521 0.00970084779 -0.0567515977 0.0607765876 0.183420077 -0.0727137551 -0.0270099547 0.0259942077 0.00637345994 0.0362093039 0.0634940416 0.22586067 0.0804543719 0.177989498 0.168853745 -0.0725347623 -0.130849689 0.14897649 0.0990756676 0.105376959 0.0459880121 0.037299931 0.0371170193 0.124092944 0.026399713 0.214453608 0.165884897 -0.0445454419 -0.02944877 -0.0441051386 0.0706486255 -0.0643619671 0.107751079 0.0106587159 -0.0160649233 0.0853765532 0.0439129174 -0.0179767329 -0.0966302827 0.153880417 0.158972874 -0.0232971646 0.109733656 0.118792728 0.111737549 -0.0411141589 0.128475308 0.177355379 0.088045463 0.0918510482 -0.0182551499 0.0349350236 0.0172623489 0.0358161516 -0.0553316772 0.00500165345 0.0473173968 0.0152386809 -0.102430955 -0.0801292434 0.142120838 0.182653144 0.0498294421 -0.0422114469 0.0125372913 0.165790632 -0.0877576023 0.0534564219 -0.0601202659 0.04567682 0.0359176025 -0.0493109711 -0.0345178694 0.0486616641 0.179762542 -0.0616127439 0.142689958 0.173905298 0.13620089 0.0958447605 0.0267633125 0.0164805949 0.0387719236 -0.0923323482 -0.025394721 -0.0696693659 -0.0885034949 0.155368611 0.175244749 0.108128108 0.0818990022 0.146583825 0.0607022159 -0.0263073556 0.0531130992 0.0492566414 0.0426749587 0.128532976 0.165642813 -0.0271078423 -0.0249717701 -0.00465310086 0.0946793407 -0.0277413856 -0.0293258466 0.153490797 0.0542890653 -0.121490426 0.0403553173 -0.0457720421 0.0372848473 0.1336312 -0.0569381975 0.0157627482 0.183314934 -0.098882556 0.0972879678 0.133379266 -0.13416934 0.127192289 0.114743538 -0.145406723 -0.141376868 -0.00748612825 -0.0768275931 -0.00242518331 0.0610179976 0.11269661 0.0289160293 0.134316772 0.0604642555 -0.0706829354 0.0602234714 -0.00745525956 0.128726706 0.02637784 0.0765895173 -0.100985415 -0.103230231 0.0484438539 -0.0102437539 0.117040537 0.130066067 0.0934641883 0.035608504 0.080431819 -0.0183086582 -0.0162367485 -0.000762896263 0.091369085 0.0433430262 -0.0226776432 0.0331841335 -0.0515708551 -0.138115823 0.111762553 -0.00960157253 0.0194415804 0.0162233952 0.0687385723 0.00495963311 0.124809526 -0.0617128015 0.128910005 0.124350287 0.123989262 -0.0698941946 -0.0828819647 -0.051271636 0.108241625 0.090747878 -0.0240470748 -0.0150890118 0.159876198 0.111201644 -0.107370481 0.041435346 0.0879196003 -0.073871471 -0.00736038294 -0.0421624519 -0.00446702167 0.0206496771 -0.0786093399 0.00770913251 0.0940739587 -0.143417105 0.161424622 -0.00790184364 -0.0727001727 0.042316515 0.141635984 0.168245554 -0.0294214915 -0.159648478 0.112373084 -0.0489424169 -0.0889650211 -0.136385739 0.0504631549 -0.0346960463 0.0639858767 0.0742279962 -0.0959718451 0.125432774 0.0435161628 0.0543604121 0.12924619 -0.067039676 0.0832744464 -0.119794376 -0.0892888829 0.144032732 -0.13564758 0.0327132791 0.0262426939 0.0729919598 -0.0233827997 0.0430958606 0.103070885 -0.00284027657 -0.131751791 0.0218737386 -0.0669141933 0.0876880065 0.108192131 -0.00722055649 0.0314042829 0.00201363396 0.0693058148 -0.127397463 -0.11308068 0.095366247 0.0613252074 0.0767963976 0.109912105 -0.023631271 9.64457431e-05 0.0741448328 0.0674567968 0.0592180379 0.120218024 -0.138540611 0.186128601 0.0634339973 -0.066954501 0.123150513 0.00262892642 -0.156880677 0.0271030273 0.0389812775 0.163095251 0.0937159061 -0.119892217 -0.10029912 -0.0113538243 0.117122836 0.0958641991 0.0554464087 -0.0611852631 0.0309960768 -0.148533225 0.0168368462 -0.0148341283 0.0199822951 0.0840069354 -0.0250799228 0.000891973905 -0.110700309 -0.0532766916 0.0793971419 0.0984170437 -0.128924787 0.0291142873 -0.0578225479 0.0482807197 0.0368235111 -0.0756311119 0.056336727 0.168977603 0.00820702594 -0.0148615483 0.128323391 -0.0488858111 0.0030394888 0.0392629169 0.0217599515 -0.109780788 -0.124327026 -0.110027082 0.079080537 -0.0519953929 -0.0504782349 -0.00701974798 0.17692171 0.143427163 0.152141586 0.0693683028 -0.115196042 -0.0221416522 0.169055194 -0.0540919825 0.182789385 0.139691234 -0.074015893 0.108184151 -0.0935382247 -0.0598740615 0.0335229784 0.150850862 0.111152209 0.163470238 -0.111606114 -0.0112746516 0.00895981397 0.146160573 0.137397975 0.165998906 -0.0262579694 0.0310111959 0.0895001888 0.0290670171 0.148392752 0.10005831 0.0260470044 0.0666432157 0.14119634 0.145734191 0.140673295 0.0609008037 0.0760866255 -0.0274666939 -0.0857639909 -0.093561694 -0.0673863441 0.06305594 0.183098152 0.0362807289 0.179879576 0.187368259 0.0867011249 -0.00191641552 0.179400802 -0.178453162 0.0443644077 0.14893277 0.130691677 0.190664202 -0.00733991154 0.0375372507 0.139617547 0.0207137242 -0.0660620481 0.121428333 -0.027175935 0.0812105387 -0.0167020112 0.0997308716 0.0814295784 0.00100216595 0.0675137788 -0.0446306355 0.11655578 0.0790163651 -0.0364569351 0.043299146 0.0583344959 0.0861434639 0.0999846533 -0.101646118 0.0544962138 0.111912884 0.13398391 0.192645401 -0.0995487198 0.172990069 0.0454724953 -0.0182189811 -0.0639074966 -0.117950983 0.158961445 0.0991295949 0.00396099035 -0.120374672 0.168334991 -0.0206933524 0.0865943655 0.161322176 0.0555427149 -0.0746511817 -0.0254731867 0.148725659 0.114840917 -0.0996649787 0.0825758129 0.00449527614 0.162873149 0.14936614 0.156525835 0.0766895339 0.118420944 -0.0548048988 -0.167635486 0.0756825805 0.059926942 0.0492656752 0.0400654711 0.0896347836 0.0765077025 -0.0438671187 -0.146087736 0.117657624 -0.0255973134 -0.11745102 0.0930163413 -0.0821457729 -0.115750015 0.0327894762 0.120342232 -0.0219539329 0.190586492 0.0217166767 0.0146391429 0.0616531707 0.103957534 -0.0640848204 -0.0858041495 0.0310945753 -0.102986038 -0.0559266806 -0.000226511125 -0.140096694 -0.119943008 -0.111525618 -0.022964308 0.0660581961 -0.140670016 -0.0539666936 0.0656664073 -0.146068677 -0.0434579179 0.0442539938 0.049366042 0.138595164 0.215925127 0.127935782 0.00820590742 -0.0747593045 -0.0842378289 -0.0417899489 0.0216342304 0.0241500065 0.10414844 0.0635119677 0.109194174 -0.0664032325 -0.0841010138 0.0333074108 0.144626364 0.0835791081 0.0405562595 0.144938678 0.113412 -0.0413297117 -0.0116025591 0.116451755 -0.122508198 0.115656048 0.118746422 0.149316311 -0.0756765008 0.162036806 0.136063144 0.0960770398 0.0914931074 0.00234524277 0.141637772 0.0776848495 -0.103856735 0.0223964415 0.0540647469 0.154280543 0.0363733396 0.020892188 0.0519513749 0.00717404438 -0.0717171952 0.0605637506 0.123303227 -0.0711054057 -0.0547375344 0.0238987729 0.122411825 0.0981374756 -0.0796336755 0.181836978 -0.0139241079 0.0208457373 0.0578660555 0.115007117 -0.152290791 -0.116023742 0.125889778 -0.0744427964 0.173140392 -0.0230522808 0.0991717577 0.0317968801 0.107756197 0.164815009 -0.0174482651 0.0639693215 0.0523474552 0.0462639593 0.140508741 0.0506025292 -0.0438129827 0.0276643373 0.0874049738 0.138593227 -0.0969195291 -0.0404044203 -0.0224188063 0.108781926 -0.100896388 -0.0309308395 0.125772789 0.028097406 0.0634060204 0.0683069155 0.145577833 0.183691531 -0.0496028848 -0.0776938125 -0.060156174 0.0218149051 0.197596177 -0.0154853165 -0.0412122235 0.0939057693 -0.118166968 0.1283319 0.111681804 -0.143919662 0.00493258471 0.126385331 0.151842475 0.186866894 0.0665669069 -0.0903968439 -0.0313272439 -0.0247976556 -0.169458717 0.054224968 0.0968870521 0.139724314 0.0172182582 0.103909202 0.155110002 -0.0126233418 0.190851286 -0.0473378785 -0.106794864 0.0950474441 0.108153269 -0.00215253839 0.0927259997 0.0746136159 0.000158840179 0.151475027 0.112737246 0.0532431304 0.054269813 0.129630253 0.164428711 0.0523424074 0.0814517438 0.016212143 -0.0117008882 0.0680367425 -0.0788285807 -0.0789092779 0.172827527 0.119092286 -0.0780554339 0.148280904 -0.0357619487 0.0404880531 0.139303714 0.152678803 -0.0313055441 -0.0433851704 0.145161822 -0.157154232 0.0209975056 -0.0296302848 0.020536093 0.0674732029 -0.0705216452 -0.0414924286 0.057908535 -0.169467628 -0.0330408588 0.182830229 0.0190448835 0.13370271 -0.0713856667 -0.0501033887 0.0232970063 -0.0963442922 0.0447021201 -0.118378267 0.189147756 -0.0498844683 0.0468240269 0.00958443806 -0.109032102 0.108971842 0.00711469864 0.00700109964 0.112477995 -0.138462275 -0.114364117 -0.0077861608 -0.143087372 -0.0425114706 -0.137003303 0.00309556606 -0.056427639 -0.084226869 -0.0175813958 -0.120090812 -0.131012186 -0.136584803 -0.0965648219 -0.0900525004 0.0984451473 -0.0295922905 -0.0215709601 -0.157052785 -0.0698363632 0.128503025 0.0812449306 0.044423122 -0.0622849166 0.126199692 -0.0240686592 0.110058717 0.0588081293 -0.0629438162 0.0803009868 0.0551974401 0.00367958308 0.0953964517 0.096288465 0.0765576512 0.134714395 0.128183305 -0.0794131979 0.158218175 0.0334427804 0.103117377 0.118011713 -0.0353304408 0.0812821984 -0.0924249962 -0.0943661332 0.0499824919 0.172689974 -0.015012878 -0.00591862109 0.000791038619 0.0393676013 -0.087351352 -0.00550199067 -0.131391063 0.110083923 -0.0942325443 0.151406854 -0.0441909246 0.125668615 0.113547325 0.104156129 0.0539508313 0.04261766 0.0514821857 0.120158657 0.157996073 0.00543851778 0.145554841 -0.141709834 -0.0490901694 -0.0447609834 -0.0976633877 -0.0905513093 0.142786831 0.134974882 -0.0384936519 0.124396443 0.0711318851 0.0364370346 0.0655808076 -0.111105889 0.177323133 0.0175751373 -0.00530883716 0.0142328804 0.0944742784 -0.104841068 0.0137548354 0.072433494 0.0684480369 -0.0881074294 -0.0346771851 0.153584346 0.127561867 -0.036190562 0.0799537897 -0.0859517306 -0.0208766013 0.0334149264 0.0264191292 0.0374499112 0.142725974 0.0713385791 -0.0855041817 -0.0318115205 -0.070518069 0.0307250991 0.13150534 -0.072534509 -0.104207613 0.189150855 0.0493283272 -0.0275112167 0.166747719 -0.0543703809 0.0186180789 0.165644959 -0.0484347753 -0.101127744 0.1094006 0.185057849 -0.0443641394 -0.144589871 -0.0198352784 0.0630682558 0.152583927 0.0218491945 -0.122168265 0.0245404653 0.0799318552 0.0951262489 0.122852422 0.0709591582 0.147164628 0.0161824599 0.137200028 0.0640827939 -0.0755483732 0.0245481338 -0.0100152371 0.14329806 0.0801420659 0.0691645741 -0.0589840487 0.130164921 0.00311033521 0.0268984325 -0.0349549092 -0.00415431196 0.0867199227 -0.0820708722 0.164873272 -0.169460997 -0.0531368032 0.152747095 -0.165769219 -0.0328237601 0.208387405 0.047868643 -0.137711033 0.091696687 0.0407248922 -0.0470836647 0.145012021 0.0215788931 -0.0970180035 -0.0877238438 0.172740042 -0.0380049273 0.0389408693 -0.0466512367 -0.0354783237 0.0639048144 -0.0723189265 0.104144089 0.0859282464 0.13152495 -0.128844962 -0.0214285571 0.115861677 -0.0982220173 -0.0596799552 -0.146179408 0.0965073407 -0.0615618378 0.0918795392 -0.0798900872 -0.160657704 -0.126372367 -0.0579259321 0.0660004467 0.161658168 0.104445606 -0.0138723087 -0.0934201628 -0.0294323321 0.00640385225 -0.0297847576 0.00359103805 0.0525301509 0.043393068 0.157615259 0.180452317 0.194085628 0.0691384748 0.134813935 -0.0515557639 -0.0708269849 0.088720344 0.0964737907 0.155151665 -0.0886232555 0.0660808086 -0.0829776451 0.101593263 -0.0553871915 -0.0182833746 -0.0800305083 0.178971991 -0.118555464 0.000954513147 0.00644796155 -0.0530595593 0.0575908013 -0.118949205 0.0164483711 -0.0739891082 0.157318026 0.0651573166 -0.0650493428 -0.0733206868 -0.0576313995 0.164077073 0.145226628 0.162652925 0.0094029773 -0.0282768738 0.00458042137 -0.0341153368 0.0680479035 0.00607198244 -0.00773193361 0.0329170078 0.0389629118 -0.162258938 0.0433447473 0.04725869 0.0874198675 0.0951827168 0.171968609 -0.0408742838 0.0696654394 0.0726477876 -0.0282292832 0.13275665 -0.0873321742 0.118847266 -0.094043538 -0.00783028547 -0.110805973 0.0371340886 0.011893562 0.0594444014 -0.0330062397 0.103927121 0.0566597134 0.070883058 -0.0379756056 -0.0413067117 -0.0425783545 0.111014336 0.0684353039 0.0692486465 0.0570905209 -0.0403401703 -0.0371561572 0.0209832303 0.115475081 -0.0771651715 0.1641756 0.132279456 0.11953865 0.152888119 0.161066189 0.0236473735 0.00623266771 0.0192979313 -0.0633362159 -0.0164176226 0.112811953 -0.0552775189 0.104069315 -0.00800814759 0.142864808 -0.119347326 0.0983854905 0.0419207662 0.172624946 0.13796024 0.14337796 -0.0860052034 0.0162426792 0.189442113 0.152368501 -0.0791355148 0.191711664 -0.045804102 0.0885845646 -0.0440708026 0.0840106755 0.142962575 -0.109646082 0.00183130568 -0.0525661372 -0.063833341 0.135730505 -0.0724909231 -0.0305184722 -0.0249376651 0.121382438 -0.053534802 0.0984386578 -0.0753171369 0.0939152837 0.00636771461 -0.049575828 -0.0524887219 0.168064952 -0.160115361 0.164482102 0.0465373471 -0.0193462316 0.189078987 -0.0162147954 0.0448620357 -0.0965996385 0.029319942 -0.00714721577 0.113784157 0.0306320339 -0.162794113 0.036773555 0.132374078 0.127610669 0.050170999 -0.0961276665 0.119895853 0.139223352 0.0842405856 -0.0275717005 -0.048406817 0.0329983979 0.0270373188 0.0343525745 0.143185422 0.126393601 0.117207043 -0.111426808 0.00335491286 -0.0176657494 0.00746619329 0.0158684719 0.0560003184 0.162911817 0.169606015 -0.00238073198 0.110828638 0.170307085 -0.00346783875 0.0999374315 0.120749101 -0.0795692578 0.00236885715 -0.0515253469 0.150424793 0.105336741 0.0109604793 0.173142359 -0.0780003294 0.0945810005 -0.00646437472 0.180615485 0.165021613 0.0602739379 0.187165871 0.0765264705 0.127765179 -0.176980063 0.0931098312 -0.0693295747 0.00362776732 0.0865088329 -0.122369155 -0.0243129283 0.0619697198 -0.104171418 -0.0679299384 -0.0262254607 0.0717521831 0.177063763 -0.0804491788 0.0471047014 0.130668938 0.0129235433 -0.044484172 -0.110434927 -0.0484529473 0.056467887 0.161255106 0.0596067756 0.00202068407 -0.0361751877 -0.0691773742 0.13520807 -0.076868318 -0.00055724883 0.136181444 -0.144539505 0.0790223703 0.104204692 0.113801822 0.079020001 -0.00296835252 -0.0561508648 -0.0639912412 -0.096902214 0.063912116 0.0158059336 0.0301315952 -0.0454157777 0.0474643707 -0.0998102129 0.0738191977 0.104636416 -0.0670538545 -0.0157381035 0.0721107796 0.145137876 -0.0157069545 0.0764504448 -0.102792904 -0.0242816862 -0.0148483599 0.164760023 0.0949771851 0.0556184649 0.159742668 0.11568401 0.133465067 0.0253654663 0.0464388952 0.1051047 0.168289691 0.0721212029 0.0222112965 0.177081063 0.0332994349 -0.0857983083 0.168268591 0.0322159566 0.109546766 -0.0359725878 0.173063204 0.114196211 -0.0472102724 -0.0865413472 0.0990323052 0.106711067 0.124012247 -0.0643537641 -0.0489022098 0.0309291538 -0.139787465 0.154954955 0.158387884 0.175662607 0.0474990308 -0.0359640867 0.0122991987 -0.0742847919 -0.116437078 -0.022644069 0.0906722546 0.109451734 -0.0519334488 0.0178270023 0.166177243 -0.0722740144 -0.00594325503 -0.054272633 -0.142582893 0.0954159126 -0.00533125736 0.0493725352 -0.146273687 -0.00553551223 0.126585066 0.078031756 0.0408783071 0.0403124169 0.0905888006 -0.0999932885 0.0324288867 -0.048167184 0.048798237 0.0910838693 0.010453077 0.0368673541 0.126243964 -0.10025917 0.119546175 0.0917273164 0.00480829086 0.150867537 0.0249657575 0.0332049243 0.0880667567 -0.0586667955 0.072079584 0.175612509 0.0517335869 0.0897404104 0.0503287949 -0.133238509 0.0293708127 -0.0389682427 -0.14583306 -0.0541342646 -0.145008922 -0.00405186322 -0.0991181433 0.149902388 -0.079027079 0.13217856 0.152729511 -0.00680424459 0.0569330305 0.0193487108 -0.0548162982 0.0513189137 -0.0619835034 0.0174638182 -0.102416456 -0.0279327556 -0.129593804 -0.0595460832 -0.0616615489 -0.0346394479 0.108837441 0.0645946115 -0.11461664 -0.00987040997 -0.0194515288 -0.0440613478 -0.0762307048 0.113881603 0.0807152838 -0.0215207562 0.053498432 0.00671930611 -0.0759949684 0.078516528 -0.129376277 0.00994156301 -0.0961488858 -0.0222117975 -0.0067448318 0.109046414 -0.118271597 -0.0475344136 -0.00401996076 -0.0324587896 0.111761943 0.0669000298 -0.140469015 0.0275926143 -0.0115185082 -0.111927435 -0.131411597 0.0218255222 0.0812726617 -0.0837273151 0.0129608214 0.133702025 0.0800562501 -0.0325852484 0.0149642564 -0.0189071596 -0.173735201 0.0099428352 -0.00971476547 0.0206104293 -0.113343984 -0.107871518 -0.120784573 -0.0340123661 0.12200997 0.0924243927 0.134061486 0.00931480248 0.00610988587 -0.143206105 -0.013435632 0.157742649 0.0986237824 0.031523902 -0.109645322 0.101808242 -0.0647404119 -0.0524963662 0.0176734496 0.0557880327 0.108553298 -0.0902532712 -0.130619377 0.0632717982 0.104041591 0.0543672703 0.0434634201 0.147163749 -0.114743508 0.158163086 0.147472963 -0.0521723554 -0.100233488 -0.0301290527 0.022240812 -0.0719027072 -0.0280963797 0.0667710602 0.00854949374 -0.0772623569 0.0685823038 0.00593935698 -0.0318717696 0.0944193527 0.0372171178 0.0526549183 -0.101998597 -0.0764912069 -0.118265085 -0.0155763114 0.0363124497 0.0786181912 0.0214089006 0.00902846642 -0.0222423617 -0.0114359492 0.016868338 0.0275472291 -0.0955874622 0.068063274 0.0991849825 -0.129994661 -0.14134939 -0.0891924128 0.0620854646 -0.147031859 -0.0616799332 0.125663459 0.0466651432 0.049083516 0.0202748105 -0.0635501817 -0.0811214596 0.0342678167 -0.144643277 -0.0279157292 -0.0520310104 -0.0900295675 -0.0991411358 -0.00983341318 0.0775161907 -0.108855121 0.0795320719 0.0555958673 0.047831919 0.116747767 -0.0178907923 0.00232720398 0.0800028816 0.0948506668 0.109556422 -0.139458165 -0.0708387718 -0.0218770187 0.0945127904 -0.15837191 -0.0499448627 -0.148272514 -0.0720821992 0.0286393929 0.166063771 -0.0910914093 0.0242926553 -0.12178494 0.0714246258 0.0331623964 0.141669735 -0.0356313661 -0.113949567 -0.107093729 0.0499410294 0.178825215 0.0778585151 -0.0158302784 -0.104186572 0.0341806933 0.00881133415 -0.0617542751 -0.136974439 0.141358063 0.00945444964 -0.0606168583 -0.0930097848 0.191351295 -0.0328337252 0.0643470958 -0.0714427084 -0.0224459022 -0.0216223132 0.0666145608 0.158240885 0.197071999 -0.00105335366 -0.130007252 -0.105515987 0.138388366 -0.137579709 0.114597313 0.189100042 0.142671525 0.17301853 -0.134293392 -0.0512899421 0.0793258399 -0.0749241337 -0.0476800092 -0.0704626963 0.188314125 -0.0592299625 0.13017118 -0.101025827 -0.0467180312 0.0082515683 -0.112287328 -0.062346559 0.013593995 0.0616131909 -0.078616567 0.0776763111 0.0905192047 0.0462051481 0.161554873 0.105334468 0.13517189 -0.115146726 -0.143860593 0.0162272323 -0.0732620955 -0.0247567333 0.0928674936 0.115267269 0.0816683248 0.00596335484 0.0484995171 0.0975567997 0.055007495 0.194354102 -0.00516810175 -0.00678860582 0.0775851458 -0.0466337353 0.106064767 0.143327415 0.0536226183 0.0567489788 0.205744937 0.0850102827 0.066885747 0.0559313521 -0.0664127171 0.157319784 0.0911302492 0.168385208 -0.0391344093 -0.0564815253 0.098924838 0.0328915305 -0.0534631759 0.0365355276 -0.0136043811 0.0482157357 -0.00801647455 -0.0435665064 -0.0428646132 -0.0644146577 -0.049035199 -0.0692589357 -0.113157302 0.0250543877 -0.0696002543 -0.0740747377 -0.0703184903 -0.00694498792 -0.0684268475 0.149164468 0.153054193 0.0761677772 -0.0204661116 0.12485972 -0.126783043 -0.00187381369 0.0541003644 0.107983328 -0.0837595835 -0.0870729461 0.0846110657 -0.0927637219 0.0266203284 -0.00495085446 0.0371818319 0.148703456 0.0929978117 0.00649248715 -0.052705083 -0.00607873127 -0.114258632 0.0115164211 -0.0972977728 0.084398061 -0.103781044 0.042367924 -0.0428892151 0.0323127471 -0.00776143046 -0.0386278778 -0.0703245178 -0.0709099472 0.16123727 -0.0346258432 0.0354511216 0.0405629389 -0.0782804564 -0.038936574 0.0836522579 0.176592737 -0.00653237011 0.0523184314 0.0602646545 0.0936931893 0.161708683 -0.0552729927 0.113780089 0.150944054 -0.0067132581 0.033431489 0.134626687 -0.0877803564 0.0627585277 0.159808293 0.0874273032 -0.0712592527 -0.0547058992 -0.0712833256 -0.0840452388 0.164110661 0.0463254526 0.11971055 -0.0787557662 0.0414926298 -0.0132505866 0.143975362 -0.0445205495 0.0115820579 0.0280404091 -0.0880124941 0.0269049294 -0.151099011 -0.0708077624 0.1746151 -0.0408792794 0.170332685 0.147516906 0.16913189 -0.0805162191 -0.0267642699 0.00414879527 -0.138525307 0.00516474945 -0.0246851854 -0.0470002219 -0.0861195773 -0.100950107 0.0405560918 -0.0432230942 0.104467802 -0.0955900922 -0.0287470184 0.13435027 -0.0651793703 -0.0681400895 -0.105308339 0.105755769 -0.130883738 0.0873673931 -0.0740140676 0.0563389994 0.0325795859 -0.0656638816 0.0513560064 -0.0224221945 -0.117003471 0.0642713904 0.0981090814 -0.0234821606 0.00352106593 0.0429167375 0.0492503606 -0.0412014462 0.00425704801 -0.0755375251 0.0191665534 0.001770781 -0.110787489 -0.1348757 0.106690206 0.00657321559 -0.0908673182 -0.108731627 0.0566626191 0.0443638451 -0.0703660399 0.0542139225 -0.00493319333 0.12348906 0.00126835168 -0.00761680584 -0.0449741632 0.128203496 0.0697475076 0.156427085 0.111904904 0.0363090038 -0.069633007 -0.124383852 0.0528828725 -0.0260626376 0.146264195 0.202404305 0.123526029 0.113593549 0.063587226 0.115930393 -0.0480900295 0.0870323107 -0.072083123 -0.0411015637 0.1239696 -0.0247502401 0.101827934 0.0338473544 0.179488152 0.0185336322 -0.0024687883 0.193507329 0.0594470054 -0.122646861 0.0367143154 0.0131789902 -0.00824107043 -0.0438461341 0.0322048962 -0.0372958966 -0.0309159439 0.112236492 0.104765609 0.0508270562 -0.134375989 0.0461360626 0.161751613 -0.0832926556 0.0294436906 -0.0715111569 0.151263878 -0.165225923 0.0409720019 -0.0970856622 0.118698254 -0.0376353674 0.0590884909 0.172025725 -0.0388665274 0.0342746116 0.0503573269 -0.0705714077 0.143638507 0.0997425094 -0.0244571362 0.126339301 0.00611212337 -0.0767538771 0.135550186 -0.0817491114 0.0428956598 0.053690739 0.0867463723 -0.120110415 0.092980817 0.0624419227 0.171113074 0.071235843 0.0251063108 0.147427261 0.0600847751 0.147185415 0.0782428235 -0.0334974229 0.140428677 -0.0508178994 -0.0417781211 0.096801661 -0.0446150079 -0.0366388001 -0.0328694917 0.111436456 0.0238367319 0.0866102576 -0.151921302 -0.0687064454 -0.104527332 -0.0853670314 -0.000437619659 0.126783535 0.0569791384 -0.0517665707 +tensor_10weight 2500 +-0.0212358683 0.122197703 -0.0510823093 -0.0501324013 -0.0548678078 0.0356177911 -0.00784289744 0.133274257 -0.113695405 -0.0432012156 0.0939747244 0.0988038033 -0.0639443696 0.0290857553 0.0895108432 -0.0702648386 -0.103292271 -0.0736945942 -0.00376112177 0.0998725593 0.104668014 -0.105342008 0.0656864345 -0.114149243 -0.00571362674 0.0216598436 -0.0114243887 -0.0504532829 0.0596787184 0.00372021808 -0.15211682 -0.0136099635 -0.0886892602 0.0599286295 0.111488119 -0.0207552537 0.0917330608 -0.0306493416 -0.0526035354 -0.0849622265 0.101938419 0.0732950419 -0.117307015 0.0721970722 -0.0825721473 0.0530262738 -0.0570005514 -0.14794296 -0.125646636 0.0978401229 -0.0572635084 -0.0405422673 -0.0372838974 -0.0117440075 -0.0372347534 0.05405204 -0.125367016 0.00642648339 0.0431452096 -0.0818922222 0.0333031267 0.0492147207 -0.108358391 0.0211769491 0.0296456665 -0.115399122 0.0998793691 0.0635934472 -0.0597816631 0.135748908 -0.0291152298 -0.0674216747 -0.0520641059 0.13197051 0.112524465 0.0249439776 0.0945808142 -0.104327582 0.131835803 -0.0558281392 -0.104002684 -0.0600294694 -0.0934771448 0.035828352 -0.00607830286 -0.0175107867 -0.126915321 0.10744977 0.0146200657 0.080092743 0.0436066091 -0.0399526656 0.00826710463 0.0102796853 -0.138014555 0.088743791 -0.0517612249 -0.103482887 -0.0803165734 -0.126166776 0.0944030806 0.117681094 0.109592296 -0.0782303661 -0.012482455 0.0995225459 -0.139412105 -0.085186258 0.148377135 0.150794506 0.110053003 0.0709926337 -0.000629723771 0.013662681 0.0823172182 -0.0580728464 0.0406894386 0.0878868401 -0.0696239024 0.180716202 -0.0156155387 0.102927946 -0.0455422476 -0.0267394036 -0.105474576 -0.0847397819 0.0854922086 0.0470506549 0.0965595171 0.127182499 0.137551412 -0.0368003063 -0.0720289052 0.0134783518 0.0268878676 0.0088609159 -0.0511660501 -0.0823307037 0.0519733205 -0.0623468719 0.0798326582 0.0710632354 -0.00685403682 -0.137981296 0.011066772 -0.0105396928 -0.0804577619 0.111978434 0.125334308 0.00390364812 0.0692017078 -0.0330482721 0.163387418 0.0201984197 0.151748836 -0.0597846881 0.0121450806 0.0635938272 0.143580437 0.120768994 -0.0914445743 -0.0753249824 -0.0758429915 -0.0557716116 -0.0532640293 0.0682220832 -0.0222889148 -0.0228032283 -0.0421909615 0.0669203699 0.0878867134 -0.10618075 0.0309686121 0.0602300242 -0.115028903 -0.104819998 0.0828765184 -0.0887905657 -0.128947496 0.0665918365 -0.0184224322 -0.0902371928 0.0826666802 -0.118614137 -0.0974627435 -0.126331478 0.121555597 0.0860794485 0.087571308 0.10830126 0.000543156988 0.0120290369 0.119321413 -0.0516519211 0.0992750004 -0.0404139012 -6.53001553e-05 0.0228800084 0.142584652 -0.112774611 -0.0440897308 -0.101826452 -0.0968946069 0.110168286 0.0165652726 -0.0190366097 -0.00230144663 0.165133551 0.000178731847 0.163377017 0.0187537577 -0.111495733 0.00262274873 0.0370500833 0.111238286 0.0467720181 -0.112312317 0.0970760286 -0.00430823164 -0.18515943 0.0749811605 -0.108619854 -0.115121402 -0.127359807 -0.139737591 0.144899085 0.125063911 -0.0695543438 -0.0871604905 0.0424468778 -0.0412323475 -0.023522187 0.0755847916 -0.00548974751 0.12203481 -0.135008246 0.133453161 -0.0179671869 -0.0897851288 0.0138236731 0.0413174592 -0.130779296 0.0947689861 0.145704255 0.0761682168 0.0180642232 -0.0403898954 0.0231717415 0.0988920107 -0.116821639 -0.000677700795 -0.0828759521 -0.0409976803 -0.0652928352 -0.00307619479 -0.0512737185 0.0509406962 0.164199054 0.0935533643 0.0614940412 0.0182006471 -0.136083275 -0.036729455 -0.0655212551 -0.000610545278 0.110906526 -0.0215685032 0.0942183807 0.091754362 0.0299259573 -0.0927302539 -0.0870193392 0.041432485 -0.0959858447 -0.0521472655 0.133616418 0.00140008167 0.0231243372 -0.0265129507 -0.0465798788 -0.0466384888 -0.115962021 -0.0177416876 0.0583319366 0.0504825823 0.0262723565 -0.00666236039 0.0547430068 0.176730543 0.0766595826 0.0228095483 -0.0677164495 0.142202839 0.0357140489 0.152742878 -0.138932645 -0.0411144495 0.046292562 -0.0618947372 0.129624233 0.0132857962 0.13160333 0.0381516591 -0.00642365264 0.146867096 0.172721684 0.0822038203 -0.170568198 0.104037531 0.0469250493 0.149638385 0.0287801176 -0.105733179 -0.060603328 0.104576632 -0.109430753 -0.0709009469 0.142124668 -0.0206337441 -0.0685040206 -0.00922098476 0.154050708 0.140051425 0.0351035632 -0.049322959 -0.133136809 -0.00690346071 -0.126874417 0.0869078487 0.163344264 0.177283853 0.0478345305 0.0679927543 -0.122830227 -0.0329710469 -0.138243169 -0.12043523 0.018810302 0.0234610289 0.085149698 0.122265451 0.0947110653 0.0231434219 -0.135353088 0.102692701 0.0495786704 0.0994817838 -0.00882655289 0.0960466415 0.139429167 -0.00701248366 -0.0530242138 0.13024801 -0.0270533189 0.0678792298 -0.0942333192 -0.0897237882 -0.116916768 0.128777713 -0.0864267498 0.000630012888 0.0271891207 0.0435388982 0.0202370584 -0.101047307 0.0206810199 0.17194964 0.148503706 0.0857690498 0.161830828 -0.135013863 0.118901089 -0.0623394176 0.144353598 -0.0805446953 -0.166448697 0.0953875184 -0.0034776032 -0.108664006 0.0685736537 -0.0221380815 -0.0657745823 0.00281999423 -0.107203327 -0.0475207977 -0.055595018 -0.110597998 -0.000231489539 -0.0365380459 -0.0790299848 -0.0336767174 0.00941203535 0.0301933419 -0.0706446469 0.0102126878 -0.0478211716 0.0370407067 0.106186956 -0.0224919319 -0.0260020383 0.0235891566 0.0859296694 0.108447783 0.00656368863 -0.0794644728 -0.075159736 -0.110706359 -0.0758301392 0.0576422177 -0.0484935977 0.0564662404 0.137293592 -0.1790566 -0.00773862004 -0.0347555578 0.0215124693 0.0171802938 0.176061988 0.0460711457 0.147571266 -0.102327831 0.195341617 0.058222834 0.13799569 0.118976817 0.0509692021 0.050737951 -0.0811304599 -0.0793619528 0.0404427722 0.166059211 0.0436591581 0.0677934214 -0.103122085 -0.0477355011 0.0969016552 0.0731616244 -0.0612649173 0.0483927317 0.101544008 -0.0105341347 0.0251087993 -0.0852457061 0.0189878102 -0.00559592852 -0.0792983919 0.0710483044 0.0725165606 0.0998317599 -0.00865345914 0.0574824326 0.00925513823 -0.124299236 -0.152690053 -0.0688084438 0.0785533562 0.0506814644 0.147026345 -0.020497581 0.0864086375 0.0827063099 0.0358608812 -0.0127497688 -0.051433742 -0.03434515 -0.128280848 0.0762891397 0.0711952001 0.0842626169 0.116974868 -0.13133204 -0.0335378908 0.0899138004 -0.133157939 -0.120754138 0.122247837 0.144659519 0.0242889076 0.0994777754 0.06880299 -0.0276972707 -0.113097489 -0.0623187982 0.0822641999 -0.119825244 -0.103717044 -0.0319737606 -0.135691062 0.152195513 -0.0339170918 0.14714168 0.0540374666 0.133274242 -0.0455024727 0.113066867 -0.0257611051 0.0637556389 0.0359611772 -0.0776446313 0.0980009288 0.00792387128 -0.113957405 -0.0919724554 0.0144144921 0.138423935 0.073610343 -0.143383607 0.0898004025 -0.0354972184 0.0135471914 -0.157295063 0.0106644779 0.0293142907 -0.00285607576 -0.0387508944 0.0805381238 0.134696633 -0.0486889333 0.115237691 -0.114024878 0.0478819907 -0.204706341 0.0640740022 0.0566777252 0.127831176 -0.0553595684 0.111671567 -0.09231098 0.0911259577 0.000540402718 0.048529759 0.0324980132 0.131618008 -0.0291069895 -0.117312111 0.119812474 0.0617211722 -0.0828384385 0.103557624 -0.102094062 -0.165967241 -0.0316339955 0.114424214 0.0979985967 0.106774256 0.10326688 -0.0260687321 0.0577113181 0.107890628 -0.207330927 0.13924247 -0.108645178 -0.006839226 -0.0576377921 0.086189121 -0.0478715226 -0.0539538078 0.0736430809 -0.113258503 -0.144859955 0.00832846761 -0.139867589 -0.0278512705 0.111798391 -0.128404155 0.0160521064 0.0621018 -0.0222936384 0.067758739 0.151578188 0.0899427235 -0.0596541949 -0.107079484 0.0257597771 0.10411863 -0.0212498736 -0.106785044 0.102427348 -0.037666291 -0.0371498428 0.12570031 0.0917552412 -0.0337271765 -0.00665520132 0.0846108422 -0.137335181 -0.0166137852 0.0288310051 -0.0332151465 0.129317015 0.00994049478 0.0253879577 0.0699467286 0.0467383862 0.00212879106 0.0807025656 0.104752108 -0.0590361021 0.0355920568 0.144212484 -0.0798209384 -0.119622223 0.129336998 0.144326404 0.140256554 0.130315661 -0.0842513517 0.0202817731 0.0428673401 0.0224014688 0.0633894131 -0.121904492 0.0456793755 -0.119502507 0.0149252117 0.0991675705 -0.00801187102 0.0127708912 0.146813497 0.0297204014 0.0190939084 -0.0910222307 0.0188052319 0.158833399 0.0565757714 0.106845409 0.126346767 0.0609186888 -0.0289904978 0.176135257 -0.0152246384 0.00210902141 0.170274019 0.147236124 0.100339673 -0.0750769973 0.126910731 0.0639681518 -0.141766325 0.00575648621 0.00252318289 -0.132266387 0.0917782336 0.165258139 0.159399614 0.110745199 -0.0729087219 0.00930848531 0.129214033 -0.0562379323 0.114632159 0.0686002523 -0.0798907951 -0.0475209691 0.135875911 0.0170198567 -0.116139926 0.00134182745 0.119676389 0.00874867849 0.044542592 0.0469349325 -0.171083689 0.137197331 0.0741593838 -0.0608005896 0.142061442 -0.012179587 0.0524498336 -0.0355517454 -0.093820259 -0.116283298 0.0172735397 0.112878129 -0.133789182 -0.0446203165 -0.056491144 0.0793790519 -0.0914917514 0.102299117 -0.0138002355 0.162907958 -0.0748615712 0.138051897 0.106378302 0.00609137118 0.122283094 0.139686123 0.0373347066 0.0692594871 -0.09532848 -0.0478848442 0.125734076 0.0966270939 0.079935506 -0.010005963 -0.062777698 0.0338496156 0.0195008758 0.0981275588 0.161850542 -0.0924032331 0.132830709 -0.061219655 -0.112690888 0.121970147 -0.0678780898 -0.0108335214 -0.026362868 -0.0930077806 0.0707007274 0.0775098354 0.0764014944 -0.0804891065 -0.113752075 0.00710404944 0.0588292368 -0.0711446628 0.0660018176 -0.0639827028 -0.0958132148 -0.123145066 -0.0116624041 -0.0329767279 0.0308814552 -0.138171315 -0.126469448 -0.0836871266 -0.0362357274 0.0118497657 0.0553193018 0.162871584 0.00862812717 -0.0775254369 -0.0664473996 -0.0720814988 0.0207482781 -0.000734820962 -0.0429652929 0.156263977 0.0950327292 -0.131239817 0.0131368376 0.0467994325 0.152589336 0.0339371823 0.0687561333 0.0528964065 0.165966034 -0.124363385 -0.0527783372 -0.0752571672 0.0272485688 0.19544439 -0.0526332743 0.11452125 0.06950555 0.0388930514 -0.105637603 -0.031256184 -0.143096924 -0.131879777 0.0679151788 0.0898881108 0.138390079 0.0468003303 0.00110050617 -0.166997537 0.148328051 -0.135715783 -0.0184072368 -0.0204313342 0.0777179599 0.0854007453 0.0669743344 0.0512876213 0.0736838058 -0.0702814385 -0.153760359 -0.0127334371 0.0702296048 0.0945134461 0.113579147 -0.045809608 -0.0650008023 0.0018505156 0.0389033966 -0.0164941698 0.0427322201 0.0889017582 0.0659029856 -0.0811767578 0.0873623267 0.18029575 0.16048792 -0.112073712 -0.134867206 0.0882760212 -0.102488875 -0.175802514 -0.0120754875 0.0511610173 -0.0656074211 0.0839222074 0.132837757 0.19432193 0.0962905958 0.11927399 -0.0800096765 0.0369717441 0.172430858 -0.124649733 -0.0634947792 -0.0230292752 0.0867050886 -0.0332086422 0.0130523248 0.1857972 -0.0377311036 -0.0985669419 -0.0854148418 0.0455307364 0.11115057 0.0544578135 0.0360678583 0.0398137569 0.0514812209 0.0629364699 0.157751113 -0.0630130768 -0.0467872992 0.104552008 -0.00756954914 0.128961414 0.152928904 0.0538875759 -0.0584964044 0.000610977411 0.106909499 0.167631388 0.0725584775 0.129740968 -0.0139085511 0.102276772 0.0839342475 0.0176016726 0.0198159665 -0.0576478094 -0.0319991671 -0.0245914981 -0.0934077576 -0.00444747973 0.0162859596 -0.12718007 0.137412518 -0.0712438971 0.0353779495 -0.00824063458 -0.0929021388 -0.0211561657 0.00593618024 0.0581243485 -0.0115015805 -0.117641151 0.0745487809 0.100706778 0.101716518 0.180298716 0.0615622588 0.190501958 0.0251207165 0.161565259 0.0331474617 -0.049629122 -0.0350433774 -0.177207738 -0.0606218576 -0.135168135 -0.0552285984 0.0633888692 0.169191226 0.181376621 0.123600326 0.0766487271 0.0379917473 0.0728779733 -0.0138807297 -0.126598462 0.0824816525 -0.0179684516 0.0607266538 0.127557591 0.130710021 -0.0497126617 0.097561419 0.0444984324 0.00906473491 -0.107714869 0.109389283 0.0244740434 0.0977818221 0.0717759356 0.146900296 0.0456320979 0.0432121679 -0.068095766 0.035988193 0.16688697 0.015179188 0.193963483 0.030294802 0.0684091449 -0.154407337 -0.0253650546 -0.1035157 -0.123490132 0.160163686 0.000832796795 0.0173495747 -0.103751779 -0.114844963 -0.0266452879 -0.10764344 0.057379473 -0.0908356607 -0.0109218499 0.0242156517 0.107376277 0.00434230454 -0.0106718605 0.126825973 -0.076470606 0.082616128 0.043287307 -0.0409609638 -0.0944999158 -0.102769725 0.0562386662 0.110875674 0.075079143 -0.0874663591 -0.087224707 -0.0344407968 -0.032368429 0.139833078 0.138399825 0.0838530734 0.145841986 -0.0942437425 0.0514989197 0.106409363 -0.105719045 -0.0327457897 0.0023922089 0.0723857582 0.115679517 0.0641390905 -0.0389708839 0.064969562 -0.11146944 -0.0840641856 -0.00882211328 -0.0985186249 -0.0100410283 -0.140838861 -0.0820496976 0.126737103 0.0637906492 0.185262144 0.0401138254 -0.0199363641 0.0733033046 0.0741309002 -0.0405171663 0.119358622 0.131731167 0.00470401347 -0.0874768347 -0.0030607495 0.154426917 0.142330453 0.0776753575 -0.0136618298 -0.0559839308 0.117518134 0.0162534118 0.105629325 0.0662130266 -0.0396728814 -0.0103532532 -0.0827013478 -0.0040447861 0.113319181 -0.0071705617 0.118163139 0.0378845818 0.0424246453 0.148900077 0.0901416466 -0.131045297 -0.0908931792 -0.00706362771 0.118041806 0.0408021808 0.0160984807 0.114142895 -0.0700615197 -0.0494136475 -0.0595068522 -0.0467700139 -0.169063121 0.0931548029 -0.0394937769 0.0162656307 0.10976477 -0.124994159 0.152687699 -0.161219954 0.0349471532 0.00292883557 0.0899900794 0.0686580092 -0.0421929248 -0.0581205003 0.015063826 -0.0568712726 -0.147363365 0.0534492135 -0.0798066407 -0.0166791826 0.0615924746 0.0440535769 0.157255575 0.0443781093 0.107450068 0.0306068957 0.111347824 -0.0176973268 0.0151626179 0.083994858 0.00934020989 -0.0776927471 0.142738372 -0.0590152331 -0.0490930192 -0.103396282 -0.113522559 0.0619038753 0.14320752 -0.162481412 0.0684234798 0.019900918 0.0254009217 0.104212388 -0.034442611 -0.0442203879 -0.0163948387 0.00209845603 0.00254264474 -0.0859125108 0.0658564866 0.0487911701 -0.10677994 -0.0656869113 -0.0264248922 -0.102019623 -0.102298513 -0.138458312 -0.119788498 0.0699746907 -0.127610922 -0.029055886 0.0234031677 -0.0221559554 -0.0785237625 -0.079463318 -0.00504159927 -0.0934635699 -0.128190622 0.116880015 -0.131823063 -0.0321453363 0.00674818456 -0.118852653 0.0121976882 -0.124695078 -0.0894826874 -0.0633899048 0.0750036389 -0.0270189941 0.0216782093 0.119181171 0.139441684 -0.0148586482 0.0138805658 0.0725070536 0.0154770464 -0.126432493 -0.0768562183 -0.0687850416 -0.0399501249 -0.0991529524 -0.0160450432 -0.0420119353 -0.0326361097 -0.0698527694 0.0993200317 0.205627039 0.177876145 0.158109769 -0.0378685482 -0.0387157574 0.0318316072 0.0931719393 -0.0390335843 -0.093457289 0.158116326 -0.0205905512 -0.0280272551 0.0694964528 -0.00315693673 0.100278348 -0.115536571 0.134927243 -0.0945299864 0.157936096 0.0946900696 0.0962090343 -0.0132327564 0.0670364797 0.112801351 0.0329531841 0.00913766772 -0.157800838 -0.0242684614 -0.062707752 -0.111074157 -0.0969263613 -0.0453660265 0.0788833573 -0.10100504 0.0351482034 0.00255969632 0.146189213 0.176710948 0.076782994 0.0267225392 -0.0470640622 0.0270443261 -0.0651312843 -0.0475523248 -0.0839515477 -0.0822535306 0.0876104087 -0.0853568986 0.0850623995 -0.0537006631 -0.00454986189 -0.115827605 0.076602146 -0.0653875545 -0.0943169966 0.0738318786 0.00882227719 -0.105202228 0.0500551313 -0.0072239181 -0.095633924 0.0686312243 -0.155843362 -0.10586188 -0.0515479743 -0.0627304092 -0.119748496 -0.055315733 -0.034655381 -0.011634198 -0.0673866794 0.0497374982 -0.0230727922 -0.117348522 -0.0596318021 0.124789402 -0.100009322 0.125387162 0.125093237 0.11934261 0.122768745 0.00161180296 0.0404978395 -0.0511606485 0.025636822 0.0928659737 -0.119610175 -0.0845185071 -0.0342509151 0.0703661814 -0.0939814001 -0.0344806798 -0.0912777558 0.00682032388 0.121527597 0.152524486 -0.013022732 0.0765541866 -0.0932440087 -0.0872863084 -0.0211912636 0.0815037265 -0.0279196408 0.0894725025 0.0345439613 0.00409509987 -0.0809944794 -0.1393179 -0.125571921 -0.00232244516 -0.149977431 -0.000756583293 -0.107811421 -0.13097401 -0.124444515 -0.016273234 -0.0980345458 -0.0679222867 0.145107448 -0.0677620098 0.11634396 -0.150404945 0.0630804896 -0.0887408033 0.0502750538 0.0362975895 -0.000291265926 -0.00586632686 -0.0945255011 -0.106234178 0.140956268 -0.0830846429 -0.0355807208 -0.0545149483 0.0505581349 -0.00131463038 0.158409923 -0.0294266306 -0.075270161 -0.0552593507 0.110549971 0.0572320521 -0.139074802 0.079300411 -0.13124457 -0.0520654507 0.0863010138 -0.0589499101 0.0994329005 -0.0107528744 0.133117393 0.135243297 -0.100229755 -0.064115867 0.117229715 -0.0909496993 -0.123401761 -0.118980557 0.0693805069 -0.0614001341 -0.103551611 0.0602181554 -0.0570658669 0.0473706871 -0.115162462 -0.00543242693 0.0720573142 -0.0638199747 0.163928419 0.15177232 -0.107249737 -0.150279045 -0.124889374 -0.0754014552 -0.117463201 0.0907788277 -0.137130409 -0.112758525 -0.0632348582 -0.085662134 0.074386403 -0.0294712894 -0.103990085 -0.0954368263 -0.116161741 0.139300272 0.109896317 0.0364004523 0.0969301388 -0.0921835527 0.0688580126 0.0143036414 0.135515511 -0.10856048 -0.128424734 0.00355436816 -0.0129383691 -0.0427853577 0.0744294003 0.120447546 0.0322268978 0.111017898 -0.148288384 -0.0120353373 -0.0698817894 0.123846047 0.0900507122 0.0200054049 -0.135218769 -0.0566676334 0.000346322719 0.138647377 0.133002952 0.0393482894 0.0430621244 0.123535052 -0.0048245755 0.0777058303 0.105987355 0.0330017395 -0.0667480379 -0.0552513823 0.119488135 -0.0444536582 -0.0245578699 0.10310775 0.135614321 -0.088050127 0.101776421 -0.106960148 -0.0866610706 0.0718072876 -0.126476645 -0.124185599 -0.0585252866 0.0773498043 0.0260625742 -0.0329737999 0.050648436 -0.0158751626 -0.11962828 0.119259298 0.074308984 0.138976827 -0.159304231 0.0172711946 0.164110437 0.100464553 0.10389293 -0.0240411293 -0.0973320231 -0.119676456 0.00236897776 -0.0644378364 0.0506922454 0.19321233 -0.000225052238 -0.0394313931 0.110896409 0.132451773 -0.0674725696 0.106891982 0.0839640722 0.00246000011 -0.128063992 -0.106095694 -0.0592876561 0.0409409404 0.0347136185 5.23093404e-05 -0.0714386553 -0.00371642876 -0.109261386 0.150267109 -0.0990683362 0.151943013 -0.0184463765 -0.0465980396 0.0613862872 0.136278436 -0.00558510423 0.100865357 0.101894312 -0.0244332775 -0.125516385 0.0207252149 0.101681627 0.145940647 0.0516951084 0.16129303 0.178174466 0.067602627 -0.0624158829 -0.0751179382 -0.0219047312 0.052100759 -0.0685305297 0.0354404449 0.145376444 0.18646054 0.178686082 -0.0324649215 0.186916694 0.0407279916 -0.130371153 -0.0193989351 0.0320602059 0.0507811755 -0.113682859 0.0327428654 0.00616077147 -0.0258325189 -0.143563882 -0.00310279964 0.185173586 -0.0143889384 -0.0377997085 0.102268487 0.193651006 -0.0199139044 0.0859818161 -0.12391866 -0.0357881412 0.117275149 -0.00614784472 0.157511353 -0.0342883319 0.0726206675 -0.140140399 -0.0890319422 -0.084323287 0.181992263 0.0513049066 0.0995599255 -0.0906688347 -0.103707798 0.189509571 0.0850646123 -0.160007775 -0.125715733 0.119538076 -0.119608335 0.0870004892 -0.0289871693 0.0428543352 0.0881076306 -0.0739037469 0.150500178 -0.0330894664 0.120570533 0.0367856883 0.0812487155 0.0561989732 0.0205095038 -0.0794103295 -0.149736494 0.0125511289 -0.117065132 -0.107071228 0.0454606973 0.0796560869 -0.101957068 0.147831231 -0.0325231701 0.102706663 -0.00391825195 0.164131463 0.113403954 -0.121139087 -0.124457628 0.00445246696 0.040741276 0.135801449 -0.00480739959 -0.078587763 0.0388003998 0.0373185351 -0.0382047556 0.123435661 0.0475043617 0.00739693642 0.0977098569 -0.0144506181 0.113023907 -0.134157673 0.115016133 0.0749712959 0.122992218 0.128705814 -0.0182231162 -0.0774768889 -0.100664325 0.0511561967 -0.0247491226 -0.0616593063 0.0696846992 -0.131600708 0.128312185 -0.0420887545 -0.0552831143 -0.132332042 0.0888988078 -0.0928973109 -0.0273143947 -0.00685594976 -0.0243961141 -0.0604439601 0.127762869 0.0312011689 0.0182463527 0.0971829295 -0.0156243443 -0.110507861 0.118185356 -0.0953080505 0.0230623633 -0.139256597 -0.0295829773 -0.00552763045 0.02986148 -0.0686590225 -0.092862606 -0.1297746 0.0351940989 -0.0635817125 -0.00291644037 -0.0894218385 -0.081991896 -0.0788865611 0.123435721 -0.07980977 0.127385929 0.0179787278 0.14100419 0.0584084392 -0.11898651 0.0776305497 -0.0203871056 -0.0342618525 -0.0426038876 0.0608503371 -0.0924751759 -0.110731475 -0.0939228088 0.0693204403 -0.0963335559 -0.136444792 -0.0746414661 -0.0768960863 -0.0328734815 0.121303841 0.0307305455 -0.0240531266 0.0461567193 -0.112294145 0.0924198776 0.0726142228 -0.0232265219 0.0855844915 -0.0904331505 0.00588195026 -0.0725407153 0.0315330774 0.0881674886 -0.0255857036 0.00970177352 -0.0484308302 0.0706667453 -0.0180258229 -0.0909893364 0.0912033021 -0.0541153103 0.118749335 -0.000519677997 -0.12067198 5.66840172e-05 -0.0196669474 -0.0159197301 -0.120875023 0.0988306701 -0.0475680456 -0.0561974943 0.0541714281 -0.138189748 -0.00213968754 0.0791497976 0.0153562725 0.102331981 -0.02512566 0.0891461223 -0.100474566 0.0814501047 -0.0774096027 -0.0491994061 -0.0873599797 0.125588104 0.194157138 0.021343857 0.153216049 0.153526738 0.0659974441 0.00557104684 0.00512425043 0.0593339801 -0.0341665149 0.1543639 -0.000350087881 -0.0057605654 -0.0569509007 0.0794611201 -0.0399132632 -0.053406354 -0.021354856 0.0188272204 0.161608189 -0.124593504 0.0364353582 0.0259574521 0.0545204356 0.123703361 0.0302464068 -0.180545017 -0.0880055279 0.0392708704 -0.0457140729 0.0489293262 -0.0629897118 0.0735282153 0.0903664082 -0.0868221596 0.0773085654 0.0447509512 -0.0666414499 0.10026215 0.0482167453 -0.0492483862 0.194690838 0.0770569816 -0.142567039 0.114170618 0.0906017646 -0.0648275763 -0.0698935315 0.0423483737 0.0252110269 0.0347312838 -0.0560424551 0.155089974 -0.0118696066 -0.154669881 0.122313514 -0.0593367517 -0.133725002 0.0684605017 -0.086332038 0.15027903 0.00808770955 0.0856792182 -0.103054002 0.144213781 0.0522497185 -0.012934139 -0.090749681 -0.111638978 -0.15779312 -0.120520085 0.00437208824 -0.0104053728 -0.0667165816 -0.126051918 -0.0548642427 -0.104130857 0.0505597442 -0.103951007 0.0535476543 0.126115173 -0.126277968 -0.0643455386 -0.0392601117 0.0434453227 0.0778148323 -0.0601691268 -0.0518789664 -0.104312316 0.0010818392 0.0459540337 0.0427468866 -0.053878624 0.139487013 0.0611597076 0.0600839928 0.11611075 -0.0964443162 0.0624526814 -0.120941721 0.132605001 0.0708762035 0.163541496 -0.00394374598 -0.166257143 -0.203483716 0.0616421737 0.116930254 -0.0280899294 0.0485812873 -0.219869539 0.00187383592 0.0102478564 0.108631112 0.063601993 0.0346559994 0.0576099493 0.129458979 0.0913215131 -0.0457242802 -0.128046185 0.0772113949 0.0461370759 -0.16218935 0.031650953 -0.0577221215 0.060324017 0.11425402 -0.0406978093 0.181004599 0.122253641 -0.0693835318 0.092224963 -0.00997300446 0.137522176 -0.100897603 -0.0125190523 0.0379933044 -0.0922655612 0.0624521673 -0.0938842148 -0.0446401648 -0.0100792432 0.153803915 0.114698537 -0.0192592535 0.0803508684 -0.0886118263 -0.0518604182 0.0633063093 -0.051035013 -0.110467285 0.0447706729 -0.00330133177 -0.0912512243 0.00899628736 0.165726572 -0.0454223789 0.101888083 -0.0568365306 -0.076063931 -0.0752097219 -0.0845429227 0.0309042297 0.0270464383 0.124918491 -0.0662075952 -0.108895019 0.139057159 0.140161321 -0.119610265 0.136644498 -0.0796718523 0.00887751393 -0.178879619 0.0901582688 0.138248637 0.0798882693 -0.0129555576 0.0835241526 0.112452 -0.0344961025 0.00462428341 -0.129168198 0.120342299 0.0168783814 0.0924949348 0.104666054 0.0520613231 -0.048391819 0.0304543953 -0.0920903161 -0.0853118896 0.0968189314 0.144676819 0.0184051823 0.129063278 0.158408046 -0.0152725829 -0.0893933401 -0.0740989223 -0.0408062041 0.000471571781 -0.126842275 0.0290431026 -0.00776752643 0.0354271829 0.137151405 0.0211606342 -0.075007841 -0.0753669515 0.046267733 -0.09437415 -0.137393638 0.113058127 -0.026870288 -0.0587519705 0.117264286 0.00866104662 0.0201778039 -0.0951031819 0.018844882 0.0590609238 -0.0709743276 0.0873017982 -0.105027102 0.0530570112 -0.133408979 0.0909369588 0.0397072695 -0.126006484 -0.0984181166 0.130100951 -0.0197343528 -0.14802596 -0.0323335156 0.0627914593 -0.115520857 -0.0452975444 -0.012140125 0.0625165701 0.12852025 0.137867913 -0.0958045647 0.0133615816 0.107896157 -0.0483738929 0.129055023 0.116109088 -0.077403754 -0.0633423328 -0.0807941109 -0.0901692063 0.135869384 0.115573078 0.0980086252 -0.0180905815 0.00574294198 0.0841204077 -0.114126891 -0.0070350226 0.12609975 -0.12341994 -0.0101188719 -0.0531954169 0.0926595107 -0.14045234 0.0385289043 -0.00951111317 -0.130858913 -0.0142143071 0.106903538 -0.108551912 -0.125400409 0.058905676 -0.117564946 -0.0387115702 -0.120323576 -0.00476152822 -0.117001377 0.0521154404 0.0897176862 -0.143047303 0.0822072178 -0.0936514139 -0.0156190991 0.0703094602 -0.109612264 0.10537225 0.10860981 0.0861182958 0.0131997541 0.0654514953 0.0550837517 0.0355030596 -0.127356902 0.0202751011 0.0479572453 0.0155448811 0.0376787409 -0.104008965 0.138826773 0.128562942 -0.0176690705 0.00616015308 0.0954742804 0.168244436 0.0313249305 -0.00562194875 -0.013728708 0.0743624717 -0.0685951263 -0.118008956 -0.0987153798 -0.0248185005 -0.140825942 0.0919594541 0.0747423917 -0.0905265957 0.129529849 -0.124772102 0.0051275813 0.0195230469 -0.146458313 -0.0511724278 -0.00252351165 -0.0710987002 0.164692074 0.0975965038 0.0976115763 -0.017378984 -0.0336411037 -0.00410315767 -0.111187756 0.0705890879 -0.0971891209 -0.170355156 -0.0228465442 -0.000340677885 -0.111434825 0.0483258702 0.0879553556 0.0596619062 0.0922146142 -0.0954236314 0.100458361 0.0672615394 -0.0278729852 0.0144202998 -0.0134725468 0.00500577223 -0.0834878609 0.0704662427 0.0280400775 -0.173883215 0.0107927518 0.145330369 -0.00825130939 0.0123181194 0.104186043 -0.00961343665 -0.0443730503 -0.12468195 -0.127869114 0.0712913722 0.0349406078 -0.0263537578 0.140321776 0.0573507696 0.0558672026 0.0422607958 0.0942431912 -0.134432197 -0.0805723518 -0.117681436 -0.0301251039 0.0112107592 0.0862491578 0.0813791007 0.15245752 -0.104849078 -0.1278539 -0.0234719608 -0.0375554711 -0.00391989155 0.0951362252 -0.124997646 0.101080559 -0.0823273435 0.114658192 0.0812243894 0.0979240239 0.0534090586 0.11992234 -0.0614060089 0.0227183215 -0.0045140041 -0.0936731175 0.164146602 0.109354012 0.0560285486 0.0972351655 0.0452851653 -0.0221107267 -0.143891752 -0.0367256775 -0.100730948 -0.0520177893 -0.0628575087 -0.0123458058 -0.144427627 -0.0617014915 -0.0792786926 0.105423264 0.118998893 0.065015249 0.103419602 0.078821741 0.11923746 -0.0886161029 0.0995480195 -0.00256725191 0.00349370553 -0.0529144071 -0.0110389693 0.137066856 -0.177615538 -0.0219782609 0.0117756883 -0.0622062907 -0.031752415 0.109670192 0.00176507875 -0.128339231 0.0967004448 -0.0175355524 -0.0179675464 0.00220880075 0.0363271013 -0.0472624972 0.0390423499 0.0431711748 -0.0524883382 0.137009606 0.032474678 0.0447325036 -0.109758742 0.0141501743 -0.0879177302 0.0324584693 0.0984169021 0.0776358694 -0.0186196659 -0.0178685524 0.136504993 -0.130911753 0.120253332 -0.00131494773 0.0500290655 -0.0261307955 -0.115568407 0.0599436909 0.0793855786 0.013578156 0.164593741 -0.0608182624 0.00377323222 0.144239753 -0.0365212336 -0.00670494884 0.113193937 -0.0333096795 -0.0992462263 -0.0314201638 0.121462323 -0.00150117278 0.152934536 -0.0595164932 0.155643508 -0.0155162774 0.113217972 0.0924211666 -0.0591561124 0.119596377 -0.148361132 0.0774345249 -0.0543995574 -0.0430051051 -0.0587892085 -0.0859734565 0.0093586091 -0.139499143 0.144523352 0.0625231117 -0.0972201005 -0.0138707748 0.105667144 0.0957431421 -0.0930925608 0.114664152 0.0465527698 -0.0664715692 0.103240147 0.116609365 -0.192610845 -0.152009219 -0.0707181469 -0.123133332 -0.0165981147 -0.119874239 -0.123395704 -0.134802729 -0.0183263794 -0.00162356615 0.0698149651 -0.0478172004 -0.0292479843 0.0124004614 0.0228632949 0.125499591 -0.128627047 -0.0394836068 -0.0904139578 0.0717693791 -0.0241678786 -0.0282474756 0.066885747 0.172793537 -0.0325833708 0.134693041 -0.0837737098 0.028767975 0.149519458 0.10379082 0.134075478 -0.00300905108 -0.11726708 0.00835976377 0.0115354434 -0.104070403 0.0869700015 0.0493195616 0.173674643 0.136860088 0.133943602 0.0349466503 0.0715380386 0.0455492027 -0.0717399567 0.0613892823 -0.071349673 0.103834003 -0.0662872195 0.110759154 -0.0086634336 0.11282818 -0.0787081271 0.0525955185 -0.115244508 0.017306909 -0.148093 0.11725767 -0.0918413401 -0.0415653959 0.0675327182 -0.142755657 0.122713141 -0.0754952356 0.119042411 0.120726988 0.0600856133 0.0850800574 0.042756021 -0.0042983531 -0.0966215879 -0.109561965 -0.121179365 -0.121256597 -0.0916649252 -0.0139565729 0.0183815174 0.010678432 -0.0070024007 -0.10860841 0.123465493 -0.0865222588 0.0993401259 0.0353338942 -0.122070476 -0.00103206933 -0.147107095 -0.0779001042 -0.0985540375 -0.0854975283 0.0685408339 -0.065476723 -0.0296396669 0.0578391589 0.0765029639 -0.0989598259 -0.0449470505 0.0685051531 0.0537158102 0.0139079243 0.0583296567 -0.0594620258 0.0189818889 -0.0988531634 -0.00361028314 -0.0409312546 -0.0480199158 -0.0213640556 -0.126316875 -0.118182555 -0.134643987 0.0825530589 -0.0812493861 -0.00928412378 0.0827149451 0.014478147 -0.124131575 -0.13848491 0.113321409 -0.0203420967 0.132658973 -0.0168279931 0.0254632235 -0.0577695444 -0.125243694 0.0185761452 0.125729159 -0.0470989868 0.137575284 0.00578674674 0.125307932 0.13194342 -0.110660031 -0.108901128 0.0635878146 0.139960542 -0.0849670395 0.0299605131 -0.0711956099 0.0486410856 -0.0859787986 -0.125518829 -0.0788395777 0.113387808 0.172473475 0.125997916 -0.03521844 0.00814832374 -0.0874923393 0.0611119755 -0.103455245 0.0358751714 0.0404794477 0.116629399 0.107773907 0.0883051604 0.155530751 0.0984854996 -0.0652066395 0.0862129629 0.0566241778 0.0940288976 -0.0396610685 -0.054708723 -0.0403634794 0.0204263702 -0.0830316693 0.175091997 0.0323943421 -0.0326925032 0.187265396 -0.0557819456 -0.149056599 0.161892369 -0.0292865653 -0.106854089 0.113595374 -0.0478481576 0.0871280357 -0.0546426699 0.109687231 -0.0883881673 0.152132541 0.0782102272 -0.0743416622 -0.0343025103 0.151285902 0.0897574127 0.0613243282 -0.0363124833 -0.0416660458 0.0365589708 0.00544850901 0.0712229908 -0.0174090713 -0.00367248501 0.100005753 -0.0259860251 0.0931316465 0.0641765073 -0.0520533472 -0.000773876556 0.113493264 -0.0614270456 -0.126095414 0.11269249 0.00773805752 0.0544681847 -0.0156176239 0.0190839916 0.00293012918 0.0550577864 0.0748387203 -0.0842898712 -0.0772737563 -0.157246128 0.00333786267 0.0245691296 -0.0825911462 0.0128215477 -0.0858282223 -0.132421732 -0.0927961841 0.144973248 0.0598813556 -0.0359893106 0.000982378377 0.100832321 -0.106284365 0.0759487748 0.124556273 0.105182365 0.00680022268 0.0535307154 -0.0955361351 0.0177737772 0.0095509449 -0.00627064146 0.033846356 0.168117985 0.0789029747 0.152832642 0.175960913 -0.00220050896 -0.0862830505 0.106605045 -0.0212400369 -0.0751578733 -0.0164428316 0.0700538829 -0.0992731154 0.129148081 -0.0179386213 0.0452752709 -0.0637316629 0.0592104197 -0.128685504 -0.0348353833 -0.087284103 -0.0663602129 -0.0031849267 -0.000142063553 -0.0840348825 0.0924766883 0.0673238337 0.167500377 -0.0353617668 -0.0333009921 -0.112182476 -0.04946943 0.134662643 0.139826789 0.156980231 0.0751472116 0.0695004016 -0.0762207955 0.156616062 -0.0323073752 -0.074322626 -0.0840249732 -0.0568689369 0.0383799225 0.12136513 0.0337613001 0.150748312 -0.0912267268 -0.0950863957 -0.0684669167 0.0555381961 0.00922425464 0.0324769616 0.154439181 0.00271727936 0.0754308924 -0.137119815 -0.0415384322 0.107144803 -0.125383273 0.053086549 -0.0475495011 -0.00223423541 -0.128673628 -0.0492692962 -0.0113832206 -0.0116038918 0.104703367 0.0881138444 0.137240604 -0.005506441 0.00293928385 0.0956130475 -0.0784134567 -0.021586366 -0.0949726552 0.109912977 0.0546887219 0.0080575645 -0.0473530963 0.101009175 -0.0167142078 0.0431778133 -0.0919825733 0.0523650348 -0.136390731 -0.0124782622 -0.0131750405 0.0803216249 -0.135611996 -0.139048174 0.121725962 0.0170050114 -0.0948430598 0.126605704 -0.12657319 0.0280110091 0.0484481603 -0.101917908 -0.131059453 -0.00414025784 -0.000507161021 -0.0291253105 -0.0539416969 -0.132729664 0.118548885 -0.119953021 -0.106853649 -0.0724168047 0.0265188962 -0.0701930001 -0.0523263291 0.0513340086 0.0227334052 0.134236738 0.00679840147 -0.0369084999 0.101770133 -0.0643866509 -0.0391958281 -0.11996039 0.0662431717 -0.0149399638 0.113427982 0.00900928676 -0.115386441 -0.0923655182 -0.125217244 -0.105257966 -0.127515703 -0.115915604 -0.117300279 -0.0824699104 -0.00401163101 -0.0243823603 -0.071768783 -0.0823556334 -0.116988376 -0.0874817073 -0.0767723396 0.0525470376 0.0697348416 0.0105717331 0.116433874 0.119623169 -0.0428111032 0.0553773344 -0.107416034 0.131372139 -0.0292251855 -0.064143002 -0.129900947 0.0492804348 -0.0171842128 -0.13053751 0.0456082523 +tensor_6bias 50 +-0.130149469 -0.0166715905 0.0930550545 -0.00245699566 -0.106651746 0.0490312241 -0.022929607 0.0280555151 -0.149067715 0.102508798 0.00938428845 0.150981218 -0.114400074 -0.0645027235 0.016750779 -0.0841871202 -0.140838712 0.0354661271 -0.127782494 -0.107863024 0.0691528246 0.0463019311 0.0961098671 0.0680775866 -0.118705533 -0.0768498629 0.17632094 0.119572431 -0.0184698328 0.0619278774 0.0973391309 0.0654409006 0.0840726122 -0.0982169956 -0.0271483194 0.119829573 -0.0558238514 0.0868603587 0.109693505 -0.126328036 0.169493452 -0.153565153 -0.0748259053 0.0136530614 0.00735191396 0.121958517 0.00247201324 -0.0304538812 -0.0688641742 0.0533529967 +tensor_18weight 500 +0.116833255 -0.0357756764 -0.131794453 -0.0954782292 -0.0199575797 -0.0554031618 0.0123106642 -0.0748193115 -0.138343185 -0.0409799181 0.00820469763 -0.000622143503 0.103817098 0.133209154 0.108685024 -0.0963258296 0.133699819 -0.0743798465 0.105768584 0.101406492 -0.0850842893 0.088313885 -0.0203158874 0.0809838101 -0.00829087198 -0.00621365244 0.12420509 -0.0834524781 0.013258785 0.000458776922 -0.0094107436 0.121913455 -0.112981685 -0.0701991916 -0.0657723844 -0.0241640378 -0.111835979 0.0305915046 0.0958627611 -0.123314679 -0.0531556047 0.0353454947 0.122457325 -0.188562363 -0.0805713162 -0.0883217677 0.137407482 -0.0494341888 0.08294186 0.02592903 -0.102925614 0.112401806 -0.0666541457 -0.0743903071 -0.124930732 0.00989535823 0.0654063374 0.0936208814 0.00587140396 -0.133220345 -0.0864736214 0.129711837 0.00181314978 -0.11009489 -0.142534941 0.112804607 0.0828809589 -0.0675114542 -0.0837594494 0.080936946 -0.0578315705 0.000171717635 -0.115505785 -0.00781203434 -0.044651553 0.0236158818 -0.0261275116 0.0364638008 -0.0263226833 -0.0818104967 -0.0312857106 0.0161777474 0.0981943533 -0.142439932 0.130368665 -0.148819655 -0.0904635265 0.142308936 -0.0558655635 0.119650826 -0.123948567 0.071270369 -0.0475144461 0.0499968566 0.0238669682 0.0490803383 0.102086172 0.0440850668 -0.0912592411 -0.124338679 0.0205278974 0.0803509951 -0.127337903 0.0695622861 -0.0565674454 -0.0611764155 0.0246511605 0.147374704 0.0300100464 0.031006122 0.0128832478 -0.134186521 -0.0788531825 0.0231011659 -0.077104196 -0.0899467021 0.083257556 -0.016190676 -0.15599066 0.0265589394 0.0970405489 -0.101687469 -0.119606331 0.101642758 0.0926929563 0.0224016327 -0.118740149 -0.0145147676 -0.121801361 0.0961833745 -0.0375055596 -0.0604088642 -0.0904211402 0.0308890697 0.0637984425 0.0605207421 0.0125890784 0.101664178 0.0726759285 0.0591030382 0.0281341467 0.0179437492 -0.0888262913 0.0439237058 0.0959485695 0.0477892607 0.138858929 -0.0815726668 0.0842805654 -0.0488859788 -0.127596661 0.0637440085 0.0945658982 0.0379101187 -0.133902624 -0.0156785361 0.153879091 -0.0837965533 0.112203002 -0.109656185 -0.121323109 -0.0539827608 0.157840356 -0.140984669 0.103252746 0.0117826462 0.0724756718 0.1193185 -0.0168017652 0.105931647 -0.15716891 -0.0412402935 -0.182400733 0.175201252 0.0334252864 0.190847382 -0.0840521902 -0.102074817 0.0166378226 -0.070603177 -0.0926473141 0.0471240357 -0.0813179836 -0.0973169506 0.049886927 0.107353233 0.0245145429 -0.122061022 0.0877110511 0.0779518932 0.181554541 0.00650133053 0.148282856 -0.167027533 0.0817222595 0.166063353 -0.071306996 0.0401937515 -0.0894560814 0.0982646197 -0.0373243652 0.0289797336 0.0392166823 -0.108381942 0.119098619 -0.0920399055 -0.0729553699 -0.124035373 -0.0866058767 0.132396668 -0.0131686293 -0.0622706711 -0.115531176 -0.129241109 0.0608382747 0.0826291889 -0.0870855898 -0.153687358 -0.00150228257 0.114700183 0.093991451 0.0560563877 -0.0242470428 -0.03554409 -0.0501358062 0.0884645432 -0.0462445691 -0.160832793 -0.0499034822 -0.0424251109 0.15960142 0.00967518892 0.013454861 0.0476650223 -0.0562032312 0.00298618712 0.113581337 0.0738084391 -0.137588665 -0.0807389989 -0.0702914745 0.0433799401 0.130559713 -0.0844176263 0.00879538152 0.0190126356 0.0929833725 -0.0415338278 -0.0416321158 0.0581194386 -0.106194869 0.0854856074 0.105097309 0.0867655277 -0.110841736 0.13861914 0.00394579815 0.0424983464 -0.0553477593 0.0576893315 -0.0487310477 0.00869362801 -0.0946266651 0.0686361194 0.0094735641 -0.0982639343 0.0865717679 -0.0489508957 -0.0480820388 0.0424392 -0.0348532163 -0.145720199 -0.0116074253 -0.0465409979 0.0409410596 0.137870952 -0.141640723 0.133919835 0.0830813125 -0.0514443479 -0.144528806 -0.0606587119 0.0772298053 -0.000756907742 0.0295330584 -0.00934765488 0.0772825181 -0.120189674 -0.0941238254 0.110182583 0.020738909 -0.110578194 -0.170464888 0.135421559 0.0186651032 0.157670006 0.106957033 0.113828443 0.032888636 -0.085790351 -0.102287576 0.10205999 0.11301367 -0.0841406286 0.0869232267 -0.0806331262 -0.0432705954 0.0882454589 -0.127744198 0.0729970783 0.183013499 0.0513928235 -0.160567686 -0.0651886687 0.0733133778 -0.140486658 -0.119877644 -0.0233747195 0.0682742521 -0.0181181505 0.0523737594 -0.114034481 0.178959042 -0.108594783 0.0531802028 0.00544614438 0.122413933 -0.107881032 0.00437956769 -0.0753047615 -0.0751520917 -0.0669195428 0.140085652 0.107123025 0.0215605646 -0.0108890682 0.126112461 -0.0248530898 0.0944449008 -0.11579188 0.0103179337 -0.147988439 0.0894878879 0.155256197 0.0593105108 0.0212335344 0.108353369 -0.0329438969 0.0173103362 0.113536589 -0.0336008444 -0.0386447273 0.0362053365 0.0621379763 -0.0990284234 0.00793749839 -0.0617283881 -0.0743452683 0.179765821 0.114959568 0.136922091 -0.1003832 -0.0692859069 -0.139016584 -0.13847138 -0.0461068004 -0.0357935503 -0.175952822 -0.0971040502 0.0884984359 0.00901553407 0.173032984 -0.0787108466 0.0656532124 -0.0365875959 -0.0772555619 -0.101957574 0.177314684 -0.184264794 0.0541368276 -0.173181415 -0.0607216991 0.0584572963 -0.0959036872 -0.0192932636 -0.140759885 -0.0871745721 -0.0414703935 -0.128599197 -0.134148136 -0.0330905467 0.086126022 0.0754621923 -0.0512827821 0.0647300407 -0.0423469543 0.103672571 -0.10058222 -0.0269276202 -0.0845367238 0.148252413 -0.127518728 0.120773628 0.0699376613 0.085009709 -0.0772422925 -0.00348520023 -0.0487586632 0.0242007188 0.0718890578 0.0988076255 0.0587318242 0.0960896015 -0.0790796131 0.0568241999 0.0869796574 0.0882544219 -0.0609133728 -0.193863526 0.117342524 -0.0339369737 0.115510337 0.0176041406 -0.134604976 -0.0149109662 -0.0460692905 -0.0518316031 0.154208392 0.116131343 0.000284732843 -0.124516778 0.00545642432 -0.1934973 -0.0553306863 -0.114772283 0.0383958407 0.065391317 0.0921707079 -0.112537354 0.138822451 -0.112784393 0.163916111 0.141898572 0.144761667 0.043000266 -0.0156551208 -0.13070862 0.0155345816 0.0829150677 -0.15498811 0.0502647795 0.106921747 -0.0415367335 -0.101776689 -0.0270393789 -0.150163233 -0.00523975072 0.12342082 0.030272549 -0.0634030774 0.110089242 0.078154169 0.141164288 -0.0665735304 -0.133532166 0.0205077082 -0.0799736828 0.00293931179 -0.0775768757 -0.0609018579 0.104522519 -0.0473734476 -0.000608845323 -0.139011964 -0.12750718 -0.113618098 0.0852759406 0.0522349291 -0.011812062 0.103033014 0.116778359 -0.0851101875 0.0191278923 -0.138369411 -0.0144041777 -0.033769384 0.0952177495 -0.184691101 0.00995114446 0.0508734547 -0.161694378 0.103683837 0.106144048 -0.0914210454 -0.10774231 -0.0468717627 0.0950566381 0.0999391824 -0.0267179832 0.114936009 +tensor_0weight 5000 +-0.0508145355 0.0380447619 0.063999176 0.0305916369 0.0178726781 -0.0604492612 -0.0922251716 -0.0409420505 -0.0806181803 0.0253173634 0.0200065672 0.025728466 -0.0917467773 0.103931934 -0.0364619642 0.0943374634 -0.00828016549 -0.00586269284 0.0867723376 -0.0814111456 0.031561438 0.0127995471 -0.0175799523 -0.0762633756 0.0174684227 0.0652227029 -0.0757252946 -0.0175922979 -0.0378516056 -0.0103352945 0.0671745986 -0.00129433826 -0.0402865373 -0.0769478306 0.136584729 -0.00320164161 0.0293592662 -0.131899893 -0.00832800474 -0.0754740536 0.0523712561 0.00832175463 0.0861478224 -0.0513150692 0.0660690591 0.0819229931 -0.061753273 -0.0584596395 0.00335742347 0.0352997482 -0.0355550982 -0.0571611226 0.0267518349 0.00366023136 0.0501797497 -0.0911384001 -0.0553132854 -0.0707477331 -0.00369775807 0.0324063897 0.0952493548 -0.0701338053 0.0869731754 -0.0122592403 0.0579397976 0.0811071023 0.0882389173 0.00392138492 -0.0282733813 -0.07796707 -0.0237656292 -0.0976018459 -0.0302748028 0.0959793851 -0.0774898157 0.0740917549 0.0638118461 0.078004308 0.0606729016 0.00807148404 0.122843958 -0.0412954316 -0.00570290349 0.048803661 -0.0925534815 0.0642913654 0.0318598822 0.0163798314 -0.128930375 0.10925965 0.0126452744 0.0192803536 0.0565545857 0.0464702807 0.0887314975 -0.0057315547 -0.0403685123 -0.0338817462 -0.048792094 -0.0464581065 -0.0035396677 0.00428326242 -0.00884059165 -0.0119973514 -0.000725717517 -0.00789349712 0.118645795 -0.0580181517 0.0644906759 0.00912526064 0.0169304255 0.124032162 -0.0209737495 -0.0644200072 -0.00610294472 0.0443046205 -0.111826301 0.0924093947 -0.0439966656 -0.0174338557 0.0350687169 0.00473201321 0.0256146453 -0.0102232145 -0.0740443543 -0.0562746376 -0.014960424 0.0814036652 -0.116801761 0.100059807 -0.103689127 -0.0274910927 0.0271945633 0.0108639002 0.10785304 0.119615123 -0.0933286771 -0.104783587 0.0308681801 -0.0486758314 -0.109117366 0.0569621176 -0.0481622331 0.00365207065 -0.0219343584 -0.0347234085 -0.0862182751 -0.0892119408 -0.0102323415 0.0456935875 -0.0520036221 0.0333958827 0.0777817443 -0.0517655611 -0.0659507513 -0.0258211251 0.0767518133 -0.0409224853 0.0908992663 -0.02203927 -0.0486994721 -0.0106065404 0.0736719891 -0.0421070047 0.00211916398 -0.0954323933 -0.0184283517 0.0945980549 0.0464413576 0.0754498392 -0.037175186 -0.00513185887 -0.0806778446 0.0640999004 0.0459177829 -0.0637460798 0.050385993 0.0309800953 -0.0565693192 0.123325884 -0.0541292951 -0.103475936 0.131021678 0.0500673242 -0.0464580432 -0.0415508747 0.0403000787 0.0370427035 -0.0539194234 0.115986116 -0.0146925198 0.0295080431 -0.0609974898 -0.0897742435 0.0866930038 -0.0403378308 0.0923621878 0.0294523649 0.0361655056 0.0205234103 0.120485581 0.0700325444 -0.0497162156 0.043903362 -0.123235166 -0.00563619565 0.0633756816 0.0214825068 0.0333110169 0.0104085831 0.00706916722 0.0849041864 0.110511042 -0.0315861739 0.0258784778 0.0600173473 0.0242875703 -0.0698327497 -0.0123227434 0.014183823 0.0316864885 -0.0905837119 -0.166954413 0.0110382149 -0.0523379371 0.113217518 -0.0749479383 -0.0839105621 0.06123152 -0.123284116 0.00467087328 -0.0400639065 -0.139171645 -0.0208464172 0.0824865252 0.0881877691 -0.0531907566 -0.0462760702 -0.110711597 0.0575970635 0.0207520071 -0.0592331663 -0.0876662433 0.0696238056 0.0587312393 -0.146805972 0.0465192385 -0.0876265243 0.0274952278 -0.0920811594 0.0425466485 0.138803825 -0.11095649 0.035078045 0.0631543472 0.0814108402 -0.0799154192 0.0390180871 0.076176472 0.0414959937 -0.0411920138 -0.013391098 0.00715481211 0.0667355806 0.0209038183 -0.0535365157 -0.000487437297 0.0508622043 -0.0740626454 -0.0971553922 -0.00739993062 -0.11939621 0.0771144629 -0.0776564106 0.00858938135 -0.103641413 0.0731763914 0.00908944476 0.0822138935 -0.0353183523 0.0420758463 0.00362044154 0.107373729 0.0974787101 -0.101556815 0.0841847733 0.0912442133 0.125659168 0.0618594773 -0.0642373115 0.0193936136 0.101438187 0.0530573241 0.0676667318 -0.00218354817 0.0432167687 -0.0871621072 -0.0426511392 0.0288807489 0.0790897906 0.0490392447 -0.0205203649 -0.0993364006 0.00468417443 -0.0547306687 -0.0277090929 0.00697927317 -0.0244437791 -0.0157332867 0.106168769 -0.0359168798 0.114783011 0.152111039 -0.0253520384 -0.0015796381 0.03375398 -0.104952097 0.0092763612 0.101215295 -0.00308345142 -0.0368209258 -0.0473044775 -0.00817228947 -0.109114319 0.105732635 0.0244474001 -0.0146100083 0.0529635809 -0.00840851665 -0.0632252246 -0.0520593971 -0.00865435693 0.0344991274 -5.80968299e-06 -0.133371904 -0.151106805 0.0796020627 -0.00727936905 0.0341606252 -0.0332682915 -0.121840335 -0.152285203 -0.0688880011 0.0230131447 0.000283442176 0.0609894954 -0.004379577 0.0477737971 0.044799611 -0.132041544 -0.0921159685 0.0794112161 0.0724173859 0.0694021881 -0.0325237289 -0.0596630126 -0.128212959 0.0867897272 0.0183652658 0.067165792 -0.0221667513 -0.0792030767 0.00673970953 0.0961405337 0.11915601 0.0162419658 -0.0958381593 -0.0221719481 0.066273272 0.0103854984 0.0839003772 -0.0880922079 0.0691054389 -0.0436538383 -0.0678017363 -0.0862348899 -0.0580505431 0.0340274572 -0.0189464837 -0.0844241232 0.077873528 0.07532157 0.0911468565 0.130459666 0.0642754659 0.103514485 -0.0523621738 0.0538226627 -0.00427193614 -0.0198291782 0.0464040674 -0.0794681758 -0.0358173288 -0.0710784718 0.108343065 -0.0409613326 0.0320960544 0.053875234 0.00935616158 0.0279227011 -0.0595730767 -0.0895934626 -0.054435689 0.0687097013 -0.0623276383 -0.0781896859 0.0710855275 -0.0379823111 0.0614629425 0.107129268 -0.0969881415 -0.116216652 0.104508013 -0.0730313659 -0.0942338929 -0.124592021 -0.0121723814 0.0757561401 0.00725453952 0.027494695 -0.0790883899 -0.0104121519 -0.0122909518 0.0885993391 0.00961995777 -0.0863305554 0.0516466871 -0.00846583862 -0.137650937 0.053744074 0.0191885531 0.099622637 0.119871758 -0.0234789476 -0.0225552637 -0.0628033355 -0.061706692 0.00870011281 0.0219527185 -0.113005184 0.0864791349 -0.0586110726 -0.0858683884 0.0617091358 -0.0387163647 0.0250992496 0.0188102666 -0.0987309664 0.0387692712 -0.0278170835 -0.0702976808 -0.036741849 0.0252645276 0.0743944049 0.0373597182 -0.0650147647 0.0886150151 -0.0378745385 -0.0721595287 -0.100263052 -0.024431048 -0.00138329086 -0.0156793948 -0.108034611 0.00560034066 -0.10105747 0.10377124 0.108116172 0.106484957 -0.00357731106 -0.103540003 -0.162499279 -0.0292240772 -0.13454926 -0.0578239672 0.0473558456 -0.0877546594 0.0137864761 -0.0327536836 0.0507721342 0.0252886489 -0.092969656 -0.046330668 0.0129592251 0.0318424702 -0.0836347714 -0.00133580307 0.0577662215 -0.130686596 0.0925165117 -0.0197680425 -0.0594279207 0.081254214 -0.025833251 0.106694445 0.103731573 0.0476561114 -0.0746863931 0.0867198333 0.0718293041 -0.0795527846 0.0907836407 -0.0875569582 -0.0436345451 0.0336315818 -0.101902887 -0.112922281 0.0268265437 -0.0239662174 0.108922079 0.041044455 -0.00692772307 0.0482088998 -0.0167634431 -0.0713200569 -0.131396279 -0.0818632841 -0.0646765679 -0.00467614038 -0.068184495 0.0581986308 -0.12290591 0.0856338814 0.0330237187 -0.113662779 0.0768672228 -0.0640005991 -0.102779485 -0.0699071512 0.109070554 0.0373121388 0.00894282851 0.0210740287 0.102456108 0.00209105411 0.0643166751 -0.0748509914 0.0103702946 -0.00475171115 -0.124941736 0.0627204254 -0.110363327 -0.0701798648 -0.00204091449 -0.0581695102 0.0710774362 0.0637491271 -0.112383977 -0.0604935288 -0.0444468074 -0.0884831399 0.0787647441 -0.057589367 -0.0092884656 -0.0068281414 -6.82513783e-05 -0.00189695833 0.0291572809 0.0887888893 -0.10821224 -0.0333320834 -0.0257991888 0.0457047522 0.0474029109 -0.0698928088 -0.12633343 -0.0517579019 0.0700997636 -0.0429069959 0.0199789684 -0.0108658681 0.00652803527 -0.00151343702 -0.0620038249 0.0806239918 -0.0406728946 -0.0541682765 -0.0738097504 0.106633566 0.118564427 -0.0846382231 -0.0406942107 -0.0214116126 0.021105893 0.00434125355 -0.0575985499 -0.0204750691 -0.0223995987 -0.108478487 -0.0789667591 0.00276051858 -0.0364289954 0.0240258034 -0.00772039779 0.0677978322 0.0111008026 -0.0301737618 0.129933059 -0.0297325328 -0.121423602 -0.00256420486 -0.0767344758 -0.0345042236 0.0232742243 0.0518034101 0.0377323851 -0.0785427988 0.0944864005 0.0168189276 -0.0450433195 -0.0179200061 -0.0332794897 -0.115497865 -0.079460144 0.0748219565 -0.0902453661 0.0155278947 -0.0175510496 0.095810093 0.081910409 0.0412754081 -0.124072641 -0.0311682243 0.0492392965 -0.0202937964 0.0275281016 0.0286301002 -0.0205335319 -0.06336198 -0.00144966797 -0.0174041037 -0.116765253 -0.0784229636 0.0826164782 0.0420248657 -0.0860279575 0.0445345417 0.0170288365 -0.0538485646 0.0218434893 -0.126645058 -0.081135571 0.100772187 0.074350059 0.0520832427 0.0342816785 -0.0332369693 -0.00494507421 0.0594232231 0.0195060819 -0.0653662607 -0.0566894747 -0.049552016 0.0946275666 0.0667197555 -0.0115687326 0.0609822571 -0.0733285174 -0.00757924188 0.128872409 0.131651253 0.0883550048 0.00453105802 -0.0755265802 -0.0273298975 -0.075551331 0.0423105136 0.103586905 -0.103956595 -0.0853345916 -0.0501238741 0.0979430974 0.0415611826 0.0830030888 0.026601227 0.0730280057 -0.0635615513 0.0683744699 -0.0418914109 -0.0580942109 -0.0672050193 0.000723240606 0.0774079859 -0.0247790851 0.0417027809 0.0464081317 0.0150757832 0.0990941375 -0.0262722876 0.0383368991 -0.0866433606 0.053820353 0.000491182785 0.0509168692 0.0996452123 -0.109139279 -0.0142310057 -0.0809439868 0.0759590417 -0.0160349142 0.0490121357 -0.096037291 0.0512820296 0.0240419395 -0.0778301433 -0.00461494876 0.0313165486 -0.0526363626 0.0636812896 0.0332127437 -0.0893361941 0.107702576 0.0753764287 -0.134968281 0.154497638 -0.0106210969 0.0807469338 0.0397658274 -0.0412878655 0.0725319758 -0.075096637 0.0352239423 -0.156253964 -0.105903931 0.00186598103 0.0204177406 0.0137510747 -0.0850754306 -0.0996614769 -0.137975514 -0.0964332893 -0.0970748141 0.0658250079 -0.0284603387 -0.0586091056 0.0547327399 -0.0909201056 0.0791378096 -0.135371462 0.0970040932 -0.0691698939 -0.0478290841 -0.091066964 -0.10993892 -0.0587934963 -0.149786964 -0.0152382096 0.104548037 -0.0258558169 -0.144351274 -0.0335272104 0.0226420816 -0.0596394576 -0.0499719083 -0.0401447415 -0.137909144 -0.0354104489 0.0517158546 -0.00912801269 0.100223139 0.0372407772 -0.0557585917 -0.00336286239 0.0683526322 -0.040298298 -0.00263097975 -0.0374882258 0.0522439405 0.0507735275 0.105002061 0.0763192996 -0.0607291535 0.0252055712 -0.00846379343 -0.0764852315 -0.0580886006 0.0643623322 0.0687340647 -0.0175391026 0.0497902818 -0.0412122346 -0.0626358017 -0.0582311451 0.126354679 -0.118473426 0.151346073 0.0104045104 0.0904658511 0.0403522402 0.00989431608 -0.104035281 0.0668206066 -0.0350457989 0.0594084747 -0.0234730225 0.0567279682 0.0705103427 0.0155637255 0.00617892295 0.0591375902 0.10290321 0.0125923716 0.0783741623 0.00137256691 -0.00307283737 0.0503848121 0.10381522 0.118850879 0.128660917 -0.053519316 0.0977203473 -0.0134721575 -0.0389264151 -0.00525255827 0.0452782214 -0.0551993214 -0.10694126 0.027695125 0.0864779726 0.0454558991 -0.0506804623 -0.0287189651 -0.0546144284 0.120786496 -0.0527668484 -0.0774859414 0.102176331 0.0673900619 -0.0448943712 -0.0719371215 -0.0406077392 0.0517419763 -0.133232012 -0.0570902154 -0.09013246 -0.0748804808 0.00943455193 0.0882416517 0.000705939427 0.0691983029 -0.0305666197 0.0502307639 0.0774589181 0.0290872231 -0.103126198 0.0543247163 0.0888695046 -0.0432999581 -0.0238669831 -0.0651162937 0.0898748636 -0.0334561318 -0.0923917145 0.00535089429 0.0831253678 -0.032534346 -0.103109762 0.0489915684 -0.0154016791 -0.0483072698 0.0992657989 -0.0456443615 0.0638154149 -0.00041857746 0.0412595235 -0.0256175622 -0.0011343424 0.0302553996 -0.0492172204 0.0441855341 0.0358452648 -0.125684917 0.0641204044 -0.101313218 0.0406814888 0.0231520366 0.00894289184 -0.0159130525 -0.0403623842 -0.0126857739 -0.0646654069 -0.0864315107 0.0479207449 0.0227875356 0.0891341716 0.0144964764 -0.128592268 -0.0647967756 -0.0491824746 -0.122899771 0.0843127072 -0.0399818346 -0.0702486336 0.0469990969 0.0585947372 0.00991726387 -0.0545531549 0.121398546 0.0267390348 -0.0134512298 0.0329682231 -0.0672333017 -0.0224784035 -0.00435023708 -0.0271258652 0.0712630972 -0.0160659477 0.0995363668 -0.0256949402 0.103435107 0.109910071 0.00650324021 -0.0404900536 -0.0908767134 0.0118982857 0.00520248339 0.0329482853 0.0144852586 -0.0797013938 -0.0785156786 -0.114622436 -0.0149816191 -0.0634922013 -0.0747183189 0.0377447829 0.00633793836 0.0832202658 -0.0870476142 -0.0112469308 0.051385209 0.00177763787 0.0805689245 0.0667984635 0.119763464 -0.0189604852 -0.0689202473 -0.00829955377 0.0841114894 -0.0166632887 -0.101568498 0.0870780572 0.0787321255 -0.101076506 -0.0728867874 -0.0815497339 0.059538722 0.0476107113 -0.0611895993 -0.055862911 -0.00502554746 0.0184646137 -0.0100589432 -0.141160175 0.0608552545 0.0207750183 -0.0828769058 -0.0782217011 -0.0249421597 0.0649304986 -0.0759224221 0.0226793531 0.0345480256 0.101637982 -0.0291147213 -0.020399509 -0.0961149037 0.0607593879 -0.0901033953 -0.00980376825 0.0093408674 0.0903950557 -0.0326510593 -0.0616331063 -0.0332476608 -0.0641225353 -0.0496507026 -0.058669664 0.117607869 -0.0409576073 -0.00356686814 -0.105142437 0.0766613707 0.0395114012 0.0188095663 0.0634850636 -0.0737257972 -0.0844153538 0.118897498 -0.000630921393 0.080352664 0.00662139896 -0.0893025771 -0.0714181289 0.081619963 0.0111359404 -0.0571513996 0.0548180155 -0.0636223927 0.125711203 0.0851431414 0.130260974 0.0836031362 0.061986275 -0.028846303 -0.0287329499 0.0502533987 0.115427487 -0.0506522879 0.127979293 0.119268231 0.0850080177 0.0331578441 0.0409094281 0.0090124933 -0.0136618558 0.0948067382 -0.0672471449 0.0505564883 0.032799989 0.0633241385 -0.0469509736 0.0506216548 -0.0372176617 0.0645158365 0.149505928 0.0132820019 0.0121845976 0.0295179803 0.0295598768 -0.130403206 0.0423673615 0.0379888043 -0.0185889266 0.0913859308 0.0504159145 -0.0536566004 -0.064247027 0.0357843451 -0.00891068671 0.0950773582 -0.116974562 -0.0360760242 -0.121625684 0.103534453 -0.110155627 -0.109326176 -0.0306907389 -0.124297231 0.0215684474 -0.107538059 0.115768477 -0.0631534979 -0.107648998 0.00836135633 -0.0481221005 -0.0226832405 0.00744933914 0.0239705071 0.00856848713 -0.0518919192 -0.0672201142 -0.0423557498 0.0152753228 0.0322034582 -0.0436891429 -0.0355248898 -0.0221560691 -0.0227099117 -0.0872905031 -0.074751161 -0.0961238891 0.0214987211 -0.0765815899 -0.101568431 0.0197522994 0.0158146303 0.0358287059 -0.0310186576 0.054503344 -0.0471081249 0.00175969047 0.0102003291 0.0548275784 0.0608831719 0.00927542709 -0.00995576289 -0.00546212913 0.124199063 -0.0787529647 -0.107979171 0.0664112717 0.00175410474 0.0996535346 0.053399168 0.0650362223 0.0413330421 0.059269011 0.00307723135 0.0968322679 0.0304244794 0.0847681016 -0.0587718002 -0.0920936614 0.0963051766 0.0730310529 0.075302057 -0.101675689 0.00834253523 0.0334894434 8.25827228e-05 -0.0221394673 0.0630398169 0.0403992832 -0.0181807 0.0733471513 -0.0055750059 0.0181060694 0.1169772 -0.00306291087 -0.0245710369 0.0374747738 0.0355481431 0.127457261 0.06669911 0.033534728 -0.0313876085 0.058364775 -0.0700249672 -0.0348450616 -0.0787659734 -0.111428857 -0.0750032812 0.1067295 0.0295113139 0.022092022 0.0761882439 -0.0214715526 -0.0454636477 0.0440265127 -0.0405539833 0.0178954173 -0.0918944478 0.0349099524 0.0980099589 -0.0593721792 -0.0717693344 0.0749724507 -0.10822311 -0.10527648 -0.0456449613 0.00945392437 -0.113418877 -0.0248292517 -0.151771814 -0.0317451209 0.00303221145 0.0136932479 0.0757390037 -0.0645068213 0.110142581 0.0331983566 -0.0726855695 0.0410212204 -0.0837602541 0.00736812409 -0.0960764661 0.0659725666 -0.0506423712 0.108358607 0.0074415463 -0.0579753295 0.0222589932 0.0219781511 -0.0769435242 -0.00365759665 0.0658315271 -0.0195193309 0.0876873434 0.0829789042 0.030799007 0.0445269085 -0.087823227 0.0490200967 0.0495685935 -0.0617967919 -0.00453193625 0.103787176 -0.0256911721 -0.0746461451 0.149433792 -0.00853996538 0.0359981731 -0.0535804741 0.10725081 0.0878978521 0.0258817542 -0.0147519195 -0.0875247493 0.0177521463 0.0229451209 0.0438379906 -0.0674143359 0.0837276876 0.0518606342 0.0602514297 -0.0148247061 0.0175807085 0.0104981009 0.0398374051 0.016279107 0.0897895545 -0.010169927 0.12621972 -0.152914077 0.102994591 -0.00934717152 -0.0707922205 0.088611111 0.106939681 0.112134047 -0.0540277697 -0.054023616 -0.0951209962 0.0558281131 -0.0773286074 -0.000430493499 -0.0220108796 0.00224742503 -0.042162884 0.0229496341 0.000386319705 0.0903915763 -0.0727334097 -0.0566792227 -0.0469854027 0.0666792765 -0.0901913494 0.0639531165 0.00190761709 0.0819069371 0.0437930077 0.073981382 -0.10753461 -0.0635947138 0.0397201367 0.0639339834 0.0106142825 -0.095933184 -0.0258502234 -0.151261196 0.0201133601 0.0523358956 -0.113775507 -0.0635734051 -0.0209280569 0.0180092286 -0.0952379927 -0.0805232748 0.0792436674 0.116160475 0.0405516624 -0.0603361167 0.0921702161 0.0638613254 -0.0797907561 0.0562291071 -0.0404303297 0.0192060955 0.0931882188 -0.0454974994 -0.063482672 0.0183900204 -0.0941224843 0.0311275516 -0.0276973266 -0.0228528716 -0.0103476569 -0.0013043856 0.0595675893 -0.0146932686 -0.0967626795 0.0205185581 0.0111512868 0.0304273423 0.0346512347 0.0115508316 -0.0314554684 0.0335132703 -0.0399859101 0.0783086121 0.0110251317 0.050887987 0.0386743098 -0.018033972 0.0640587211 0.0695254728 -0.0274955798 0.0315612257 -0.0987086147 0.0660334751 0.108960167 0.0362012573 -0.0556706525 0.0763316229 0.0343016721 0.0549547151 0.0566200167 -0.00617094245 0.104899995 -0.0148995249 -0.0461326651 -0.052078858 0.113826625 0.042423591 0.0696527734 0.0174295567 0.0255777556 0.0323791206 -0.085186027 -0.0352433883 0.0130573669 0.116727203 0.0527772866 0.0953754038 0.0984134078 0.0301339664 0.0283296034 0.0112838252 -0.041340284 -0.106966309 0.0208709706 0.0510318168 0.0411410108 0.0704910904 0.113985598 0.0480646491 -0.101439185 -0.101105615 -0.0554792546 -0.0963118672 -0.0833592713 0.0804136619 -0.0818424746 -0.0130467992 0.0993848965 -0.0514523238 0.0999550074 0.102077879 0.00966593996 -0.00935996324 -0.0179428924 -0.0360591672 0.00619822368 0.0243546553 -0.142853007 -0.0114681982 -0.0543433689 0.0218674429 -0.0637027845 0.0662505031 0.11204917 -0.0893480182 0.0857268497 0.103264339 0.0781002343 -0.0893782303 -0.0274790041 0.0431495346 0.0856630653 -0.12378367 -0.0509530865 -0.0479679741 -0.0808392987 0.0511769354 -0.00993785262 -0.0495909974 0.00732931681 0.107190818 0.0212429408 0.0919175819 -0.0032403795 -0.0621873438 -0.0842421055 0.0878323093 0.00147393253 0.0229070615 -0.0386694148 -0.0345502682 -0.0645541772 0.14156653 -0.0889476463 0.0902122259 -0.0681383684 -0.0405545346 -0.0987435952 0.0225519631 -0.118827663 -0.111183643 -0.0112256492 -0.00946287438 0.0775573999 0.0200256836 -0.0373974107 -0.078532733 -0.108547017 0.0992447287 0.0162392482 -0.0711892024 0.0401137359 0.0209429767 0.000362629071 0.0647842437 -0.0358259976 0.00750721199 0.0491359942 0.0709332824 -0.105451792 -0.0134563902 0.0872533396 -0.0307084043 -0.118991949 0.0960125998 0.0121480636 -0.036713779 0.0374878086 0.0718258396 -0.0660520568 -0.00429979758 -0.055313319 0.018989075 0.0844045654 0.0639191419 0.0425145887 -0.0436811857 0.0248131063 0.0507366285 0.00984115712 0.0211421121 -0.0417334475 0.127702236 -0.142305464 0.038462583 -0.100248791 -0.0598390587 0.0798201784 0.0749086887 -0.0129145803 0.0493668057 0.0832006335 -0.00326930895 0.0621138252 0.116234139 -0.0619600303 -0.0258555952 0.00560154766 -0.00271366001 -0.0680233538 0.039063748 -0.114920385 -0.0542362481 0.0695442334 0.0281284824 0.0585357882 0.125471935 0.0688281953 0.0719351396 -0.0179130882 -0.0254238006 -0.00948760845 -0.0995621756 -0.0127528915 0.0291331895 -0.0169871729 -0.00137848861 0.126049355 0.0243894756 -0.00514754048 -0.0438758358 -0.070057936 0.00142127706 0.0820695385 -0.0231800079 -0.0708072856 -0.0734865814 -0.114026025 -0.0061859726 -0.0585030317 0.0943298936 -0.0582126155 0.064423196 0.0419933088 0.0116295256 0.0170936771 0.0498891808 0.0110197524 0.0411308594 -0.0257459451 0.0114618847 0.0878219977 -0.0317848064 0.0811458603 0.01887214 0.00988883246 -0.0506531522 0.0625907555 0.0145452367 -0.112982243 0.0802996382 -0.0328567512 0.0700641721 -0.00277703465 -0.0246732663 -0.0414474681 -0.0930275917 0.0817583874 -0.0246985424 -0.0693705902 0.0860790238 0.0245301407 0.028220322 0.0357720293 0.0410393327 0.0705156475 -0.063267082 0.050686691 -0.0218410157 0.0550663397 0.0759022906 -0.0350831598 -0.0160008334 -0.115162447 -0.0647135377 0.0396890379 -0.0345642604 0.0103187198 -0.0589025803 0.0834977105 -0.0107147945 0.0380949751 0.0866653398 -0.0723311082 -0.0372112989 -0.000454910012 0.0213319007 -0.00432507833 -0.0310348179 0.0425887331 -0.0940774977 -0.0323967934 -0.0242477451 0.117995851 -0.0160061121 0.0213480443 -0.0668758824 0.114949614 0.0316681191 -0.0759480372 -0.0610279627 0.0633142143 0.0236565657 0.0845542625 0.00935758371 0.0250929277 -0.0281674396 -0.0359582417 0.0694757774 0.056437064 0.016289724 -0.043686077 0.0887322947 0.000600125699 0.0521455668 0.0419055298 -0.0610189848 -0.0224667937 0.0316987857 -0.0323978439 -0.0178262964 -0.0366154872 0.0907478258 -0.0856860802 -0.110066622 -0.0315983742 -0.0946494043 -0.0222084317 -0.0352201238 0.0455912501 0.0811657757 -0.0895951316 0.0279459916 -0.0952548608 0.113056384 0.00558312191 0.050939288 0.124181278 0.0341638587 0.00255426345 -0.033172816 0.0153816594 0.104887553 0.0244834907 0.0457413457 -0.0520596057 0.095031105 0.0351452902 -0.11665196 -0.0497119017 0.050630711 -0.0861758068 -0.0872503743 -0.0496218018 -0.00381143531 0.109498873 -0.0175776016 0.00528071402 0.0259748194 0.0909558833 0.0579428524 0.139967725 0.0764526948 0.00463831052 -0.0771861747 -0.144396409 -0.104723662 -0.0137682576 0.0223192684 0.0313319825 -0.019306751 0.0563000366 -0.00657232618 0.034467455 0.0391030945 -0.0310320668 -0.0617044605 -0.126123548 0.0184416007 -0.051189024 -0.0356684178 0.0274483245 -0.0450351276 -0.0650538877 -0.000781424344 -0.0433340222 -0.0677636564 -0.0434984639 0.0460208468 -0.0124574052 0.045673795 0.0250319857 -0.0097975824 -0.000288532581 -0.0693829432 0.0817056447 -0.0367149822 -0.0803469568 -0.0272673164 -0.0125355599 0.108721487 0.0555210412 -0.0362726599 0.00188711134 -0.0225252602 -0.0467403233 0.0223246478 0.0924254134 -0.00607204111 0.0348412544 -0.0419691056 -0.0352974981 0.120286591 -0.0532627963 0.0599474981 0.117449939 0.0250896253 -0.0453546159 0.0333019607 0.0678343773 0.0618110187 0.0790082738 -0.013288267 0.0124899093 -0.0627008379 -0.0927575454 0.0872658491 -0.0858765841 0.0804509819 0.115199946 -0.116047971 0.114141606 -0.000370875583 -0.0547132045 0.0655369386 -0.105160132 -0.0587072149 0.00996344257 -0.0588719957 0.143944472 -0.0636086613 -0.0625388771 0.050297644 0.01688735 0.0503490344 -0.0187371671 0.0263831038 0.0351513959 -0.0622758158 -0.0289025009 -0.00445907749 -0.0819463283 0.0783530101 0.00172135397 -0.0108682076 0.053622894 0.0898650363 -0.125441834 -0.0469661765 -0.0740193054 0.0146140624 -0.074739024 -0.124391489 0.000910399249 0.100878544 -0.00938480534 0.10746365 -0.10205555 0.0822874457 0.117170572 0.065103583 -0.0468601808 0.0443411134 -0.0220601298 -0.0349924974 0.0565963052 0.024442032 -0.000596265076 -0.0503311418 0.0310966447 -0.0986445844 -0.0381193534 -0.107975848 0.041932255 0.0274684485 0.060344439 -0.0951578543 -0.00214851787 -0.0242667589 -0.00569425896 0.058796335 0.106092222 0.0197916087 -0.0124082975 0.0247668065 0.0231674556 0.0468560532 -0.000621114043 0.0964491889 0.0251123365 -0.0552343167 0.119750619 0.043985635 0.00931171793 -0.0136433262 0.091603227 -0.0712718666 0.0786479861 -0.0408394635 0.0966731384 -0.0660808533 0.0769225433 -0.0086235553 -0.105327964 -0.017321486 0.0972045138 -0.077172406 0.0514651127 -0.0781937093 -0.108713485 0.112203546 -0.0802456141 0.121202722 0.11901883 -0.0931790471 -0.0164292976 0.0312756896 0.105686158 -0.0783906654 0.0468474701 0.0110720228 -0.00267141312 0.0711446702 -0.0328070559 0.0175967477 -0.0356302932 -0.0124149965 0.0686402246 -0.0505678542 0.129400566 0.0214219112 0.0196880996 0.104359493 -0.0519865453 0.052242592 0.00997835957 -0.0990768448 -0.0456322841 0.0230734646 -0.0203887951 -0.0376775041 -0.07378342 -0.0275467373 -0.0691813529 -0.0821307749 0.0593699105 -0.0246762205 -0.101402849 0.0678628758 -0.00588039402 0.106908754 0.00991031248 -0.115229808 0.0721573606 -0.0429049321 0.0701546818 -0.0851534382 0.0652838498 -0.0788848251 -0.0332299247 -0.0408851914 0.00320880138 0.0196518935 0.0832488984 -0.0366014726 0.01875652 0.053820096 0.0153092891 0.0467731841 -0.0368329771 0.0111917052 -0.0291276965 0.0631685331 0.0357577875 -0.0179604348 0.00486189499 0.0305900779 -0.0209680013 0.0740240738 -0.0749756619 -0.121885069 0.032916151 0.00262444629 0.00849013589 0.0662304983 0.0742840394 -0.125835225 -0.0522070974 -0.113991506 0.0744321495 0.0978048667 -0.00498304795 -0.0719037652 -0.0263758246 0.0775782466 0.0118285939 -0.0350849591 -0.0356183834 -0.0106398668 -0.0223848727 0.0236225128 -0.120488241 0.0512224138 -0.041531492 0.0656389818 0.0879166201 -0.0274794661 0.0920548141 0.0264546964 0.0468961522 0.0466408059 0.0399114974 0.0449604504 -0.0700372905 0.0427690521 0.100792646 -0.0325727239 0.0341325775 -0.114281707 -0.00742708845 0.0433078147 -0.10298638 -0.104879826 -0.0632601455 -0.0102007883 -0.0802601725 -0.00294449297 0.00281117624 0.104717933 0.0612074584 -0.0467934757 0.118006982 0.117171124 0.106652826 -0.0225161687 0.0956271738 0.0270829834 0.0200848412 0.0168362167 0.0149010466 -0.0440483093 0.117436014 0.0664134845 0.0302254353 0.0535751954 -0.00848081987 0.0130929723 -0.0325898565 0.128652498 -0.0109613249 -0.00605653459 0.0190438107 -0.0259028617 0.0401356928 -0.00171622215 -0.104202524 0.0845206603 0.12883538 -0.0919445157 -0.00195987965 0.0736826509 0.0480313748 0.0636631101 -0.0408567712 0.0155776199 0.0579566024 0.134289131 0.0326339938 0.0374059007 -0.0366481617 -0.0377640799 -0.0160350259 0.0126323858 -0.0398559012 -0.0692037791 0.00400359975 -0.106625289 -0.0896666497 0.119961634 0.129376546 -0.0542201884 -0.0679891706 -0.0174552612 0.0752889439 0.116622798 -0.122125328 -0.0475201905 0.0421101414 0.00309556024 0.0322735868 -0.097082302 -0.0326796286 -0.0467596054 -0.0276475735 -0.088789694 0.0212633777 0.0486687906 0.108680114 -9.16656572e-05 -0.0739132911 -0.00859406963 -0.0290659312 -0.0827777684 0.151205987 0.0135993576 0.0095570432 -0.142162323 0.00178637984 -0.0175982956 -0.00341362623 -0.116277464 0.127863139 0.155240506 0.0902651772 -0.0665329844 -0.0343229175 0.00592056988 -0.0689622238 -0.0882099047 0.0423216335 -0.0418753251 0.0649065152 -0.148054436 -0.126529023 -0.107144743 0.048180446 0.0964411348 0.0283801127 -0.147997066 0.0730884373 0.0641160384 0.103855938 -0.0519286469 -0.0627045557 -0.123388886 0.106936358 -0.100495324 0.036348857 -0.0662566945 0.0681579113 0.0571975075 -0.0754548088 -0.0399843156 0.0368984528 -0.0353834778 -0.0517093278 -0.123962395 0.0263090748 -0.130231589 0.0996464565 0.0178089179 0.0458062775 0.0963696018 0.0762125254 0.0340860561 0.109043621 -0.0622775555 0.113345571 0.110095598 -0.100886367 0.0148935774 -0.123453058 0.0149683403 0.0882795379 -0.0308197234 -0.00579763902 -0.0442597829 -0.0558761358 0.00445035286 0.0967673883 -0.0510170944 0.1171581 0.0859833658 0.00351574784 -0.0282143541 0.0262071025 -0.0563719533 0.0486262627 0.0520373955 0.0674998388 -0.0777793005 -0.019211974 0.0729704723 0.0623332597 0.0411960185 -0.00379213877 -0.0160260908 0.101385273 -0.07236664 0.118454322 -0.0279059727 -0.00111757044 0.108530454 0.0313402973 -0.109885067 -0.00746698584 0.0517579988 -0.102587014 -0.062045224 -0.0723215193 -0.00753403036 -0.0194992591 -0.055590637 -0.110146999 0.0563573688 0.000793169835 -0.0437380224 0.037614204 -0.103893019 -0.11840263 -0.0892521739 0.0177615266 0.0299307667 -0.0603615977 0.0103125488 -0.0940437391 -0.0742155388 -0.00279134372 0.116551526 -0.0507049747 -0.0112416446 0.0206989124 -0.0475890413 0.00135824515 0.0360365659 -0.0638581216 -0.110917598 -0.0285419766 -0.0785639212 -0.00673839869 -0.072663039 -0.0943017006 0.0209225155 0.0458435677 0.06708619 -0.00773984846 0.130092591 -0.0302911103 -0.094658874 -0.106029265 0.0598360002 0.0363203026 -0.0587395169 0.0218387116 -0.00121726026 0.0923015103 0.0268146414 0.00547261769 -0.0118542481 -0.0527351797 0.0381134599 0.0549164079 -0.0742723569 -0.00661152741 -0.0885568187 -0.146828458 -0.145066977 -0.0526844971 0.0974245518 0.0119285621 -0.148422763 0.138095111 -0.0548562445 0.0224515107 -0.033984974 -0.0918067098 -0.0412526764 -0.129727185 -0.091969721 -0.0195525698 -0.0304857362 -0.114038028 0.123106226 0.0100026429 0.0864370763 0.0507619679 0.00284729549 -0.00105298625 0.0372510068 0.0406655185 0.035562437 0.0691316351 0.0814873502 -0.0958798006 -0.0228097066 0.0195914619 -0.027246682 0.0997626036 0.0630631745 -0.125293136 0.0745200738 -0.0511293188 0.0464217141 -0.067329675 0.018248735 -0.10921976 -0.0231118333 0.0425507538 0.0270119589 -0.0716171041 0.0184950344 0.0490458496 0.0652568191 0.0191503335 0.0326661766 0.00589203555 -0.0994252041 0.0639910772 0.0935874581 -0.0105717117 0.0074147 -0.0258986168 0.0828858837 0.00915369298 -0.031141039 -0.0613915138 0.0385154858 -0.098638989 0.0779575929 -0.0105742011 -0.0758871809 -0.0109963436 -0.00425893022 -0.0998037308 0.0786853656 -0.00168336509 -0.0925417468 -0.125538707 -0.122153223 0.0821714997 0.124297135 0.0863585621 -0.0707112625 -0.0507845916 0.0522913001 0.0209270567 0.0663688928 -0.0528395213 0.120615751 0.0676541924 -0.024552837 -0.0572560132 -0.0213594604 0.10777957 0.101967193 0.13353315 -0.0903856754 0.000669586763 0.0565047972 -0.0825800672 -0.062248636 -0.014425775 -0.0738483593 -0.00286239828 0.0884365365 -0.105007574 -0.0629888326 0.0934715867 0.110788323 0.0860318914 0.00210579997 -0.0725004748 -0.127494186 0.0919124186 0.110653833 -0.0781571791 0.00416795025 -0.110430084 -0.0848361403 0.00444030436 0.116966464 -0.0922116861 -0.0228395946 -0.00113955385 0.00864992663 0.0542619862 0.0738494545 0.0541707687 0.0777184516 -0.0970832705 0.0126359928 -0.0184956864 -0.0622084662 0.0451156609 0.079605639 0.012876017 -0.0658479407 0.0148149095 -0.116397806 -0.084576413 -0.100534178 0.0414143018 0.0586240441 0.0751088932 -0.0757061094 -0.0277423412 0.134908676 -0.033364512 -0.0541506857 -0.0223149844 0.0424581356 -0.0582377762 -0.0225137156 0.0737239867 0.0508049503 0.0808460936 0.0816969201 0.0865024626 0.0526327603 0.00781754963 0.0650917143 0.0286054742 0.0803678334 -0.0147822825 0.0908693671 0.107531264 0.0507709011 0.0359385237 0.0059293001 -0.00380560011 -0.00963426009 0.0474996306 0.0564068668 -0.0188577659 0.0513748489 0.0650842935 -0.0713231862 0.0369577892 -0.0133927027 -0.0248449575 0.0464835763 0.0194541477 0.0589518249 -0.0366000384 -0.00920657907 0.0802554563 0.107316345 -0.0738855079 -0.041999802 -0.061523933 -0.0305218492 0.156987384 0.0346348062 0.0126057826 -0.0152376043 0.0502441861 0.0664728656 -0.0426487103 -0.0210334025 -0.0939696729 -0.0341476351 0.0202106778 -0.0405531637 -0.0474039763 -0.122032635 -0.0849575177 0.0358551703 0.0997392237 0.0705704316 -0.0954606384 -0.0823383406 -0.0335653499 -0.121191315 0.013300227 -0.0529904217 0.0114150038 -0.0746903941 -0.03891664 -0.0819836557 -0.0729287639 -0.0179754216 -0.0102402242 -0.0607133955 -0.0891000032 0.00797273777 -0.022605991 -0.0809449404 0.0195738394 -0.134260491 0.0545250103 -0.0856008008 0.0678676516 -0.0582477748 0.0922091454 0.0154068666 0.0158708468 -0.12760596 0.0564289317 -0.0613937639 -0.0670805797 -2.70218661e-05 0.0686709732 -0.0105099222 -0.0782819167 -0.126448721 -0.0540316962 -0.00444437843 0.0426672585 0.0511715114 -0.0664667189 -0.117823824 -0.0813703611 0.0417935252 -0.075663574 -0.0227859747 -0.0927959681 0.00258619245 0.0769350082 -0.0262786634 0.15649274 0.00548388064 -0.0489241667 -0.028820714 -0.11300534 0.0169768985 0.0593335219 0.107664488 0.0454386249 0.127441257 0.0917166471 0.118455522 -0.089602381 -0.00173335895 0.039862778 0.030519424 0.0692663789 -0.0869349092 0.0913121849 0.0777036697 -0.103544652 -0.0235558059 -0.00459441263 0.0603882074 0.092305325 -0.0165223666 -0.0444068499 0.0616826303 -0.0684993789 -0.0543850996 0.02927945 0.0985921547 0.0364675447 0.0572966635 0.130570352 -0.109130152 0.0495605171 -0.0749507546 0.0501365066 -0.119342029 0.0564106107 -0.0683375373 -0.10465467 -0.10835091 -0.0725807771 -0.061554186 0.0320482478 0.0128553994 0.0586562306 0.0384311117 0.0629496649 -0.0201265886 0.00698905718 0.0514870435 -0.100664191 -0.0316986516 0.00712753553 0.0836560577 -0.129708961 -0.0514545329 0.127032861 0.0874415487 0.0538004041 -0.12563847 0.0540097728 0.0164953042 0.0776124969 0.0573409572 0.0104391398 0.0330088995 0.0108658904 -0.0540808886 -0.0545005389 -0.0912899747 0.039707467 -0.0845631137 0.00952329952 -0.132783711 -0.00215431862 -0.00721834227 -0.0975917131 0.0458213203 0.0200680383 0.0877264142 -0.037273623 0.0161861442 0.110501729 0.0362346061 -0.0118159521 0.06564527 -0.0172634032 -0.0569092482 -0.0533066876 0.049190376 -0.0601144843 -0.0810378492 0.0477174371 0.0174570587 -0.0335904099 -0.133734539 0.0280133393 -0.0691580176 0.0767375976 0.138999552 -0.120073155 -0.0186303947 -0.0229670238 0.110347301 -0.0162759181 -0.125030503 -0.0610758066 -0.0921585709 0.0763650015 -0.0880154371 0.0487306491 -0.0280137099 0.013374065 -0.106856197 0.0210058708 -0.0297262985 -0.100724012 -0.0436595678 0.0109883668 -0.0853584632 0.0911873952 0.0432055667 0.00453142403 0.0127071068 -0.0517662428 0.0676092654 -0.0881505087 0.0098452922 -0.0823895931 0.026693739 -0.0401734523 -0.0659064725 0.0980170965 -0.0178305618 0.0141226156 -0.0129480297 -0.0210044179 -0.052819252 -0.00334462686 -0.0292001851 0.102371179 0.0348540843 0.0185807701 -0.0405740775 0.0680675134 -0.0530634858 -0.0756660923 0.000960345787 0.0799955055 -0.00204092567 0.106765963 -0.00071719382 -0.0700320154 0.0350265689 -0.0876352489 -0.0223301966 0.0634540915 0.046514608 0.00813020207 -0.0424726084 -0.0671557188 -0.0563678183 0.00290626287 -0.0465417765 0.0410110131 -0.0751074255 0.0387596823 0.0907988921 -0.0267684907 0.0381636135 -0.0729710087 0.111388907 -0.00196558004 -0.0716333836 0.00468423311 0.0505745411 -0.0488672592 -0.0919072255 -0.0417954288 0.0445425287 -0.133938208 0.0828562379 -0.0701478645 -0.086428687 0.031541761 -0.0582731441 0.0219943449 0.0624012351 0.0130704865 -0.0401517116 0.0552026331 0.0567754321 0.0710184798 0.00758881867 -0.0124910641 0.0352796912 -0.0257061403 0.0308687277 -0.0213920511 9.48071975e-05 -0.0383036099 0.0287866332 -0.0110332398 0.0567016639 0.0603689998 0.00948372204 0.0733415633 -0.057091359 -0.0315212272 0.0307328366 -0.047477711 0.0429863371 -0.0265197102 -0.0657150894 0.0344378874 0.00179678167 0.010823926 0.0920330659 0.0393380597 -0.117785789 0.0551497154 0.0594726615 -0.0610899888 0.0493407547 0.0192096289 0.018806411 -0.0479772612 0.084946245 -0.0363575257 0.0180488937 0.0628424212 0.047983963 -0.00609665224 0.0490711816 -0.0524433553 0.0299023781 -0.134415284 0.0352817513 0.112402901 -0.00522935297 -0.108842514 -0.100159943 0.0536962375 -0.0482212268 0.033140216 0.0652505681 -0.0243575778 -0.00743648084 0.109651551 -0.0922743604 0.00548604131 -0.00986081176 0.0220172554 0.0101632392 -0.0740579367 -0.127274752 0.123685867 -0.03647862 -0.0687318668 0.00219690241 0.0450170375 -0.115679674 0.0136389351 0.103024855 0.103029318 0.110668465 0.00388179533 -0.0727895275 0.0803632215 0.0280900523 0.0280285254 -0.0467234924 -0.0731821135 -0.130512729 -0.0447000824 -0.107333377 -0.0769113675 -0.0171804633 0.0346588232 -0.0489323661 -0.0676056147 -0.105796985 0.0340438746 -0.0489065722 0.0679880902 -0.138374269 -0.10602235 0.0318014435 -0.116686605 0.0709820092 -0.0534674339 -0.00963871367 0.117157362 -0.0614339933 -0.0393194258 -0.0876544267 0.00427854154 0.0532199927 -0.044476755 0.000165691803 -0.0572972745 0.0502985567 -0.0602571145 0.12552923 0.0100918449 -0.0934075043 -0.0230401549 0.0295511037 0.0494234338 -0.0848071203 0.0259556789 0.079481706 -0.070821397 -0.0908804163 0.0531463176 0.0720867738 -0.0683894381 0.0868811682 -0.0569350533 0.0212230869 0.0892836973 0.0608907081 0.0264557358 0.117087588 -0.0282123219 0.0711553991 0.10003607 0.0398537852 0.0925926194 0.0799961835 0.106195562 0.0835256651 0.0742167607 -0.1288362 -0.0119199455 0.0209458005 -0.0807825178 -0.090900667 0.0211038124 -0.0566857532 0.031491559 0.0864643902 -0.151924461 -0.0301744267 0.0409735925 0.111272551 -0.0702080205 0.0960132927 0.108262971 0.0476099811 -0.00588667253 0.0851650238 0.010426431 -0.0125091802 -0.101221651 -0.0594466254 0.0449221432 0.0909607708 -0.0161867402 -0.0196655095 0.0530647635 0.0182610322 -0.0709991604 0.00761050964 0.0762544423 -0.0591728203 -0.0941646695 0.024339376 -0.0626933351 -0.103930928 -0.0321634226 0.0614973754 -0.118808359 0.0537795126 -0.0431177206 -0.116265662 -0.0131834941 -0.00236911164 0.0999807268 0.110797547 0.0114548178 -0.0998885259 0.145602047 0.111171857 -0.0988338813 0.0828444064 -0.00907499064 0.00587745896 0.071559459 -0.13120684 0.060727559 0.0683118403 -0.0836969465 -0.0657875538 -0.0527593195 -0.115409821 0.0975560099 0.0509091392 -0.0899974853 0.0956521481 0.0244503263 0.0126064662 0.114975713 -0.0405928865 -0.0483787097 0.0214200635 0.00839174818 -0.046142336 0.0722193867 0.0103485761 0.107258148 -0.0378706008 -0.0450719856 -0.0208163224 -0.00753858453 0.0927842855 -0.0761416107 0.0222469252 0.0221501626 0.103600152 0.00779794529 -0.0233703442 0.012515164 0.0401047952 -0.0354013927 0.0374246277 0.0105700931 0.0782879516 0.0528782457 0.0609663725 0.121859848 0.0579176694 0.0506275222 0.089213632 -0.118543468 0.037998043 -0.0670093521 -0.13410008 0.0411561765 0.0282420814 0.0700198412 0.0285238512 0.00444778334 -0.0674335882 -0.0760352165 -0.0417404994 -0.00990704726 0.0137323095 0.0390490703 -0.05264882 -0.0782701373 0.123745263 0.0234257653 -0.0788318142 0.024272114 -0.0218193699 0.0922509953 -0.00793454051 -0.0463180654 -0.113730133 -0.0577475242 -0.00455238903 0.0886773691 0.0554309674 0.0941009894 0.129545763 -0.021742925 -0.081449911 -0.0626695529 -0.0395893045 -0.036148537 -0.0715967566 0.00607152935 0.0371897854 -0.0568351522 -0.0322895311 -0.104940452 0.0889158696 0.0984952897 0.0380211174 0.109986477 0.00241002371 0.0807410851 0.0208322443 0.0305939745 -0.109555371 0.0584459454 0.0174565129 0.0446334742 -0.00203529699 -0.0710110068 -0.00075355859 0.00535989506 -0.00548237702 -0.0412316248 -0.10585098 -0.0147320451 -0.000358470366 -0.0302088298 0.0840577036 0.0291063283 -0.0138413198 0.0101552876 -0.0291738547 0.0731139556 -0.00815887749 -0.117785364 -0.0678437576 0.0784235671 0.0205320921 0.0843389407 0.0271183364 0.000885073736 0.0346361846 0.0954925418 0.130852431 -0.0751837641 -0.0253316611 -0.0976730809 -0.0676677674 0.0476101562 0.0662705749 -0.0301036816 -0.049003385 0.0106137209 -0.11252144 -0.0744634122 -0.0980421826 -0.0166393481 0.0481715277 0.0189415459 -0.0880238637 0.104637556 0.10711097 -0.0564402714 -0.0601721779 0.0159785729 0.0403848588 0.0139113516 -0.108862996 -0.00510752294 -0.0115773957 -0.0455890708 -0.0705545172 0.0851743072 -0.0565754622 0.0915891081 -0.0679899007 0.140255541 0.0228361152 -0.0286569875 0.0132950023 0.0172942225 -0.0447521694 0.0719969049 0.109762378 0.0197884869 -0.0228357762 -0.0507639088 0.0672996938 -0.0723399743 -0.00699901069 0.103675373 0.00931620412 -0.0457025245 0.0969348028 -0.0169527791 0.0237304047 -0.0895861909 -0.0194170661 0.0993531495 0.00229117088 -0.0562044792 0.0336305238 0.0490789376 0.0386500955 0.0127196591 0.00393643929 0.00232719886 -0.0829996243 -0.121718653 0.0753233731 0.0201976635 0.082195513 0.0600713976 0.0539501272 -0.0779756531 0.0625429153 0.00583441136 0.00761622144 0.115591303 -0.0334634334 -0.0245005973 -0.00851792749 0.0614887588 -0.0673773736 -0.0610792227 -0.0935594514 0.0109041268 0.0797310621 -0.054385256 -0.0951922908 0.0503217317 -0.105528668 -0.111157358 -0.0676904768 0.0699280798 -0.0395813137 0.0542365499 -0.0423914567 0.0401284434 -0.108586438 0.106151514 0.0741012841 0.0261538271 -0.0234557595 0.025597224 0.128329813 0.075639084 -0.0326176621 0.0483325124 0.0441246293 -0.00211445754 -0.0714289173 -0.028952891 -0.0652696118 0.10838379 0.0806302279 0.0361339003 0.000588985044 -0.0501025841 -0.0767339468 0.0711772069 -0.109505966 0.0615409054 -0.00328358519 -0.0599714369 0.0810240135 0.0322509259 -0.105258301 0.00425739167 -0.0571172498 0.011786839 0.0552534238 0.0161224175 0.00839114189 -0.0516668111 -0.0663074031 0.0675223991 0.00408511516 -0.0934429094 -0.00482452614 -0.0146074528 0.116948992 -0.0514550433 -0.0092106685 0.0192382932 0.0676550567 -0.107134365 -0.0554183982 -0.0378831327 -0.00111221685 -0.111972146 0.0370764211 -0.025367327 0.0421501771 -0.034930028 -0.133550882 0.0460590795 -0.0408849232 0.0172165278 -0.0828626677 -0.142174855 0.0368294381 -0.0315607302 -0.0813754499 -0.0344028175 0.024872696 0.115811288 0.0284592416 -0.0183729436 0.0680122226 0.096686542 -0.0504275933 0.0555682927 -0.0117671303 -0.106590241 -0.0779706761 -0.0753313005 -0.01689367 -0.0120249027 -0.121162862 0.0904329047 -0.0305291414 0.115454301 -0.0964366719 -0.00301298662 -0.0242908541 0.0638555288 0.130841374 -0.10721194 -0.108135305 0.0741211176 0.0291729122 -0.113760702 -0.0604273416 -0.0709419549 0.139797956 -0.0342961662 -0.0201933645 -0.112033077 -0.0611329861 0.0254594646 0.0681489855 0.0438128486 0.0619565509 -0.11234884 0.0307241976 0.0981715992 0.00958520174 -0.0568824336 -0.0283987094 0.0636719465 0.0751391798 -0.114547461 -0.0748261958 -0.0586201847 0.000263712311 0.00999936834 -0.0185962841 -0.08149115 0.105373196 -0.00710947951 0.000694327406 0.0461735427 -0.0157357287 -0.0240037851 0.0580982715 -0.100919247 0.111699469 -0.0659800097 -0.00773917325 -0.0232742541 -0.0999335274 0.0753451958 -0.051228717 -0.0892171562 -0.0358730741 -0.0996832997 0.0939411744 0.00339663634 0.0281091705 -0.0894726738 -0.00704634562 -0.07571394 -0.0520362742 -0.0228929147 -0.0211675484 0.0902847946 -0.0659550056 0.11681138 -0.0221742485 0.0613481849 -0.0157842189 0.0737548843 0.00965575501 -0.0838649422 0.00177340093 0.0528827235 -0.11357832 0.100755192 -0.0534734391 -0.0358427912 0.144596636 0.0647218004 -0.0423597619 0.0605341755 0.0732165053 0.0211333074 -0.132864833 -0.0364422947 0.0478251725 0.0119115161 -0.00161979138 0.0571164563 -0.0192272216 0.00423192605 0.0322174877 -0.0759943277 0.0137448525 0.00333114085 0.0397117473 -0.0238687452 -0.0832956731 -0.0279709753 0.0938207209 0.0343491249 -0.0871218666 0.0408064276 -0.116285831 0.0795068964 0.0848761573 -0.046004314 0.0709750503 -0.10165219 0.113097928 -0.0288509876 0.0324093103 0.120680496 -0.0231527574 0.0672659576 0.0315056667 -0.0114693092 0.0518258587 -0.0120203597 0.0803814754 0.034682408 -0.0337615535 0.0386820808 -0.0665814355 -0.0311845411 -0.0300064813 0.000839714077 0.0999553874 0.0476576835 -0.0808109865 -0.0593067668 -0.0203568004 -0.0215366837 -0.0306633245 0.0436415672 -0.0116597321 -0.0583168492 -0.0336035974 0.0782463998 0.019496616 -0.113910265 -0.100025997 0.0590599142 -0.0404247232 0.0250362512 -0.114405021 0.0491880253 0.0765769333 -0.0574881397 0.0377197377 0.0666196123 -0.0882478282 0.12657164 -0.072820656 0.0688454062 -0.0431665219 -0.0397588946 0.0174000375 -0.0294575971 -0.113417856 0.0438238829 0.0227670204 -0.0407737307 0.00265934458 0.0681627318 0.0419177152 -0.0144360522 0.0160792209 0.0458587408 0.0618349649 0.0176743343 -0.0137938112 -0.0633563772 0.0310720019 0.0681380481 0.0212434102 0.0676606894 0.041460555 0.110738382 0.0473630205 -0.0378823988 0.0510912016 0.00387570239 0.0408548079 -0.0602159686 -0.129621208 -0.00288634188 0.00880218763 0.091095008 -0.132332921 0.0251508802 -0.0712097511 -0.103759281 0.0195465796 0.0568811819 0.0538945012 0.140887022 -0.0393595546 -0.00443352573 0.0130884098 0.0407299697 -0.0129098492 0.120743103 0.02760542 -0.129898503 -0.00857560523 -0.0720292479 0.0491493791 0.102326475 0.0921848789 0.0479474291 0.0605254434 0.0442970507 -0.0229930989 -0.101877302 0.120247759 -0.0461608209 0.0228956696 -0.0468554012 0.0931479931 0.0344555005 0.01930671 -0.0351826884 0.0757239461 0.0770438984 -0.0225172918 0.0203145165 0.0127433063 -0.0512304567 0.070727922 0.011212891 0.0631789118 0.061186403 -0.0620558858 -0.0242845789 -0.0357322469 0.0807544664 0.0694836825 0.0275604576 0.048231598 0.0312998705 0.0098650381 -0.0849438533 0.00338348607 -0.0562642589 0.062496379 0.00795244705 -0.00989409816 0.0154066579 0.0518637821 -0.103680417 0.00535006076 -0.104885489 0.0388466492 -0.0735442638 -0.0991858095 -0.0114390533 -0.024811225 0.0624354333 0.106003806 -0.0726236701 -0.106910177 -0.0611559413 0.0201094151 -0.052110929 0.0187292732 -0.0280273762 -0.101260066 -0.143803522 -0.1252902 0.0355423726 0.041179236 -0.126104265 -0.0216143429 0.0806514397 0.00608616043 0.0657909364 0.0178345367 -0.0923066512 0.0481731519 0.145438254 0.0159616042 -0.0456462018 0.0941475853 0.0632876828 0.0367626883 0.0247407742 0.0601012856 -0.0355465524 -0.0118422816 0.0488038473 -0.0545568913 0.0373688899 -0.0515505187 -0.0396510959 -0.0605122671 -0.085121952 0.0596127883 0.105902717 0.0220958665 -0.0255203731 -0.0148762362 -0.0768131837 -0.0578792021 -0.0949795991 0.0773940459 0.0814553499 0.137127966 -0.116018936 0.0563674271 0.0888326541 0.0284422096 -0.110917278 0.0353827216 0.0380769633 0.12019825 -0.00778515963 0.0705309212 0.00951496419 0.0804332197 0.0100569949 -0.0600129589 0.0635915622 0.0929165035 0.0890567824 -0.0334398523 0.0183780789 0.0171072353 -0.0848544464 0.0573717169 -0.0625854135 0.00517629972 -0.0316587314 -0.00222206302 -0.139464319 0.00396719109 0.119108282 -0.0407875292 -0.0145511776 0.0634373575 0.0286066066 0.0339107104 0.0838994458 0.156156093 0.0932729319 -0.067164138 -0.0970614329 -0.000510855229 0.108286127 0.151279747 -0.153703973 0.035030935 0.0742894635 -0.0494455397 -0.0341568068 -0.0577272587 -0.0769041032 -0.0278417245 -0.0176225342 0.0874658376 -0.0257863011 -0.0709038004 0.0103997458 0.105501436 -0.132797644 0.0622315481 -0.0974398479 -0.0254051387 0.0495131202 -0.111515976 0.103808023 -0.101846233 -0.0294793397 -0.131958127 -0.0533692092 0.109044902 -0.0826396644 -0.0222143289 -0.0188204758 -0.0809235647 -0.0443305187 0.0731882006 -0.0607132837 0.108671054 -0.099729836 0.0198555607 -0.14591822 0.0282850396 -0.0493627414 -0.0674319044 -0.132230341 -0.0341024846 -0.0808820575 0.0899107382 -0.0263775121 -0.103454776 0.0222094338 0.00426623598 -0.084072873 -0.0383605286 -0.0547198616 0.0559252352 0.0470217839 0.0677336454 0.0497331806 0.0631156936 -0.00398747297 0.112397343 -0.166542813 0.137340739 0.0250983089 0.0890974551 0.119683884 -0.0544718653 0.0518688969 -0.0565655194 0.0104118874 0.0981469452 -0.0510016531 0.00272554019 0.0927296504 -0.0194716733 0.110565722 0.0546717308 0.0536676086 0.064474754 0.0118900863 -0.11696136 0.0142176412 0.0417189002 -0.0839173347 -0.0281918701 -0.0403215103 0.02901816 -0.0981693715 -0.0701962784 -0.00782805495 -0.0462877564 -0.0264666826 0.0648322478 0.0252208374 -0.0529760942 0.0255852453 -0.0645134747 -0.0154027175 -0.0532422848 0.0832066536 0.0396291837 -0.0148703549 -0.00148318615 -0.0615470037 0.0724665746 -0.0584392287 -0.026484957 -0.0585906915 0.0238307714 -0.0229600184 -0.0852913335 0.038450405 0.103452124 -0.033543352 -0.101024874 -0.0779693052 0.0964624882 -0.0119416546 -0.0178287029 -0.0222403612 0.0204786509 0.0163842663 -0.045447167 -0.0765725672 0.0971594155 0.041313909 0.0470899418 0.00305234478 -0.119385377 0.0464745872 0.0310937278 -0.131466374 0.0710817575 0.0100257266 0.068332687 0.0689313188 0.0125061376 -0.0347266309 0.0975375995 -0.00572358584 -0.0388996676 0.0531310216 0.137647584 0.0694899485 0.00679214392 -0.00121487537 -0.0537946038 0.0458747223 0.0680655316 -0.0407916345 0.0546781644 0.0273533594 -0.12560609 -0.0675602481 0.0385174453 -0.0973455235 -0.00201383908 0.0485716909 -0.0049632024 0.00582174305 0.0641960278 -0.0848527774 -0.0219417512 -0.15150407 -0.0783323124 -0.00965964142 -0.0608184524 -0.0130811296 0.0480553694 -0.0386784896 0.0999142677 0.0546924099 -0.1222317 0.0141203087 -0.0226628929 -0.0910275429 0.0631851926 -0.0745201185 0.0876418352 0.00886597019 -0.047717195 -0.128835618 0.0425309427 -0.0195610169 0.0638543293 -0.129029542 -0.0131098628 0.0650595948 0.0454248711 0.0853424594 0.0140957199 -0.0202541035 0.0317879245 -0.143378869 -0.00423101289 -0.131989092 0.0190712065 0.0510258228 -0.0865119174 0.0910336599 0.0548940673 -0.105332822 -0.0292321127 0.00560635095 0.0586736389 0.00505677052 0.110722467 -0.107276112 0.110618874 -0.0691331774 0.118226469 -0.0460344777 0.0930353999 0.0951236412 -0.0392442942 -0.112528846 -0.0722931251 -0.0136691937 -0.0806170553 -0.0411446244 -0.041987583 -0.00134162803 0.0213002544 -0.0701876432 -0.0885930806 0.0525745451 0.0624400154 0.0732306167 -0.0200780723 0.0234855395 0.00717696035 -0.147869304 0.0787881762 -0.0716938302 0.0857125446 -0.0352597944 -0.0805057809 0.0154540623 -0.00925941207 0.0756853744 0.0621306412 0.00842974614 -0.0371917672 0.108534172 -0.035098426 0.0504793301 -0.020088356 -0.0706505328 0.0162423179 -0.0752581954 -0.134370595 -0.015700357 -0.0898832977 0.0254374146 -0.0247301795 0.130348459 -0.035687875 -0.0680520609 -0.0444837064 -0.0601570755 -0.0513698831 0.111533344 -0.113860339 -0.0939767584 -0.0477326587 0.0567156076 0.0808228627 -0.0376587808 -0.114645995 0.0915196016 -0.019314684 0.0117936404 0.0774793029 0.0794394761 0.0432011634 0.0209889244 0.0314350612 0.0340264812 -0.104788385 -0.00981875602 0.0270214248 -0.0868451148 -0.0589688234 -0.0642679632 0.063923806 0.0117191905 0.0869612917 -0.0395875722 -0.0766618028 0.0934183374 -0.0996760055 0.0946377516 0.0345660634 -0.0784306899 0.07895834 -0.0232259352 0.0685211644 -0.0316710509 -0.0698057264 -0.011367701 -0.0852755904 0.0756148174 0.112687804 -0.0221658461 -0.0638062581 -0.0995947495 -0.148020685 -0.130782247 -0.0682474449 -0.0904296935 -0.0312870853 0.00539993821 0.0133634834 -0.0529328249 0.11267703 -0.0565492623 0.00755324587 -0.13352786 0.0963837281 -0.0480984338 -0.0886128098 0.0665832683 -0.109329402 -0.0235391576 -0.057248909 0.0689797029 -0.06789276 0.055123087 -0.0196565576 -0.0988758132 -0.0760087073 -0.0214166064 -0.0119032441 0.0697430596 0.00237821974 -0.0490270369 0.117983244 0.0743466169 -0.0153463781 0.101554446 0.103069983 0.0187342204 0.06316486 -0.117018297 0.0541630685 -0.100673176 0.0376137197 -0.0421548113 0.0645036548 -0.0168326087 0.0727319941 -0.0281656329 -0.117367707 0.116008684 0.0423757844 0.0988826826 0.0217613652 -0.0824515149 -0.0351628885 0.0812880173 0.147239089 0.0299862716 0.0364563912 0.145076081 0.105126604 -0.0210315958 0.0879319981 -0.0583813041 0.0593444556 0.106604464 -0.0204098541 0.0887133107 0.0528428815 -0.0444989093 -0.157084122 0.054473713 0.150896385 0.028199533 -0.0832022205 0.0864810869 -0.0489188135 -0.00317808706 -0.138337359 -0.0614818409 -0.0958974063 0.16130729 0.0542741828 -0.0278767291 -0.0963605344 0.132825524 -0.0308959335 0.0545662679 -0.0319377147 -0.0980552807 0.0630095378 -0.0429899767 -0.0260642897 -0.0517068692 -0.0561511219 0.11457511 -0.060042996 -0.0527282394 0.0207744949 0.023834521 0.0296360757 0.0525115617 -0.114589319 -0.147910029 -0.0456607491 0.0715667382 0.0993826538 -0.0107942242 0.117987439 -0.0284947716 -0.0709881261 -0.0903323144 -0.0224822853 -0.157054216 -0.0552059412 -0.0338664018 0.0750938728 -0.065085113 0.0211203843 0.118827477 -0.0167396851 -0.0932219103 0.0603475198 -0.0151796769 0.00819401443 -0.103917979 -0.0764359087 0.0163631905 0.0167195648 0.10436935 0.031059213 -0.010204182 0.0322529972 -0.0338583738 0.0547566526 0.0993093476 -0.0449988134 0.0820005462 0.0658240914 -0.0828819126 -0.0934411883 -0.00780287059 0.0153802652 -0.0455549546 -0.021676302 -0.00396145368 -0.121144004 0.0972423553 -0.0255077239 -0.0110973027 0.120320976 -0.0416977331 -0.0117513239 -0.105017632 0.0385619588 -0.0398330316 0.0233246256 -0.0271476638 -0.0675703511 0.0597647466 0.0618401542 -0.0964857414 0.060786169 0.00302257249 0.0607231446 0.0319902562 0.0811921582 -0.01984399 -0.00189587893 -0.139521733 0.0102705099 0.0954400972 0.0966984481 0.0283194389 -0.045280274 0.0892768875 -0.052237168 0.0279194918 0.0737474114 -0.000261810783 0.112771511 -0.00738663413 -0.0538329072 0.0805022269 0.0928170681 0.0922827125 -0.12284258 -0.0546792485 0.0152943293 -0.0549117215 -0.0439201444 0.148618817 -0.0444157384 -0.014640267 -0.0463561974 -0.141159236 -0.0240516476 -0.0309760477 0.103102759 -0.0665320605 0.0416155756 0.0295819342 -0.072761029 0.106963806 -0.0282385554 -0.0605240837 0.0600927584 -0.0168974958 0.0207078587 -0.0489886738 0.0462408178 0.0563473664 0.0989545807 0.0286699794 0.00622357149 -0.0955138803 -0.089370057 0.0498380885 0.111254118 0.0486598499 0.0335665718 0.00651514437 -0.137515217 -0.00872137025 -0.0768070891 -0.103112787 -0.0138399014 0.0693487599 -0.000350349292 -0.132112339 -0.054441724 0.0118631627 0.0867957175 0.0417998731 -0.125937298 0.0554638319 0.0775117502 0.0927276611 0.0790062174 -0.107764006 -0.00206389814 -0.0824461728 0.0240072217 0.0872223303 0.0261813533 0.0419610031 -0.110041335 0.0405180678 -0.0859660432 0.0439100154 0.0232283361 -0.0124879908 0.0721851513 -0.078385748 -0.0744791776 0.0467836894 0.0291622393 -0.00720773824 0.0299567468 -0.0575182885 0.0597681552 0.0921448171 -0.0661888644 -0.00482166698 0.0675290599 0.0166998263 -0.0955132842 0.085087046 0.102876 0.0426338613 0.0119831273 0.0855897442 0.0225691516 0.0364397429 0.0160135124 0.00990157295 0.0420151539 0.0316322856 -0.0585764237 0.0824364498 -0.0413012579 0.135568514 0.034757603 -0.0277830604 -0.034982793 0.0369454138 0.0193050615 0.0799474046 -0.0846258327 0.0366695002 0.0598423779 0.0799307451 0.136157006 0.128120825 -0.0201004725 0.0452948473 -0.06387043 -0.0197872147 -0.0240808073 0.0412721485 -0.0553675219 -0.106740355 -0.035156589 -0.0776401386 -0.104280807 0.0636275262 0.143016845 0.0158824641 0.0104194768 -0.0614338666 0.0736046582 0.0777402669 -0.0251369067 0.08835724 0.0629755557 0.100526057 0.130498186 -0.0605841354 -0.107173443 0.0618615188 -0.0027110891 0.0448608994 0.0324240513 0.13815707 0.0885208547 -0.0184885561 0.0110004703 0.0750818923 -0.123291738 -0.0332586765 -0.00227115862 0.0191539656 -0.00745699275 0.0960062817 -0.00826996565 -0.11104311 0.00985418726 -0.00825903285 0.0663968921 -0.022775976 0.120236516 -0.0306508504 -0.126447007 0.0266145803 0.0663856491 0.0159932058 -0.00567367487 0.0217537843 -0.129739061 0.0966290981 -0.120227985 -0.0869013295 -0.0486435518 0.145977855 0.0870844871 0.08520028 -0.0570347048 0.0191150215 0.0577304959 -0.0748146251 0.138433784 -0.00482775643 0.0784191266 -0.00595876481 -0.089486897 -0.0807763785 0.0747055635 0.0123625547 -0.0272405632 0.0675494596 0.0216415282 0.0251738597 -0.055193793 0.00323623535 0.104679525 0.00744761759 -0.0563779734 -0.00321181351 0.0755025595 0.0668580309 0.0714727044 0.0588193573 -0.0533336736 -0.027081253 0.0995806679 -0.00321418745 -0.0934964344 -0.0121698389 -0.0306112021 0.049315121 -0.0717256963 0.0284800846 -0.0465604663 0.0592573173 0.0975120962 -0.0522723123 0.0236058217 0.03004965 0.0192594938 -0.0153996143 0.0517514087 0.0202556662 -0.036583852 0.105843432 0.0923823789 -0.108679689 0.104115218 0.0757252499 -0.0786331147 -0.108693816 -0.0475629792 0.0984940901 -0.014999046 -0.0789110363 -0.0634896383 -0.124007449 0.0513020195 0.0337021165 0.00936586969 0.0974761024 -0.0039249598 -0.0539403148 0.0093635805 0.064329423 0.00841173995 -0.0369432382 -0.0830086768 0.0733837709 -0.0366025865 -0.0928544104 -0.0144924261 -0.0935278535 -0.0608592965 -0.102837108 -0.105442159 -0.0516982377 0.0344825126 -0.0649234951 0.0406593382 0.0177465007 -0.129067734 0.0989128351 -0.00798356999 0.0440664552 0.0535025857 0.0184675008 0.0339980274 0.0178082474 0.0744322464 -0.0364990495 -0.00785736833 -0.0367429368 0.110767066 0.043253459 0.0398505144 0.0362781025 -0.0534571707 -0.0138610825 0.0870700777 0.0741645619 0.0842578635 0.142304018 -0.0709979832 -0.0997136533 -0.0118433814 -0.0195060018 -0.0260943621 -0.0851690397 -0.0610457137 0.0346727297 0.038758263 -0.026763279 0.00496497378 -0.0248329956 -0.0392976888 0.0639327541 -0.0116053829 0.0389414802 -0.0765374303 -0.00860751234 0.0580505244 0.058850836 0.0729101896 -0.0168091431 0.0493836068 0.0378085151 -0.0690903589 -0.0735144988 0.0763928369 -0.0370460264 0.0515179113 -0.0130856326 0.0213577785 -0.0343368538 -0.0334647931 -0.0814459473 0.0176412053 0.0479053147 0.00514184404 0.0467168912 -0.114557423 0.0326536633 -0.122538239 0.0797366053 -0.0220797621 -0.0939437151 -0.0356741399 -0.129558548 -0.0728810504 0.0334232114 0.0454140641 0.0641237572 0.0639395788 0.0806245655 -0.0568198524 0.113629669 -0.0269122235 0.0319497921 0.0329489671 -0.0914393291 -0.0630809143 0.0387656465 -0.0879159197 -0.0275259484 0.0394459814 -0.0404246971 0.0497982427 0.0524061657 0.00315544894 0.0564953573 0.126382247 -0.0345166884 0.0698444024 -0.0992106721 -0.0982451588 -0.107451998 -0.0527341142 0.123538248 0.024919359 -0.0131305484 -0.0894226953 -0.115056708 -0.0378506444 0.0621916279 -0.122512206 -0.0171847306 -0.0232702196 -0.103842773 -0.100498989 0.0790945068 0.0964731276 -0.0845638365 -0.0501652695 0.0643050224 -0.0128263319 0.0967387334 0.0712623745 -0.0615100749 0.0906366855 -0.0492125414 0.0326652825 0.126826495 -0.0253421534 -0.0376052111 -0.0545951948 0.0150278658 -0.120675765 0.0287395269 0.0592594892 0.0814532936 0.102257699 -0.00316688977 0.0232737195 -0.0206201728 -0.106220126 0.0464446917 0.0267721917 -0.0964898616 0.000449200714 0.058107879 -0.0119522484 0.119053274 -0.0157440305 -0.0229303446 -0.0538970679 0.0736326724 0.0061632446 0.084165886 0.0724296197 -0.144264609 0.0839208364 0.0139587959 -0.0285230391 0.0120399361 -0.11818894 -0.104979657 0.0247651879 0.0166639592 0.00610556966 -0.0571998879 -0.127792791 -0.0762260929 -0.0623565726 -0.0835529417 -0.0365828983 0.0787034184 0.0618906133 -0.0250335261 -0.0716648474 0.0972160697 0.0754901916 -0.0548195727 -0.0320330486 -0.0405605473 -0.0662167147 -0.0892478526 -0.0222243164 -0.0625544339 -0.0201768409 0.118638895 -0.0409025624 0.0845576376 -0.107005633 0.0590842962 0.034769319 0.00361982756 -0.0245255139 0.0547625758 -0.115161017 -0.0553721972 0.0778890774 -0.041641593 -0.0323726982 0.0310737193 -0.0700510889 0.015053235 0.160222828 0.10772761 0.037959829 -0.0358525217 -0.111562051 0.0241074972 -0.12357261 -0.12483231 -0.0926482156 -0.0271823723 -0.113680638 0.108206771 -0.116128989 0.00913964305 -0.0308017898 0.0424275286 0.122627713 -0.0544019043 -0.132333323 -0.046340026 0.139717042 -0.0182990991 0.068831861 0.0823961869 -0.043938648 -0.038030766 -0.0475954749 -0.0332984775 -0.058541622 -0.0173970181 0.00698842388 -0.00505919755 -0.0264520328 -0.075296253 -0.0592983365 0.0703864917 0.0332557037 -0.083748579 0.0185417943 0.0330124721 0.081373781 -0.0521305203 -0.0828755274 -0.0191443413 -0.0275393687 -0.104940005 -0.0373899266 -0.129006848 -0.0350307822 -0.0594692267 -0.086783044 0.0255106967 -0.112269998 -0.0306032244 0.0352828279 -0.0759110004 0.0436218269 0.0371037386 -0.0105048856 -0.0273989253 0.0408369228 -0.0481947623 -0.0490162857 0.0253327843 -0.0900856555 -0.10654892 0.0296242032 -0.0569313392 0.00557792746 0.0511694998 0.0238375813 -0.0494668148 0.00665589096 -0.0544650368 -0.0221331436 0.122494169 -0.00346396537 -0.03750135 -0.0583185181 -0.0697296709 -0.00882709585 -0.0139009692 -0.0581789836 0.0511611365 -0.0665547177 0.0625669286 0.0951525047 0.0109070055 -0.0445890985 0.0510924347 0.0549049266 0.076408051 0.0454293936 0.00382474251 -0.0298265554 0.00967820082 -0.0501072742 0.0321529955 0.0609736592 0.0241174586 -0.0162564274 -0.0707081556 0.144424051 -0.0120626008 0.0290390942 -0.0303947851 -0.0576401316 -0.0760076717 0.0552736968 0.0407108702 -0.0295744073 -0.097608991 0.0594237335 -0.0941474885 0.0194068011 -0.0656058192 -0.0635675117 0.0444326811 0.0656519532 -0.0508928746 -0.0729632974 0.0443925709 0.0649101809 -0.0789977983 0.0154495686 0.0545210727 0.0447072089 0.126279771 -0.0358525813 0.0973265097 0.0260252561 0.0660820976 0.103842169 0.0981507078 0.0491054058 -0.0901521593 0.00591290556 0.0812497959 0.00227644946 -0.0607588552 0.0970650539 -0.0110606086 0.0776812136 -0.0386007279 0.119623892 0.0970067903 0.0679384917 0.05710252 0.0563185252 0.120259158 -0.0155343693 0.11131572 0.0304788649 0.0158111248 -0.0598068163 0.0854219869 0.0570583344 -0.0570600703 0.0287855826 -0.0342741273 0.036824815 -0.0501024202 -0.0268743541 -0.0634012967 -0.0412885621 0.128790557 0.00406311126 0.042762816 0.00955149718 -0.0193585306 0.0519001707 -0.039887663 -0.0505587868 0.0825586244 0.159575224 -0.039045386 0.0544076897 -0.0779607669 0.0380125828 0.0408898331 -0.0760996863 0.0313064456 -0.0805607736 -0.0574796721 -0.0846826658 0.113500386 -0.0871631727 -0.117222093 0.0810274109 0.051653102 -0.0653802082 -0.00290928991 -0.0630526915 -0.119209491 0.0347142629 0.0174591336 -0.0608103834 -0.0927200988 -0.0334013142 -0.0835639536 0.11650601 0.0233004745 0.0682244599 -0.0430421382 0.028754117 -0.0900809765 0.012021089 0.0624547713 -0.105882496 0.0918491483 -0.0683888867 -0.0233582761 -0.0216962695 -0.0297207572 -0.0362710096 0.0270867273 0.0460449308 0.0642470419 0.0419084579 0.096854955 0.0901691094 0.0210975073 0.0876087993 -0.0793926194 -0.0800184235 0.0771968812 0.00599401817 0.0235112216 -0.124454454 -0.0479293279 0.0157109667 0.0773659572 0.00740274787 0.0359256268 0.0233147927 0.0827361718 0.0162823889 0.0475280918 0.0248643626 -0.0826426297 0.0601135641 0.119723722 -0.0729700252 -0.115519248 0.0721688643 0.105871052 -0.0132921757 0.08133322 -0.0257968605 -0.0480753519 0.0262065995 0.0138876187 0.0765962973 -0.0963439941 0.0358153284 -0.0679376945 -0.0568544529 -0.0514913723 -0.0568194948 -0.0500201248 0.040343143 -0.0777348354 0.0369281098 0.0772028044 0.00108942436 -0.00182866259 -0.0662001669 0.0198386908 0.045538079 0.0673875585 0.0710163489 0.0381334536 -0.0855338573 0.0286879074 0.151627332 -0.0448625647 0.0633926764 0.0938242897 0.0526457354 0.0111530349 0.097349681 0.0770600736 0.0608735308 0.0969019234 -0.0362519659 0.00380012137 -0.0779155269 0.00992168486 0.0114733698 -0.000200923663 -0.0576153658 0.083698988 -0.147824839 0.0438186601 0.0710815191 0.028712105 0.04132507 -0.0103248488 -0.000444060774 -0.0523407757 -0.0500133485 -0.0338968448 0.0263319649 -0.04361872 -0.0277711656 -0.0168558471 0.0467232168 0.0177872274 0.0654023588 0.0379033573 -0.0607685857 0.00964797754 -0.120628364 0.0780472904 -0.0843662843 0.030455105 0.118488185 0.0485005565 0.0704616383 0.100928433 0.0408400409 -0.0665229484 -0.00970364176 -0.0212464705 0.0161203556 0.147231802 -0.0107072778 -0.0776233077 -0.0784361213 -0.0594038591 0.0418931209 -0.000451631407 0.112979718 -0.0695450008 -0.0122077055 0.00301642111 0.0691217184 0.0310368631 0.0258781705 -0.0280865338 0.0467126593 0.00575158047 0.00231607817 -0.0486097038 -0.119848073 0.0345518552 -0.043744415 0.0102821859 -0.0540690646 0.138281018 -0.00140106888 -0.0396243855 -0.0496274233 -0.0120704891 -0.0222738367 -0.0162870642 0.014834349 0.00513116037 -0.0692124441 -0.0776496753 -0.120830476 0.0161017329 -0.0370132066 -0.0627025366 -0.108544096 -0.0967517719 -0.0203821119 0.0064773499 -0.13926363 -0.0478015207 -0.00240246905 0.0692687705 -0.0158551876 -0.0689087138 0.104410864 0.0010006387 0.0992425233 0.0829349607 0.0481005087 -0.0365891643 -0.150370061 0.0162225049 -0.0743764937 0.0161576103 -0.0280919597 0.0834880769 0.0755537376 -0.0628848672 0.0377435535 0.0462861024 0.0946150273 0.037716087 0.0241654012 -0.0828214586 0.0492958799 -0.0326795466 -0.0770705715 -0.140277237 -0.0159451012 -0.109213002 0.0240502506 0.0428965315 0.0193452798 0.0742191896 0.0848924294 -0.0308462307 0.0752010345 -0.0391890407 -0.00412439182 0.0942446142 0.00469598826 -0.0910378024 0.000205826393 -0.0885702595 -0.10040123 -0.0408986025 -0.0709256157 0.0822851807 -0.0281461775 -0.0398267582 0.0393168256 -0.112278141 -0.0479413383 -0.0573235005 -0.0405642055 0.122419089 0.0270299502 0.00841662008 -0.0121508595 -0.0198617335 -0.000879280851 0.00153166568 0.0554407202 0.00480276532 -0.0682227761 -0.0328659527 -0.0611312203 -0.142565057 -0.00181693793 0.0861305967 0.0628089532 -0.0740653574 -0.0160195958 0.0974677652 -0.101770975 0.0444948077 0.0841920972 -0.0720307156 -0.00300135929 -0.069788307 -0.0935182795 -0.0931349918 -0.10534066 0.00375205046 0.0325018056 0.0886859596 0.0366864353 -0.0217052139 0.0339041911 -0.0782768726 0.00892924238 -0.0385166742 0.0129920086 0.0826299712 -0.044218149 -0.0138300406 0.00623118551 0.0313761942 0.126124471 0.0703845546 0.0405096114 0.0779507384 0.056386482 0.0744233653 0.0530518629 0.0425919332 -0.0413660035 0.0031353673 0.0289649554 0.0164382402 -0.011526701 0.0496848971 0.119253859 -0.111141384 -0.036666058 -0.0288353041 -0.00853391178 -0.0851149112 -0.0335021652 -0.0633720756 -0.0867637545 0.0132676009 0.0459979139 0.0367925242 -0.0876119509 -0.0333126523 -0.0919199139 0.0108301183 0.117218599 0.107971512 0.0295869391 0.0267735161 -0.0267074816 0.0445350818 0.111544669 0.0392179638 0.0937596411 -0.00281381886 0.0201951191 -0.0501904786 0.0335953049 -0.0029760797 -0.0398072712 -0.0350824408 0.0377969109 -0.0179890636 0.0425337292 0.0354793109 0.0247561317 -0.0294101313 -0.0284981932 0.028481964 -0.0429043695 -0.053075958 -0.126678079 -0.085693188 0.0219913088 -0.103943169 -0.0727316067 -0.0124961985 -0.126258418 -0.0539888591 0.0462416373 0.0975957662 -0.00795640703 -0.021866478 -0.0801899433 0.0211574696 -0.00211753859 -0.0770760551 0.106369033 0.0529551283 0.00306034461 -0.153457433 0.0278290957 -0.0596790686 -0.04930925 0.072257936 -0.067070365 -0.0302931052 -0.115538754 0.138532385 0.0228340477 0.154491559 -0.0212814286 0.000630576746 -0.0118962303 0.0624279119 0.0366596803 -0.104068108 -0.0649591386 0.0323766321 0.0730391443 0.0661889538 -0.0391814969 0.0793058872 -0.0105679389 -0.0682836398 -0.0303012673 0.122320741 -0.00334193907 -0.107540131 0.0638230518 -0.119354151 0.108916059 0.0184885114 -0.0397466794 -0.074820742 0.0587450974 0.111003347 -0.0857884958 0.0496451035 -0.175095469 -0.0324501954 0.0114386939 -0.083123289 -0.0846996307 -0.0342796296 -0.0571025424 0.126033574 0.0537063144 0.0963928178 -0.0292254034 0.0303790402 0.0882642195 0.0210448559 0.0844297558 0.0784011334 -0.067198731 0.0598029867 -0.115000091 -0.0233332999 0.11517673 0.0587579273 0.0726984292 -0.0598884225 -0.0596332885 0.10184852 -0.00164783257 0.0644008815 -0.0344729498 0.00754436292 -0.133005932 0.0759031922 0.00654394785 -0.00963001978 -0.172101706 0.0105558978 0.176982358 -0.0497983247 -0.0037973139 -0.0655243993 0.0364305004 0.0520126633 0.016448427 -0.00652270019 0.068141371 0.0190387368 -0.0741908997 -0.0276863649 -0.0545798913 0.0041190316 0.0235041492 0.0122208726 -0.0720304623 0.0136137297 -0.0269483216 +tensor_10bias 50 +0.12787357 0.017543152 0.122975975 0.0730041191 0.0510178655 -0.00993559696 0.139933825 0.15092434 0.0684130192 -0.0333705768 -0.184260622 -0.13440612 0.109378524 0.111376524 -0.10483826 -0.0250708181 0.120549299 0.0411001481 0.183845177 0.135748357 -0.00771392835 -0.12025056 0.085442692 -0.0513125733 0.136845529 -0.0145230526 -0.0895486251 -0.0252410602 -0.00896273553 0.0933182612 -0.108676046 -0.104239464 0.170086652 -0.0341263078 0.0728005916 -0.0453254506 -0.100045033 -0.110129185 -0.00771265198 -0.119152002 0.1214706 0.101130307 0.0332861841 0.0142126186 -0.010599345 0.109234303 -0.0182705577 0.177162722 0.0691059828 -0.0739419758 +tensor_2bias 50 +-0.0447338857 0.0537877791 0.0785957575 -0.0634338111 0.153481558 0.148676842 0.0265698414 -0.0261984505 -0.0751923025 -0.0352455713 0.0932889804 0.113871664 -0.0193461645 0.175267622 -0.0770687833 0.157511786 0.0196232703 -0.0737266392 0.0872744098 0.116388358 0.168398216 0.0425802097 -0.102230035 0.0693789497 -0.0855393335 0.126388997 0.0205914602 0.140580684 -0.00234525092 -0.0295791756 0.0197821874 0.0661892593 0.166472748 0.149337456 0.0513125136 0.00068877294 -0.0757507607 -0.0540507101 0.134943455 0.0256511811 -0.0943378955 -0.0261238459 0.0309584048 0.111188456 0.169084176 0.136096522 0.0985386074 0.0480017625 -0.0471420884 0.122215845 +tensor_6weight 2500 +0.0649253875 0.129901871 -0.0820776671 -0.0164463595 -0.0272229239 0.0591965616 -0.118314907 -0.037768431 0.0372078121 -0.105141595 -0.140254259 0.0649844706 -0.112917937 0.141195908 0.140458569 0.0553426445 0.0367731303 -0.0505450144 0.0507215112 0.114758804 0.115806922 -0.0424669459 0.0370975286 -0.14095898 -0.104349688 -0.007835567 -0.0608764365 -0.0330444127 -0.12756449 -0.104601666 -0.0191679522 0.00627362728 -0.0662557259 0.0937368721 -0.101459384 -0.0692796931 -0.0512177646 -0.126805127 0.0393478721 0.0119376034 -0.0574386194 0.100259379 -0.10315454 0.109866068 -0.02667135 0.130284503 -0.127174616 -0.0201597661 0.0414076746 -0.122587755 0.126039341 -0.115497321 -0.126209974 -0.00932627916 0.0310982913 0.0501976013 -0.0105512738 -0.117707536 -0.116891071 0.117860749 0.0559653193 0.0531298667 -0.0543823317 0.106951609 0.0151336193 -0.0444077402 -0.112000868 0.0114103854 0.0838644654 -0.012747705 -0.0791340023 -0.0889710411 -0.0655299723 -0.0225159228 -0.00320497155 -0.0662335902 -0.0993035883 0.137778953 0.105412766 -0.116872713 0.0578503758 0.0725949556 0.0382958353 -0.0512723327 -0.00722907484 0.0786679238 -0.116880074 -0.0138037503 -0.0500161424 -0.133497417 0.0958063304 0.0558829457 0.0326671302 -0.0238390192 0.0845869035 -0.0934950113 -0.0433793738 0.0942181498 -0.045510605 0.0947668105 -0.106258683 0.0446187519 -0.0900780708 -0.0834366232 0.191142887 -0.100739747 0.171907842 -0.0254000407 0.138836846 -0.0700232163 0.114825904 -0.143776864 -0.0321323685 0.0355321914 -0.178224027 0.119957708 -0.0752720386 0.127894193 0.164032444 0.065395847 -0.063121289 -0.0970638469 0.102740057 0.0505844206 0.0253012329 0.0821145922 0.180317059 0.136325151 -0.103746325 0.126737922 -0.0877246112 -0.0697940513 0.0607301034 0.0686804578 -0.0175086763 0.0285665393 0.147603065 -0.159169093 -0.058806546 0.101134196 -0.0185775906 -0.113093227 0.0278050229 -0.0363715962 0.123531096 0.105049185 0.0325903893 0.101475507 0.175050184 0.0439927392 -0.0129783954 -0.103368133 -0.094232142 -0.133218303 -0.10637027 -0.126878336 0.100644603 -0.0823836327 -0.0993345156 -0.0921484306 -0.00233977009 0.0756816864 -0.0497992188 0.044235874 -0.100462228 0.0119998753 -0.0844490379 -0.0331858918 -0.0446389243 0.042482052 -0.126429394 -0.105036467 0.0468023382 -0.0696351752 0.0628612116 0.0562251285 -0.0864542499 -0.0504873767 -0.057342425 0.107809477 0.103574097 0.0706402957 0.0782148615 -0.112125456 0.0768203884 0.0012682596 -0.124097727 0.114557318 -0.0111420928 0.0438492894 -0.0157870948 -0.129962921 0.115011618 0.0792783797 0.0613046065 -0.000343024731 -0.0795636103 -0.0708794519 -0.0101428293 0.0629758537 -0.0162976906 0.111654803 -0.134260848 0.00456416048 -0.129808471 -0.0437678993 -0.0731499866 -0.156290948 0.176469311 -0.134536281 -0.0936101675 0.094726339 0.129458129 -0.00281856535 -0.0142846275 0.00348282605 0.129408911 0.125073373 0.153636366 -0.0143775577 -0.013238579 -0.0172810107 0.0421338268 0.116808861 0.0514435619 0.13204819 0.0942413136 -0.012623366 -0.0874075145 -0.0010379689 -0.162753403 -0.0148045626 -0.0110199554 -0.0829107389 -0.0709493682 0.162264898 -0.0466960482 0.115680397 -0.0569904298 0.0977253392 -0.0407817513 0.163954467 -0.0335706919 0.145685494 0.122499764 -0.0530293435 0.160302415 0.00654218439 0.0903446525 -0.0116685461 0.0239315517 -0.0313074701 -0.102479123 0.0804489553 0.0174044427 0.0801673904 -0.0707507953 -0.0458744019 0.0368017294 -0.158817649 0.0533084273 0.0464035608 -0.0136327893 -0.026964413 -0.0722962692 -0.0277424678 0.193694353 -0.00919557363 -0.0336900316 -0.00418696925 -0.0529568717 -0.00187929883 -0.00698451232 -0.0436371192 0.0323710404 -0.019839149 -0.0511180982 -0.110972911 -0.0133787924 -0.00690555479 0.104938939 -0.038326323 0.0560517721 0.138403684 0.143514618 0.199766785 0.14532347 0.0941502005 0.0855569765 0.0256890338 0.0689958632 -0.0572427884 -0.00418164022 0.0580582805 0.150297597 -0.122072354 0.176015973 -0.120600596 0.119270205 0.106842689 0.108840823 -0.0772350207 0.128743961 -0.0015650976 0.0175431371 0.053713128 -0.117410287 0.0328807086 0.0287136007 -0.104569376 0.0721085593 0.0677165911 -0.0558042675 -0.0673747733 0.115988277 -0.122426286 0.0186466724 0.101494573 -0.029576974 -0.115950264 -0.0865741 0.0563799553 0.107808612 -0.0450687222 0.0710128173 -0.0514423363 0.0430348404 -0.0574421734 0.0800841525 0.0757694393 0.10702318 -0.0222116411 -0.0559151433 0.0379136428 -0.0136397472 -0.125272736 -0.12881507 0.0900285095 0.0889691934 0.121225074 0.0771746784 -0.0660418868 -0.044440113 -0.122758932 -0.109487474 -0.0582289658 -0.104467168 -0.00918032415 -0.0209672842 -0.0869374499 0.168161795 0.108111799 -0.0880761966 -0.0135405827 0.178589284 0.003923479 0.0852129236 0.161241695 0.00760242762 0.06472487 0.0908324644 -0.109867044 0.13713856 -0.0345446207 -0.144439176 0.0468028821 0.136207759 0.122578613 0.0340208001 -0.105203725 0.0250524748 0.106788099 -0.121437281 0.181704462 0.11812605 0.0816245601 -0.101409554 0.158797711 -0.0405994244 -0.106579058 0.0417435579 -0.0245459247 -0.00784720015 0.0369141363 -0.070102796 -0.0140520735 0.180028707 -0.0340496227 0.0966045856 -0.0815079585 -0.0375775248 -0.173401833 -0.0957172289 -0.189357907 0.0151246237 0.0324664675 -0.0768369883 -0.106799647 -0.0305638388 0.0201060958 -0.053941071 -0.0226951279 -0.0301792286 -0.0753694102 0.106556229 0.00628629327 0.0264616497 0.115733989 0.0310344063 -0.0524785519 0.0871863812 0.1207719 -0.0298178941 0.152269572 -0.13239485 -0.0819777101 0.0469505712 -0.0912657976 -0.111869723 -0.0653776079 0.10464593 -0.0256920718 0.12280155 -0.143135741 -0.00778760947 0.03016074 0.0972794741 -0.0641395524 -0.0162782986 0.0504767261 0.0761293843 -0.0471233875 -0.0866800919 0.0426621437 0.0164198168 0.111198299 -0.150085554 0.0248084236 -0.0389914848 -0.0365719572 -0.138500616 -0.0784377009 -0.107607454 0.0207631979 0.0907824636 -0.0914271027 0.0534422696 -0.112685621 0.0665683895 -0.0469377451 0.0247338824 -0.0177221745 0.118170217 0.113025144 0.0234410614 0.104385503 0.0654341355 -0.10872592 0.128927425 0.196715385 0.0276464783 -0.0738130882 -0.081564039 -0.00269559864 -0.126407489 0.015476441 -0.045586586 0.0332736522 -0.0798867643 0.135938272 -0.162508756 0.0983785167 -0.0764289424 -0.0560759567 0.0814144537 -0.031941954 -0.121607453 -0.0935366377 -0.0972638801 -0.0318852663 0.134761959 0.00468478957 0.0771510676 0.0787510574 0.164012611 -0.0312081948 -0.0129511952 0.0929201245 0.128727853 -0.00758869387 0.0151306689 0.0861001238 0.106875338 0.0643666014 0.153492779 0.0107787019 0.0601070002 -0.0477736481 -0.131303728 0.00165647722 -0.159763634 0.0611100607 -0.0269413907 0.0301383473 0.118319333 -0.114341162 -0.143750668 -0.106911905 -0.0885151848 0.160572648 -0.0470729731 0.0245884079 -0.0456172712 0.0757794902 0.0562509894 0.0297678653 0.0527246483 0.0166134071 -0.108542152 0.142919838 0.127158552 -0.0228688288 0.00676658237 -0.03869633 -0.0931294337 -0.00328914542 -0.0614178069 -0.0198070854 0.145518914 -0.0294807851 0.0692162439 0.15985842 0.0560066774 -0.0942831039 0.0402628109 -0.118215956 -0.116073422 0.0202833321 0.117826007 0.122413464 -0.0271829292 0.0389408059 0.0934228823 0.0398765206 -0.00495207263 0.0981794819 0.115069546 0.0594924539 0.0624140352 0.0753316805 0.0130726891 0.00351743586 -0.118038118 0.133946255 -0.0532785915 -0.111061007 -0.0136450082 0.0968498662 0.133393183 0.149615765 -0.126794592 -0.107227415 0.167891011 -0.0144322244 -0.181450546 0.0244579148 -0.0923274755 0.157411754 0.050326366 0.143469214 0.00917230081 -0.0694648325 -0.0583085977 0.100404061 -0.0703162327 -0.132603139 0.0277496353 0.182791844 0.0298265126 -0.14978756 -0.0095058633 0.177655354 -0.0389893278 -0.0960298106 0.055750493 -0.0944034085 0.175231501 -0.151938185 0.0563026294 -0.126313433 -0.137585253 -0.11282815 0.0335017443 -0.016390631 0.0258972906 0.149925053 -0.0161783621 0.132413134 -0.129700065 -0.0751069337 -0.0137014491 -0.126565307 -0.0802877396 -0.127848729 -0.0448123366 0.00571359694 -0.0442490689 -0.0026283646 -0.0133119607 -0.117010497 -0.032991223 -0.0752329901 -0.0423538461 0.0337411128 -0.101852775 0.102702036 -0.113081135 0.128210023 0.0527718291 0.0711361766 0.046200335 0.112589262 -0.0602141693 -0.124360792 -0.049823273 -0.140881091 0.116494343 -0.137485832 0.0550901145 -0.0324928425 -0.101916127 -0.0462415516 0.0865442455 -0.119312339 0.0382132456 -0.0243112519 0.101194464 -0.10621307 -0.0587359108 0.107364364 -0.0826650411 0.112274796 0.0253867805 0.0701454431 -0.043696586 -0.0748712718 -0.0725907981 0.0644025356 0.0884814113 0.0663292259 -0.129587308 -0.0319217071 0.0338242948 0.115189984 0.0245237201 0.0201187134 -0.0739658847 -0.0454444066 -0.0267900527 0.0743228644 -0.134670675 0.0116872936 0.123525247 0.00718687475 0.139177337 -0.0978305936 0.0739517361 -0.0291812122 0.0807204247 -0.140549108 0.00840865076 -0.0133223087 -0.0685992762 -0.0170855597 0.060691461 0.0238291025 -0.141307816 0.0849160701 0.0482466817 -0.0244439244 0.0211740434 0.0507029444 -0.069623448 -0.0391115323 -0.045335494 0.105534464 -0.0210918859 0.0410889536 -0.119236276 -0.0102088749 0.0296808928 -0.111803085 0.0251688212 -0.0522222742 -0.134250998 0.112514332 -0.0292918608 0.114655808 -0.115933761 -0.0447240621 -0.0562940501 0.115107387 -0.0417959876 -0.0358452164 0.128562316 0.123079613 0.0867616385 0.0504442304 0.085063085 -0.0750186294 -0.0415927172 0.0159885045 0.0309951119 0.0242125411 0.0228883941 0.128811404 -0.0658345073 0.0893866047 -0.0262501985 -0.0197901707 0.0398271419 -0.00843849033 0.0776178464 0.0806626081 0.168270662 0.015441413 0.0647286773 -0.0716274977 0.0758225247 0.114696413 0.142221808 0.103615619 0.0212591253 0.140274763 0.00738972286 0.159614474 0.11493472 -0.0833858475 0.0133725926 0.0502345115 0.138931051 -0.0143997408 -0.135814145 -0.0122304466 0.157529533 -0.150415257 -0.0632499009 -0.0106943063 -0.0938702598 0.163158879 0.13341108 0.106037788 0.1496768 0.177437797 0.087329708 0.137258947 -0.0137388939 0.0762795284 0.0370195433 -0.0747531578 -0.092746526 0.0398157351 0.0443542562 0.0983223766 -0.00542128552 0.0799729377 0.168658942 0.125600606 0.150951058 0.117274851 0.0738498569 -0.0982450694 -0.011585433 -0.00457595475 -0.0337975733 -0.0616223812 0.0883765817 0.146805629 0.0442404337 -0.101139419 -0.059554819 0.0444233194 0.0295815617 0.0203016624 0.0470338352 0.00290740328 0.0758937672 0.0288642086 -0.0832545534 0.0548138246 -0.00573976338 0.0907851085 0.0382896215 -0.137567922 -0.0848902464 -0.0355325341 0.0280306078 0.0849616677 -0.0109465634 -0.0933749229 -0.0489923954 0.131554142 -0.0105491728 -0.0911042765 0.0896382779 0.107579067 -0.029194802 -0.118035324 -0.0691957697 0.0260686129 -0.117240146 0.0314605832 0.10417594 0.0173794031 -0.10924159 0.00410650671 0.12347053 -0.021081768 -0.0583038926 -0.076368995 -0.0559989214 -0.12317574 0.126255885 0.124372408 -0.139102474 -0.127438575 -0.0832829475 -0.0507567972 -0.0409637913 0.0168262422 -0.109306589 0.0518526733 0.0749200583 0.00206166506 0.0649633855 -0.0586098135 -0.00433701277 -0.140350699 0.0938716233 -0.089609772 -0.0619740263 -0.0610454977 0.0776864439 -0.0440377593 -0.0523070544 0.136881992 0.111145541 0.0935858637 -0.130629882 0.0228392035 0.0660683215 0.0564527586 -0.0145275388 -0.056871783 0.140726104 0.0382112935 0.0346260034 -0.0959678069 0.145820111 -0.0788428187 0.130337492 0.106305443 0.186199993 -0.0118903993 0.114453636 0.0458821617 -0.0491925776 0.0321561061 0.0618102029 -0.16807498 0.146204278 -0.0881870687 -0.169820085 0.0581149757 -0.0209829025 0.000727858045 0.0668258667 0.0809662268 0.0593013167 -0.154004052 -0.0266895164 0.131010324 0.0933532268 0.136942223 0.0960304737 0.127566546 0.128763124 -0.129231334 0.0490520634 0.0179415178 0.035261184 -0.179191247 0.134654313 -0.191801935 -0.076531738 0.0557464883 -0.0514609776 0.030970484 -0.0304086115 -0.058471296 -0.107087307 -0.0737263411 0.0960866362 0.0616026595 0.00334342872 0.0160897672 0.115088649 -0.129959434 -0.0453715175 0.106996052 0.0485980026 -0.0609982088 0.0606777444 0.0854022726 -0.0109910937 0.0280183572 -0.106572933 -0.00772281922 -0.0217049569 0.142191678 0.078674458 0.068385914 -0.0397756584 -0.0448649749 0.0790037736 -0.0683723092 -0.134903669 0.0462144762 -0.0944194347 0.14962922 0.0367264152 -0.075939849 0.151242435 -0.0653834939 0.0671074167 0.0147493538 0.13696453 -0.0275645163 -0.0429917164 -0.0180217978 0.0253212303 -0.0417146906 0.0207910389 -0.0281672105 0.130631521 -0.109785154 0.0733767524 -0.109265648 -0.0798736662 0.0224359911 0.208666578 -0.0645421147 0.0355885737 -0.073725976 -0.0510966443 -0.0937370732 0.173772439 0.0993817151 0.00306298863 -0.195579961 0.052579727 0.127555981 0.0955225378 0.0206778944 0.0144746751 0.130441144 0.0313935652 0.00892100483 0.080054298 -0.128953949 0.0751526803 -0.0949046835 -0.153239205 -0.0463347062 0.016422227 0.0674657375 -0.0140186697 0.064172186 0.202651188 -0.165430844 0.0656619221 -0.0430362485 -0.197136238 -0.0389609933 -0.12942259 0.0315187573 0.0998861641 0.0155031411 0.0358207226 0.168374822 0.0940297097 0.0293072574 -0.0722433701 -0.0128252115 -0.0433789827 0.059830334 0.167342469 0.05525738 0.00795800146 0.177529857 0.0210485943 0.047749389 -0.0363491178 0.168270051 -0.100355022 0.0292338673 0.175140589 -0.127292693 0.162490025 0.0100361016 0.154595226 0.0616088361 0.136025682 -0.00410753815 0.0369135141 -0.143811956 0.0958657786 0.144568652 -0.00905292854 0.130941108 0.0106995432 0.0483372957 -0.0231650397 0.036639642 -0.0617889985 0.0236214604 0.0238810871 -0.0795606971 -0.110024542 0.174338296 -0.0911057219 0.0656976923 0.0863363743 0.0683924854 0.134093165 0.145337448 0.116067648 -0.0847840905 -0.0767683238 -0.0150442421 -0.0229843333 0.0828322992 0.0535647161 0.0319587328 0.068530798 -0.0646711886 0.197244614 0.0427581631 0.0388010144 0.162918717 0.136511028 0.0195802618 -0.0968718901 0.167434052 -0.0834559351 0.0702522248 0.163521126 0.110413931 0.161692828 -0.0881290808 0.148896158 -0.128931329 0.0255813021 -0.0889823139 0.157743439 -0.0732447058 -0.0442789234 0.0533142164 0.133719116 -0.116840921 0.0800347999 0.189877659 -0.135516554 -0.0575624406 -0.0097662257 0.119637571 -0.074548699 -0.0714714378 0.126038283 0.1195461 -0.09768942 0.0303867999 -0.123445861 -0.0530549176 0.107548378 0.106309928 -0.0313007124 0.183906198 0.0751518011 -0.0633003265 -0.0617225319 -0.0701497793 0.0320757441 -0.0290392973 -0.0253149793 -0.0470200963 -0.0478345975 -0.120073162 0.201239541 0.142304704 0.0925019607 0.148831651 -0.167674646 0.123002127 0.106455177 0.0328564122 0.18806994 -0.116831504 -0.00451909332 0.108785309 0.157465339 -0.00134878256 0.168126434 0.0580710471 0.0837541148 -0.0657100528 0.158608526 -0.0463683493 0.0946896747 -0.104266793 0.0244341511 -0.0714015439 -0.0990499556 -0.0860033333 0.145062909 -0.0333383344 0.142448917 -0.00225598761 -0.0131941633 -0.149845496 0.00207266607 0.0925255567 -0.182044104 0.00203921902 0.178830191 0.135419115 0.127062351 0.119150542 0.120787822 0.0427289233 -0.102054872 0.0916266441 -0.0503866151 -0.0314327143 0.113203667 -0.14366518 0.12766479 0.0501433946 -0.0380674638 0.132927895 0.147104084 0.129884318 0.0988519117 0.0387863517 0.0734434873 0.0411540642 -0.027659202 -0.13669847 0.083362028 -0.0450929962 0.145056829 0.0885054395 -0.0165824685 -0.0861969367 -0.0862592608 -0.160450995 0.0212117564 -0.104402281 0.143013418 -0.0506607853 0.121090904 0.00905802753 0.111442901 -0.143552661 0.0210310649 0.0612097643 0.00359729188 0.0227075666 -0.0815051943 0.155096367 -0.0119450046 -0.0233580228 -0.0038536794 -0.0880303755 0.164003551 0.1600402 -0.016360864 -0.0836358368 0.0851199031 0.0105815725 -0.121088877 0.161806434 -0.0379569791 0.0800513998 -0.0538180247 0.153429583 -0.0247538723 -0.00772412121 0.120341845 0.0548929647 0.114107296 0.0127800889 -0.0710391551 0.134522244 -0.0879234001 -0.0632987469 -0.0650375783 0.0809550807 0.137545347 0.0396288 0.186278701 0.110111617 0.143173963 -0.176478416 0.160997689 0.0144827925 0.0872319192 -0.0407468043 0.114270978 0.0436847992 0.0258595552 -0.0514572188 -0.0362136886 0.130494818 0.126685143 -0.0894779786 0.117681846 0.173565581 0.174748227 -0.15385066 0.149053425 0.160555586 0.0397729799 0.156005859 0.110312633 0.104156397 0.161141351 -0.0919019654 0.015511048 0.0107473964 -0.0837544352 -0.0176889747 -0.10078945 -0.0619383864 0.160746276 -0.087044619 -0.0232165866 -0.0215495545 0.0582484603 0.0864141285 0.175924376 0.0442700647 -0.0247930624 0.0347629003 -0.161288068 -0.0290379301 0.170908287 -0.117735907 0.110525407 -0.115487754 -0.000686930609 0.130876914 -0.0291782003 -0.192795917 0.127867773 0.126315489 -0.07262256 -0.098871097 0.0209841039 -0.19527556 0.116880774 -0.02486692 -0.00237640645 0.143660888 -0.016016813 -0.0697216764 0.175688595 0.0232482143 0.0199046992 -0.103963897 -0.0378533229 0.0388961881 0.00533542689 0.0628525913 0.159435913 -0.0747304037 0.0978682712 0.164278746 0.077385895 0.109259471 -0.0799139515 -0.0421864092 -0.0443351157 -0.133975893 0.0834283531 0.093928501 0.00520775095 -0.0434011891 -0.0435828492 0.138147533 0.106794529 0.093232654 -0.077764377 0.16267027 0.051492583 -0.0966648981 -0.0458262265 -0.0408286341 0.0238162875 -0.00872587226 0.153415054 -0.0966666192 -0.0194769856 0.151141167 -0.132202849 0.17568706 0.0875745118 -0.00695692096 0.0846608803 0.0842222869 -0.00846964866 -0.133651823 0.0813971162 0.0544089861 0.101662867 -0.166373864 -0.112454981 0.137616843 0.140390456 0.0915882215 0.10989771 -0.0496877804 0.154562473 0.0789823458 0.0279520545 -0.0192710813 0.025512537 -0.00114545715 0.0528355576 -0.0804974213 0.130488142 -0.0450717099 0.00189470535 -0.126931518 0.00184863445 0.0691755414 0.0959887952 -0.00365662854 -0.0239975173 -0.000226317745 0.162838191 0.110088706 0.103135742 -0.0143095907 0.0685937479 0.039006602 0.181053951 0.0662889108 0.142534971 -0.0225376673 -0.0523421951 -0.0925690904 -0.00610838691 -0.0569295287 -0.0691444948 -0.0351942256 -0.0200236402 0.0384809263 -0.00329685421 -0.15174298 0.1632265 -0.191212401 -0.169024199 -0.093971774 0.115878142 0.0936368257 -0.0726782456 -0.0567203537 0.127668455 0.0460995883 -0.0191945117 0.18582131 -0.171271384 0.0437021852 0.062035732 0.0159470849 0.0150196124 0.00918887649 -0.0672063157 0.0613921694 0.0558371395 -0.172685817 0.0529843457 -0.179647043 -0.00943551958 -0.0415023826 -0.0244376082 -0.0472054332 0.153094694 0.143580258 0.0942730904 0.156098858 -0.00754955551 0.0512687974 0.138893977 0.0646209419 0.00226254459 0.133554146 0.0259827524 0.110805348 0.0725759491 -0.131094366 0.12708883 0.0314303264 -0.0524304323 -0.032248389 0.163754046 0.0906126276 0.00314503536 0.103355683 -0.022527555 -0.1250837 -0.143783137 0.0596455783 0.0511251315 -0.0954806134 0.17346862 -0.00509193866 -0.0772540048 -0.0803210288 0.173364595 0.167615995 -0.129515707 0.0145245409 0.0466810837 0.0946052521 -0.0887519196 -0.0918630585 0.154023126 0.182059482 0.122924969 -0.0969166085 0.0428368933 -0.0473706648 0.0871873423 0.0173784196 -0.0468124636 0.130918413 0.115169801 0.106101029 0.0267140083 0.171541661 0.117503718 0.0674298778 0.0793930814 -0.0995452777 0.0986198336 -0.0477845483 -0.0891349018 -0.110497288 0.149275228 0.0541292913 -0.0509323142 0.00657417579 -0.00849667098 0.0782996938 -0.000425429258 0.0927700475 0.0596327335 -0.0792194828 0.048249729 -0.125496924 -0.119564533 0.0140337572 0.154170945 -0.175600752 0.0509903021 0.0491141193 0.151463166 0.0498116091 -0.0577821173 0.0124854716 0.0519152619 0.000966675114 -0.0199240129 -0.0589309931 0.000340196391 -0.0851683021 0.0118466569 0.109990321 -0.0261993259 -0.0374022834 -0.0214411858 -0.077557683 0.0687204972 0.0663195103 -0.0442392081 0.0338341743 0.13567619 0.180690661 0.19239752 0.107011527 -0.0798124969 0.0309492871 0.0260094907 -0.125474811 0.0975558758 -0.171736181 0.121255443 -0.0812420845 0.174648881 0.0337508172 -0.0655879006 0.168462068 -0.123068273 -0.14526248 0.1509289 0.149049625 0.0172713008 -0.0775876939 0.125850379 0.0576170236 0.0959700122 -0.0350637622 -0.0413426161 0.198388338 0.06012512 -0.18112573 0.0456633084 0.123411685 -0.135381892 0.0592928678 0.0492700674 0.192084178 0.00668479549 0.00347893289 -0.0798124969 0.160300285 -0.0158643834 -0.097056821 -0.00595153868 0.193585619 0.129847378 -0.0445784479 0.154722676 0.0128285876 0.114035919 0.0366068296 0.0581881292 0.0526427999 -0.0453962088 -0.0249866024 0.147942722 0.048362948 0.0718180016 -0.129099786 0.0695572644 -0.040164955 0.151449472 -0.107218184 0.0813755468 0.0611639321 0.120362371 0.00170552568 0.0150107937 0.0923141465 0.179166928 -0.0595131889 0.0748501047 0.024664795 -0.072850123 0.0498956218 -0.118837982 0.11913538 0.1241147 -0.0298356991 -0.0732461885 0.137327462 0.150715679 -0.144629672 0.0296867546 0.0185879748 -0.158391654 -0.0696423948 -0.0815559775 0.120456815 0.174756512 -0.0714245588 0.100912079 0.109141059 -0.0181489885 -0.189933077 0.0589498132 -0.146864727 0.0246144049 -0.0326956324 0.0814622864 0.044614289 0.0344069004 -0.0722796917 0.0347998254 0.00988415256 0.074375473 0.0236355383 0.186613545 -0.0229948368 -0.0627373829 0.058446534 0.0801035017 -0.0250811949 0.0163063705 -0.0360587984 -0.0412077829 -0.131415576 -0.14121896 0.183651194 0.0538982339 -0.0825245678 0.0530949496 0.129799366 0.077988103 -0.163070917 0.131275401 0.115696557 0.0255096387 -0.0695977584 0.149488509 0.110933349 0.0595859699 -0.105136663 0.139630318 0.13104403 0.140138745 -0.101875864 0.0968326181 -0.0490331948 0.0320329145 0.0932519361 0.111740142 -0.0153519753 -0.0669102296 0.0104083447 -0.0649985299 -0.154267743 -0.0946161672 0.139226034 0.107407138 -0.0765753686 -0.0474209748 -0.111844584 -0.0410924852 0.00278180838 0.108596429 0.014437899 -0.120850071 0.101168439 0.0475970656 0.110533401 -0.0760123357 -0.0803952068 -0.0215338543 0.109282747 0.0477782488 0.0887209475 -0.0328624696 -0.0277395248 -0.131564692 0.0674616843 -0.0144642591 0.13782452 -0.0827166885 -0.0459428355 -0.0465939641 -0.0978194177 0.137472615 -0.0644845441 -0.093579635 -0.079621926 0.0540327132 -0.0426073149 -0.0682768524 0.140229478 -0.0689926222 -0.116822943 -0.0883634388 -0.0420724526 0.0797011107 0.134867147 -0.0124301612 -0.0311987475 0.076223284 0.0785176903 -0.0510006249 -0.08932513 0.0967391878 -0.136143774 0.124550089 -0.119794972 -0.106707312 0.0434878916 -0.000768460974 0.083400093 0.123351663 0.154955849 0.0239652898 -0.00470558135 0.0155227007 -0.155885831 -0.0280565098 0.128090873 -0.0347218178 0.0469225496 0.066305764 -0.0798357874 0.0677081048 -0.153243482 0.0412665345 0.15851365 0.0430604853 -0.0530885011 0.131436363 -0.0623488314 0.0265644994 -0.130693406 -0.0925032496 0.167786196 -0.00228108512 -0.051090654 -0.129197508 0.125834614 -0.0826043189 0.0495859832 0.13765806 0.140279785 -0.100200407 0.078553237 0.102651939 0.0530582368 -0.105640791 -0.0712560862 0.0563652664 0.0500995256 0.110330448 0.0879531652 0.0794132054 0.0128588937 0.139566243 0.00905480981 0.0769669786 0.0616210736 0.0383987278 0.024789568 -0.0215452202 -0.0754719898 0.103158571 0.021371033 0.157727793 -0.168305516 0.041431915 -0.205217093 0.0685112029 0.11518427 0.0901029781 0.0836623907 -0.00306673371 -0.078299813 0.0937599093 0.0358634107 0.150480777 0.017379215 0.0400344506 -0.0467984006 -0.0435465574 -0.0746275187 -0.12713474 0.110726796 0.163420781 -0.100556083 -0.00550368195 -0.10327252 0.044071883 0.0337789692 -0.0129005229 -0.0913272351 0.132832885 0.147079349 0.100901216 0.134497017 -0.0322105363 0.133053601 -0.0325982273 0.141311869 0.0040314584 0.151371911 0.181470856 0.0484154783 0.164058596 -0.0128529146 0.0409421511 0.159602627 0.101342879 0.149882555 -0.0492368788 0.172365248 0.124329507 0.0683217645 0.0930551067 -0.0814763457 0.147788212 0.00853961147 0.0389146842 -0.000336691737 0.163021743 -0.084802106 0.0986582115 -0.0116979126 0.0385086611 -0.0496010855 0.0737678558 0.103331998 0.161403298 0.0173213035 -0.103028946 -0.0950937942 -0.0377868973 -0.0620894209 0.13404268 0.0146548431 -0.0653266087 -0.0033960822 -0.112761199 0.0226024743 -0.177061707 0.109000698 0.045506943 0.101955965 0.0158496425 -0.0690437183 0.104365595 0.0359109081 -0.122470014 -0.0659879521 0.0467208475 -0.0753396451 0.0523877777 -0.0585377291 0.100402929 0.119433776 0.0242477674 0.0617414936 0.182905495 0.157282576 0.0866737887 0.107341088 0.114345349 0.00848616753 0.0763099417 0.0206906293 0.0617443733 0.0259690173 0.107850946 -0.111641936 0.133501753 -0.169169813 -0.0887352601 0.089083977 0.156513289 0.0230403095 0.000902002619 0.0383367911 -0.0300379787 -0.146975219 -0.00701804645 0.131880164 -0.0454387777 0.0733794197 0.173866943 0.0410080142 0.19769071 0.0897455812 0.0198194478 0.00869395584 -0.0264868997 0.0861539766 0.123009734 -0.0185853429 0.16686818 -0.0672833547 0.0305484533 0.132848471 -0.127947524 -0.1613774 0.0643686131 0.0070268726 0.0036489605 -0.189245149 -0.0304792393 0.113094799 0.130098417 0.118080013 0.127857327 0.0940245837 -0.165752977 -0.0374614373 0.109492496 0.0428666584 0.170740604 -0.115685873 0.0148922838 -0.116838083 0.111455843 -0.0632996783 0.0108929574 0.0726874396 0.0742699429 -0.0629896522 0.113437019 0.199758425 0.0475728512 0.137889087 0.19803226 0.0400452688 0.0794214979 -0.108014926 -0.00188282889 0.111494496 0.0771949738 -0.116306648 0.0865728483 0.0771485493 0.0915202647 -0.0908453912 0.0911061615 -0.0472535603 0.136873767 -0.037476372 0.129080757 0.173227653 0.176956236 -0.117876649 0.0886662453 -0.0194631983 0.140326738 0.0929994658 -0.0285486728 -0.123725995 0.0545314588 -0.132062644 0.196129248 -0.0776121169 -0.0292998273 -0.0817124322 -0.123064265 0.0644138977 -0.0409719124 0.0910102725 0.0774317682 0.0588561557 -0.0303226635 -0.114509314 0.00717926025 -0.0146975368 -0.0139649464 -0.0769111067 -0.0884687155 -0.0844886228 -0.0546910986 0.0992946401 -0.0306005422 -0.0368665494 0.0252984539 0.0552819744 -0.0180559643 -0.0461472273 -0.059688963 0.0529744141 0.105257906 0.135227516 -0.136654019 0.0433159433 0.0750075579 -0.143905401 0.0697793365 0.0171793997 -0.0880545825 0.0440685079 -0.135759518 -0.0708841234 -0.137341917 0.0642284378 -0.0825591236 0.0998160243 0.104954824 0.0703029931 -0.00554473838 0.0652662367 -0.137622833 -0.0849017501 0.0079975808 0.0469577163 0.0332614519 -0.0239423085 0.0571367703 0.125478789 -0.0188843291 0.0104151899 0.0507268719 0.0427310057 0.182245687 -0.0464136638 -0.0774840489 0.0790423155 0.0158217624 0.174919963 -0.167722598 0.00393518014 -0.141249925 0.0320646316 -0.0712961331 0.18195422 -0.101946741 0.205521435 0.0143015096 0.190244779 0.0565855652 0.143080652 -0.0879745483 0.0268129539 0.0264821127 -0.0976307765 -0.0719135925 -0.0931720287 0.0752973855 -0.0940701365 0.0943753496 -0.00160595321 0.00526125729 -0.0494134016 -0.0277267974 0.114400044 0.0121099204 0.0469762683 -0.0178804994 0.175322458 -0.0936195925 -0.0206507854 0.0129827568 -0.127164483 -0.0533081368 0.0902868807 -0.089850314 0.0812181607 -0.0252427552 -0.0260248482 -0.0263420995 -0.128954813 -0.144561514 -0.0969642028 0.0840708092 0.105219595 -0.0315751806 -0.133927286 -0.0635263324 0.0815265328 -0.103957005 -0.0656396598 -0.0624658093 0.027983008 0.0192227215 -0.0915314779 -0.0996872336 0.0151820509 0.00491440995 -0.0790896341 -0.148336604 0.106279097 0.057419382 0.034870699 0.100479744 0.00237061502 0.0768525749 -0.12644136 -0.125458911 0.112800233 0.00162200222 0.0578222498 -0.056215629 -0.0922449976 -0.158906817 -0.0518889986 -0.100536995 -0.0453334972 0.00646515191 0.0148057342 0.0331344642 -0.00636346964 0.0370892994 0.00641168654 -0.0307880603 -0.0186160952 0.0293306652 0.0952301919 -0.12958698 0.117998272 -0.0704888254 0.00443183212 0.147841737 -0.0992462039 0.0764997005 -0.0257688798 -0.0460406169 0.0839670599 0.120056614 -0.0614700243 0.113699906 0.0346624181 0.180427715 -0.0145217599 0.168693572 -0.00197043363 0.191886678 -0.0972156301 0.0206416119 0.0345100351 0.0903015509 -0.100287922 0.0303347614 0.136919394 -0.0126191778 -0.115950003 -0.0293597691 0.0265962426 0.00261192676 0.0278086904 0.158691257 0.0234635379 0.117342651 0.0816714615 0.0948666632 -0.0849409848 -0.114143133 0.0362917073 0.070062004 -0.0524370112 0.167162567 0.104840927 0.124661915 -0.138633773 0.19061929 0.0486695245 -0.00107917748 -0.0845123231 0.046763584 0.0243339688 0.0911204591 -0.113943458 0.00347187044 0.0777205974 0.095806241 0.0292435624 0.127136692 -0.0182037577 -0.0450141095 -0.0123331165 -0.0598197915 0.152919352 -0.131715685 0.164068297 -0.0793498456 0.00121658249 -0.0503176786 -0.0856561065 0.0431076214 0.0459455065 0.167714477 -0.00190150819 0.0480769761 0.0142310113 0.0774440318 0.0504581034 0.197599128 0.172974482 0.0960050672 -0.0724410191 -0.0655787885 0.0426691361 -0.0474077053 0.0671926141 -0.0111915339 -0.0694714338 -0.0728770122 -0.0334699675 0.0879241973 0.0191930141 -0.0492004342 0.170004874 -0.136069939 -0.0839288905 0.121699564 -0.0373032577 -0.0790554881 0.0212189052 -0.0723486841 -0.0706750974 0.164014727 -0.1265852 0.180671826 -0.0538335219 0.135076165 -0.082566984 0.00627529481 0.0355592817 0.146791458 0.0428247713 -0.0218269154 0.113299571 0.15928854 0.109753877 -0.0433866642 -0.0531712547 0.121344112 -0.0599708892 -0.140954524 -0.0652005821 0.107553594 -0.0420940556 0.0391891636 -0.0892334729 0.0449264199 0.038767308 -0.00515921181 0.00227128062 -0.0260546599 0.145359293 0.0675093085 0.165128261 0.107131146 0.145455942 0.150093794 0.182555065 0.0683342069 -0.0751166418 0.00099511235 0.136942998 -0.0637786239 -0.118969493 0.0861738697 0.121482879 -0.0593939386 -0.0681066886 0.137257427 0.142178074 0.110687025 0.104999736 0.00519723399 -0.0884702951 0.0194963887 0.146859735 -0.00592712127 0.0192816481 -0.0416031592 0.10512694 0.0102964779 -0.0616582707 0.13753584 -0.00928659178 -0.0823482201 0.114266947 0.0528262816 -0.0983823165 0.0188455041 0.0611194335 0.100222267 0.00196855632 0.0390211269 -0.139956653 -0.0327276476 0.156153634 0.0125370612 0.0344246514 -0.0589949451 0.0921387449 -0.144850284 0.0212448426 0.144581348 0.0431137607 -0.0885965675 0.0854236633 0.00550921075 0.172450885 0.123434886 -0.185906976 0.132565111 -0.181601852 -0.10362874 -0.184760764 0.130670205 -0.0472870953 -0.10729944 0.132553771 0.0137786418 0.0876799598 -0.0260619633 -0.0263571106 0.123666577 0.123572513 0.00393577246 -0.0911321938 0.149641573 0.0127057144 0.040043395 0.0847804174 -0.0459438674 -0.177438155 0.00510600302 -0.00431553461 0.138086572 0.116237916 0.168006837 -0.0157650076 -0.00970370602 0.0802516192 -0.0514502637 0.132704586 -0.0417737029 -0.0193822831 0.166242853 0.0419458486 0.15848121 -0.00174845546 -0.0805547163 0.00697086425 -0.171336144 0.0593196638 0.0595933609 0.110469177 -0.0882590115 0.050680656 0.0118347788 -0.0319386683 -0.0632662848 -0.0177531485 0.0838051289 0.0470289141 0.178859159 -0.0346439704 0.0504389554 -0.0520837195 0.00219774805 -0.0491009764 0.0503517203 0.107992731 0.0384831354 0.0872439444 0.167825118 0.0623564459 0.0223074984 0.0821516067 -0.00865145214 -0.0457197949 0.0970128179 0.0120575717 -0.0556218661 -0.116809532 0.074401699 0.0880089849 -0.0123710101 0.007505944 0.135203391 -0.146449044 0.0852448419 -0.132378265 -0.109845184 0.149791658 0.015171879 -0.158416107 -0.0637820587 0.16275458 -0.0364229716 -0.143408865 0.127545208 -0.0622910671 -0.139478207 0.0405872539 0.0932571068 0.0956263393 -0.00292709633 -0.100080743 0.137279779 -0.0495060496 -0.0749291778 -0.0744291395 -0.0862122774 0.0235699266 0.109829761 0.0802345648 -0.123428002 -0.135655686 0.115854591 0.186653689 0.104481116 -0.0934653729 -0.107345767 -0.0480583683 -0.112480521 -0.0674405769 0.0481690913 0.0844945163 0.102531567 -0.132132486 0.137842521 0.00775253773 -0.0610849336 -0.032591112 -0.0524423793 -0.0668133944 -0.113737375 -0.000165238976 0.114906386 -0.1328713 -0.0835750252 -0.088781476 0.018294096 -0.0263542235 -0.0792298913 0.0685598254 0.0419423133 -0.0260287449 0.13109158 -0.138066247 -0.0322780311 -0.0882859379 0.0807678401 0.0512416959 -0.123070188 -0.00298701227 -0.0796232373 -0.104369447 -0.117494076 0.00122408569 0.0332989395 0.0664115399 -0.0739870965 0.0106086135 0.109527692 -0.0934588537 -0.0895289928 0.0728636533 -0.0278315544 0.0639105886 0.0930453986 0.0494588055 0.017094126 0.112311125 -0.00386948418 0.0680094063 0.0254231635 0.0913507342 0.16692546 -0.0122418981 0.108312286 0.0950310752 0.101287387 -0.105884947 0.030036103 0.00558372587 0.109446019 -0.0987028182 -0.0895694122 0.118367992 0.0662995502 0.114169754 0.0966514125 -0.0286930036 -0.0851531997 0.128677562 0.124861382 -0.100621521 -0.128018498 0.0673300773 -0.0310823116 -0.0784357563 0.0379403606 -0.0306251384 0.0655758083 0.0960387737 -0.152080312 0.136492133 -0.101761207 -0.0275989529 0.0933943838 0.0766497627 -0.0804210976 0.143909901 0.143697292 -0.0849372372 -0.10959392 -0.0742666796 0.125293195 -0.0966164172 +tensor_14weight 2500 +-0.0543760806 0.0856281444 0.0533403084 0.0177523084 -0.0268334541 -0.0549559146 0.159062862 0.172800139 0.109722741 0.0875528008 0.0125674438 -0.0810011849 -0.068577148 0.170207128 -0.135173365 -0.0806247443 -0.0548967347 -0.0452914089 0.0365853943 0.129278928 -0.0377073251 -0.17943646 -0.00266921567 0.0811229944 0.0154373068 -0.0359650813 -0.0855926052 0.127574399 -0.1265679 0.04885903 0.0561187416 -0.112507693 -0.139889583 0.170207158 -0.097494632 -0.0187973343 -0.0904997438 -0.0484883524 0.031168703 -0.055549074 -0.0741278306 -0.002624318 -0.117438287 -0.0157258548 -0.0880523771 0.114648446 0.0272049736 0.103814438 -0.0217095967 -0.140518233 -0.0760676265 -0.110887714 -0.0115829725 -0.00750160404 0.0959720686 0.0384376198 0.061359182 0.0955482125 0.101260468 -0.0115174651 -0.013766964 0.0398462117 0.166129872 -0.0850986466 0.140506133 -0.103672192 -0.154903129 0.0968019962 0.066429466 0.0431276001 0.147400737 -0.00412948243 -0.0342022404 -0.0535201877 -5.71517012e-05 0.0244176984 0.0832642242 0.176724657 -0.0719986036 0.172275752 -0.114797458 0.0914949924 0.0334078744 0.0464251973 -0.00394226797 -0.0035392812 -0.0278604105 0.0514154881 -0.0311339442 0.021156881 -0.0213947129 -0.0683914274 0.0251719803 0.0944593325 0.12849097 -0.049127765 -0.0469818637 -0.0983457267 0.13893728 0.0303975027 -0.0299507454 -0.0138533115 0.139151528 -0.135961041 -0.0921831578 0.0593009293 -0.0144180804 -0.0136186779 -0.0715967119 0.0790341347 0.00953520834 -0.0408776402 0.101040825 0.00309920311 0.0447804034 0.0982600003 -0.0721947402 -0.118167073 0.0333673507 -0.0950507745 0.10244967 0.08306925 0.0455361456 -0.122597888 -0.0647362471 0.00561864674 -0.136176527 0.0647586584 -0.122481212 -0.0205618665 -0.094566375 0.0131596476 -0.117649406 -0.110489279 -0.0717473105 0.103288978 0.0714375228 -0.0784455761 -0.105901703 0.0811899006 -0.131345108 -0.0233812556 0.102898851 -0.00886622071 0.0682659149 0.129993364 -0.0971994996 -0.0193270147 0.00360363722 0.121052161 0.00784411095 -0.123100765 -0.0609981082 -0.135147735 0.0461434908 0.117215686 -0.0296066701 -0.0148467962 0.054072503 -0.118035108 -0.13138777 0.0103239622 0.0106298085 0.00161406794 0.121524885 0.106862329 -0.0696737245 0.122207746 -0.129250824 0.0716361329 0.117990665 0.0917533413 0.0275282189 -0.124964394 0.123115174 0.0490060188 -0.0750153661 -0.0502910502 -0.0452317111 0.101086549 -0.101995051 -0.112885557 -0.0476158895 -0.0509889536 0.0219939649 0.00387603301 -0.0764786229 -0.0421580113 -0.0788122267 0.084515363 0.0346965827 -0.01090011 0.0382516384 -0.00645032525 0.129111394 -0.0737728179 -0.00789030734 0.11321111 0.00651154015 0.000951979193 -0.0776003599 0.0253983736 -0.0880478546 0.111969553 0.0747581348 -0.0281555094 0.0477269702 -0.0837645158 -0.00260412018 0.0995940417 -0.0253548026 0.0838286281 0.037731003 0.0643470585 0.0464969426 -0.102055438 0.00463358313 0.0325008184 -0.00370962941 0.0821173638 0.0869908333 0.032846041 0.0313670263 -0.148173332 -0.177075326 0.014351381 -0.0347749256 0.0631445199 0.0138477925 0.141796917 0.0031752775 -0.0240941141 0.115030944 0.0497418977 0.0109222829 0.0674659908 0.010190879 -0.0980509967 0.107191958 -0.0665694326 -0.0149048567 -0.135567963 -0.0943998545 -0.0724455938 -0.108684249 -0.117758349 -0.0431607552 -0.0478789434 0.0548663996 -0.0874581188 -0.12479274 0.0178123116 0.070239827 -0.0386666693 0.134508371 -0.0741510987 -0.045267418 -0.104734987 0.0435491502 0.0155023336 0.062136706 0.103647709 0.00290234643 0.064395614 0.0177004337 -0.0480007231 -0.110428169 0.0850054473 -0.0885846689 -0.0500162207 -0.0616900064 0.148497447 0.0951149315 -0.0552124381 -0.14905256 -0.0889345855 0.0241270382 -0.0488678627 -0.00426269416 -0.0119903926 0.141092837 -0.0797038823 0.0120936269 -0.0693103597 0.0249975473 0.145910755 0.0371512882 0.117824383 -0.0573362373 0.0323375016 0.10749159 0.0636148006 0.0273176879 -0.0183407739 -0.115713961 0.00984863937 -0.042138014 -0.118756019 0.0761800632 -0.132937029 0.119471751 -0.0354485847 0.127721861 0.121748939 -0.0444656201 -0.116685092 0.0304207485 0.0655196533 0.0813344195 -0.107156277 -0.0844279304 -0.0894685909 -0.127893046 -0.0939210355 0.0843395889 0.0614806749 -0.0614846796 0.0341131836 0.0421788543 -0.105776869 -0.122536495 -0.129194289 0.12389411 0.0393402874 0.0495846197 -0.119962715 0.106891051 0.0449610613 -0.145153821 0.0629368573 -0.108572282 -0.0517346151 -0.0622508824 0.0323888771 0.0450324118 0.0440010354 -0.0938819498 0.0203166902 -0.122573078 0.0831483901 -0.0707751289 0.0610891283 -0.0958172753 -0.0424687862 -0.107412554 0.126818612 -0.0554413795 -0.122475646 0.0243445728 0.0513011068 -0.0796121135 -0.0414110497 0.0280717909 -0.0394670665 0.0598106235 0.0484230518 -0.126170114 -0.0951998904 -0.078391239 0.0402923077 0.0229541957 -0.0937744156 -0.0477972776 -0.0179067627 0.0602110922 0.0934107453 0.139090851 0.0272798836 -0.0181408152 0.0339401662 -0.0496698096 0.123755589 0.077883482 0.0388832986 -0.119654641 0.140062913 -0.02580522 -0.0365974084 -0.0948570818 0.0476925224 -0.106507264 -0.0877594203 0.0167225003 0.0365579128 0.0707214922 0.0943449885 -0.0219054744 0.0260573626 -0.0470213518 0.139553711 -0.0147360563 0.0706477165 -0.078761287 -0.0111072361 0.0549765974 -0.0400532633 -0.0153049231 0.0315274298 -0.0705541149 0.0631048977 0.117633738 -0.0514981188 -0.134041414 0.0551473498 -0.0794123039 0.00670406362 -0.0401185192 0.00911470596 -0.0211872291 0.116496786 0.155161962 0.0629097223 0.138448417 0.0936189666 0.0627936721 0.0648671389 -0.129390776 0.0583335906 -0.0174725447 0.0610876642 0.163619712 0.0462206006 -0.0404846109 0.0467165858 -0.149703398 0.0884451717 0.0297990069 0.0904366821 -6.66035776e-05 0.089948453 -0.164717227 0.0440124683 0.0429885276 -0.0889559984 0.0580933429 -0.0497451164 0.0140721994 0.123201773 -0.0521491505 0.0792684183 -0.116658807 -0.0728405491 0.138154134 0.0858280063 -0.0885532424 -0.0259025618 0.143468827 0.11027436 0.130449191 0.0466446765 0.0738923401 -0.0459300056 0.135415688 -0.0519030988 0.0363911055 0.100617178 -0.0735667422 -0.056867335 -0.100770339 -0.0581379086 -0.0582638234 0.057107687 -0.0833413973 0.117787801 -0.121084802 -0.0609023273 0.0458093919 -0.0386206284 0.120702438 0.152693301 -0.0653539896 0.00243751518 -0.116231412 0.129159972 0.123322234 -0.00450206548 -0.0729444399 0.0853474438 -0.145202518 0.0369620174 0.0304967947 -0.0492551252 -0.130056858 0.0221843477 0.0469832569 -0.056170959 -0.146923915 0.0244862288 -0.0021409702 0.0949956179 0.134217575 -0.0556118563 -0.106579103 -0.0108840466 -0.147231668 -0.0594046339 -0.0605274215 0.00136603415 0.127606124 -0.115885407 -0.00178258657 0.0252946466 -0.0912591442 -0.0576305799 -0.0229029693 -0.085684374 0.165293708 -0.0522565134 -0.0692233294 -0.0375391915 -0.0102695916 -0.104804181 -0.0526487827 0.150056034 0.0551703274 -0.0257776212 -0.0228184611 0.073696211 -0.0467144549 0.0735779107 0.0948753133 0.144433752 0.166210935 0.150882557 0.0348055102 -0.0166522712 0.161574543 -0.0576791242 -0.0115333898 0.0468233787 0.0915934965 0.0572047532 0.00290581165 -0.0225567296 -0.0246865228 0.0868225098 -0.00762603246 -0.11816176 -0.12570864 -0.0243588239 0.0893646181 0.0507476032 0.0117150992 0.10665486 0.121998012 0.0696426779 0.0685170516 0.025851354 0.142432615 0.146065772 -0.0138009675 -0.129955053 -0.000132796747 0.0203777198 0.181162477 0.0261075366 0.168559924 -0.0609995425 0.168947399 0.0293546468 0.0149049358 0.0582519248 -0.04978792 0.103416584 0.0590672493 -0.0700641274 -0.14282304 0.0278612077 -0.131275356 0.14719297 -0.0653766692 -0.0751730502 -0.0065545626 -0.0380778089 -0.157771811 -0.0827088878 0.0777130723 0.198460281 -0.0469098203 0.134435102 0.14349848 0.0417049713 -0.167793706 -0.0996251702 0.0797272176 0.122879468 0.179209173 -0.0471446738 0.168509901 -0.0920644701 0.000451093569 -0.129801482 -0.0594977811 0.00711449794 -0.102573976 -0.109216064 0.148805737 -0.0950382799 -0.00647751195 -0.109716304 -0.055815164 0.0108515322 0.0906130522 -0.0650295168 0.0893351659 0.114572234 -0.106268756 -0.0118306447 0.113306493 0.0420926064 -0.143582255 -0.11182075 -0.0272862986 0.0896898583 0.0240881741 -0.0932913795 0.0246650521 -0.138226554 0.0727393776 0.0228461325 -0.16916123 0.00246544858 0.0807503536 0.0973562822 0.0370443426 -0.135211006 -0.0732924193 -0.00324719655 0.00122735673 -0.0426253006 0.0464077778 0.0109117776 0.0653063208 -0.0427299105 0.0784161389 -0.0572868735 0.105860651 -0.097189337 0.000937802775 0.0585776716 0.141582102 0.0805247277 0.0409072042 0.00662690401 -0.0858124942 -0.0805532038 0.0404491127 0.124266788 -0.106811218 -0.0971105546 -0.0997086912 0.0074750483 -0.108586416 0.0607502013 -0.054395549 -0.0843265578 -0.0900614634 -0.0676774904 0.0206092894 0.099438563 -0.0570041686 0.0200342685 0.0236357749 0.0572907329 0.0950599462 -0.125204116 0.0311794877 -0.0876096636 -0.136037469 -0.0375309587 -0.0594457537 -0.113332778 -0.0978064537 0.0964330435 -0.0265298411 -0.00542576611 -0.0438456684 -0.0523090437 0.0630306751 0.124709442 0.0266276151 0.0232248306 -0.0171631426 0.106016204 -0.110088825 0.08274737 -0.110974953 0.0963929445 -0.0833926201 0.0303138644 -0.0306184739 -0.00307349861 -0.11504256 0.1075629 -0.131726444 -0.0421231985 -0.138194129 -0.113239586 0.0452417433 -0.0149982423 -0.0436716527 0.0988965183 0.0192198902 0.0135216201 0.00940239057 -0.11294537 0.0857888535 -0.120034076 -0.108530715 -0.101037055 0.0667439774 0.0855601728 0.0677483305 -0.0134334378 0.120776698 -0.116636701 0.0875215456 -0.0949789584 -0.0514179617 -0.115705922 -0.0596454814 0.0422541201 -0.0999356657 0.0502830669 -0.0702968836 -0.123627275 0.106365606 -0.0744836628 0.0104168141 -0.0793894753 -0.114666551 -0.00283100014 0.140118852 0.0356186256 0.054495573 0.102265559 -0.0458586551 -0.0236852318 0.106435075 0.00892684981 0.11433281 -0.115178108 -0.131405771 0.117208794 0.0424666107 0.134797171 -0.143703952 0.0569373965 -0.0665611774 0.0933629125 -0.0201621354 0.111485049 0.00233875564 -0.0812246799 -0.0269328542 0.0232816096 0.0677310228 0.117872521 -0.141205952 -0.00658942759 -0.0309162736 -0.144758567 -0.057528194 -0.0684359372 -0.0633766428 -0.0364208929 0.152422816 0.0387307405 0.0868177786 0.0241200123 -0.0501802117 -0.03670137 0.0153368488 -0.0590804406 0.0290142465 0.100567661 -0.0803031549 0.037419185 -0.0459126569 0.0960116088 0.116186179 -0.0661039278 -0.0716232583 0.0593420751 0.0900740027 0.132992968 -0.025843842 -0.062321458 0.0100088529 0.00727820396 0.0946147069 0.111916468 -0.0648906529 -0.0192210414 -0.10582228 -0.052964583 -0.0713335574 -0.100110069 0.0656400323 -0.0618378446 0.0341230631 0.105089828 -0.0501025058 -0.138664886 -0.115556583 -0.105643809 -0.0142834401 -0.0338118225 0.0103544462 -0.0796577036 -0.00744031509 0.0366418958 -0.13744548 -0.12394321 -0.133015111 0.139994159 -0.096086286 -0.0779372826 0.0771979392 0.127238646 -0.0140574072 -0.0333673917 -0.0900884196 0.0653517544 -0.0381353125 0.124770477 -0.12113288 0.0970005691 0.107545584 0.114952408 0.0286091883 -0.109182179 -0.0630336329 0.0627928153 0.0373910069 0.110793836 0.0872234032 -0.110715158 0.0479132868 0.068171978 0.0977038071 -0.0969489664 0.0069321245 -0.138224244 -0.1087984 -0.0156357884 -0.0806711093 0.0635136664 -0.088648513 0.0915248096 -0.0295681208 -0.0889791846 -0.0202619806 0.0473107845 0.0719934851 0.0844703317 -0.0181293488 -0.0325784534 -0.0223496631 0.0800980031 -0.0469706431 0.12284486 -0.0163284689 -0.0720243454 0.0100721121 0.012039721 0.0458173305 0.00532619655 -0.0372635648 -0.0938430429 -0.0808144957 -0.0140093267 -0.139559567 -0.0209952146 -0.0627007261 0.0597438067 0.105400652 -0.034525536 -0.0433830321 0.00658106804 -0.113124847 0.039323777 -0.0219132751 -0.0291076973 -0.0714975595 0.0354093611 -0.0999722928 -0.0223256275 -0.00125360489 0.131300226 0.0749686807 -0.103176132 -0.00177618861 0.100545034 -0.0482359231 -0.130312055 0.0585651398 0.0481558293 -0.050323084 -0.0714227259 0.114079475 0.0674445853 0.0338538028 -0.0207888857 -0.0817157254 0.0414048955 -0.00719799427 0.000449810963 -0.0368338116 -0.000197023153 -0.128918022 -0.120564923 0.00234631728 0.017666148 -0.0760105997 0.129530162 0.0278998706 0.0785642117 -0.0846611708 0.142189592 -0.0866099969 -0.034735851 -0.0374385677 -0.141838074 -0.0461979173 0.0792662352 0.0883275494 0.0821309313 0.0135414349 0.0762536079 0.162841812 0.11422585 0.0459163897 -0.101808973 0.124757119 0.10234201 0.121778518 0.124219798 0.15815866 0.120062478 -0.0342520848 -0.0277299657 0.00312125683 0.0146831786 0.0419388674 0.12670289 -0.0661896765 0.00485484302 -0.0807016641 -0.102789596 -0.10495542 -0.0415844247 0.117631674 0.0792787224 0.121481225 -0.121911712 0.0956676602 -0.13807556 -0.071042493 0.10285683 0.134201437 0.0446345471 -0.0243865289 -0.0965441614 -0.0743445978 0.0868661553 0.0650995299 0.0439964831 0.0520170368 0.122856326 -0.00557545433 0.0643403828 -0.130216479 0.075305514 -0.0704696178 0.0523242615 0.132898629 0.0556151196 0.0135608455 0.106630892 0.00448958855 0.0294760223 -0.130943984 -0.115232065 -0.0185228847 0.0750679225 0.0868396237 0.115859844 0.0128320716 -0.0767292604 -0.0757243782 -0.109670304 -0.022307232 -0.0409514084 -0.0354676992 -0.101452865 -0.0788824335 0.0799969286 0.139659941 -0.080234088 0.0067355819 0.0488539226 0.134902641 -0.044175718 0.0824501589 -0.150373846 -0.134587288 0.0352238007 0.116991237 -0.0220136195 0.114933126 -0.114182681 -0.0119344881 0.0254181288 0.110809639 -0.139513344 0.0670420676 0.100754023 0.0473007746 -0.110368282 -0.0900191069 -0.0120764263 0.0714306533 0.122893341 -0.0579950325 -0.069827266 -0.0631239116 -0.0197088365 0.0283315647 -0.0101505062 -0.012342534 -0.136444777 -0.110550106 -0.0110609038 0.0534135252 0.0715058818 -0.135529175 0.0218331032 -0.0914941207 -0.117715605 0.0637661964 -0.119437411 -0.0767295882 0.13249214 -0.0307924412 -0.0124762207 -0.0491118282 0.115513906 -0.0599435866 0.0437990949 -0.0970950872 -0.126184925 0.00789543986 -0.0699488521 -0.0769708008 0.0143007189 -0.125161707 0.0508386642 -0.0768451542 0.0126496255 0.0037975586 0.0945261717 0.030873267 -0.0632951036 0.121244743 -0.124326058 -0.0519415066 -0.00867667794 -0.0128302025 0.0473873913 0.134872839 0.0456339866 -0.141257316 0.131795123 -0.129008144 0.0524923205 0.111486077 0.00192398916 0.0257397145 0.0104135079 0.0229955614 -0.0101489769 0.0377998948 0.0529350787 -0.0652860105 -0.0964240208 -0.119346842 0.102311134 -0.0513100103 -0.0956246778 0.0625582039 0.10400553 -0.0152444094 0.0996984094 -0.11846026 -0.0332592428 0.0517609864 0.124776825 0.0835027397 -0.0161721092 0.0544919521 0.0960061252 -0.0852253288 -0.115617849 0.132461503 0.0126986802 -0.0718445331 -0.0552118719 0.0423579067 0.00376538932 0.109214559 -0.060116075 0.00753490627 -0.11368005 0.00783166289 -0.0537703261 0.0794192106 -0.0919727385 0.00208424032 0.111719355 -0.0963476151 0.0950013399 -0.108282149 -0.12641567 0.12435104 -0.110969186 0.0644554049 -0.140637219 -0.0633735061 -0.0338808447 -0.123940453 -0.012826249 -0.0421397537 0.123598143 -0.038868092 -0.0173738599 -0.0128029287 -0.0981713384 0.100576788 0.075251177 0.0625472218 0.0764244497 -0.0654502288 -0.0070194602 -0.0901699513 -0.018791154 0.109865949 -0.0268438831 -0.0740915313 0.10593608 0.150855407 -0.0649960041 0.05053664 0.131901428 0.0731908754 -0.109051332 -0.00465088245 -0.0684918538 0.146911919 0.00280831754 0.14333044 0.108945541 0.0746650323 0.112723231 0.0545722842 -0.120427899 0.0842576474 0.0266837925 0.0456104651 0.0698814988 0.047249984 0.190266967 0.163280755 -0.0185097642 0.0825716704 0.031122379 0.064069435 -0.164239749 0.0375948921 0.0769198686 -0.155088678 -0.104684114 0.075281471 -0.00940326042 -0.0429962575 -0.0142965838 0.0120388716 0.0730001554 0.00848747697 -0.107716456 -0.0337854624 -0.133317709 0.128731623 -0.108297765 -0.0142049389 0.133316174 -0.115573399 0.0836417973 -0.0845889002 0.0138655473 -0.0655629039 -0.0104014426 8.64409303e-05 0.1608392 0.0675673187 0.0179192871 -0.0236063488 0.046505671 -0.0392516479 0.0720221549 0.0741254017 0.0510466658 0.149464175 0.0988350585 -0.0124777406 -0.0114185531 -0.143563926 0.116806343 0.0688097924 0.145737663 0.0330820084 0.103331283 0.189101636 -0.0580817536 -0.152595311 0.10088592 -0.012149916 -0.153301135 -0.0369912386 0.0269313119 -0.100157224 -0.0723579377 0.0605176054 -0.16903989 -0.140898824 -0.124555223 0.149031043 -0.112365574 0.0937826708 0.0157145858 -0.100749768 0.0515762866 -0.0710803419 -0.0686863139 -0.0993681699 0.136043593 -0.0661427677 -0.00102904439 -0.0144443447 0.00527101662 0.0664612353 -0.125710681 -0.0143222958 0.0327278823 0.00587114692 0.0143621564 0.0739389807 0.0117307007 0.00317768753 0.131541565 0.0450980216 0.0562246889 -0.112021118 -0.121544585 -0.0703852251 -0.0653774664 -0.049726896 -0.0712407231 0.0384204239 -0.0756127983 -0.108454555 0.0761180222 -0.108996943 0.031490311 -0.110682026 0.0954553038 -0.0480172858 0.026041314 -0.0700769648 -0.0236957669 -0.105962321 -0.0145984888 -0.0221198499 0.080327794 -0.0255625173 0.0917616338 -0.0579084232 -0.115254268 0.0298689604 -0.033957921 -0.00849801302 0.101248682 0.0688192248 -0.033151634 0.0967501849 0.125775561 0.0594263077 -0.0315592438 -0.0751049966 -0.0152497739 0.0194816925 -0.025003599 0.110108972 0.0337932631 0.0466330573 -0.108177759 0.0923204646 -0.127821535 0.132012337 -0.0206957385 0.160091415 -0.0121085728 0.103845544 -0.00710585574 0.106360584 -0.0573629886 -0.116646938 -0.0393697619 0.111357979 -0.00339666428 -0.0565832593 0.187762156 0.0738954321 0.00436372962 -0.0201180782 0.0962937772 -0.0409735553 0.126787856 -0.0257689413 0.142943844 0.0456875302 0.0157842282 -0.125038773 -0.127323419 -0.0811595023 0.127563968 0.0147216143 -0.0188120566 -0.037811175 -0.0973046944 0.155958325 0.107728779 -0.0499815643 -0.132879764 0.0703277811 -0.0151593685 -0.00903364085 -0.12080054 -0.0394929722 -0.05078182 -0.0835544169 -0.0767298788 0.111453474 0.0164176039 0.0738900974 -0.0237710364 0.108378887 0.00188849773 0.0275815967 0.0885725319 0.0609878637 0.0426038019 -0.0367399938 0.00715940725 0.0550841689 0.0243325494 -0.182480216 0.0272398591 0.0657472908 0.0879313201 0.0918957889 0.156981304 -0.0646059811 -0.119247735 -0.047599677 -0.116651364 0.147660568 -0.0381035991 0.0582092069 -0.147984505 0.19525826 0.122297406 0.0312584154 0.0444355682 0.0738523602 -0.00197054748 -0.0539330691 -0.101374164 -0.0567407869 -0.083611846 0.0657852963 0.0735184103 -0.0166746452 0.0735289529 0.0353927538 0.0238035768 0.0864389837 -0.0453045592 0.0350245051 0.0573615246 -0.117907874 -0.0528462976 0.0303292908 0.0802146792 0.0530198105 0.0752285719 0.0932139829 -0.126501501 -0.0627672225 0.0169230103 -0.0626025647 0.0349148065 0.109626763 -0.105911814 0.0788237303 0.0676720217 -0.153280228 0.0910756141 -0.0301282536 -0.000840488705 0.106123164 0.0890205577 -0.0100000529 -0.0720009357 -0.0945366025 0.10201738 0.139737338 -0.00469925907 -0.00698884297 0.108112089 0.0135092204 -0.127867475 0.0966705605 -0.00785964262 -0.138336435 0.0638243333 0.0331486128 0.0260607041 -0.0856452286 0.0471999496 0.0917534381 -0.0968203172 0.137134388 -0.108194001 -0.0242078379 -0.0838816911 0.0852543861 0.101658776 0.0793644413 0.0438847691 0.048443377 0.0175982714 -0.0791520029 -0.126521289 0.120813206 -0.0748142153 0.0292773098 -0.0915241987 -0.0191392377 0.070805788 0.0297760516 0.0540979952 0.0455519557 -0.104483157 -0.127842084 0.00549553335 -0.00101320446 -0.0383768007 -0.0929412916 -0.0527148545 -0.121787742 0.116296932 0.127205387 0.0215808749 -0.00527906418 -0.0950953811 -0.102384314 -0.136119738 -0.135512173 0.0213526934 0.131827787 0.0118881762 -0.0137285888 0.0550298095 -0.101517551 -0.0358831212 -0.0636179894 -0.0305602103 0.000535279512 0.0445587337 -0.14124018 0.108244672 -0.0400930718 -0.081310682 -0.133354321 -0.00902658701 0.0237516761 0.136513993 0.0469265431 -0.12061704 -0.0962344632 -0.100439511 -0.0504943989 0.0647302195 -0.00468132785 0.0335817374 -0.0337964632 0.134859458 -0.0318156444 -0.0735609904 -0.087305516 -0.00295244693 0.143023223 0.0555585437 -0.0434618294 -0.0989003032 0.0798072517 0.0347367227 -0.0889567286 0.0454466157 -0.0994898081 0.0186923463 -0.173386604 0.0882037133 0.137024656 0.0983027816 -0.120416664 0.192227334 0.0853054151 0.0870762393 -0.0379575193 0.161280155 -0.0570422672 0.151813626 -0.106967077 0.166952252 0.101883747 0.054235056 0.189217716 -0.0840895325 0.070606254 0.0933529139 -0.0497438386 -0.0975164622 0.156465441 -0.061189115 -0.0255476627 -0.00462962687 -0.0117694922 -0.026365703 -0.0826264173 0.10701783 -0.0981103182 0.0933794603 0.112419106 -0.00185976818 -0.0279008057 -0.0761715546 0.160778821 -0.00510247052 -0.0529871248 -0.00726129953 -0.0403804705 0.106054351 0.0701956153 0.160074636 -0.126040637 -0.0446393117 -0.0562781654 -0.0765028149 0.118484885 -0.00363161834 0.0893984511 0.0428721681 -0.0862490907 0.161411509 0.110493377 -0.125039488 0.160013914 0.138771698 0.00862341002 0.120823577 0.124806479 0.126176298 0.0108156186 0.103124447 0.0837469697 0.150882855 0.0736863688 0.00851425901 0.0157211907 0.0169511139 0.0302761346 0.0810550079 0.0379871763 0.138916358 -0.0554511286 -0.134623349 -0.0141197927 0.102847748 -0.109416708 0.116542891 0.0628927052 -0.0400933027 0.104376331 -0.119453713 0.0155786276 -0.0215776712 -0.00744922459 0.0798532814 0.0639959276 -0.0196548998 0.0446766913 -0.0535386354 -0.0242897347 -0.0326042622 -0.0356833786 0.091811657 0.119531497 -0.00566391647 -0.108836398 -0.0138843209 -0.0777539462 0.0244711339 0.0302925706 -0.0840600431 -0.135451585 -0.0993905663 0.0911848098 0.0254171342 -0.104574814 -0.0337190852 0.123146519 -0.00662016869 0.112501815 0.115908071 -0.0144283175 -0.0765947551 -0.070567295 -0.0816542134 -0.063042447 0.0821659714 -0.106859922 0.0445039272 0.0489273965 -0.054737024 0.0737254471 -0.0994004011 0.0907932669 -0.0525529906 0.0760397166 -0.0459865108 0.115764856 0.089792937 0.0262587946 0.0727124959 -0.0986016765 0.134460911 0.120390087 -0.106480896 -0.126752883 0.0600169674 0.0778978691 0.18399404 -0.116155103 -0.0270620678 0.0360745415 -0.0706986636 0.0219121743 -0.0485235155 0.0116395457 -0.0190455988 -0.031040974 0.138535559 0.0049945279 -0.0669544563 -0.135656506 0.111046769 0.0884723812 0.0641905293 0.0139932213 -0.0747538805 0.14979732 -0.0598235726 -0.0681837425 0.078919284 -0.0920129493 0.0927841365 0.0622474365 -0.0725972429 -0.113833509 -0.106715776 -0.127413034 -0.0326030068 0.0898154825 0.0540499836 0.120726503 0.000129148364 0.122900732 -0.000325784204 -0.152520627 -0.0064624548 0.073874481 0.0152352303 0.0961310565 0.0109456517 -0.0507575348 0.120654956 0.126830235 0.108187631 -0.163000211 -0.17243591 0.0882807449 -0.00232086889 -0.132580787 -0.132526517 0.101748489 0.118433878 -0.034390375 -0.0734529495 -0.163235143 0.0954085439 -0.164447442 0.0220291484 0.165916741 -0.0268109124 0.00910670217 0.0109529579 0.0204264484 0.0675835386 0.0192554276 -0.019607991 -0.139865518 -0.0490590185 0.116421953 0.133383304 -0.00608086493 0.0666911826 0.010473121 0.0839324743 -0.00665302482 -0.0341572762 0.0905544311 -0.0330039002 -0.121482521 -0.13752155 -0.00984864868 -0.00438012183 -0.0152192581 -0.0799304917 -0.144058108 -0.0689092726 0.0797125772 0.049202282 0.121467397 -0.059979789 -0.0338185877 -0.0875877663 -0.0352213718 -0.0265744999 -0.000970848021 -0.0542924628 -0.0339916125 0.0772038847 0.072251454 0.0386321284 -0.123490326 0.115714893 0.00240401109 0.138920873 -0.0890034363 0.0679262504 -0.0547012426 -0.0577247515 -0.00683979178 0.0310920458 0.0301014595 0.09850014 0.0825214908 0.137729675 -0.101440713 0.0277171135 -0.0100952508 -0.106422052 0.0132092983 0.104450844 -0.0620612726 -0.0807258561 -0.0243801288 -0.0753299445 0.0706419945 0.133234069 -0.119514674 0.126184896 -0.0232823435 -0.107439861 0.0486271791 -0.0467080846 -0.0273494851 -0.0494388938 0.106704935 0.0911619067 -0.0630275458 -0.12553288 -0.00862511992 -0.028438285 0.0349335819 0.140132353 0.0860453546 -0.0957376212 -0.00533922017 -0.0604479536 0.105011344 -0.0739720687 0.0970580429 0.0160997361 -0.0723680109 -0.0773093924 -0.100641474 -0.000383406878 0.0533034801 -0.0397997424 -0.094927974 -0.0186026245 -0.128123358 0.0100442469 0.0157190859 -0.0655204803 -0.0211179629 -0.0938301831 -0.0268794596 -0.0193795785 -0.0539523363 0.0891814232 -0.0723926127 -0.0894492418 -0.103800982 -0.0962850004 -0.0336188897 0.0200818777 -0.0712372959 0.0486632138 -0.0899597034 0.0786281079 0.0739620626 0.000252395868 -0.123112433 0.0357136726 -0.123496763 -0.10789144 0.0340666659 0.0580845289 0.129886597 0.0481477603 -0.0664139464 -0.068032667 0.0435736328 0.120462291 0.119476132 -0.123401277 -0.0684902221 0.0760027915 -0.00998137705 0.0795442387 0.0158706605 0.0744519681 -0.0714682937 0.0881680399 0.0666808859 -0.0570629239 0.164650321 0.12301676 0.0870940611 0.0813848004 -0.026452858 -0.0121077476 0.129477188 -0.142631516 0.0968390107 -0.0411611088 0.0438095704 -0.0174164046 0.143360704 -0.0702998862 -0.00564636895 -0.129426509 0.129657581 0.00313778641 -0.0574785173 0.130360812 0.0483710952 -0.119206332 0.0171343237 0.0760408044 0.0280918181 -0.0732154027 0.110174745 0.0896861851 0.00575533276 0.101032197 -0.0848289058 0.0622318983 -0.0934585631 0.13217181 -0.145480588 -0.151452094 -0.193723321 -0.0797042996 0.0565548502 -0.103730097 -0.0197361708 0.0303085633 -0.077303797 -0.0265100189 0.0391752571 -0.0126148164 0.0999564305 0.0248864293 0.120508894 -0.133522972 -0.0817446709 0.168698058 -0.0242357664 0.125314549 0.138810694 0.0583302379 0.117092818 -0.00391758466 0.132906526 0.00617161999 0.0204244088 0.0222095568 -0.0337266289 -0.120259523 -0.0814958364 0.0759975687 -0.127410248 0.107059687 0.0443736836 -0.111485079 -0.0593480803 0.0116199553 -0.0279748887 -0.114313811 0.138794228 0.0156804174 0.0343504995 -0.0126132518 -0.0769002363 0.0819693729 -0.0879027769 0.129266858 -0.089684993 -0.0613807291 -0.141623229 -0.0882831067 -0.00330040953 -0.0679773539 0.0915297493 -0.137266099 -0.130465984 -0.0945152789 0.00593703426 -0.116638407 -0.0219550729 0.156748876 0.180117995 -0.0130009502 0.0716223866 -0.144065097 -0.0823530853 -0.018947104 0.0158558208 0.0430184379 0.154572129 -0.0570289902 0.0835271254 0.117485747 -0.0653187782 -0.0945697576 0.0832378045 0.128256038 -0.074221611 -0.0555155501 -0.0284128729 -0.0774558261 0.0875912234 -0.000803266244 0.0153772263 0.105136067 -0.156031758 -0.0866245553 0.0865259767 0.013942048 0.0376075655 0.0092022717 0.137353256 0.0442829132 0.0658835545 0.12299668 0.0346786119 0.100729421 0.0517841168 -0.0898612216 0.0358949974 -0.0126487454 -0.150891528 0.118704185 0.165351018 -0.0962797925 0.14023003 0.103115313 -0.0602911599 0.110436209 0.0308854431 0.121604525 0.127166167 -0.0259905457 0.128405809 0.0925664902 0.120217182 -0.110264599 0.0578555204 0.0608986728 -0.0673596784 0.0434045941 0.0300820656 0.00743610319 -0.0426555723 -0.0631315857 -0.102573559 0.159559608 -0.0250314325 0.151789397 0.0174076445 0.0587394796 0.0256275982 0.0982463285 0.113202661 -0.0388134755 -0.175611705 0.00276806951 0.100146458 -0.0734548494 0.0292181689 0.0794109702 0.163403466 -0.0469043329 0.0134403957 0.010670647 -0.0128954323 -0.123306222 -0.105166018 0.0782779232 -0.0463915803 0.0925515667 -0.153734311 0.0399706028 0.0324561 -0.136016384 0.121838123 -0.0339740776 0.0617321283 0.0716827065 0.0507700294 0.163305402 -0.0263362825 -0.0235168263 0.156056419 -0.0918754488 0.0799345896 -0.0889241397 0.132214025 -0.0339573547 -0.0231025834 -0.0893127769 -0.15716745 0.0421844684 0.164029196 -0.0144499643 -0.0524246357 0.0808350593 -0.0515551828 0.12877433 0.123672612 -0.00602019066 0.108041525 0.0685625225 -0.107800402 -0.106587365 -0.0706622899 0.079738766 0.00206816196 0.111752108 0.164152429 -0.140735298 0.0629734248 -0.139009103 0.147802591 -0.0249149017 0.0117471283 -0.0343583301 0.100054584 0.0977551788 0.0404288657 0.126847446 0.172036812 -0.0862832591 0.0989949033 -0.0108139813 -0.109132327 -0.065063715 -0.05474668 -0.0446046479 0.0215636939 0.0535516292 0.0117583256 -0.0911338031 0.08406578 -0.0239859279 -0.0490008146 -0.0228198916 -0.0266752448 -0.0871018842 -0.143308818 -0.000947127643 0.0332983695 0.111087874 -0.112454593 0.166085541 0.138097584 -0.11190179 0.0368833952 0.0595792085 -0.08562731 0.159958228 -0.0567510165 -0.139039397 0.142708972 0.0837232322 0.0328567959 -0.137172282 0.0749840587 0.0762316734 -0.121910147 -0.0499757975 -0.0399817154 0.0123820901 0.0685181022 -0.0794972554 0.130480066 0.0485983491 0.138348088 0.115582928 -0.139361694 -0.124933064 -0.116998814 0.0428138971 -0.119468078 0.0110614747 -0.0100474358 -0.0216016769 0.0101694763 -0.0820546895 -0.0777691826 -0.117070802 0.126368955 -0.134564951 -0.0302673057 0.110644177 -0.117660195 -0.00458803773 -0.134814233 -0.0293745026 -0.0682334229 -0.095479995 0.0648405999 -0.0339591131 -0.0932905525 -0.123183005 -0.0931407288 -0.134305537 -0.00308911502 -0.0195324719 0.131572172 0.0200927258 -0.0235931352 0.0316516161 -0.0269541889 -0.0461172685 0.0184912682 0.0468815118 -0.138456166 -0.0628102869 -0.116835825 -0.0204005614 -0.0558201149 -0.0682484955 -0.0157806873 0.0899439305 -0.100633815 0.0933804959 -0.0987777337 0.0174719673 -0.0371658802 -0.0836207345 -0.145877808 -0.0866134912 0.113749318 -0.0686557889 -0.10567008 -0.0482730195 -0.0070918831 0.114946 -0.134217158 -0.118984662 0.158714101 0.108982846 0.0504443385 -0.141264856 0.0717000142 0.104913875 0.138390988 0.0620018132 0.116844647 -0.0917929709 0.00455025444 -0.0148221394 0.090433605 -0.140797302 -0.127093479 -0.139803812 -0.0659283474 0.0552696697 0.0945735574 -0.097406134 0.0754362494 0.068106018 0.124531001 -0.0909646004 -0.102133028 -0.115647264 0.0507812947 -0.0703440532 0.124720164 0.138866737 -0.100535631 0.0667068362 0.00467189308 0.116699241 0.00265486818 -0.0454894938 -0.0559370294 -0.13688907 -0.0905043259 0.0473077707 0.00356861111 0.119102031 -0.00279730256 -0.0290972441 -0.0549311638 0.0690060258 -0.108683005 0.038172666 -0.0376584455 0.0284418333 -0.0453239642 -0.0188913643 0.10330762 0.0158650316 0.0302619878 0.10599114 -0.0192918926 -0.00512768747 -0.067353636 -0.0088639101 -0.0262610465 -0.00164695387 -0.0329788141 -0.0782948136 0.0144974366 -0.102364071 -0.0231709629 -0.053929802 0.0872491896 -0.0339207873 -0.0452650562 -0.0555231161 -0.106613263 0.0545223877 -0.104694769 0.0733768344 0.117440388 0.0850411355 0.127571523 -0.0451369807 0.127713785 0.0604759753 -0.116016053 0.120935522 -0.0490187742 0.0145258456 0.12456093 0.0472011603 0.0200307388 0.213931099 -0.0325213335 0.100024901 0.150687665 -0.0330869481 -0.0743881986 -0.0423107482 0.0197958369 0.144206196 -0.075991191 0.00704598008 0.169469431 -0.0728433281 0.00913137291 0.00888578407 -0.0482634306 -0.0623912066 -0.0938192755 0.0339379199 0.119007394 -0.0949489996 -0.114306375 0.0466704629 0.01765888 0.12498638 0.0581458732 0.0168336164 0.109576389 -0.0280811246 -0.0985747352 0.0206970666 -0.0964695066 -0.0664414242 0.110378399 0.139610469 -0.12254858 0.005280599 0.00761935115 -0.128316179 -0.0374653786 0.0578353852 0.127879024 0.0134341568 -0.0500954539 -0.00337731675 -0.119695731 -0.11888539 -0.00329445861 0.062248569 0.123474449 -0.0654902309 -0.0125339935 -0.0127297472 0.106315874 -0.117941201 0.101428568 -0.0125857871 -0.0383322537 0.0596774332 -0.13507621 -0.0973153785 -0.126249328 -0.0674846619 -0.0139419707 0.120396413 0.105357513 0.0358138867 0.154476896 0.111231543 -0.171673253 -0.0643624365 -0.0841967613 0.0120489495 -0.110221028 -0.0037632389 -0.0754774585 0.10185004 0.094084166 -0.123280331 -0.0452301428 -0.119065173 0.00748275174 0.0470008291 -0.0596443266 0.110938832 0.078253679 0.0948193073 -0.00295168161 -0.0287760254 0.158084825 0.0492092073 0.0768529922 0.0546108335 -0.0640769675 -0.0777091384 0.102904022 -0.0749624521 0.129996344 -0.050066106 0.140702903 0.135439858 0.0565205291 0.039585311 0.131971121 -0.0433706567 -0.00613029394 -0.0415827632 -0.0896254405 -0.0250180494 0.0855293274 0.0162106231 0.0830451697 0.0412258208 0.109798126 0.0821261331 0.119376883 0.123270549 -0.109921172 -0.069528237 -0.128147811 0.00981930271 -0.0908927023 0.162675932 -0.115118623 0.0430796407 -0.0360173024 -0.142209709 -0.0679464936 0.125360489 0.0772604644 0.159708619 0.136423364 -0.113330379 -0.0995218381 0.0548840612 -0.0670673028 -0.0882866234 0.0123610795 -0.0497058704 -0.0615072772 -0.0382896513 -0.114018604 0.139805213 0.139648527 -0.0281077586 0.0916863829 -0.00357940956 0.119387031 -0.038923528 0.099045448 -0.056032382 -0.132692307 0.0513692684 -0.0116330124 0.0760553926 -0.0958063975 0.133466452 0.0711750537 -0.0614313632 -0.0412404239 0.0541786365 0.063894175 0.0131827295 0.0888437405 0.0340017416 0.00108662061 0.102465764 0.172270909 -0.110579006 0.0245050453 -0.113299966 0.0307509303 -0.00821371656 -0.0823507458 0.12988846 0.0906711072 0.0899582058 -0.0237735175 0.0522135533 -0.0568510592 -0.0833672881 0.0160184987 0.0457749926 -0.157355502 -0.137885511 0.0371456817 0.140265986 -0.0514193922 -0.0789706931 -0.0262926575 0.031272471 0.0584937558 0.088243857 0.0618854538 0.105974808 +tensor_16weight 2500 +-0.00417222502 0.0333769061 -0.068073824 -0.00174406881 0.0395693518 0.163498551 0.085880965 -0.0441546589 0.0277523138 -0.0394380651 -0.108085141 0.0367854051 -0.088741377 0.110496983 0.137496606 -0.0574882254 -0.0685930923 -0.118896537 -0.0459423698 0.128555194 -0.00706961751 -0.109601662 -0.0728417113 0.0407270938 0.116901517 0.037543118 -0.0457625464 0.105285026 -0.0170973707 0.0741245896 0.0285330229 0.0602779202 -0.0999675766 0.0917604342 0.00275715417 0.0221613981 0.0221239924 0.0220769197 0.0309871975 0.141381815 0.102749333 0.0140401116 -0.160013482 0.0494507849 -0.109416723 0.0506694168 -0.0603474639 0.122836456 0.102406837 0.102601565 0.0129987504 0.0918339491 -0.0100188255 0.0104814339 -0.0403004438 0.0710072964 0.139566272 -0.0848197117 -0.065366447 0.0172738302 0.0708059147 0.0956877321 -0.101862162 0.0278298706 0.119774833 0.109509952 -0.0190650206 -0.0558079928 -0.0829644129 -0.0505085252 0.0924009234 -0.0197391063 0.120741382 0.109388441 -0.155889794 0.0402384102 0.0444546603 -0.0963738933 -0.0165153071 0.0370764881 0.152952656 0.173049316 -0.104738578 0.153395116 0.0572723225 -0.0417025536 -0.0367592871 -0.113802627 0.127408341 -0.0777374357 0.0151805067 -0.0934357792 -0.0254224017 -0.0842629671 0.102764659 0.0851573944 0.119558379 -0.0993547663 -0.0726170391 -0.0864863023 0.0344553739 -0.0371179581 0.014597863 -0.101272777 0.0389231592 0.0477042645 -0.135611862 -0.0279283747 -0.0736898407 -0.00949966908 0.0327468514 0.011397168 0.0977702439 0.0774219632 -0.0666735023 -0.120168492 0.0698120147 0.0764941722 0.00528292358 0.111364022 0.0231147856 -0.0114662051 -0.0972312316 -0.046190843 -0.0422428921 0.0343527198 -0.0671815872 -0.0542576611 0.0252622664 -0.133999184 -0.036504671 -0.10330338 0.0781079531 -0.0724790767 -0.0193377137 -0.106740713 0.119743183 -0.131887868 -0.00206248462 -0.0915471017 -0.0368748158 0.00305576622 0.101493865 0.126230076 -0.0354850665 0.0502597541 -0.00101481378 0.066304937 -0.0897568762 -0.0490704626 -0.104472384 -0.0435468704 -0.083782576 0.00601604581 0.113205031 -0.0322454944 0.0402761698 -0.0695916861 -0.139957666 -0.0615144074 -0.0179994181 -0.0612879917 -0.0798030049 0.105225846 -0.0033620894 0.0714047551 0.0813372284 -0.0406201556 0.0420547277 -0.0629295483 0.128933236 -0.0379166752 0.0688687265 -0.0884593129 -0.12569578 -0.0999955758 0.0851953328 -0.105392635 0.0814247131 -0.123030953 0.0994129926 -0.0500591323 -0.109111317 -0.13911283 -0.0404232666 0.0469972044 -0.135043383 0.0236423463 0.00703085959 -0.0505910367 -0.0494552478 -0.135939568 -0.125193208 0.0693392456 0.128731236 -0.0279947668 -0.00343213975 -0.0427332819 0.137931898 -0.120300733 0.11937128 -0.0187009424 -0.0802288204 -0.13997142 -0.0681145191 -0.123841763 0.12888445 0.0320565253 -0.0545828864 0.0154529363 -0.109894589 0.127666876 0.0496545732 -0.0170144811 0.117039517 0.111107305 -0.140118808 -0.00819459558 -0.0746041089 -0.104405858 0.0340925604 -0.0303836614 0.115028903 0.082659781 0.0525604337 -0.0737821385 -0.0581655875 0.0645884275 -0.121426933 0.0243149996 0.104953259 0.11160703 -0.134071976 -0.0780070424 -0.0556476638 0.0689132363 -0.0958281457 0.0529917628 -0.00589548051 -0.0485527515 -0.133214802 -0.0193034932 -0.0625650287 0.0658643395 -0.115375213 0.0658562183 -0.126765266 0.0663554519 0.105942756 0.0113557875 0.00450533349 -0.0318827182 -0.0382149518 -0.0586391389 0.16599071 -0.0995347276 0.0626212955 -0.0848238021 0.0312059093 -0.0671846345 0.114710093 0.111972772 -0.109937578 -0.033448413 -0.00338487327 -0.137714684 0.061901439 0.0263031721 0.0757300183 -0.0868174583 -0.115281321 -0.0186160952 0.022743687 -0.0695182681 -0.0389948115 -0.00921310484 0.146889284 0.121259861 0.123389371 0.0740807503 -0.0913022682 0.0926736519 -0.0748549104 0.0120129362 -0.0260141995 0.160688117 0.103861287 -0.0145337479 -0.0333565325 0.106497906 0.12470568 -0.0427022539 -0.0200853404 0.125489667 0.117662869 -0.00191673823 0.0511538982 -0.0245015733 -0.0231342129 -0.0423432663 -0.067036055 0.0543258078 0.0126409382 -0.13212578 0.0719170049 0.0198016949 0.103088938 0.0337647051 0.118756339 -0.106006429 0.108546667 -0.161683097 -0.0532578528 -0.0416240096 -0.0414204299 0.137367234 0.113782153 -0.00119005144 -0.124352522 -0.0775565729 -0.0454200171 0.110256732 0.138212636 0.139873043 0.0665704682 -0.0985324904 -0.132870167 0.025891345 0.102456145 0.131423756 -0.108975701 -0.0261260234 0.0348203629 -0.0914004669 -0.130643874 0.064868167 -0.073974885 0.102802224 0.176195145 0.0179768056 0.0108742332 -0.0181707554 -0.0102240648 0.00463731587 0.0538709089 0.00627785875 -0.117067128 -0.087891832 0.004855379 -0.016739469 -0.0725544542 0.0821630657 -0.0417728201 0.113972411 -0.0352853052 0.076194793 0.106641911 0.101013884 -0.0971745029 0.0243909303 0.029293431 0.0236056633 -0.0190717317 -0.12938638 -0.101987876 -0.0398474075 -0.0397941768 -0.0599071085 -0.0816488713 0.131114334 -0.072889857 -0.128262654 -0.048984535 0.0297204573 -0.071578376 0.0270867199 -0.0633577183 0.13518168 0.0259044431 0.0611634552 -0.0133587159 -0.00845749862 0.0748591572 -0.0559645891 0.0772884116 -0.0892494842 -0.124270409 0.0640116185 0.0149187753 0.112770371 -0.0819433481 -0.158321261 0.146862835 0.05744645 -0.136843622 -0.0534451306 -0.057807114 0.112765148 -0.121185474 0.0865707248 0.0242433939 0.00829058886 0.00682345033 -0.0391817167 0.118281983 0.0936698243 -0.0126834186 -0.0224663615 0.0807867125 0.0614369959 -0.0208024122 -0.0155477682 0.0223713629 0.133279428 0.0341431238 -0.0801266879 -0.0473638549 -0.0159126073 0.0619758293 0.104336567 0.0777632669 0.0014840191 -0.0106922537 -0.089225471 0.139960572 0.060204789 0.0285251942 0.00940582156 0.124438897 0.0746723562 -0.0786366537 -0.140947089 -0.0317693353 0.0952398479 0.0504078493 -0.101314582 0.00230675936 0.028885033 0.0213491581 -0.138435528 -0.0281476919 0.108943664 0.00568072731 -0.021763064 0.115609825 -0.0683022514 0.0327104814 0.0758552849 0.11111246 -0.0849706307 -0.0847819373 -0.000146973485 0.0806944817 0.0748187006 0.00227210205 0.0243628193 -0.0767735019 -0.0275015086 0.0700375587 0.0457462482 0.0988681763 -0.033178322 -0.0504397713 0.0441949666 0.0874261707 -0.192612246 -0.0348819122 0.0727340132 -0.0881135315 0.0686711743 -0.0988578126 0.0592095368 -0.0447203517 0.0680712909 0.132170856 -0.0182013065 -0.0532437004 -0.0237953365 -0.0845318213 0.0437362194 0.0721768141 -0.0437071882 -0.0137181133 0.117270313 0.0595132113 0.174431637 0.0367056727 0.16878359 -0.0222882591 0.0592969358 0.0360841192 -0.0704026446 -0.0345776901 -0.0942229927 -0.165675908 -0.0193019863 -0.000750561245 -0.0503743216 -0.0973385572 0.0120092537 -0.168930963 0.162473217 -0.114793286 -0.0891378894 -0.0980757028 0.100445837 -0.0628618151 0.046582222 0.0680130422 -0.110214941 0.127800643 -0.0861949921 -0.0599781014 0.0208107978 0.0531936698 -0.00385034014 -0.0491617396 -0.0419875681 -0.00234212819 -0.044506561 -0.0653151795 0.0169184674 0.0848835111 0.144753113 -0.0450268537 0.0625728816 -0.022600282 0.0092583932 -0.0980294049 0.0949492604 -0.108607598 0.0273391213 -0.0572451502 -0.0307708569 -0.0600267388 0.0144662457 -0.122474261 0.0097047314 0.00327231945 -0.0497302189 0.143207729 0.110742435 0.116505228 -0.0606399626 0.130554333 -0.0482001454 0.146989092 0.0189287849 0.074033089 0.0732528344 0.0350786448 0.0295372307 0.131620392 0.0493342653 0.0956929848 0.0703327283 0.0766540915 -0.0670498535 0.105273128 -0.05142162 0.0384206101 0.0971984193 -0.116058186 -0.120040804 -0.0705016181 -0.10306605 0.100996941 0.104779199 -0.0833229199 0.0132066812 -0.131049663 -0.0160818845 -0.118435718 -0.0491212681 0.0483348295 0.0081577599 -0.0334057733 0.0873719454 -0.0852230042 0.1210372 -0.0395233259 0.0784760267 -0.0274248198 0.099436149 -0.081705071 -0.015222121 0.110525087 -0.0580340363 -0.0512353852 0.0699884966 -0.134925202 -0.00844763592 -0.0294794999 0.0049456358 0.0870961398 0.112957731 -0.12726216 -0.0223451219 0.053539414 0.0455328077 -0.0087382691 -0.0701403543 0.0741796196 -0.122097038 0.0159640387 -0.0312917195 0.10192579 0.0948835909 0.0600303523 -0.116103448 0.0967111215 -0.186777875 0.0394800454 0.0645573735 -0.00990641117 0.0682907104 -0.0206053555 -0.0376345441 -0.0776938945 -0.0846702084 -0.0824906975 0.020499425 -0.0137960762 0.180186674 0.0354575664 0.0700841099 0.0222972054 0.0324955657 0.130303159 -0.0262751058 -0.184336275 0.0281189717 -0.138265505 -0.0036136366 -0.0606828108 -0.013456936 0.112634584 0.105258964 0.176689893 -0.169644877 -0.0470958985 0.132185519 -0.175565004 0.0531695932 0.0104851555 0.0826452076 -0.145328104 -0.0922671333 -0.164871365 0.142606691 -0.0212450475 -0.0627536103 0.120814189 -0.0105575472 0.0765462518 -0.0688535571 0.0809823424 0.00905969739 -0.0526664332 0.0274341255 -0.065106079 0.106400639 0.0910326689 0.0318324715 0.00195610523 -0.0523022339 -0.0163692534 0.0768652707 -0.0259682089 -0.0942348465 -0.126349121 -0.122505806 0.0306945741 -0.113110162 0.0605172664 0.0169282854 -0.0441042334 -0.139085665 0.0421898365 0.0137649477 -0.0915340632 0.0716675818 -0.0818142742 0.0774355978 0.132591441 -0.0364979431 -0.112088569 0.025617823 0.0622905344 0.137828872 -0.128434256 -0.0762574747 0.039005056 0.0113735795 0.0853985548 -0.110001206 -0.122525297 -0.118933201 -0.0976378173 -0.00780165195 0.0592657812 0.127340347 -0.116591275 -0.00909214467 0.0285629407 0.137352273 0.0194081701 0.0885308981 0.157478809 0.00182641763 -0.111265883 -0.0360108428 0.0904895365 0.0231916904 -0.134599373 0.123875104 -0.0176251531 0.00123633444 0.118751198 0.0944036767 0.00726477336 -0.00553962262 0.0213748366 0.0605549626 -0.0671228841 -0.11890097 0.0446490161 -0.124758892 0.0125705721 -0.0418758988 0.0201296303 -0.0815314054 -0.0722059831 -0.0872977003 0.17252858 0.0650849119 0.105454117 -0.0630111396 0.0785340741 -0.0949152634 0.120920762 -0.00637194049 -0.122229263 -0.04851266 0.0649544969 -0.0367785469 0.0145572387 0.0296253487 -0.104165144 -0.0307542253 0.0980055779 -0.0429146662 0.0307714939 0.0965365469 0.0424363613 -0.0620633438 0.037256062 0.0731356591 0.0377854705 0.126446977 -0.0181031153 -0.10487172 -0.012096405 0.0115436465 0.00484970212 0.0365195423 -0.0648169369 0.0427874923 -0.0382243469 -0.131412312 0.0129759014 0.014330104 0.104675427 -0.0191158354 0.110161588 -0.0690229386 -0.134754956 0.0209327489 -0.137074128 0.101832643 -0.0448981151 0.0525557846 -0.0926448479 -0.0648545772 -0.118553191 -0.0258715078 0.0647533536 0.0853616297 0.0266875774 0.0280805677 -0.133945882 -0.131097019 0.0535267889 -0.0883116797 0.0746518523 0.0716847479 0.0854772329 -0.0682440624 -0.0383327305 -0.0687909126 0.0661910623 -0.0651709512 0.0676450729 0.042929098 0.0129889995 -0.0627579913 0.026080206 -0.0745798126 -0.110127226 0.0564449728 -0.0521587133 -0.0703866705 0.0996105373 0.0995502174 -0.0150131434 -0.093635723 -0.0350378379 -0.0970593914 0.122739777 -0.0746073425 0.00775636733 -0.0259913579 -0.0691226423 -0.0844953358 0.0796677917 -0.0647141263 -0.126704842 0.0870190561 0.110861555 -0.0944047272 -0.00367192924 0.0355183631 0.118708417 -0.0181086287 0.10595347 -0.00600086153 -0.00448402762 0.0546585321 0.0393356681 0.140889272 0.0358275473 -0.0797418952 0.0966726542 0.102656618 -0.0845542178 0.000538542867 -0.138633057 -0.103164904 -0.0017221421 -0.0267990902 -0.0042184745 0.0618429407 0.0372477919 -0.0600601025 -0.120651938 0.018602442 0.0741632134 -0.0556408912 -0.0401281454 0.0216008872 0.0986668468 -0.0385047868 -0.00614350522 0.0835534334 0.0581347793 0.0706503093 0.0540118366 -0.0558184311 -0.163868651 0.00549533684 -0.0525678769 0.0940028876 -0.0198096931 0.0326584801 0.0405591354 -0.0935613215 -0.0739186257 -0.032372281 0.131545618 0.076330319 0.0784656629 0.096952945 -0.0555465668 0.127968788 0.0291817039 -0.161188528 -0.0909612328 0.0471343175 -0.050026428 0.0184959229 -0.139395848 0.0397638716 -0.18348141 0.112185314 -0.0655662641 -0.0470221415 -0.164569005 -0.121025704 -0.0150856273 -0.00368866767 -0.0741992891 0.059470281 -0.0445875078 0.0394076109 -0.129359409 0.0192312244 -0.0108757932 0.0112697631 -0.146348611 0.159908772 0.0195346791 0.00573998271 0.106009968 0.0103269666 0.0211634543 -0.0780764073 -0.115655147 0.102710932 -0.0541914441 0.0466746576 -0.106848881 0.10031607 -0.134285003 -0.0872690454 -0.135360599 -0.0295483619 -0.105432019 -0.0230201259 0.0187110342 -0.10428597 0.184378833 -0.0646381974 0.0479477942 0.0756504536 0.101597495 0.10669633 0.0338435024 0.0337592065 -0.136492506 -0.0870145112 0.0574491024 0.0298079327 0.0783730522 -0.0157561749 0.0467197858 0.103480637 0.0719107315 -0.10105852 0.0388642065 -0.0440139845 -0.0164328683 -0.0481204912 0.14119412 -0.136799589 -0.0650587231 0.00970490556 0.0506416559 0.0616328567 -0.00483906409 0.00724408031 0.165032029 -0.124474898 -0.149660811 -0.0778745487 -0.127463415 -0.0629397258 0.0866350383 -0.135488585 -0.120086707 -0.0116316313 -0.0172481909 -0.033788152 -0.00575722754 0.0619941019 0.109511442 -0.1259799 -0.0296196118 0.0126680005 -0.00631114235 0.0279259682 -0.000546666677 0.046820391 -0.0707993954 0.0746022463 -0.0110969217 -0.106299125 0.0408700407 0.0607301854 0.0142738195 -0.117608964 0.125323534 -0.0797425956 -0.147771716 -0.0951133072 -0.0761706829 -0.0271511003 -0.0952035114 0.149354368 0.0850981027 0.12360011 -0.0736458525 0.0565205403 0.0375516564 -0.0330046788 0.0698090419 -0.0682012588 -0.108341932 -0.0916303098 0.0491649024 0.128733024 -0.0175282191 -0.0851026475 0.121217623 0.0644281209 -9.09119844e-05 -0.00583170354 0.0807056576 -0.0556110144 -0.030019151 -0.151016012 -0.0976619869 -0.0704679191 -0.140273213 -0.131401047 -0.0380841792 0.105461046 0.115149468 0.0569063798 -0.0500233844 -0.120028786 -0.0609620214 0.0207634512 0.121349677 0.0845320895 -0.00625681877 0.130006418 0.10405767 -0.0260085575 0.00623696856 -0.00945841614 0.100506075 0.0220730081 -0.125443459 0.0532133728 -0.120297104 -0.11440815 -0.0461563803 -0.0888359398 0.013649012 -0.00815679412 -0.070658952 -0.00767463259 -0.120047957 0.120935254 0.0590654165 -0.0607035644 0.0469796248 0.10308367 0.17886214 -0.0048859301 -0.0231490359 -0.127134889 -0.0811686739 -0.0198651105 0.0723841488 -0.00580265373 0.0251238793 0.0345673561 -0.150929868 0.0692640245 -0.0050682174 -0.0870390087 0.0347174555 0.118515827 0.0921180844 0.0585382432 0.102678254 0.0418618806 -0.0638227612 0.0212153941 -0.048158478 0.067370899 0.134428993 -0.0895267203 -0.137082666 0.0383201912 0.0807136148 0.0119578699 -0.00565120764 0.051708404 -0.0704574063 -0.0806446597 -0.0455211401 0.128211096 -0.152886659 -0.126107663 -0.174904436 0.170342699 0.0492694043 -0.016915286 0.0414748713 -0.0318201743 0.103975341 -0.0692306831 -0.0701901168 -0.136825696 -0.104924172 0.0634303093 -0.103916064 -0.106038429 -0.0103737917 -0.102413662 -0.0131952306 0.0928339362 0.0422227457 0.0485036634 0.100036606 -0.0334572345 0.0251479615 -0.0170369614 0.105127081 0.0789836645 -0.125357226 -0.0810341984 -0.119952716 -0.087011233 -0.136233181 -0.145835862 -0.139367864 0.0879707336 0.118222609 0.0654330924 -0.00252592564 0.0332888886 0.0847103074 -0.147119001 0.13717629 -0.0333771855 -0.0392536968 0.140056893 0.161733225 0.170547694 -0.13669911 -0.126283079 0.136217371 0.0222201925 0.100318342 0.0159461573 -0.151088864 -0.0795981139 0.0589573346 -0.00727232778 -0.0690927505 -0.00238380092 0.00583820418 -0.0911569446 -0.131048679 -0.117706373 0.0648147985 0.00487408834 0.117360242 0.170240089 0.118482806 0.0110958666 -0.0510480367 -0.0351133123 0.059696164 0.0868533999 -0.02283502 0.157529563 0.0911459476 -0.130911916 -0.0573937744 -0.116225123 0.0283462927 -0.0848590583 -0.00989816617 -0.0507650711 0.00583241554 -0.00466799736 -0.0543093197 0.123912387 -0.0726056844 -0.0514792576 0.178007007 -0.0574177206 0.0999287218 0.08989916 -0.0277903583 -0.144622609 0.0931683257 0.140252993 0.0123335114 -0.119432166 0.001549048 -0.00325248647 0.096072562 0.162840962 0.0217829365 0.122066244 -0.0385645702 -0.026368469 0.0513184667 -0.0859575272 -0.0242716596 0.00956724584 0.0109226704 0.0818789154 0.0224322379 0.135026619 -0.04657121 -0.104234524 0.0428646505 0.0151414573 -0.0313612148 -0.00296355784 -0.0544822216 0.129524395 0.0263857096 0.0820539892 0.0624815822 -0.0385384262 0.111337319 -0.0279020891 0.0234304219 -0.138835654 0.0737862438 0.0558309704 0.106782034 0.0583992153 -0.0247403383 -0.128191724 0.0889156908 -0.0910850763 0.0517035276 -0.0938775688 -0.0293845311 -0.120674253 -0.0451591834 0.0491400808 0.0255339593 -0.109320991 -0.0378039181 -0.0312750563 0.0803458393 -0.139691994 -0.00930851698 -0.0976287797 -0.114196926 -0.133310482 0.141642928 0.105140746 0.119025633 0.0663658231 0.0742790997 -0.0807274282 -0.049172353 0.0325890929 0.168383315 0.0958275571 0.0830662847 0.13090989 0.11515788 0.0930101275 -0.135337189 -0.01777054 -0.0463960879 0.121989891 -0.0663873479 0.0556551777 -0.0777027011 0.0780093744 0.0564488322 0.0328528732 0.0221739244 0.0690468401 0.165196538 -0.0979238898 0.0833290517 -0.0583337545 0.171756044 0.0485463925 -0.0788506195 -0.133651629 -0.0622514226 0.146948621 0.00967819989 -0.10787762 -0.0629694313 -0.0672841221 0.0454268195 0.153374791 0.0254830644 -0.107513458 -0.0417494588 -0.128024951 0.0972493291 0.0891800448 0.0753429011 0.0632758364 -0.0226834938 0.0985851139 -0.0661747381 -0.0974865481 -0.116021931 -0.0705863535 0.0088367667 0.11096172 0.00588925183 0.0319114141 -0.0778467134 -0.0426055863 0.0243966766 0.00911161304 -0.125340477 0.0958841443 0.0809076428 -0.0837767944 -0.0955800563 0.0739903226 -0.0197054464 0.0984792486 0.0248065591 0.013472463 0.0424549654 -0.115080677 0.0406080261 -0.0103811678 -0.00163237448 0.124697939 -0.0250684526 -0.109149016 -0.0914341062 0.0849946067 0.114163592 -0.114370003 -0.0137929916 0.0836040005 0.113473631 -0.0115816081 -0.00375672383 0.145472392 0.115759097 -0.0200298876 -0.0216319505 0.0506976917 0.146452606 0.00109191891 0.0250434522 0.125239 -0.070178017 0.112422191 0.128775164 0.0972625315 -0.0562150516 0.0652283952 0.0582313985 -0.0210614875 0.0146073569 -0.0898632482 0.00922326744 0.143344715 -0.154285237 0.120073751 -0.0218681768 -0.0313799977 -0.0713483468 -0.11174649 -0.0150706414 -0.0665470064 0.0984330028 -0.0607178248 0.00206361711 0.144247591 0.114495434 -0.0467930511 0.0812650472 0.126196146 0.155254051 -0.117046624 -0.00681339065 0.110044703 -0.130152121 0.0844703615 -0.103106052 0.07677605 0.136585757 -0.00327930111 -0.0542289279 -0.0169889219 -0.00491619762 0.00191336707 0.123525828 -0.0764942095 0.062074706 -0.118892998 -0.152141303 0.128834948 -0.00888511073 -0.128381923 0.166257232 -0.118136637 0.104106829 -0.00407020096 -0.0890831947 0.0344172269 -0.0881365165 0.00957617164 -0.0420764349 -0.11916101 0.0259462018 -0.12878786 -0.11673443 0.0500368178 0.123366237 -0.118328013 -0.0965624005 -0.124413118 -0.0506703407 0.10639628 0.118844062 0.136438951 -0.096905753 0.0549481958 -0.0769049451 -0.0224119276 0.0397996418 0.0951755494 0.113283962 -0.0366398245 0.106758043 -0.153018385 0.116750017 0.0242051464 0.0555575825 -0.144076809 -0.123421922 -0.0913296789 -0.0159076471 0.0744441524 0.129337355 0.0706739873 0.0101508312 0.00735373795 0.127268285 -0.108703181 0.0155255729 -0.0134423403 -0.0967226699 0.0832142085 -0.174966842 0.130565166 0.0653796941 0.0878034979 -0.0906267166 0.0458128788 -0.00101685664 0.0771127343 0.0380266793 0.0676126033 0.119121142 -0.113756225 -0.044930473 -0.0590678304 -0.114111833 0.0717334226 0.121548906 0.141148821 0.145141944 -0.0386701077 0.0908232033 0.0557151176 0.0785376281 0.0665990561 -0.026476074 0.0890998095 -0.0765963942 0.00731649436 -0.0525127202 0.0552615821 0.0508936346 -0.0130886035 -0.136101693 0.0808141679 -0.0865692198 -0.0861371309 0.120797709 0.14255853 -0.0736281499 -0.110956788 -0.0413296558 0.132970884 -0.128850982 -0.0727135316 -0.0477897227 0.0840527937 -0.0905833915 -0.115051724 -0.0579792745 0.111140922 0.10985522 -0.0506750494 -0.0162282735 0.0716301352 0.0544709153 0.12016838 -0.0960830227 0.0585822612 0.0589075349 0.0988149717 0.0618340187 -0.0847306624 -0.0631039515 0.0553748012 -0.111075364 0.0433333553 -0.0188259855 -0.112079769 0.0795213431 -0.0475602113 0.13167055 0.022298038 0.134109125 0.0383149795 -0.0405841693 -0.120256543 -0.104634985 -0.0977489278 -0.020790996 0.105706528 0.133288711 -0.00679467805 0.107927233 0.132649049 0.184538066 0.0214475244 -0.0248829424 0.0847060084 0.059696611 0.0894906074 -0.0552571602 0.110768363 -0.0777745917 0.0256617665 -0.0267863441 -0.011260841 0.167986751 -0.0874950364 0.191600099 0.145262077 -0.0928313509 0.0274879988 -0.0514709651 0.09432742 0.120864995 0.0269548818 0.00873297453 0.0555894822 0.0474308766 0.10841053 0.0247467272 -0.0354869291 0.0167415757 0.130794838 0.130202994 -0.105830118 -0.0619872436 0.118844941 0.0582736246 -0.0529267974 -0.122846067 -0.00995433331 -0.0649164692 0.0907461941 -0.0287418552 -0.0419690758 0.0771300942 -0.0686131641 -0.137708366 -0.0504630506 -0.0929714441 -0.105465375 0.128161833 0.0110401921 -0.102053583 -0.137259245 -0.106930703 0.116477638 0.0865236223 0.0225237608 -0.10337396 -0.141124219 0.0859540254 0.124468133 0.122546621 -0.13006793 0.0637582615 -0.111786462 0.0577507317 -0.104083821 -0.0128176091 0.165550634 -0.0433670357 -0.0197649412 0.0431121029 0.0775365457 0.0529541112 -0.094568193 0.121677496 -0.0553900301 -0.111113667 0.0546647757 -0.102778159 -0.11795371 -0.0317932889 -0.0803981498 0.0593941696 0.0496674031 0.115015298 -0.0528469123 -0.139373824 0.0843660235 0.139188275 -0.00414940715 -0.0868603438 0.13721244 -0.111397579 -0.159685537 -0.082697615 -0.104306176 0.126542285 0.105239764 0.107114151 0.177761674 0.0128696105 0.0826148763 0.058955878 -0.104517892 -0.0758889616 0.09818995 -0.0577240698 -0.0954538211 -0.00908805989 -0.0742976665 0.0133352363 -0.0486188494 0.110651046 0.142552137 -0.115153261 -0.0301514734 0.063916415 0.15691933 0.0455034524 -0.099480927 -0.0727156326 0.0517707281 -0.00254492369 -0.0865125954 -0.0609851852 -0.0927319676 0.0186524875 0.0648534745 -0.106417365 -0.12526378 -0.0807953104 -0.112901188 0.0167454872 0.0380413979 0.0550592877 0.121599279 -0.110051796 0.02049114 -0.113943994 0.0120341163 0.054577291 -0.0282275155 -0.0312832892 -0.14506188 0.0298683271 0.0238241255 -0.109261803 0.156495273 -0.132307068 0.0377000831 -0.194822595 0.0180359278 0.0903957933 0.0792595521 -0.112396851 0.181946352 0.131865382 0.131673768 -0.0266070645 -0.0772341341 -0.152235314 0.00981780421 0.021913426 0.128900573 0.0193908215 -0.0176257994 -0.0472896919 -0.0430161357 0.00791153312 -0.00393258035 -0.0942965969 -0.121386364 0.0414898992 -0.108187407 -0.0423817933 0.0773015916 0.0430543721 -0.140809581 -0.0302241296 -0.0291004777 0.137758568 0.118799791 0.0682975501 -0.0604750663 0.0137153119 -0.132459 -0.127778888 -0.0843767822 -0.0346240923 0.109940037 0.0166125298 -0.0546926185 0.0908764154 0.021722734 -0.098706007 0.0907239914 0.0898113251 -0.0827805996 -0.0991119295 -0.140943363 0.0492345244 -0.0119397789 0.14030765 0.103607342 0.0925647169 0.0516325533 0.0379345268 -0.0236282125 -0.12176773 0.0705950111 -0.0243805572 0.0545013547 -0.0887916982 -0.114718959 -0.0397065133 -0.0364263579 0.0644538626 -0.0560083427 0.00641170144 -0.115581721 0.0916448981 0.165714934 -0.0800587684 -0.139642954 -0.147206143 0.14433305 0.121563576 0.124653377 0.00290819933 -0.00689320266 -0.134743109 -0.0199477952 0.025890775 -0.097605601 0.144976184 0.0240392517 0.106600702 -0.0212116838 0.141039237 0.0561088696 0.0311461743 -0.0972645953 -0.072422564 -0.00339891389 0.0796980709 0.104762718 -0.000878902618 -0.132795557 -0.0110478699 0.0181016214 0.033460509 0.16719529 0.0324967206 -0.0523595363 -0.0596223697 0.00182852231 -0.0620629825 0.0593552999 -0.0177798048 -0.134050861 0.091143012 0.0613186061 -0.0134655545 0.0728626028 0.11032062 -0.110704623 0.113462664 -0.125822559 -0.0217845738 -0.0629318058 -0.110271119 0.0102175586 0.0384269953 0.041528672 0.117570601 0.0818911791 -0.172721431 -0.0826341584 0.143604502 -0.0946950018 -0.0604351908 -0.145674467 -0.0840071887 0.124765471 -0.0483974069 0.0704860687 -0.0513613969 0.102030285 0.0974786878 -0.122555755 0.0484942682 0.120798931 0.0460326076 -0.158095554 -0.174045458 0.0302520841 0.00143720582 0.109413154 -0.089490898 0.0403371304 0.0639648885 0.0195339117 0.0851712674 0.0805694312 0.14608863 0.0028625133 -0.0460382551 0.133578837 0.0474288128 -0.00900612772 0.0885348395 -0.0367066041 -0.0723806694 0.11516311 -0.118180014 0.173740089 -0.146156281 0.0717909113 0.0275212824 -0.00858630054 -0.0898427665 0.0496289805 -0.00977645814 -0.000550225377 0.107040554 0.00615952769 -0.0177549198 0.00348424283 0.0556790046 0.00946377218 0.0532293469 0.065482147 -0.00725790858 -0.0178769901 -0.0931927189 -0.00408215867 -0.0455512553 0.0916866064 -0.0685584769 0.0127655407 0.153180107 -0.111894742 0.0765284225 0.0260704271 0.166822523 0.0185323507 0.0651063919 -0.0187719949 0.124089167 0.060685847 -0.0775963366 -0.0125377784 -0.0656027719 0.0646102652 -0.168868825 0.0715800896 0.0879634544 -0.0288382769 0.106939703 0.0860616416 -0.147091925 0.0701762438 -0.154209405 0.0168217402 -0.0449583791 0.150799006 -0.0704786852 0.0240108818 -0.0355115384 0.0358552337 -0.00473457575 -0.0508537143 -0.141223907 0.0694443733 0.13417317 -0.0782108009 -0.101270229 0.0261989981 -0.0686776191 0.0585096031 -0.0421225578 0.112916127 0.086612314 -0.0666391104 -0.0785085559 0.0723554343 0.0139737725 0.0708736479 0.104420915 0.0537221283 0.0645926893 0.0457919091 -0.0992759913 -0.0648162961 0.0873944908 0.135357454 0.0170427561 -0.111893699 0.118674949 0.0242431909 0.131864205 -0.0160110891 -0.0652568489 -0.136016116 0.109248772 -0.0484858006 0.0772848427 -0.14131242 -0.0729296431 -0.0393306315 -0.136528164 -0.0325899497 -0.138895422 0.0920817852 -0.00506219268 -0.0218565464 0.178741023 0.000478784292 0.131854966 -0.0545493215 0.0215138886 0.00403119158 0.0818745121 -0.0696112365 0.0600213483 -0.0925471112 -0.0903580412 -0.0477291904 0.107568391 0.00575149059 -0.0782493427 -0.139606044 -0.104121134 0.000853225589 -0.0665216073 0.0340940468 0.089630641 0.105736643 0.0541669875 -0.0017753446 -0.0978966951 0.0146369338 0.148612946 0.0921797678 0.136579424 0.0460715145 0.0911454633 -0.128372431 0.0109238625 -0.0441332199 -0.0767628923 0.0842499733 0.0932835937 0.0461514592 -0.0389378071 0.0933182985 -0.0622017495 -0.150734246 0.0955395252 0.0519403517 -0.0935698077 0.066866152 -0.0563694201 0.0773465633 0.130469427 -0.0873394459 -0.0453016274 0.108205438 0.113830492 0.0172546878 0.142020121 -0.154692054 -0.0341549404 -0.0286441967 0.109917641 0.0980234817 -0.0668107346 0.184589744 0.0403014049 0.0617943406 -0.0170874391 0.165723398 -0.129302576 -0.0549281389 -0.0933095962 0.0417284593 -0.0645934939 -0.110736929 0.0152139366 -0.0576914884 0.14489913 0.0873956382 0.122595996 -0.100744717 -0.0429090895 -0.0427098498 0.00572537677 0.0862406343 0.0775141418 -0.114131004 0.0538300015 -0.133984268 -0.0513852537 -0.10520786 0.0823375359 -0.154663965 0.140852332 -0.12760137 0.0852351859 0.0137166381 0.107333735 0.0452813096 -0.0948897004 0.151388273 0.0713874474 -0.128167778 0.0409513377 -0.0138035901 -0.124753937 0.0356289335 0.0602589771 -0.0651701242 -0.0210708473 0.103122458 0.0906714946 -0.00217249128 0.122800052 -0.181580707 0.116333932 -0.0186058432 -0.0243001878 -0.113380618 -0.118654899 -0.0473651513 -0.0539538004 0.0831884071 -0.146917343 0.087518312 -0.0194211677 -0.0846155882 0.113477409 -0.136310667 -0.0191066563 0.143494949 0.112203613 0.114737526 -0.111948609 -0.00443618372 -0.0773533285 0.136251882 0.0225571636 -0.0362712741 -0.0346994996 0.114932373 0.0432204269 -0.0263436884 0.107738495 0.076974012 0.0996974856 0.119173273 -0.000754479493 -0.120190717 -0.194255233 0.0754887313 -0.098057352 -0.0132691478 -0.179493576 0.0841401368 -0.0858590156 0.0584929027 -0.0300332811 0.123049341 -0.0501776785 0.0645771027 0.147005603 0.0247138739 0.0643707812 -0.163929448 0.106429175 0.0111500323 0.00196518004 -0.136184648 0.0189471096 -0.0814004242 -0.045225054 -0.0670835301 -0.0842032209 0.0329777375 0.0835169405 0.0539242215 0.0254002046 0.0783123672 -0.00774729624 0.0359950811 -0.0802180246 -0.107105985 0.00407534558 -0.00570665905 -0.0215339214 -0.0752450898 0.0079908995 -0.051493451 0.0471975654 0.137056708 -0.0891998485 0.0102625685 0.074707374 -0.105349854 -0.013545217 0.0585588515 0.123852111 -0.0915067494 -0.0506767295 0.146273047 -0.0317402482 0.0230212267 0.0921593308 -0.147391126 -0.102736384 -0.100691356 -0.0281435605 0.0503647961 0.0425743312 0.0990781933 0.119182095 0.0459855348 0.0305577312 -0.123558313 -0.0513050854 0.0112928003 0.00887593627 0.126145825 0.00219893456 0.0915814042 -0.109302096 -0.15622583 -0.13981463 -0.00616870821 -0.0105179995 0.104944795 0.0233698469 0.0621656775 -0.134819224 -0.0790077522 -0.0676694512 -0.118123025 0.0649551898 -0.0268536378 -0.128346384 0.0998368114 0.0453064777 0.0186656322 0.082455948 0.0146208499 0.0852921382 -0.034911897 0.0727281496 -0.0483980887 -0.121127099 0.0403252244 0.063668929 0.0448285639 0.119639181 -0.047936447 0.0020842955 -0.093942605 -0.104747362 0.0033461235 0.034542352 0.0912363082 -0.0369631052 -0.0812179595 -0.0106220422 0.00214873254 -0.0435321555 0.0224560201 -0.136056453 0.0154713243 -0.0294029415 -0.030266948 0.0689932555 0.0570425093 0.098155126 -0.0518679842 0.119383372 -0.138322964 -0.00904059596 -0.106911823 0.103631556 -0.103188537 0.00235909224 -0.0282520652 -0.0132444557 0.0874274075 -0.0390356034 -0.0197433233 -0.0990931988 -0.0221592877 -0.0945370346 0.0292591304 0.0865644217 0.0864166915 0.100083038 -0.0889186412 -0.115894549 0.102965675 0.0463272929 -0.134802818 0.0737079829 -0.129007578 0.0247757733 0.0989635438 -0.11343433 0.134549081 -0.0447648764 -0.0838385448 0.0168119203 -0.0835036933 -0.0518646985 0.0785566717 0.0304447822 0.0878861994 -0.0602087826 0.0966850668 0.00694012549 0.150681928 -0.0779627338 -0.0904122442 -0.0970596001 -0.129838735 -0.050377138 -0.120469227 0.0891878232 0.0898670554 0.0890591964 0.0588300936 -0.00137163699 0.122232124 -0.0888908282 0.091505006 0.0884710699 -0.00118646026 -0.0513543747 0.120835468 0.10668838 -0.123527482 0.126270682 -0.0198479965 -0.136215568 -0.120502472 -0.11477147 -0.0835626945 0.112296805 -0.10101068 -0.0431131609 -0.0186134148 -0.0433320589 -0.000781024981 0.104710743 -0.0381170735 0.0791320279 0.0748666972 0.121473044 -0.0812482983 0.12976031 -0.0290680751 -0.00869207084 0.134282753 -0.124510139 0.0872084498 0.0703281984 0.044497788 -0.0438723378 0.0582895428 0.0864354447 0.00991265662 0.0587073565 0.0693353862 0.0946473181 0.132393822 -0.0691181794 0.114411987 0.0981440097 0.0721710473 0.0940810665 0.0939522609 0.0250950288 -0.00488990825 -0.0450708121 0.00825079717 0.0511284098 -0.0437048152 -0.0188587848 -0.0834606662 0.0222276002 0.115497097 0.0385790952 -0.0981816575 -0.0865187049 -0.0519928411 0.0127416467 0.0403454639 -0.102931798 -0.0622832291 -0.176352426 0.0319424681 0.115145199 0.137701288 -0.160753131 0.0574897826 0.029381264 -0.106967442 0.135685384 0.0374634452 -0.0107224155 -0.0305843167 0.0789548606 0.0807328969 0.116793022 0.0109361755 -0.116716683 0.0137034692 0.0524121523 -0.0414179638 -0.0828967467 -0.131809413 -0.08836063 0.0339427441 0.00510858931 0.102733135 -0.0078022778 -0.0649038255 0.113678351 0.0853035152 -0.0652009472 -0.105958655 -0.0926674381 -0.124525182 0.0650760978 0.0134403473 -0.0214727018 0.0792901665 -0.0695419237 0.00138363184 -0.0841140822 0.00448186696 0.011020178 -0.0218410827 0.124228075 0.135860786 -0.0192789454 -0.10124626 -0.0194821134 0.0967154875 0.14184539 0.0926100463 0.0980347469 0.00184068806 -0.095286414 -0.123941429 0.025722323 0.11833895 -0.141884103 0.117436834 -0.0285835806 0.129173145 -0.0291574448 0.0875629336 -0.12026988 0.108618274 -0.0268362314 0.140304998 0.0205702633 -0.0184959918 -0.0276578963 -0.131651908 -0.0468064025 0.100602806 -0.0214187056 0.121467933 0.132986173 0.0596923828 -0.100520998 0.0469026566 -0.0225379243 -0.124888517 -0.0528000593 -0.119780593 0.103240386 0.0283819586 0.00888063014 -0.0159017146 0.0695591718 -0.0311708003 0.0383356661 0.0305758268 -0.0846125856 -0.133274123 0.0758237839 0.0325065106 0.0458696932 -0.101240918 -0.0143970549 -0.0982608497 -0.111203283 0.0112188458 0.0285433233 0.0977355093 0.0379961878 -0.0932414755 -0.137481123 0.0168030262 -0.130169049 -0.125621766 -0.0568111241 0.0422300845 -0.025392022 -0.0273560993 -0.0488719195 0.00497778924 0.0536593273 0.0217000954 -0.0605572015 -0.0740404874 0.117026515 0.142124146 -0.102213182 0.112507164 0.077157028 0.0520981103 0.105322793 0.126890764 0.0796572641 0.029000476 0.0621110015 -0.112644024 -0.111841545 0.114291631 0.119438633 -0.146059752 -0.140381634 0.12208508 -0.0593487211 -0.123670183 -0.0897043496 0.0858117491 -0.00866622385 0.081977047 0.0776127875 0.0893446133 -0.0835789144 -0.0501656011 -0.00162425637 0.113464631 -0.0252724476 -0.103041545 0.113613687 0.101202324 0.100906543 -0.126668274 -0.0983274356 -0.0464979149 0.151394174 0.0900189281 -0.0734032542 -0.0592180528 +tensor_12weight 2500 +-0.0283639673 -0.0221012682 0.133199289 0.0563756078 0.0816197246 0.0497825928 0.0042012888 -0.0795029998 0.0960107818 -0.0432373807 0.121810496 -0.0352233462 -0.00614713971 -0.0204377715 0.0226566363 0.0332049951 0.0991061553 -0.101053268 0.0533379503 0.106147163 0.0264711361 -0.0589281917 0.0324204192 -0.105546504 -0.00826245081 -0.0233381912 -0.0656121224 -0.107647173 -0.0529786088 -0.0139337238 0.0566528141 -0.0541924387 -0.138034001 0.00951558538 0.0105655985 0.127043515 0.0221604444 0.0460083932 -0.053250242 -0.078631945 0.0428880826 0.0788966715 0.0724399239 -0.0252473392 0.105300531 0.00663374923 0.103679053 -0.0542423241 0.00125868618 0.0159070492 0.0878015533 -0.0953141749 -0.00558294635 0.121205859 -0.0582696423 -0.0979145169 0.129265517 -0.135002077 -0.0146291191 -0.122696914 -0.081385985 -0.0635085255 -0.0441100746 -0.0792573318 0.0985748172 -0.0419243202 -0.0581838712 -0.0535519533 -0.142513841 -0.0591027141 -0.137103438 -0.106035508 0.0540995859 -0.00673660636 0.0947093591 0.080907613 0.101540178 -0.0970778465 0.044728864 0.00199462846 -0.138367772 -0.0271480009 -0.141949549 0.133446857 -0.0284297504 0.11598064 -0.139251098 -0.11250446 -0.0144105088 0.0116322637 -0.0658263117 0.00760472333 0.00838519912 0.0230650119 0.107505634 0.0946382433 -0.04299891 0.0719103888 -0.102656573 0.0685082525 -0.0726309568 0.0508401543 -0.164903283 -0.00989604276 0.0877222568 -0.0414222702 0.0155824088 -0.163643703 -0.127258003 -0.036953602 0.144812256 -0.0623725951 -0.0524286106 -0.0514523163 0.149055287 0.151022196 -0.0676416233 -0.00197385135 0.114244364 -0.0560362265 0.0799408183 -0.0222610235 -0.0891805142 -0.00829313789 0.074500069 0.118379205 -0.0137284929 -0.00101503101 0.153622374 -0.0226939153 0.00704781711 -0.0985070691 0.0757866055 -0.0784802958 -0.0075249183 -0.158581018 0.000263459544 -0.0682379603 0.152031437 -0.0480913743 -0.127841696 0.0475812815 -0.0938547626 -0.0747125596 0.108499467 0.138318345 0.149815157 -0.167665958 0.0575088561 0.0208468586 -0.0716508031 0.0452511907 -0.137918025 0.0906910673 0.0187588073 0.024338223 0.0748084113 0.102134176 0.0844082534 0.0736728013 -0.0625494495 0.158075228 0.167300254 0.124639682 0.105899356 0.0630086958 0.131153017 0.112854511 -0.00915049016 0.166930482 0.0416866131 -0.123647667 0.0432391763 0.10396602 0.110268258 0.113732249 -0.0752861872 0.174172521 -0.102077015 -0.0965104327 0.0681533068 -0.00983810425 -0.106261119 0.0500092097 0.0385275073 0.023127025 0.0940388143 -0.0235950071 0.0871616006 0.0998354703 0.0826614797 -0.100802064 -0.0113284597 -0.125418305 -0.0762608498 0.0396648198 -0.0408090092 -0.11062713 0.0179210901 0.030816406 0.0176397078 -0.0463601053 -0.0776805356 -0.0160311256 0.157598197 -0.0891788378 -0.0539689176 -0.095866017 -0.06246797 -0.0310623348 0.0908927321 0.0408000574 0.0765677392 -0.0167786833 -0.0754223838 0.0956952795 0.114675142 -0.108119257 -0.0605223328 -0.024680933 0.0278924722 0.0077753365 -0.0282166582 0.0750903338 -0.0393958427 0.124427982 0.0725398734 0.143909976 0.0277911164 -0.0190528724 0.00227800012 -0.0566477478 0.00833356939 -0.0737289861 0.0828023031 -0.0422451794 0.111898191 -0.134707883 0.19176881 -0.0650343299 0.153366417 -0.132717431 0.0561521985 0.112367474 -0.107373334 0.0557826981 0.139610589 0.0258726813 0.102975905 -0.141169146 -0.0615244322 0.0394206047 -0.0578120835 0.00785397924 0.0674745068 0.0853137672 0.175648615 -0.0636080429 0.155097261 0.111320585 -0.172565565 0.0217328835 -0.0781033784 0.132392913 0.0711862743 0.0581634976 0.186962739 0.0883171558 0.0591275692 0.0622355938 -0.00711058732 -0.0328418538 0.0393970683 0.0599646643 -0.0935084671 0.11538434 0.00435523642 0.0496765226 0.120068103 0.150854304 0.125003979 0.10406445 -0.0222269446 -0.0942721814 -0.05046333 0.0410163514 -0.0161274076 -0.033829648 0.0972495377 -0.143943429 -0.0443067737 -0.0550568588 -0.0995942876 0.055709511 -0.0130466223 0.16307734 -0.0587146431 -0.0876527801 -0.0571654439 -0.0653208718 0.0995535851 0.110708371 -0.117773779 -0.0629195049 -0.0506410673 0.104000725 -0.0097075915 0.039908275 -0.169400364 0.116414152 0.0159012116 -0.115527548 0.0599115118 0.0376613848 0.0347734354 0.149321213 0.0256413259 -0.0118766362 -0.00494059315 0.154646352 -0.0279375352 0.0956456512 0.128097624 -0.0107601006 0.102062821 0.0818155333 -0.00417978317 0.169782162 0.075039342 -0.132441014 0.136600628 -0.074167937 0.182788104 0.0683773085 -0.158134758 -0.153111815 -0.131043464 0.0933924839 0.149934262 0.0977778062 -0.123312928 -0.0546211079 -0.0884571895 -0.123066284 0.03769508 -0.0408919007 -0.0261739939 -0.0946909636 0.0763799548 0.10528186 -0.0239403676 -0.00170964003 0.107443631 0.0624295175 -0.041471377 0.0583784059 0.101691283 0.172578365 0.147721156 -0.00853029452 -0.0682957247 -0.128606334 0.108250916 0.110467754 0.0698471293 -0.10638088 -0.134169206 0.0335389711 0.0807912797 0.10768722 -0.0748209134 -0.0619279668 -0.0597048439 0.0238682833 0.0770351812 0.133448511 -0.116575532 -0.00418985356 -0.014257974 0.186535001 0.0766483098 0.0578167289 -0.1063518 -0.0533722229 -0.0255306661 0.0375191718 -0.0521368645 -0.0643548667 -0.100525327 0.087843962 0.0360278301 0.186081558 0.0779006332 0.124324918 -0.0178796574 0.0964507908 0.000115471157 0.121352643 -0.0145074725 0.136342749 -0.0524556786 -0.0543406755 -0.0231247786 0.0547336638 -0.110653035 0.00989100244 0.0201757103 0.168474302 0.0595019609 0.140253812 -0.0207650233 0.156596705 0.0423379913 0.0540750362 0.0326289497 -0.0725854859 0.120222606 0.0588764995 0.0232573915 0.156475991 -0.107084453 -0.109426022 0.120157138 -0.0353143811 0.109706894 0.0043564043 0.138297886 0.168600574 0.147256792 -0.00588848395 0.0737749934 0.0574953109 0.0112638986 -0.125408784 0.0741967931 -0.0695395544 -0.0118542425 0.0154474955 0.0319254026 -0.0753778815 -0.0449277535 -0.00750545319 0.170274869 -0.131167576 0.135590628 0.122695968 -0.0971804634 0.0734618902 -0.00931429863 0.0365690589 0.0510447621 -0.0520833731 -0.0832352266 -0.101591244 -0.0607022047 0.0549951196 0.127605066 0.0541120619 -0.0588162467 0.0998720974 0.0493049473 -0.0581545606 0.0780114084 0.0665595233 -0.0931976438 0.038903445 -0.0382999554 0.0369430333 -0.085522607 0.0509717613 -0.0687565655 -0.0897999555 -0.0971511528 0.138259694 0.116990075 0.00449900329 -0.0872562379 -0.138075918 0.096727103 -0.0454748869 -0.00835196674 0.0760231018 0.0369116217 0.0443069041 0.0586204678 0.134296641 -0.131746709 0.05054304 0.141017333 -0.0719901621 -0.0659282431 0.109382346 -0.129540086 0.118722782 0.0131379962 -0.10180755 -0.0842798278 -0.0826065615 -0.0181476772 -0.0728539303 0.0718309358 -0.0300796181 -0.120240092 0.133931667 0.0222150292 0.0798185244 -0.107362755 0.105332904 -0.135004058 -0.11729826 0.147456452 -0.0890139267 0.0949609131 0.13500765 0.126173526 0.11857067 -0.000773293898 0.0894765258 0.0475671589 -0.0154326251 0.15092206 -0.037826851 -0.049774427 0.103259467 -0.0269829631 -0.0261214655 0.0183449704 0.0224278755 -0.157012105 -0.0851792991 0.0859342813 0.112071827 -0.0393440984 0.137702033 -0.0219873022 0.115037508 0.0346440673 0.0699992254 -0.0236694999 0.164631814 0.0094958609 -0.153284445 0.040881291 -0.108448327 -0.110614225 0.0260498095 0.0183986761 -0.116771467 -0.00523093343 -0.0689058378 0.0228717029 0.034788698 -0.081005007 0.0377451777 -0.0307365786 -0.0821703076 0.0325528234 -0.14126493 0.0695137903 0.0801631436 0.0886292234 0.183120564 -0.0694635212 0.161076441 0.0139303654 0.108437546 0.0401087664 -0.0091292914 0.00868863612 0.141585931 0.156644121 -0.0755528286 -0.0417527556 0.00487269135 -0.0350182503 -0.126364589 -0.024675725 -0.0234753042 -0.00169406319 0.0142500447 0.0215916336 0.0767152011 0.0652107447 0.159517407 0.103661276 -0.0556443408 -0.0293452106 -0.0731868073 0.002619609 -0.0566306897 0.00929513387 -0.197862059 0.116043128 -0.00975279883 0.105451569 -0.0839810297 0.0331301987 -0.181696653 -0.0319507346 -0.00473482907 0.0933903456 -0.0281781182 0.00192525925 0.0756578669 0.160195202 0.0697703436 0.146909162 0.0564890578 0.00285601406 -0.0287020877 -0.0564336739 -0.0819117948 0.0810552388 0.165045336 0.147315055 -0.0212504752 0.10172905 0.0923355818 -0.0141675817 0.150093243 0.179681987 -0.114264801 -0.0956551284 -0.00638729148 -0.128433809 0.0788437128 -0.119181894 -0.0507362969 -0.115742147 -0.0738439783 0.0803774595 -0.0840121433 0.138585642 -0.0423021615 0.137064368 -0.0672038868 0.117374197 -0.142620116 0.0667178184 0.140385941 0.104921885 -0.107778296 4.28000058e-05 0.124074519 -0.114285842 0.0608676858 0.176241919 -0.0267177299 0.0773821026 -0.010434255 -0.0313907899 -0.0211786404 -0.0771455616 -1.91580984e-05 0.0954339802 -0.0113247158 0.103564784 0.17941153 0.166724324 0.0164197646 0.0291595794 -0.0204665605 -0.0382782891 -0.0668093562 -0.123122104 -0.132946268 0.160618961 0.0747604817 0.0837294608 0.169379547 -0.0834754705 0.0670293868 0.125993848 0.0960132033 0.00812351983 -0.0131131131 0.076757364 0.160401285 0.107536592 0.0723732188 -0.00439980626 0.0320614874 0.147804722 -0.123346433 0.138982892 0.0878069773 0.143579617 0.0674099252 0.0150862103 0.107429564 -0.0661900267 0.169957653 0.0881028622 0.0399612524 -0.0500341803 0.153780296 0.166395634 0.137741536 -0.106945679 0.0822954476 0.0795641989 0.135367706 -0.0886543989 0.00912791491 0.0881962925 0.00199659169 -0.0546213388 -0.0906977132 0.079190582 -0.0895646214 -0.135372177 -0.114347287 -0.0691716 -0.00326307118 0.0402559191 0.104741633 0.139453545 0.0585349798 -0.0482536629 0.0699717999 0.0140683651 -0.0911885202 -0.136688069 -0.0421462804 0.0418486297 -0.119360626 -0.132552981 0.139314875 -0.0619060844 -0.0730924681 0.0170933753 0.0174552351 -0.100706309 -0.0780835152 -0.0697649717 -0.0582113415 -0.0707645416 -0.104700163 0.0463543534 -0.00485464931 -0.0724216402 0.0457475185 -0.0357155427 -0.00419606268 -0.119270869 -0.0787308067 0.0156891048 -0.0961950272 -0.0582991764 0.0977203697 -0.0429823883 -0.0821526423 0.143807203 -0.0634690672 0.00810478069 0.0317232311 0.0819439888 -0.0359285586 0.145277902 0.0952888206 -8.69644427e-05 0.148746893 0.0504151061 0.130737811 -0.0782369152 -0.0521331243 -0.0100645348 -0.0266241431 -0.137800336 0.114405535 0.00846977346 -0.113788694 -0.0912239552 0.120060176 -0.0522767827 0.0220687613 0.069416903 0.0120533248 -0.050613381 -0.0949904099 -0.0282441527 -0.00488385558 -0.099041827 0.137989372 0.0805989653 -0.0629852638 0.0974306986 -0.0589808449 -0.0508747622 0.075892739 -0.112869091 -0.0507352911 0.153549612 -0.071098201 0.101641163 -0.0416162089 0.0734882876 0.0855877772 -0.0345149338 0.127689764 -0.0378296189 -0.0469297916 0.164389119 -0.0796598718 -0.118202388 -0.0146005005 0.168985084 0.160650715 -0.0404448994 0.155802146 0.025944557 0.139599308 0.0971967429 0.0117399581 -0.0171507876 -0.142275631 0.0839506909 0.0660035759 0.125143692 -0.0461359918 -0.116108619 0.098188132 0.119050659 0.0910999924 -0.117082357 0.0748248175 -0.0653233901 -0.0601715185 0.0412754826 0.105733104 0.0728395283 0.0162961781 0.131876916 -0.115422845 0.144570231 0.0887707174 0.097047694 -0.0720524788 0.0252056289 0.0271244925 0.130636543 0.0491141453 0.0332921445 0.113790326 -0.0568653941 -0.0698323101 -0.10968978 -6.08155533e-05 0.0675321668 -0.137371317 0.0749645829 -0.0216469616 0.0328196287 -0.141031533 -0.122319311 0.131579712 0.0271315724 0.12323828 -0.0254372582 -0.0824730098 -0.0256547406 0.1272306 -0.0528855324 0.12240018 -0.0112800747 0.0355034769 -0.0888580233 0.0970521122 0.0183172673 -0.0275907442 -0.122296013 0.0405875295 -0.112721227 0.0605996549 -0.124520272 -0.0803802982 0.0134015977 0.0546580702 -0.13796401 -0.0714464486 0.0842663348 0.124747202 -0.0913373977 -0.0796824321 0.125632361 -0.0140991956 0.139319405 -0.103938892 0.0555772036 -0.100091144 0.0524645001 -0.0437719002 -0.0133467764 -0.129776016 0.133946911 -0.0986202359 0.0850319415 -0.110841952 -0.061344333 0.00772999227 0.0248650014 -0.0392874405 0.133872464 -0.0498681031 -0.0235028919 -0.0904225931 0.0328456573 0.0908727348 0.063584283 0.0274092723 0.116862483 -0.0850711316 -0.00758437719 0.0703060776 0.0831446201 -0.130965278 -0.0187913738 0.0873690993 0.0466382094 0.10954687 0.132400319 -0.0466739088 -0.0960940421 -0.1589448 -0.0499225333 -0.14365828 0.147253916 0.114190049 -0.0489069223 0.00201576366 -0.0865763724 -0.141105622 0.0361168049 0.156169817 0.160132021 -0.0354782976 0.0171889104 0.00112317502 0.112893924 0.078825742 0.100391164 0.0169844106 -0.031357035 0.103728026 -0.0463408977 -0.00959085487 0.131976262 0.0656702071 -0.0224215165 0.0772374868 -0.102550328 0.130415276 0.0524659678 0.0614057928 -0.0569102988 -0.00330987363 -0.123422012 -0.0507996455 -0.111704901 -0.0851848572 0.0836961493 -0.0112077333 0.13896206 -0.0878892019 -0.0824800953 0.0448645279 0.0262479223 0.0815070346 -0.0392298363 0.0658551753 0.0590564199 0.137629583 -0.0485812463 -0.0806304142 0.0890918598 0.0108571658 0.0944422483 -0.0846425742 0.160814211 -0.0429416671 0.0703043491 0.0728700608 0.194119066 0.0854097977 -0.109207563 -0.0898918658 -0.0273189656 -0.116638198 0.0686772019 -0.109993316 0.124169983 -0.197762132 -0.0250630938 -0.0492028296 0.0160446316 0.174763769 -0.0346757248 -0.0763854831 -0.0731105581 -0.0112034082 -0.0417329669 0.0718449354 0.0531028807 0.0267944783 -0.125297725 0.0641857833 -0.0335034095 0.0876890123 -0.0756414309 0.023246035 0.0138799511 -0.00875826553 0.0961356089 0.0716646761 -0.0198832899 0.0437015556 0.0223694257 0.0413542055 -0.0606194325 0.0904049575 0.146203548 0.128786162 0.0143855959 -0.145219877 -0.0717869774 -0.0981839523 -0.0615584739 0.148408964 -0.102737933 -0.0149649279 0.109365121 0.0478291512 -0.10187453 0.00699617295 0.146613404 -0.033152815 -0.0324295312 -0.124276176 0.149597749 0.0922028646 0.157599032 -0.135646835 0.135939449 0.101161912 -0.0470223129 0.135992065 0.049443569 -0.173461407 -0.0964307934 -0.131441608 0.0878081992 -0.0803490132 -0.0302332956 -0.103354827 0.0431390628 -0.102742799 0.0908333436 -0.0526512675 0.0258093104 0.0369244777 0.0257188752 0.133202627 0.0297276527 0.149491176 0.151769906 0.0164624956 0.0616860129 -0.0109535027 -0.118965067 0.0411789566 0.1403061 -0.0148792714 0.0120514603 -0.0927679688 -0.0414527878 0.0476270206 0.0865987465 0.157360256 -0.00801904406 -0.0541693904 -0.0268263221 -0.109232292 0.0128868064 0.0780390874 -0.124661535 -0.0210585389 -0.14193961 0.111578748 0.036265783 0.0652774721 0.0353158712 -0.150081992 -0.0131793777 0.0812528357 -0.119403362 -0.0613792799 0.160303533 -0.191477299 0.0978740901 0.0488265157 0.0381616503 -0.057727918 0.0255572535 0.0918723196 0.0639780238 0.0967227072 0.128719524 -0.0644618943 -0.0716221407 -0.113643073 0.0597443692 0.133907422 0.132179052 -0.0287083741 0.161785662 0.0266503692 -0.0227328409 0.116025813 0.098081924 0.0945273861 0.121004365 -0.128996223 -0.16734314 -0.116783001 -0.125158042 -0.124759458 0.0583286844 0.0625823587 -0.14607048 0.0436232202 -0.0646699741 -0.0939132422 -0.0846700892 0.171912223 -0.00973419473 0.1652738 -0.0948991999 -0.061686486 0.168850407 0.0989598855 -0.111697182 -0.0287689064 -0.0542520583 0.0386746228 0.02370058 0.0738422796 0.0875272676 0.0808269605 0.13247247 0.017513141 -0.0532009676 0.0261921436 0.0148328589 0.12498486 0.0354332887 0.035208758 0.136238024 0.0110940281 -0.0153260306 0.0743869767 0.0171744954 -0.0469010063 0.0871724337 -0.119166553 -0.0623017251 0.0695702434 -0.0897610858 0.0566469952 -0.0424713381 0.0625269711 -0.063430272 -0.138267055 -0.0862576067 0.0633616149 0.0599713437 -0.0952118188 0.120180786 -0.0877415165 -0.0423083492 0.142752334 0.0664141625 0.0153184086 -0.11723125 0.0551554859 0.0941181779 0.0712193325 -0.103319407 0.0191577971 0.122941971 0.10161072 0.0123700919 0.133175269 -0.0438123122 0.00521303201 -0.0965816975 -0.0765956715 0.157058403 0.0214360245 0.0272503048 0.0736062229 -0.127181187 0.0797498822 0.00283462019 -0.0299958158 -0.177072033 -0.10098806 0.0592009835 0.111474186 -0.0195805114 0.0541746095 0.0449817032 0.0997726023 -0.0977167487 0.119134799 -0.0316534825 0.000842235982 0.10191527 0.117535733 -0.0239665229 0.126424983 -0.123411469 -0.075183101 0.0330423154 -0.0681335926 -0.0103695542 -0.0856132656 0.0537110977 -0.102201037 -0.077232644 0.0817721263 0.0940563828 0.0121499747 -0.0706892982 0.0818995014 -0.0357823521 -0.0557577461 -0.0687677711 0.0249025971 0.115824074 0.0820207819 -0.0299545322 0.154968947 0.146601692 0.0333185792 -0.0947523862 0.11622557 -0.00900522526 0.164223433 -0.00459069014 -0.0108394325 -0.102431804 -0.000589489937 -0.149016678 0.094403781 0.00958791561 -0.139076263 -0.00828526635 -0.0485995077 0.0503224507 -0.106602147 -0.0803155676 0.0535347834 0.134317338 0.116844706 -0.0210482683 0.153106958 -0.0599929169 -0.0479789935 -0.00114973786 0.0614023507 0.00115412858 -0.0867474899 -0.0715186149 0.0718893707 0.0815631598 0.0764374584 0.120969631 -0.0238302015 -0.0586408377 -0.172944322 -0.032943733 0.0524964482 -0.116896465 0.0810932145 0.0334346704 -0.146538407 -0.0801583529 -0.101430289 -0.0612037107 -0.113821179 0.00876645837 0.0436918586 0.00689579546 -0.126817837 0.0449476391 0.00970517192 0.120329946 -0.130458891 0.0930926949 -0.0741954371 0.117795736 -0.0513594598 -0.0421553478 -0.0799744502 0.139152512 -0.132767022 -0.148529813 0.0808773115 0.0915669501 -0.137642413 0.179301977 -0.0657531545 0.111276798 0.13818717 0.180973396 0.148477748 0.00805056468 -0.0999653786 -0.0432874598 0.14668797 0.00481138518 0.0158720016 -0.111080863 0.0309790596 0.0831020325 0.0419512913 -0.0200633295 0.0992910117 -0.0361859724 -0.0418536253 -0.0347556248 -0.055278711 -0.129016384 0.156187266 -0.0416366123 -0.0245922692 0.0343578979 0.0859458372 0.101438627 0.00994137488 -0.069522813 -0.0426072292 0.0350656547 -0.103549249 0.0265573412 0.0763911158 -0.102405414 0.0613944381 -0.122291684 -0.030453749 -0.0144146821 0.0219928026 0.128770724 -0.0348007977 0.176855937 0.114892505 0.0656236783 -0.00231774151 0.150578141 -0.0806376413 -0.0191067625 -0.112510107 0.0331190526 0.0922281742 -0.130172268 0.0489657484 0.0407428741 -0.112448193 0.131103024 -0.0824060962 -0.0440546162 -0.135340303 0.167595923 0.0455297045 0.146565259 0.106871687 0.0179066844 0.14257127 0.0931942984 -0.0602279194 0.114411339 0.0802091658 0.183811814 0.0961534381 -0.0782056525 0.0361536555 -0.0492672026 0.0595217533 0.107877143 -0.0258457195 -0.0297816172 0.11304047 -0.0520307608 -0.106525473 0.0179495215 0.050986968 -0.00489270268 0.0162587743 -0.137507111 0.0951974392 0.0486419611 0.117301539 0.0375222899 0.00458820676 0.00400207192 -0.0281724483 0.160230845 -0.0574774742 0.0331582278 0.0722234622 -0.0535472929 0.0631503314 0.0969888195 0.140119597 0.0477451496 -0.175573215 0.0213595796 0.0623874292 0.0219977442 0.123139918 0.0430315174 -0.0433251262 -0.107093088 -0.112723738 -0.000674394774 0.0406252891 0.0568238758 -0.10773351 -0.0489739999 0.0599360615 0.126524165 -0.0252056345 0.103718482 -0.00571722444 0.0458063446 0.163875476 -0.00559463073 -0.129122823 0.0351556465 -0.12400604 0.0866204128 -0.00551600335 -0.0378636234 -0.128071412 -0.0678838044 -0.0604350902 0.135361746 -0.0829056799 0.00692036748 0.00260563195 -0.0532157905 0.124754503 0.0440538712 0.0327834561 0.182058588 0.110708781 -0.0718415529 0.0142884664 -0.0505664833 0.111684635 0.0205106754 -0.115548059 0.0633464828 -0.099902004 -0.144687116 0.102363184 -0.0373336896 0.0391648747 0.108540595 0.171795741 -0.0586603731 0.135946795 0.0958023518 0.0913395435 0.104659595 -0.104977675 -0.136726558 0.101383127 -0.00297953957 -0.0918395743 -0.0204569325 -0.130254254 0.0839903727 0.0175899137 0.113330193 0.0912543014 -0.0047330535 0.123540469 -0.0720442235 0.039481502 0.163159758 -0.122090541 0.161992833 -0.116110057 -0.0345729962 0.180961043 0.13387315 -0.0200760309 0.0601475984 -0.124806568 0.155271858 0.0845254213 0.0596974678 0.126717508 0.0584589094 0.104401402 -0.0844913498 -0.106263012 -0.0435930751 -0.0393355973 -0.043946974 0.0409472808 0.0387777314 -0.0833237693 0.0790580288 0.00874889921 0.122026242 0.106449321 0.00690521859 -0.0539527871 -0.0490253009 -0.116141111 -0.0438183956 0.0866069868 -0.0664470792 0.162248239 0.161943093 0.104601003 -0.0271019768 0.0825799704 0.00950320065 -0.0404817834 0.129946649 0.114401944 -0.0974406078 -0.144606605 0.0822113976 -0.133282393 -0.0866103172 -0.161681429 0.0105247656 0.0808934346 -0.0963236913 -0.130843952 -0.0294511542 0.14981015 0.0511926003 0.119163141 0.0719184428 0.014378503 -0.0375215076 0.100146711 0.00126785447 0.159019947 -0.0868258625 0.0822456554 0.00716631068 0.0106834266 -0.0192997064 0.0986597613 -0.0639843941 0.0297827087 0.0358716287 -0.0587924942 0.0603018999 0.0780824944 -0.123953499 -0.0754288808 -0.00476129353 -0.0413157828 0.0502767749 -0.0340033025 -0.111571126 -0.100440137 0.147668496 0.0673740879 -0.0353276059 0.0640556961 -0.0988995805 0.136604816 0.140064046 0.0980866402 -0.0750294924 -0.137589633 0.043476589 -0.114663213 -0.0333486758 -0.0707642436 -0.0460274704 0.114950918 -0.0877812207 -0.0162663646 0.150960758 -0.126444697 -0.042077966 0.0971751958 0.0925146341 -0.0311614387 0.0668840557 0.114941254 0.0032322011 0.110740408 0.0407327525 0.127984926 -0.0671080053 -0.0279953331 0.0831837729 0.0103864418 -0.0418696329 -0.0798007697 0.00768780895 -0.0546894707 -0.0398143195 0.118723847 0.116604052 0.0362069607 -0.120131537 -0.0460622348 0.0910601467 0.161261335 0.132019728 0.0777907819 0.12057399 -0.0873341486 0.0300714727 -0.0391077697 0.139219776 0.034336701 0.0798435882 0.0177026708 -0.00687919557 -0.0386889726 -0.0851943567 0.120803818 -0.0706116483 0.0470289737 -0.00083668204 -0.0218703523 0.0288076419 -0.0316865966 0.111179002 0.088308543 0.0760286823 0.104716919 0.00950780511 -0.0447407067 0.098795779 -0.116593644 0.103690639 -0.0872254521 -0.0867426321 0.0982015431 0.170969442 0.0893125832 -0.052779343 -0.103205182 -0.0188849494 0.148354068 0.135574684 0.16014275 0.0423907042 -0.123855539 -0.0422023758 -0.0901324898 -0.155949607 -0.0723659694 -0.0916374624 0.0385438688 0.129645094 -0.0276183728 -0.0442992523 0.0822724849 0.12342529 -0.0557483733 -0.0643166676 -0.137550637 -0.0250330232 -0.111662775 0.175575733 0.015148961 0.0261206627 0.134495571 -0.0594918132 0.0758288354 0.060908068 -0.0519245639 0.0473323725 -0.127921104 0.0161799341 0.0626626536 0.0191916153 0.0295425896 0.00972762611 -0.0193439405 0.0721613467 0.0690132603 0.145383656 0.139039531 0.0707440674 0.0853963941 0.121891946 0.0830445886 -0.139111638 -0.046770677 -0.0729267821 -0.0370384678 -0.122914143 0.0188236907 -0.0119272592 0.131827608 0.0751277357 0.0265672356 0.102637075 0.000820508925 0.128561452 -0.113339409 -0.0267119724 -0.0484277457 0.0841864794 -0.0932453424 0.112891175 -0.0558898747 -0.118770018 0.107176155 -0.0351163447 0.0367854685 -0.0528843328 -0.0932976678 0.132041663 -0.0217739437 0.0115754455 0.0903767347 -0.0251083001 -0.0478500426 -0.122446015 -0.110440329 -0.0738081709 0.13881968 0.0556323677 0.113088712 -0.0600917004 0.0295646787 0.0281529948 -0.0955312327 -0.0372477174 -0.0413099937 -0.12244685 -0.0883221477 -0.0363030881 -0.0330142006 0.0267371461 -0.0137264878 -0.110913046 0.0730738714 0.186447442 0.190215573 0.109426335 0.144610614 0.092832081 0.0243124962 0.082650587 -0.0580552481 -0.0164464172 0.132549599 -0.16663675 0.176283285 0.0554265827 -0.0576385558 0.0210192334 -0.0107073868 0.0469353348 0.0421484709 0.20231232 0.112845972 -0.0282869264 0.0477520488 -0.0603832416 0.0570014864 0.14945437 -0.123605035 0.00659620762 -0.0773272812 0.139960304 -0.0898624659 -0.0288678277 -0.073680222 0.0669173375 -0.125848636 -0.0638360605 0.00877119321 0.146627396 0.100049771 -0.174804077 -0.0694195628 0.114080206 -0.035090182 -0.0557439104 0.0807942003 0.0843401998 0.0177999288 0.0371674523 0.035731107 -0.111021757 -0.093117848 -0.0678854063 0.0266885534 -0.00953813922 0.089507781 -0.106526807 -0.0295186415 0.00943453331 0.142027363 0.108211689 0.106748775 0.0579309165 0.0385677479 -0.100708753 0.151549757 0.0247560851 0.0252912976 0.0531664081 -0.123913847 -0.14173685 -0.0242508408 0.159124956 -0.113823667 -0.0568166599 0.0248767957 0.0976887569 -0.0333233364 0.0985455513 0.0487232953 0.0828748867 0.0737239048 -0.00177626917 0.0483965725 0.0487547442 0.0349344462 0.0642146096 0.135932118 0.139095858 -0.161683232 0.147549808 -0.0745932832 -0.0598408207 -0.0111973844 0.052948419 -0.133784652 -0.0361436382 -0.0810343474 0.113400467 0.0375890285 0.0919118151 0.165626541 -0.0969523787 -0.116375238 0.156546161 0.160234615 0.152540177 -0.0588590465 -0.082201235 0.151506081 -0.0245612338 0.108619809 0.0955060497 0.0378499888 0.0223950893 -0.0736716762 -0.0403732657 -0.112797014 0.153787464 -0.00461600721 0.0319394395 0.0467294194 0.0903732851 -0.155461773 -0.0431794003 -0.0157981049 -0.0684597194 -0.0530542284 -0.0374011174 -0.0614936724 -0.00120179285 -0.105804361 0.0348305702 0.146401137 -0.103180595 -0.0832952708 0.0653216466 0.112658091 0.107945248 0.107463151 -0.0334763043 0.137352556 0.0405373275 0.0836709291 0.0841100141 0.000271841243 -0.0795930102 0.115181461 -0.0337604173 -0.128911823 0.123057336 0.0785609409 0.0277274642 0.143650293 0.0181832965 0.126039356 -0.0717210472 -0.0782386661 -0.207162619 -0.0587309264 0.0823272541 0.11590194 -0.15862602 -0.110215195 0.119876325 -0.03551469 0.203044742 0.0244795568 0.10723605 0.0298106819 0.174269021 0.143131137 0.145505443 0.0387773216 0.0399972759 -0.0434709154 0.174700931 0.0863626674 -0.00607159734 -0.00721096992 0.181514055 -0.174008265 -0.106141508 0.0943884626 0.0748455301 -0.08260189 0.128811941 0.09916839 -0.0107264379 0.174222261 -0.116252743 0.108514942 0.0142451078 0.0903487951 0.0105539095 -0.00275873509 -0.0601309314 -0.103395417 0.0936504826 0.00442700088 0.0223080404 -0.0448008254 0.0983777866 0.0909713805 0.0360316932 0.00507651642 -0.0769937262 0.0967232734 -0.0641804114 0.122599706 -0.113460898 -0.107624725 0.117271051 -0.00697429758 0.0493416227 -0.127027497 -0.0720096231 -0.0119818123 0.0336597078 -0.0459525064 -0.0465637259 -0.0834386647 0.0638463795 0.0122022033 -0.0931649208 -0.00553575298 -0.0875594765 0.0826876462 -0.00695849955 0.00258775055 -0.128275678 0.0674737692 -0.0166282598 -0.101550914 -0.000357478857 0.136971578 -0.0781561136 0.0165070295 0.130231693 -0.0846520886 -0.0524368845 0.13351126 0.0329737216 -0.0871247053 -0.0810154378 0.127794713 0.0551171899 0.0123283863 0.0219221041 0.0886786133 -0.0899260417 0.113652535 0.092415683 0.148293018 -0.0763472393 -0.00241494132 0.0385320969 -0.0792235285 0.119529434 0.116140589 -0.00117489253 -0.0458484106 0.022996638 -0.0169208553 0.122886755 -0.123275951 0.00158014265 0.0272646137 0.0736831352 0.0474004149 0.114665695 -0.0364573114 0.021729935 0.107678957 0.0608051606 0.109061301 -0.0183101986 -0.143050343 0.011449188 0.0253396928 0.0116122691 0.169679016 -0.108423777 0.114302441 -0.0180101134 0.0376329049 0.000675914111 -0.0989059806 0.00174778071 0.0729235336 0.0997067988 0.0798274875 0.00286698341 0.0484239347 0.0732191578 -0.0997895151 0.0131101757 0.0049995631 0.0331563503 0.0908577889 0.0351340324 0.0972493589 -0.140762478 0.0144873261 0.0925962031 0.042752862 -0.0921942815 0.123944506 0.103000984 0.134866908 -0.0654911175 -0.131940141 -0.124962308 0.121972367 -0.0777393058 -0.0134515613 -0.0593911484 0.0903083831 -0.0194513649 0.125025287 -0.0718345344 -0.028370142 -0.137873486 -0.138855338 0.102305382 -0.114035651 -0.00128389895 -0.0209189281 -0.0363460779 0.0348549187 0.0462828428 -0.135960251 0.046998933 -0.00560566783 -0.00562353432 0.0511512756 -0.097703442 -0.000405952334 -0.102428362 -0.13385509 0.0778306574 -0.126189426 0.00569581985 -0.0901096463 0.0396448672 -0.0905233249 0.0647575855 -0.0692101866 -0.118010342 0.140668467 0.0754639357 0.0287674516 -0.127176955 0.0508546382 -0.0297820792 -0.034435086 -0.0525137484 0.0414588898 0.125622496 -0.122770369 -0.103983626 0.0979629755 -0.134100616 0.0132061988 0.136052623 -0.101130173 -0.125888899 -0.0912302136 -0.000320419669 0.0951493084 0.0250687301 0.0693171024 0.0681221336 0.0920942575 -0.0242754743 -0.126860991 0.00760373473 0.0472807437 0.00129759312 -0.0417886227 -0.135421842 -0.0420329869 0.109683141 -0.0780478939 0.0869462043 -0.0336673781 0.068866685 -0.0332275853 0.0885529816 -0.0637842491 -0.0287733674 0.0399621874 0.00726474822 -0.0887221545 -0.026667513 -0.10050422 -0.0866817236 -0.0743763596 -0.0557404384 -0.0415508077 -0.0706221908 0.03670571 0.0769552216 -0.136574149 0.12560834 0.0611953586 0.0578964055 0.0321745314 -0.108904168 -0.0603623837 0.106482595 0.0418807454 0.0567226261 -0.0480391793 -0.0818945691 -0.0918800607 0.0692870989 -0.0831394047 -0.140842602 -0.0133125484 0.0388532393 -0.0127856918 -0.0663905591 -0.0192305241 -0.00343641825 0.0277077034 0.0341987535 -0.0332082435 0.0317000449 0.133374199 0.0840312392 -0.00916420575 -0.0913549662 -0.131094456 -0.114135049 0.0324433446 -0.0922504738 0.0968189761 0.0722978711 -0.0336492881 -0.135974228 -0.0138650686 0.0959312618 -0.0392885059 0.0773027018 0.0320036188 -0.136021271 -0.096613884 -0.0733416006 -0.0797348469 0.198495209 0.0582847074 -0.0810181722 -0.0431594327 -0.0618753731 0.0181986317 0.078358449 -0.0357146375 0.0401486307 -0.0250369273 0.0634938627 -0.0496993251 0.020140117 -0.127322808 -0.0666764453 0.138859123 0.0398158766 0.110776573 0.142119944 -0.034179695 -0.0179098602 0.0632333979 -0.00735516846 -0.071974054 -0.118237272 0.097171858 -0.107004285 0.0833490044 -0.0937577263 0.0946442783 0.0788892806 0.0437900051 -0.0310971122 0.0778285116 -0.0993157849 0.0229949653 0.11270339 0.16689226 0.162221476 -0.0412555002 0.173148394 -0.0327887125 -0.121510416 -0.0140555762 0.0903469846 -0.029039843 0.068566218 -0.12506409 0.0382451862 0.0248380303 0.0277039558 -0.14029333 -0.126389086 -0.00850690901 0.0435213149 0.0944143683 0.13669233 0.010785535 0.131652728 0.08942011 -0.0438658521 -0.090351522 0.0258171856 -0.0345944241 0.0610096753 -0.0412340835 -0.0557757616 -0.114867412 -0.0179175213 0.00409558415 -0.0384710208 0.100451782 0.141252503 -0.0818129927 0.0703791678 -0.070404999 -0.110884957 -0.109055385 -0.0898616537 -0.0513886064 0.12470524 -0.13137272 -0.0607609376 -0.0753680915 0.0404375494 -0.0389348865 -0.0476250499 0.139109835 -0.0529488549 -0.123520866 0.11238347 -0.0333348513 -0.0331748128 -0.042967163 0.098916471 0.100189671 0.113453075 0.0968655795 -0.0717557892 0.00738480687 0.0515316948 -0.0553591289 -0.00930176955 -0.00901313685 0.175364062 -0.0196713433 -0.0502335988 0.0383971296 0.121085942 0.103061765 0.15212594 0.116526209 -0.0264002159 -0.094560124 -0.0376541167 0.147969663 -0.0978502855 -0.00200880459 0.0312320646 -0.0133588314 0.0813049003 -0.029433284 -0.0262242351 -0.106563412 0.0297239125 0.0302417222 -0.0301722232 -0.0122521063 -0.0121933669 0.0207778811 0.118457362 0.0412592813 0.133936256 -0.0869062468 -0.0865229592 -0.0799478814 -0.0991789475 -0.0158145819 0.120537013 0.0608306117 0.17652306 -0.0725468472 0.126066044 -0.0148094278 -0.0349133536 0.0138158342 -0.00439210236 0.0949555784 -0.0591577739 -0.0189778134 0.0861846507 0.024233669 -0.0585409105 -0.114641324 -0.0379219502 0.0835855305 0.0618880242 -0.00448146462 -0.0475826338 0.0992924273 -0.0171841681 0.130379245 -0.115067512 -0.0295631811 0.132417724 -0.00352632999 -0.0539170504 -0.0408100188 0.0476579517 0.0435158908 -0.108436733 0.0848499984 -0.0134641975 0.103017956 0.113764629 -0.0229989663 -0.0402461812 0.10478355 0.105660841 -0.103532135 0.0136412531 -0.113981269 -0.130932122 0.020966202 0.103488669 0.0354949832 -0.0729100034 -0.0167546049 -0.0969255865 -0.0415144786 -0.0227924958 0.0969979763 -0.0482411981 -0.0222034156 0.0317669511 -0.0238891691 -0.0465090126 -0.049510397 -0.00603993051 -0.0495383143 0.0391993932 0.0514197163 0.0751582161 -0.0600364245 0.149168238 0.00608866569 0.145530403 -0.0471902825 -0.00813056901 0.0724131763 0.02852365 -0.0375874527 0.0505036674 0.162053078 0.061635565 -0.0772141367 -0.101029664 0.135133624 -0.026366543 0.0173085034 -0.196915612 -0.094210051 -0.0830252171 -0.0152556822 0.0760191381 0.11165338 0.015817089 0.0882711783 0.0494683385 -0.135736912 -0.0937291756 0.155131117 0.0703033358 -0.101866767 -0.136783585 -0.0569891371 0.0305453669 0.0333361402 0.0399238467 0.0434939228 0.0999660268 -0.106167085 -0.0346824601 0.1533719 -0.0761708841 0.0984655321 0.0181172937 -0.0619307309 -0.00992794242 0.070495829 0.154349118 0.0747213513 -0.191178009 -0.087587826 0.0466681495 -0.128632694 0.0855400562 -0.12334384 0.01812369 0.155531704 0.11304646 0.132001072 0.196818605 -0.0396067835 -0.0600948259 -0.0705011338 -0.113130942 -0.11137788 0.0562906638 0.0935193449 0.0702943429 -0.135742277 -0.0983269736 -0.164341018 0.0970739946 0.0331884474 0.0538573004 -0.0459236056 -0.0607440993 -0.057828702 -0.11368376 0.115170449 0.144811422 0.046080593 -0.116181828 0.0888293982 0.149672315 -0.085598737 -0.0972040147 0.101113833 0.145851657 0.0576108471 0.0922829062 0.0284254942 0.0193997808 -0.000928774185 -0.0930889547 -0.0205265954 +tensor_16bias 50 +-0.142527401 0.168874308 -0.0887828916 -0.0631441548 -0.0660232753 0.0544182472 0.0641550943 0.0606994219 -0.0223076269 0.102970138 0.0258652028 -0.0809393153 -0.0276761875 0.0546831638 0.128886178 -0.0795307532 -0.0189131647 -0.120750055 0.17368494 -0.0492844023 -0.0578795224 0.0671775565 0.0123591702 0.132642269 -0.0525798798 0.0173827056 0.0659839064 -0.0958536118 0.0788409114 0.0107072359 0.0492983423 -0.02552481 0.0519438088 0.137576833 -0.115964495 -0.0723876506 0.111166283 -0.119082645 -0.0866416842 0.0962954909 0.110601142 -0.0136935199 -0.138656154 -0.0623757802 0.0855045691 -0.12340495 0.0683390722 -0.0176106151 -0.0413498878 0.0233600959 diff --git a/Linear_16.hxx b/Linear_16.hxx new file mode 100644 index 0000000..8d831c0 --- /dev/null +++ b/Linear_16.hxx @@ -0,0 +1,658 @@ +//Code generated automatically by TMVA for GPU Inference using ALPAKA of Model file [Linear_16.onnx] at [Fri Apr 11 14:16:45 2025] + +#ifndef SOFIE_LINEAR_16 +#define SOFIE_LINEAR_16 + +#include +#include +#include +#include +#include +#include "SOFIE/SOFIE_common.hxx" +#include + +using Dim1D = alpaka::DimInt<1>; +using Acc = alpaka::TagToAcc; +using Queue = alpaka::Queue; + +namespace SOFIE_Linear_16{ +struct Session { + +// initialized tensors +auto deviceBuf_8weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_8bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_4bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_2weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_0bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_12bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_18bias = alpaka::allocBuf(devAcc, 10); +auto deviceBuf_14bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_4weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_10weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_6bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_18weight = alpaka::allocBuf(devAcc, 500); +auto deviceBuf_0weight = alpaka::allocBuf(devAcc, 5000); +auto deviceBuf_10bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_2bias = alpaka::allocBuf(devAcc, 50); +auto deviceBuf_6weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_14weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_16weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_12weight = alpaka::allocBuf(devAcc, 2500); +auto deviceBuf_16bias = alpaka::allocBuf(devAcc, 50); + +//--- declare and allocate the intermediate tensors +auto bufDev_18biasbcast = alpaka::allocBuf(devAcc,160); +auto bufDev_38 = alpaka::allocBuf(devAcc,800); +auto bufDev_14biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_34 = alpaka::allocBuf(devAcc,800); +auto bufDev_22 = alpaka::allocBuf(devAcc,800); +auto bufDev_2biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_24 = alpaka::allocBuf(devAcc,800); +auto bufDev_0biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_6biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_4biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_16biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_8biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_26 = alpaka::allocBuf(devAcc,800); +auto bufDev_28 = alpaka::allocBuf(devAcc,800); +auto bufDev_10biasbcast = alpaka::allocBuf(devAcc,800); +auto bufDev_30 = alpaka::allocBuf(devAcc,800); +auto bufDev_32 = alpaka::allocBuf(devAcc,800); +auto bufDev_36 = alpaka::allocBuf(devAcc,800); +auto bufDev_12biasbcast = alpaka::allocBuf(devAcc,800); + +Session(std::string filename ="Linear_16.dat") { + +//--- reading weights from file + std::ifstream f; + f.open(filename); + if (!f.is_open()) { + throw std::runtime_error("tmva-sofie failed to open file " + filename + " for input weights"); + } + std::string tensor_name; + size_t length; + f >> tensor_name >> length; + if (tensor_name != "tensor_8weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_8weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_8weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_8weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_8bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_8bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_8bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_8bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_4bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_4bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_4bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_4bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_2weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_2weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_2weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_2weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_0bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_0bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_0bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_0bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_12bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_12bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_12bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_12bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_18bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_18bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 10) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 10 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_18bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_18bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_14bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_14bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_14bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_14bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_4weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_4weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_4weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_4weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_10weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_10weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_10weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_10weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_6bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_6bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_6bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_6bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_18weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_18weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_18weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_18weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_0weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_0weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 5000) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 5000 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_0weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_0weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_10bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_10bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_10bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_10bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_2bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_2bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_2bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_2bias"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_6weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_6weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_6weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_6weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_14weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_14weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_14weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_14weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_16weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_16weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_16weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_16weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_12weight" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_12weight , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 2500) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_12weight[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_12weight"); + } + f >> tensor_name >> length; + if (tensor_name != "tensor_16bias" ) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_16bias , read " + tensor_name; + throw std::runtime_error(err_msg); + } + if (length != 50) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) + f >> tensor_16bias[i]; + if (f.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_16bias"); + } + f.close(); + + auto hostBuf_8weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_8weight), tensor_8weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_8weight, hostBuf8weight, 2500); + auto hostBuf_8bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_8bias), tensor_8bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_8bias, hostBuf8bias, 50); + auto hostBuf_4bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_4bias), tensor_4bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_4bias, hostBuf4bias, 50); + auto hostBuf_2weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_2weight), tensor_2weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_2weight, hostBuf2weight, 2500); + auto hostBuf_0bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_0bias), tensor_0bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_0bias, hostBuf0bias, 50); + auto hostBuf_12bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_12bias), tensor_12bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_12bias, hostBuf12bias, 50); + auto hostBuf_18bias = alpaka::allocBuf(hostAcc,10); + std::memcpy(alpaka::getPtrNative(hostBuf_18bias), tensor_18bias, 10* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_18bias, hostBuf18bias, 10); + auto hostBuf_14bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_14bias), tensor_14bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_14bias, hostBuf14bias, 50); + auto hostBuf_4weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_4weight), tensor_4weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_4weight, hostBuf4weight, 2500); + auto hostBuf_10weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_10weight), tensor_10weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_10weight, hostBuf10weight, 2500); + auto hostBuf_6bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_6bias), tensor_6bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_6bias, hostBuf6bias, 50); + auto hostBuf_18weight = alpaka::allocBuf(hostAcc,500); + std::memcpy(alpaka::getPtrNative(hostBuf_18weight), tensor_18weight, 500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_18weight, hostBuf18weight, 500); + auto hostBuf_0weight = alpaka::allocBuf(hostAcc,5000); + std::memcpy(alpaka::getPtrNative(hostBuf_0weight), tensor_0weight, 5000* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_0weight, hostBuf0weight, 5000); + auto hostBuf_10bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_10bias), tensor_10bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_10bias, hostBuf10bias, 50); + auto hostBuf_2bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_2bias), tensor_2bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_2bias, hostBuf2bias, 50); + auto hostBuf_6weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_6weight), tensor_6weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_6weight, hostBuf6weight, 2500); + auto hostBuf_14weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_14weight), tensor_14weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_14weight, hostBuf14weight, 2500); + auto hostBuf_16weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_16weight), tensor_16weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_16weight, hostBuf16weight, 2500); + auto hostBuf_12weight = alpaka::allocBuf(hostAcc,2500); + std::memcpy(alpaka::getPtrNative(hostBuf_12weight), tensor_12weight, 2500* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_12weight, hostBuf12weight, 2500); + auto hostBuf_16bias = alpaka::allocBuf(hostAcc,50); + std::memcpy(alpaka::getPtrNative(hostBuf_16bias), tensor_16bias, 50* sizeof(float)); + alpaka::memcpy(queue, deviceBuf_16bias, hostBuf16bias, 50); + +//---- allocate the intermediate dynamic tensors +//--- broadcast bias tensor 0biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_0bias,{ 50 }, { 16 , 50 }); + auto hostBuf_0biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_0biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_0biasbcast, hostBuf_0biasbcast , 800); + } +//--- broadcast bias tensor 2biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_2bias,{ 50 }, { 16 , 50 }); + auto hostBuf_2biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_2biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_2biasbcast, hostBuf_2biasbcast , 800); + } +//--- broadcast bias tensor 4biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_4bias,{ 50 }, { 16 , 50 }); + auto hostBuf_4biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_4biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_4biasbcast, hostBuf_4biasbcast , 800); + } +//--- broadcast bias tensor 6biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_6bias,{ 50 }, { 16 , 50 }); + auto hostBuf_6biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_6biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_6biasbcast, hostBuf_6biasbcast , 800); + } +//--- broadcast bias tensor 8biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_8bias,{ 50 }, { 16 , 50 }); + auto hostBuf_8biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_8biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_8biasbcast, hostBuf_8biasbcast , 800); + } +//--- broadcast bias tensor 10biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_10bias,{ 50 }, { 16 , 50 }); + auto hostBuf_10biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_10biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_10biasbcast, hostBuf_10biasbcast , 800); + } +//--- broadcast bias tensor 12biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_12bias,{ 50 }, { 16 , 50 }); + auto hostBuf_12biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_12biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_12biasbcast, hostBuf_12biasbcast , 800); + } +//--- broadcast bias tensor 14biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_14bias,{ 50 }, { 16 , 50 }); + auto hostBuf_14biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_14biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_14biasbcast, hostBuf_14biasbcast , 800); + } +//--- broadcast bias tensor 16biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_16bias,{ 50 }, { 16 , 50 }); + auto hostBuf_16biasbcast = alpaka::allocBuf(hostAcc,800); + std::memcpy(alpaka::getPtrNative(hostBuf_16biasbcast), data, 800 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_16biasbcast, hostBuf_16biasbcast , 800); + } +//--- broadcast bias tensor 18biasfor Gemm op + { + float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_18bias,{ 10 }, { 16 , 10 }); + auto hostBuf_18biasbcast = alpaka::allocBuf(hostAcc,160); + std::memcpy(alpaka::getPtrNative(hostBuf_18biasbcast), data, 160 * sizeof(float)); + alpaka::memcpy(queue, deviceBuf_18biasbcast, hostBuf_18biasbcast , 160); + } +} + + + +std::vector infer(float* tensor_input1){ + +//--------- Gemm_GPU_ALPAKA + char op_0_transA = 'n'; + char op_0_transB = 't'; + int op_0_m = 16; + int op_0_n = 50; + int op_0_k = 100; + float op_0_alpha = 1; + float op_0_beta = 1; + int op_0_lda = 100; + int op_0_ldb = 100; + std::copy(tensor_0biasbcast, tensor_0biasbcast + 800, tensor_22); + Kokkos::View kokkos_dev_input1((float*)std::data(bufDev_input1), op_0_m, op_0_k); + Kokkos::View kokkos_dev_0weight((float*)std::data(bufDev_0weight), op_0_k, op_0_n); + Kokkos::View kokkos_dev_22((float*)std::data(bufDev_22), op_0_m, op_0_n); + KokkosBlas::gemm(&op_0_transB, &op_0_transA, op_0_alpha, kokkos_dev_input1, kokkos_dev_0weight, op_0_beta, kokkos_dev_22); + +//--------- Gemm_GPU_ALPAKA + char op_1_transA = 'n'; + char op_1_transB = 't'; + int op_1_m = 16; + int op_1_n = 50; + int op_1_k = 50; + float op_1_alpha = 1; + float op_1_beta = 1; + int op_1_lda = 50; + int op_1_ldb = 50; + std::copy(tensor_2biasbcast, tensor_2biasbcast + 800, tensor_24); + Kokkos::View kokkos_dev_22((float*)std::data(bufDev_22), op_1_m, op_1_k); + Kokkos::View kokkos_dev_2weight((float*)std::data(bufDev_2weight), op_1_k, op_1_n); + Kokkos::View kokkos_dev_24((float*)std::data(bufDev_24), op_1_m, op_1_n); + KokkosBlas::gemm(&op_1_transB, &op_1_transA, op_1_alpha, kokkos_dev_22, kokkos_dev_2weight, op_1_beta, kokkos_dev_24); + +//--------- Gemm_GPU_ALPAKA + char op_2_transA = 'n'; + char op_2_transB = 't'; + int op_2_m = 16; + int op_2_n = 50; + int op_2_k = 50; + float op_2_alpha = 1; + float op_2_beta = 1; + int op_2_lda = 50; + int op_2_ldb = 50; + std::copy(tensor_4biasbcast, tensor_4biasbcast + 800, tensor_26); + Kokkos::View kokkos_dev_24((float*)std::data(bufDev_24), op_2_m, op_2_k); + Kokkos::View kokkos_dev_4weight((float*)std::data(bufDev_4weight), op_2_k, op_2_n); + Kokkos::View kokkos_dev_26((float*)std::data(bufDev_26), op_2_m, op_2_n); + KokkosBlas::gemm(&op_2_transB, &op_2_transA, op_2_alpha, kokkos_dev_24, kokkos_dev_4weight, op_2_beta, kokkos_dev_26); + +//--------- Gemm_GPU_ALPAKA + char op_3_transA = 'n'; + char op_3_transB = 't'; + int op_3_m = 16; + int op_3_n = 50; + int op_3_k = 50; + float op_3_alpha = 1; + float op_3_beta = 1; + int op_3_lda = 50; + int op_3_ldb = 50; + std::copy(tensor_6biasbcast, tensor_6biasbcast + 800, tensor_28); + Kokkos::View kokkos_dev_26((float*)std::data(bufDev_26), op_3_m, op_3_k); + Kokkos::View kokkos_dev_6weight((float*)std::data(bufDev_6weight), op_3_k, op_3_n); + Kokkos::View kokkos_dev_28((float*)std::data(bufDev_28), op_3_m, op_3_n); + KokkosBlas::gemm(&op_3_transB, &op_3_transA, op_3_alpha, kokkos_dev_26, kokkos_dev_6weight, op_3_beta, kokkos_dev_28); + +//--------- Gemm_GPU_ALPAKA + char op_4_transA = 'n'; + char op_4_transB = 't'; + int op_4_m = 16; + int op_4_n = 50; + int op_4_k = 50; + float op_4_alpha = 1; + float op_4_beta = 1; + int op_4_lda = 50; + int op_4_ldb = 50; + std::copy(tensor_8biasbcast, tensor_8biasbcast + 800, tensor_30); + Kokkos::View kokkos_dev_28((float*)std::data(bufDev_28), op_4_m, op_4_k); + Kokkos::View kokkos_dev_8weight((float*)std::data(bufDev_8weight), op_4_k, op_4_n); + Kokkos::View kokkos_dev_30((float*)std::data(bufDev_30), op_4_m, op_4_n); + KokkosBlas::gemm(&op_4_transB, &op_4_transA, op_4_alpha, kokkos_dev_28, kokkos_dev_8weight, op_4_beta, kokkos_dev_30); + +//--------- Gemm_GPU_ALPAKA + char op_5_transA = 'n'; + char op_5_transB = 't'; + int op_5_m = 16; + int op_5_n = 50; + int op_5_k = 50; + float op_5_alpha = 1; + float op_5_beta = 1; + int op_5_lda = 50; + int op_5_ldb = 50; + std::copy(tensor_10biasbcast, tensor_10biasbcast + 800, tensor_32); + Kokkos::View kokkos_dev_30((float*)std::data(bufDev_30), op_5_m, op_5_k); + Kokkos::View kokkos_dev_10weight((float*)std::data(bufDev_10weight), op_5_k, op_5_n); + Kokkos::View kokkos_dev_32((float*)std::data(bufDev_32), op_5_m, op_5_n); + KokkosBlas::gemm(&op_5_transB, &op_5_transA, op_5_alpha, kokkos_dev_30, kokkos_dev_10weight, op_5_beta, kokkos_dev_32); + +//--------- Gemm_GPU_ALPAKA + char op_6_transA = 'n'; + char op_6_transB = 't'; + int op_6_m = 16; + int op_6_n = 50; + int op_6_k = 50; + float op_6_alpha = 1; + float op_6_beta = 1; + int op_6_lda = 50; + int op_6_ldb = 50; + std::copy(tensor_12biasbcast, tensor_12biasbcast + 800, tensor_34); + Kokkos::View kokkos_dev_32((float*)std::data(bufDev_32), op_6_m, op_6_k); + Kokkos::View kokkos_dev_12weight((float*)std::data(bufDev_12weight), op_6_k, op_6_n); + Kokkos::View kokkos_dev_34((float*)std::data(bufDev_34), op_6_m, op_6_n); + KokkosBlas::gemm(&op_6_transB, &op_6_transA, op_6_alpha, kokkos_dev_32, kokkos_dev_12weight, op_6_beta, kokkos_dev_34); + +//--------- Gemm_GPU_ALPAKA + char op_7_transA = 'n'; + char op_7_transB = 't'; + int op_7_m = 16; + int op_7_n = 50; + int op_7_k = 50; + float op_7_alpha = 1; + float op_7_beta = 1; + int op_7_lda = 50; + int op_7_ldb = 50; + std::copy(tensor_14biasbcast, tensor_14biasbcast + 800, tensor_36); + Kokkos::View kokkos_dev_34((float*)std::data(bufDev_34), op_7_m, op_7_k); + Kokkos::View kokkos_dev_14weight((float*)std::data(bufDev_14weight), op_7_k, op_7_n); + Kokkos::View kokkos_dev_36((float*)std::data(bufDev_36), op_7_m, op_7_n); + KokkosBlas::gemm(&op_7_transB, &op_7_transA, op_7_alpha, kokkos_dev_34, kokkos_dev_14weight, op_7_beta, kokkos_dev_36); + +//--------- Gemm_GPU_ALPAKA + char op_8_transA = 'n'; + char op_8_transB = 't'; + int op_8_m = 16; + int op_8_n = 50; + int op_8_k = 50; + float op_8_alpha = 1; + float op_8_beta = 1; + int op_8_lda = 50; + int op_8_ldb = 50; + std::copy(tensor_16biasbcast, tensor_16biasbcast + 800, tensor_38); + Kokkos::View kokkos_dev_36((float*)std::data(bufDev_36), op_8_m, op_8_k); + Kokkos::View kokkos_dev_16weight((float*)std::data(bufDev_16weight), op_8_k, op_8_n); + Kokkos::View kokkos_dev_38((float*)std::data(bufDev_38), op_8_m, op_8_n); + KokkosBlas::gemm(&op_8_transB, &op_8_transA, op_8_alpha, kokkos_dev_36, kokkos_dev_16weight, op_8_beta, kokkos_dev_38); + +//--------- Gemm_GPU_ALPAKA + char op_9_transA = 'n'; + char op_9_transB = 't'; + int op_9_m = 16; + int op_9_n = 10; + int op_9_k = 50; + float op_9_alpha = 1; + float op_9_beta = 1; + int op_9_lda = 50; + int op_9_ldb = 50; + std::copy(tensor_18biasbcast, tensor_18biasbcast + 160, tensor_39); + Kokkos::View kokkos_dev_38((float*)std::data(bufDev_38), op_9_m, op_9_k); + Kokkos::View kokkos_dev_18weight((float*)std::data(bufDev_18weight), op_9_k, op_9_n); + Kokkos::View kokkos_dev_39((float*)std::data(bufDev_39), op_9_m, op_9_n); + KokkosBlas::gemm(&op_9_transB, &op_9_transA, op_9_alpha, kokkos_dev_38, kokkos_dev_18weight, op_9_beta, kokkos_dev_39); + return {std::vector(tensor_39, tensor_39 + 160)}; +} +}; // end of Session +} //SOFIE_Linear_16 + +#endif // SOFIE_LINEAR_16 diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index 79541af..83a47af 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -141,6 +141,11 @@ public: { Generate(static_cast>(options), batchSize, pos, verbose); } + void GenerateGPU_ALPAKA(std::underlying_type_t options, int batchSize = -1, bool verbose = false); + void GenerateGPU_ALPAKA(Options options = Options::kDefault, int batchSize = -1, bool verbose = false) + { + GenerateGPU_ALPAKA(static_cast>(options), batchSize, verbose); + } // generate the infer function signature. If isdecl= false generate the calling infer function // used to infer the sub-graphs std::string GenerateInferSignature(bool isdecl = true); @@ -153,18 +158,28 @@ protected: // internal functions // generate code for the initialized tensors void GenerateInitializedTensorInfo(); + + void GenerateInitializedTensorInfo_GPU_ALPAKA(); // generate code for the intermediate tensors void GenerateIntermediateTensorInfo(); // generate code for the dynamic tensors void GenerateDynamicTensorInfo(); + + void GenerateDynamicTensorInfo_GPU_ALPAKA(); // generate code for declarations needed by operators void GenerateOperatorDeclarations(); // generate code for inference void GenerateOutput(); + + void GenerateOutput_GPU_ALPAKA(); + + void MoveInitializedTensorsToBuffers_ALPAKA(); // generate code for initializing memory pool for intermediate tensors void GenerateIntermediateMemoryPool(); // Generate all session code void GenerateSessionCode(); + void GenerateSessionCode_GPU_ALPAKA(); + void GenerateGPU_ALPAKA_Buffers(); public: const std::vector &GetInputTensorNames() const { return fInputTensorNames; } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator.hxx b/src/SOFIE_core/inc/SOFIE/ROperator.hxx index edbec58..f7db548 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator.hxx @@ -24,8 +24,10 @@ public: virtual std::vector TypeInference(std::vector) = 0; virtual void Initialize(RModel&) = 0; virtual std::string Generate(std::string OpName) = 0; //expect unique opName for each operator within the same RModel + virtual std::string Generate_GPU_ALPAKA(std::string OpName){ return "";} //expect unique opName for each operator within the same RModel // generate initialization code for session constructor virtual std::string GenerateInitCode() { return "";} + virtual std::string GenerateInitCode_GPU_ALPAKA() { return "";}; // generate some specific declaration code for Session virtual std::string GenerateDeclCode() { return "";} // generate session data members specific to operator diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index b6901f0..7410cf3 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -488,10 +488,11 @@ namespace SOFIE{ if (fType == "float"){ - out << SP << "Kokkos::View kokkos_dev_"< kokkos_dev_"< kokkos_dev_"< kokkos_dev_"< kokkos_dev_"< kokkos_dev_"<{\n" + <<"alpaka::workdiv::getValidWorkDiv(devAcc, {totalElems}, true, alpaka::GridBlockExtent::All)\n" + <<"};\n"; + out<< SP << SP << "alpaka::exec(queue, workDiv,\n" + <<"[] ALPAKA_FN_ACC (auto const& acc, auto buf, Idx size) {\n" + <<"Idx const idx = alpaka::getIdx(acc)[0];\n" + <<" if (idx < size) {\n" + <<" auto& x = alpaka::getPtrNative(buf)[idx];\n" + <<" x = x < 0 ? 0 : x;\n" + <<" }\n" + <<"}, bufDev_"< 0) {\n"; - out << "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; - - out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; - out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; + out << "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," << length << ");\n"; out << SP << "}\n"; } fGC += out.str(); @@ -892,9 +889,9 @@ void RModel::GenerateSessionCode() // define the Session struct (for GNN this is generated in RModel_GNN) if (fUseSession) { if (!fIsSubGraph) - fGC += "struct Session {\n"; + fGC += "struct Session {\n\n"; else - fGC += "struct Session_" + fName + " {\n"; + fGC += "struct Session_" + fName + " {\n\n"; } // generate code for declaring the initialized tensors @@ -1001,9 +998,9 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() // define the Session struct (for GNN this is generated in RModel_GNN) if (fUseSession) { if (!fIsSubGraph) - fGC += "struct Session {\n"; + fGC += "struct Session {\n\n"; else - fGC += "struct Session_" + fName + " {\n"; + fGC += "struct Session_" + fName + " {\n\n"; } // // generate code for declaring the initialized tensors @@ -1188,16 +1185,16 @@ void RModel::GenerateGPU_ALPAKA(std::underlying_type_t options, int bat } if (static_cast>(Options::kGNN) & options || static_cast>(Options::kGNNComponent) & options) - throw std::runtime_error("SOFIE GPU does not yet supports GNN Inference.") + throw std::runtime_error("SOFIE GPU does not yet supports GNN Inference."); // initialize the model including all operators and sub-graphs Initialize(batchSize, verbose); std::string hgname; - // if (!fIsSubGraph) { - // fGC.clear(); - // GenerateHeaderInfo_GPU_ALPAKA(hgname); - // } + if (!fIsSubGraph) { + fGC.clear(); + GenerateHeaderInfo_GPU_ALPAKA(hgname); + } // generate first code for the subgraphs // for (auto &graph : fSubGraphs) { @@ -1310,20 +1307,20 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { // skip Constant and shape tensors if (!i.second.IsWeightTensor()) continue; std::string tensor_name = "tensor_" + i.first; - length = ConvertShapeToLength(i.second.shape()); + auto length = ConvertShapeToLength(i.second.shape()); std::string slength = std::to_string(length); if (i.second.type() == ETensorType::FLOAT) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; } else if (i.second.type() == ETensorType::DOUBLE) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; } else if (i.second.type() == ETensorType::INT64) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; } else { std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); } diff --git a/src/SOFIE_core/src/RModel_Base.cxx b/src/SOFIE_core/src/RModel_Base.cxx index a3392d8..b5524d1 100644 --- a/src/SOFIE_core/src/RModel_Base.cxx +++ b/src/SOFIE_core/src/RModel_Base.cxx @@ -59,7 +59,7 @@ void RModel_Base::GenerateHeaderInfo(std::string& hgname) { } void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { - fGC += ("//Code generated automatically by TMVA for ALPAKA Inference of Model file [" + fFileName + "] at [" + fParseTime.substr(0, fParseTime.length()-1) +"] \n"); + fGC += ("//Code generated automatically by TMVA for GPU Inference using ALPAKA of Model file [" + fFileName + "] at [" + fParseTime.substr(0, fParseTime.length()-1) +"] \n"); // add header guards hgname = fName; std::transform(hgname.begin(), hgname.end(), hgname.begin(), [](unsigned char c) { @@ -87,6 +87,9 @@ void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { if (fWeightFile == WeightFileType::RootBinary) fGC += "#include \"TFile.h\"\n"; + fGC += "\nusing Dim1D = alpaka::DimInt<1>;\n"; + fGC += "using Acc = alpaka::TagToAcc;\n"; + fGC += "using Queue = alpaka::Queue;\n"; fGC += "\nnamespace SOFIE_" + fName + "{\n"; } From 6481c052097eb1dd7fd550ddf8f24aab1d4a1d28 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 18 Aug 2025 15:43:27 +0200 Subject: [PATCH 04/22] fix: defining intermediate and initialized tensors --- Linear_16.dat | 40 -- Linear_16.hxx | 658 ---------------------- settings-dev.cmake | 7 + src/SOFIE_core/CMakeLists.txt | 1 + src/SOFIE_core/inc/SOFIE/RModel.hxx | 4 + src/SOFIE_core/inc/SOFIE/RModel_Base.hxx | 11 +- src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx | 5 + src/SOFIE_core/src/RModel.cxx | 407 ------------- src/SOFIE_core/src/RModel_ALPAKA.cxx | 366 ++++++++++++ src/SOFIE_core/src/RModel_Base.cxx | 5 +- src/SOFIE_core/src/SOFIE_common.cxx | 35 ++ 11 files changed, 428 insertions(+), 1111 deletions(-) delete mode 100644 Linear_16.dat delete mode 100644 Linear_16.hxx create mode 100644 settings-dev.cmake create mode 100644 src/SOFIE_core/src/RModel_ALPAKA.cxx diff --git a/Linear_16.dat b/Linear_16.dat deleted file mode 100644 index 873ce7d..0000000 --- a/Linear_16.dat +++ /dev/null @@ -1,40 +0,0 @@ -tensor_8weight 2500 -0.0268758684 0.139096066 0.0821818858 -0.127417535 -0.0831027254 0.109001353 -0.0448572189 0.0432091393 -0.100685023 -0.0782502964 -0.0569691472 -0.0834055692 -0.0914414823 -0.00128868222 0.114371844 0.157571077 -0.0249715224 -0.0275524613 -0.106611423 0.160815567 0.0850525424 -0.0246056858 0.0868391246 0.0197147224 0.0387364663 0.0334140956 -0.0329913124 0.110141195 0.105670758 -0.0897664875 -0.0678865984 0.0182914361 0.146356225 0.0747506022 -0.0347048417 0.0646456406 -0.0683225691 -0.0967762694 0.144724965 0.0968451351 -0.049604129 -0.0246048607 0.0982864648 -0.00104637037 -0.0540190488 0.02299482 -0.0587500408 0.162345782 -0.0178857595 -0.114502899 -0.0277074426 0.0523337275 -0.0407291614 -0.125408962 -0.0477996059 -0.144638136 -0.141282856 0.107945614 -0.0642622635 0.106897406 0.141127169 0.00702024298 0.115400836 0.0949773341 -0.0841375515 -0.029037755 -0.12251503 -0.113417722 0.152951673 -0.052355133 0.125115007 0.11263705 -0.0993821546 -0.100654982 0.13138859 -0.121526435 0.0688993633 0.0602294281 0.0230368655 -0.119217426 -0.131345615 -0.0928916186 0.0589227341 -0.0877812058 -0.0575322062 -0.0479355939 0.119958326 0.0839382187 0.0134669729 -0.120720349 -0.0728492588 -0.0201216638 -0.0426205285 0.0580729693 -0.0317371115 -0.0582037121 -0.058949165 -0.0108661382 -0.0596015975 0.0923921913 0.106575489 -0.00681856275 0.0882440805 -0.0621290579 -0.0726372078 -0.00623785472 0.0285876859 0.0697654709 0.0963460952 -0.0578520186 -0.0386559479 0.0133705661 -0.0272551179 0.0195438117 0.0346884355 -0.00187640428 0.0641605407 0.176762238 0.159317046 -0.0952501073 0.0985514522 -0.0871972367 -0.0842028037 -0.0397452265 0.17574358 -0.0538034029 0.136749208 -0.0399385504 0.068287164 0.0217975918 -0.132272243 -0.0182651877 0.105293095 0.00358554721 0.00108983321 -0.153865114 0.0111923162 0.168782786 0.0969837084 0.0112454593 -0.0346569866 -0.0916731507 -0.00954194739 0.154270783 -0.0877914429 0.0242955964 -0.0126784407 0.121400051 -0.0289624184 0.0689913705 0.0434036702 0.0646613985 0.0640042126 -0.0474287085 0.073149845 0.156802103 -0.041823104 0.0810273662 0.179701 -0.0470410772 -0.0788431466 -0.147018611 0.053253185 -0.0240673199 -0.0210381355 -0.0958639532 -0.0170583278 0.0516901463 -0.111291543 0.00283904956 0.142500415 0.141777232 0.126547039 0.129214615 0.0729232654 -0.0321790762 -0.143716827 -0.00954447314 0.172390178 0.0871036574 0.000518912973 0.103541978 0.00573523017 0.100544035 0.167853162 0.157549649 0.117853075 -0.0903918445 -0.00601014355 0.0462133735 -0.119286336 0.111245058 0.00672465004 -0.035454426 0.184626952 -0.0521864779 0.180116341 -0.0577540956 -0.0600765273 0.151180387 -0.0436708629 -0.119430825 0.163085073 0.0715407208 0.0878540799 0.0810162574 -0.0718293861 0.12325272 -0.0860322118 -0.122137249 0.00682034623 0.158898726 -0.109563902 -0.140805796 0.144035459 0.0911271796 -0.0533853769 0.158740863 -0.12681675 0.0929608271 -0.0734888241 -0.0542239025 -0.0844008029 -0.0349548869 0.0194364432 0.00317873154 -0.0542409308 -0.110601485 -0.0040136571 0.126150146 -0.0695957989 0.135092571 0.0206705686 0.0210149139 -0.175603613 -0.0048725917 0.0448048264 0.0753361583 -0.0960626155 -0.00809389353 0.00274648191 0.118843384 0.0924557075 -0.0390414186 0.104679853 -0.02499073 0.115688451 0.183578849 -0.0963831246 -0.0192914438 0.0245501548 0.132269129 0.0528996326 0.126745895 0.0650902316 0.142014235 -0.109039702 0.110978663 -0.168853745 -0.0998671725 -0.0272130556 -0.0755283609 -0.145343035 0.0856398046 -0.0465832911 0.156254068 -0.00759668648 0.0660862029 -0.128243685 0.123390384 -0.0730970129 0.0211296733 0.172928646 -0.0569610596 0.162485525 0.0430793129 0.148347437 0.0230896771 0.0979775786 0.0892469361 0.114915423 0.102117866 0.114598379 -0.0385860801 0.104682192 0.05711741 0.183385804 0.114157908 0.0803671777 0.121530138 0.0750841424 -0.0201519765 0.0892636031 -0.00840737578 -0.0380099379 0.0670845732 0.173552945 -0.0446153358 0.0231650621 -0.0720840693 0.104690835 0.113704339 0.0918174759 -0.025853835 0.0474199951 -0.0122872479 0.0429795273 -0.0552103594 0.00883762538 0.151448011 -0.0409595668 0.0949078426 0.0689887926 0.107225835 0.0818655714 0.0588729456 -0.0576343685 -0.0948910415 -0.0496109203 -0.10446807 0.187580436 0.165386483 -0.0410638899 0.109590538 -0.0093578482 -0.168643042 0.0453341343 0.0740079209 -0.0932418108 -0.136097178 0.0847565904 0.016970491 -0.196696535 0.0213545114 0.146830887 0.188490406 -0.104602233 0.102813676 -0.020301817 0.0581303798 0.154687196 0.0931936353 0.0230271649 -0.0598398224 0.00981738791 0.00165832066 0.130958691 -0.141922146 -0.0931093395 -0.0452646948 0.0832985789 -0.0752738565 0.108650707 -0.0635550246 0.161715269 -0.048298005 -0.0519500524 0.111362822 -0.0297681484 0.0919308066 0.00461465074 -0.123445027 -0.0583725758 0.0877097845 -0.0828031972 -0.0494801551 -0.0178236663 0.110060342 -0.104191855 0.00495020067 0.190953419 0.175235912 0.0748231113 -0.0354038626 -0.0866233632 0.0114633273 -0.0709704086 -0.0408563316 -0.00744438358 -0.12112467 -0.00320398994 -0.109712012 -0.13769187 -0.0328272358 0.091612272 0.14105013 0.146673918 -0.0514545403 0.202921212 0.0834511071 0.160404101 0.0601333193 0.0581494831 0.083636649 0.0984802917 0.0609594397 0.128031611 -0.0596118644 0.112030312 0.175436005 -0.0210987478 0.100863054 0.141590253 -0.014728453 -0.0364963971 0.0035578683 -0.0124314548 0.0569810681 0.0548986979 0.130580813 0.103102274 -0.0677055791 -0.116339654 0.129263833 0.162418574 0.122197464 -0.0109819323 0.0938043669 -0.0469912067 -0.101457044 0.131629422 -0.0877847597 -0.0320621915 0.0457580797 0.0759001374 -0.0854525268 0.0624620654 0.088354066 0.0471264385 0.122950502 -0.0319194868 0.0971357599 0.144149795 0.0968611538 -0.0324465856 -0.13455525 0.0447516218 -0.0679218769 -0.0809827521 0.0494714826 -0.0949900225 0.0311798677 -0.00109984947 0.174830928 -0.0281612556 0.163149565 0.0736394823 -0.0375521332 0.00539422035 -0.0927275494 -0.0925532579 0.0742847919 0.0994291157 0.127749816 0.0300972443 -0.0191503167 -0.0972991213 0.0944213718 -0.0106646148 0.0151962861 0.00275415881 0.0332029015 -0.0985995755 -0.0955503657 0.0529588386 -0.0463228486 -0.139574915 -0.105905958 0.0530111678 -0.153271616 0.00135927019 -0.018976111 0.0405978933 0.0479904711 0.0545446351 -0.114191957 0.141731873 -0.132812411 -0.0630234033 0.0933084786 0.0396189578 -0.0473725162 -0.0290426835 -0.123914912 -0.0582598001 0.152059436 0.0548362397 -0.0321423411 0.115950227 -0.120880082 0.184676751 -0.0445445627 -0.103703029 0.0245305933 -0.00212677591 -0.0626897737 -0.0121289967 0.0785561725 -0.0832984447 0.0998352543 0.117086425 -0.0671990365 -0.0363239795 0.0353550613 0.114468403 0.143954277 0.105897352 0.0256107412 0.156521618 0.0780752227 -0.0554250963 0.0736213177 -0.10541296 0.0503535867 0.00255402969 0.0666635558 0.129061893 -0.0195398014 0.0478001311 0.065228425 -0.0979058444 0.0814248547 0.108272545 -0.00994789507 -0.0218796581 -0.154623747 0.0106207961 -0.12293978 -0.00427472685 -0.126063108 -0.0116878618 0.100741506 -0.0546985939 -0.0451277271 0.0930468291 -0.0851750597 -0.0140047939 -0.123040549 0.132812724 0.0833404511 0.140870854 -0.120734856 -0.0805390403 -0.0502453148 -0.0170761105 0.00345065887 -0.0480272733 -0.0561171696 0.0876882076 0.0613627955 -0.0316582024 0.0100890994 0.0458408594 -0.135604486 -0.0192864686 -0.01957082 0.0726629794 -0.0564594492 -0.0693246424 0.0831580311 -0.123946451 0.0842915326 0.027340591 0.123881891 0.0634962171 0.0854125172 0.0679267496 0.0966168046 -0.029469654 0.0381903499 0.111267343 0.140807226 -0.13526763 0.0522036403 -0.115617715 0.0543578118 0.0461016595 0.103722617 0.0174795687 -0.136987507 -0.0600835234 0.0538240522 0.0903360397 0.0172370523 -0.0140279233 -0.114186123 0.0943125635 -0.0614755452 -0.0279850513 0.083064124 -0.12880753 -0.13455835 -0.0599042326 -0.0389251933 -0.0604324266 -0.0513332263 -0.0554876402 0.0233900547 -0.0640518144 0.111689016 0.0502607375 -0.00419057906 -0.0730830133 -0.0277305022 0.171559766 0.0534306914 0.00674414961 -0.107875511 -0.0510217324 -0.0838860199 -0.15236254 -0.138948157 -0.125519603 0.0523681492 -0.0187952798 0.114655808 0.0474532545 0.0917048305 -0.0550882407 0.0838057324 0.18853642 0.142427206 0.180868432 0.140680373 -0.0942524076 0.123159751 0.0897716284 -0.0308326464 0.0049529071 0.13588357 0.0297236629 -0.0363686383 -0.0588090122 0.12634854 0.0122025581 0.186096713 0.0920768976 0.0481046252 0.0876177624 0.0250588302 -0.0850643218 -0.0529115237 0.029226495 -0.0699693412 0.150488198 -0.0428842455 0.179663286 -0.0198406726 0.0218468606 0.197559595 0.0729278922 0.0885386169 -0.133979425 0.0167944431 -0.0360915139 0.0497089326 -0.0268492103 -0.0587182194 0.0121284872 -0.00810500979 -0.0885604918 -0.0682897642 -0.109051131 0.101431355 -0.10556107 -0.0689118356 0.0273847431 0.123891041 -0.0328962579 -0.0183387175 0.0236377716 -0.126516774 -0.027949512 0.125757441 -0.137005895 0.0159674287 -0.0881164894 0.0896662176 -0.0548697859 -0.0910438597 -0.126777187 0.0143643618 -0.0796068907 -0.0773626193 -0.0353754535 0.0982186347 0.102850467 0.0936983526 0.0350374728 0.0642853081 -0.0353903249 0.0034533143 -0.0836362615 -0.0474314392 0.137183502 0.00499179959 -0.0352529734 -0.12372198 0.0710547566 -0.0847075656 0.108061433 0.0962944925 -0.0228818804 0.0236922354 0.0593082607 -0.0698251426 -0.0753812417 -0.0950560495 -0.0748883784 -0.139509365 -0.0391269475 0.117235079 -0.0770111158 0.0286441594 -0.0478565544 0.0810799748 -0.0450968295 -0.0848289505 -0.0374233201 -0.0248766541 -0.0257886276 -0.00540667772 -0.131286308 -0.125986263 0.0405903906 -0.0291525051 -0.0074609369 -0.0744228065 0.0589668602 -0.0275227204 0.134439722 -0.116755374 -0.0779221952 0.0212557018 0.126257434 0.105313227 0.120735362 -0.0692541525 -0.0584569424 -0.108607799 -0.0476316065 -0.0588775352 0.0463445932 -0.133415371 -0.128379583 0.121760055 -0.0548802391 -0.0722203329 0.0508697033 -0.079833433 0.119531378 -0.0217971876 -0.105434492 -0.0522313938 -0.0453321934 0.107274927 0.0276630223 0.131097168 0.079335019 -0.114221223 0.0391028263 0.128627002 -0.0898075253 0.0599811226 0.072371535 0.0517965741 -0.0948484987 -0.00232080673 -0.120201647 -0.1168992 -0.163116753 0.101379991 -0.0693345442 -0.0656319857 0.0136408824 -0.0277835261 0.0546165146 -0.00200848537 0.105287716 0.021810092 0.101102382 -0.0842717886 -0.020271264 -0.121380635 0.0648328215 0.0722329915 0.0304982048 -0.0108427657 -0.0313236415 0.0242884308 0.0848189518 -0.00415426493 -0.129282877 -0.0663083941 -0.0568652116 -0.0136977984 0.0484237522 -0.136208966 -0.0747673362 -0.00170940161 0.00959950686 -0.0287488401 0.174732566 0.0778143853 -0.0412021503 0.138728648 -0.12335252 0.0248393398 -0.00735486019 -0.0928628147 -0.0812815279 0.125593081 0.0110786557 0.124990925 0.150338039 0.0616421662 -0.0968330279 0.11320933 0.116342612 -0.00344289024 0.141467705 -0.072798416 0.121146008 -0.0969213247 -0.0562434942 -0.0969665498 0.0179323703 -0.0307174679 0.0410963222 0.0908566862 -0.0271566976 0.181122735 0.176294565 -0.0137444139 -0.152425051 0.0303653441 -0.0654244274 0.103337444 0.161812425 -0.114469662 0.0337155983 0.0851140097 0.0473025665 -0.0455731675 0.0910733119 0.0064521106 0.187958792 0.167304024 -0.127127901 0.00861696992 -0.0400827006 -0.140295923 0.0512709506 0.0780323595 -0.0932431147 0.0957963392 -0.125637099 -0.0162038952 0.00915290881 0.0997759104 0.0987372771 0.16382876 -0.056870617 -0.139243662 -0.071242094 -0.0608208477 0.107261404 0.0251677446 -0.0958002061 -0.0900856256 0.0601827726 0.107842483 -0.0984033346 -0.00783828646 0.0255061835 0.00474396348 -0.0694380254 0.0950763747 -0.0441939719 0.0136436457 0.108352683 0.137562498 0.0213271081 0.0454172641 -0.0874122232 -0.089138791 0.0275239777 -0.0769107938 -0.0700656921 0.131777659 0.175489351 -0.0777074322 -0.00239577657 -0.00230550254 0.167611465 0.0103928242 -0.0727633685 -0.0352996625 0.00823523104 -0.0106461262 0.0824658424 -0.0121006668 -0.0598732941 -0.0662225783 0.0269689541 0.0804088712 0.138990924 0.149531111 -0.0406282917 -0.126480639 -0.025079472 0.0510983169 0.035402365 0.08281295 0.156712428 -0.026096575 -0.0651845187 -0.0323777311 -0.105685644 0.0783127025 0.0188494585 0.0856304839 -0.046786584 -0.0739144981 0.0625574216 0.127959684 -0.0416722037 0.114131734 -0.04018737 0.0335959457 -0.0786943138 -0.0593536906 0.000781891402 0.0818767101 0.07887806 0.0942715183 0.178498864 0.14772743 0.00345369685 -0.0423939079 -0.0205054302 0.123664357 0.0551863275 -0.177257061 0.114078067 0.0455558784 -0.0323475748 -0.112341911 0.0721865445 -0.0341178104 -0.0914598405 0.0694510341 -0.0585612506 -0.0373541526 -0.205118358 -0.0179533362 0.0257616416 0.18985191 0.101283662 0.0620856099 0.163825974 0.150054261 0.0351246744 -0.0134136677 0.0362584144 0.0490719676 -0.0219044462 0.100722261 -0.0236032922 -0.0624775924 -0.18531242 -0.0643399507 0.0405745842 0.0175180174 0.123290591 0.074898921 0.0684316009 -0.0228197258 0.138146341 -0.0247859173 -0.136237904 0.0807761028 0.190366209 -0.0662142709 0.0290480666 -0.0762866884 0.106889285 0.00406613294 0.00212845136 -0.0551334918 0.176173732 -0.000862196088 0.0479077958 0.12893793 0.0908120275 0.0428063385 -0.105808966 0.0208340362 -0.0391079783 -0.17646575 -0.0161272287 -0.0779476464 -0.139349103 0.132013753 0.0993892252 0.064087227 0.131257027 -0.0114984009 -0.120081656 0.0867618024 0.0161269289 0.0568408556 -0.0086016655 -0.00697259605 -0.127590686 0.00164337456 -0.106980473 -0.0617386699 -0.093155548 -0.0321060345 0.0353029482 -0.144390106 -0.041361127 -0.102071285 -0.0588951148 0.0818923414 -0.127334356 0.0141031453 -0.111001149 -0.123913996 -0.0247361958 -0.0820739791 -0.030570088 0.127384081 0.0231190175 -0.10356193 -0.139310062 -0.0380821303 -0.0285825692 0.126087889 -0.066886954 0.0766842216 -0.135645509 -0.0953988656 0.131382018 -0.12620239 -0.0145515203 0.134063303 0.0396169424 0.0967397094 0.119111016 -0.0184818357 0.176523507 -0.0199789405 0.0826793611 -0.110192202 -0.0409205034 0.00472770026 -0.14348729 0.147804692 0.0450261496 0.0670832992 -0.0345766172 0.126415744 0.00601782696 0.104479343 0.0414096117 -0.0710287988 0.112614326 -0.147158608 -0.0370420963 -0.119263552 0.126887798 -0.105801471 -0.00610316033 -0.105143495 0.196164653 -0.0159037225 -0.0744655356 0.132361174 0.0196442343 -0.0159279685 0.126357719 -0.029065378 0.0336539075 -0.168783128 0.0148825208 0.0555515438 0.0461699739 0.102379352 0.155959725 -0.137117967 0.0191216022 0.222972959 0.125019222 -0.0988391787 -0.0180195421 -0.00158139609 0.029359296 -0.142667904 0.103885561 -0.105973668 0.0307869632 0.00780402496 -0.0674499497 0.114393353 -0.127377525 0.00557687134 -0.061907284 -0.124339581 0.0993482098 -0.0195321329 -0.0585047677 0.0953318775 0.0480449647 -0.0227444768 0.0408569276 0.0562379509 0.0313135199 -0.132302389 0.125221208 -0.0873878524 -0.111026652 0.115592606 -0.0906311348 -0.00652401475 -0.127266601 -0.144731417 0.0330261067 -0.135340631 -0.1036596 0.0953472406 0.0560712516 -0.135941952 -0.040515393 -0.0903434008 -0.0961870179 0.119984761 -0.023610061 0.136370555 -0.0166805629 0.127074108 -0.0724001899 -0.0508536957 0.115113258 -0.0258387104 0.0489959568 -0.142882243 0.104940325 -0.109118342 -0.0262665749 -0.139981106 0.15954946 -0.00940728188 0.0964377075 0.0164540596 0.0535212867 0.189132586 -0.035037268 -0.0510806404 -0.0423220247 -0.0387359485 0.0559357852 -0.0354634076 0.174487337 -0.0849912167 -0.124549776 0.0769607276 -0.0557537489 -0.0945133492 0.152356565 -0.0749799982 0.124006495 -0.11373242 0.0692153648 -0.0678370595 -0.0376192741 -0.0456925295 0.0221248977 0.0522562191 -0.098749496 0.0200695693 0.122223869 -0.0449365303 0.0366582051 -0.135746583 -0.0222668201 -0.00500035612 0.0176082794 0.128107294 -0.0137729133 0.0214566886 0.110726222 0.150741264 -0.0353633799 0.159352034 0.0344046839 0.105027668 -0.0436317027 -0.140568197 -0.101473704 -0.032741949 -0.0106838001 -0.0316685364 -0.129210651 -0.0679190904 0.133524075 0.123635188 0.10515888 -0.0406672806 -0.0610394813 0.091179423 -0.110931419 0.079060778 0.116783403 -0.0516342819 -0.13402909 0.0207334459 0.0812019557 -0.0820832253 0.0703516304 -0.084283106 -0.143779725 0.0990532935 -0.0511374213 -0.0828005821 -0.0313504227 0.108964168 -0.0947234705 -0.129062966 -0.0215799771 0.0714171082 -0.013629063 0.100170761 0.121061251 -0.122967482 0.0534396805 -0.104779765 -0.0699278712 0.0862568319 0.0739753321 0.0636002868 0.113115802 -0.0251864307 0.0644432828 0.00374182384 0.00278152619 0.0277899243 -0.0400727838 -0.0934138894 0.0662064999 0.0740315318 0.156935647 -0.125642329 -0.0181016717 0.0717086047 -0.0788133815 -0.127949879 -0.0690763518 0.0889543295 -0.172350034 -0.100477748 0.00906703342 -0.0588162839 0.0321615078 0.143424392 0.0124900788 -0.0917625949 -0.0731047541 -0.0883597806 0.193930492 -0.0296085142 0.153995425 0.056007009 0.0701433122 0.0177569669 -0.0888565779 -0.0714818016 -0.00891659409 0.14154695 0.186709836 -0.0978443697 -0.0666612759 0.00330674648 0.15638712 -0.0387458205 -0.0156392194 0.029518418 0.177299723 -0.00526926899 -0.03754526 0.142723694 0.0702423528 -0.0628082901 -0.0962519944 -0.0736426339 0.00633251155 -0.0707057118 -0.0702924654 0.0923877209 0.00983795524 0.0308573246 0.117168695 -0.112862423 0.142043695 0.0422373824 -0.0367415026 -0.106207736 -0.0756792426 0.117217235 -0.0436312594 0.0131786875 -0.0440221652 0.097473219 -0.0796951875 -0.125066265 0.124969348 0.0678982735 0.139240772 -0.076055415 -0.025149785 0.022765873 0.00105297181 -0.0667744279 -0.142704338 0.0969702899 -0.131543919 -0.0815857351 0.107110865 0.00967122614 -0.0012585416 0.122922324 -0.0521417297 0.0298166722 0.069311209 0.0601125322 0.0429519527 0.0771004632 0.138341069 0.0400493145 0.0989085436 -0.115988038 0.0790331438 0.0363655277 0.0255561695 -0.127918124 0.0311952345 -0.0788384601 0.0947113633 0.144414648 -0.0196468234 0.0516601503 0.0307283401 0.0900717825 -0.0476232618 0.0064008832 0.0990933776 -0.105707288 0.0693743229 -0.124773592 0.0183412433 0.0536187291 -0.0942984521 -0.108814135 0.0376636833 0.0153515637 0.00406998396 0.0265448689 -0.135168463 -0.120080709 -0.121890008 -0.0570892245 -0.124603435 -0.0809690952 -0.0672037601 0.13564612 0.0371975005 0.112579718 0.014319554 -0.108871549 -0.0871257633 0.0682478845 0.0747066289 -0.0636163577 0.0898959637 -0.090092048 0.129719719 -0.0293056145 0.026599288 0.0581899136 0.0231975913 -0.0209574401 -0.00654032826 0.0904182643 -0.114409715 -0.0957838446 -0.044936955 0.0165019929 0.135205165 -0.117184259 0.0401535928 0.134790704 -0.0130638136 -0.0190193728 -0.0656322092 -0.113029599 0.00841842592 0.0140976086 -0.0268416889 0.0387401059 -0.00842970423 0.153014824 0.0431912951 0.0146862119 0.0648952872 0.159487918 -0.107046999 -0.0957565159 0.0974680409 0.0875119492 -0.106558517 -0.00956180599 -0.10792207 0.0139202345 0.0237691645 0.170654655 0.182866856 0.0337778889 -0.122317858 0.104945458 -0.00241611805 -0.0189588871 0.154625118 0.0883154273 0.0683931634 0.0424042568 -0.0296660978 0.0563843139 0.0966898203 -0.00994552113 0.18443881 -0.146624371 -0.0824042782 0.116611265 -0.108186543 0.0102908229 0.103478007 0.0179831069 0.131399289 -0.0788777545 -0.0424850732 -0.00943686068 0.083427988 -0.137433812 -0.0903602764 -0.105054028 0.124498554 0.088755466 -0.0493076742 -0.027949756 0.0751230642 0.115139447 -0.127784625 -0.0843564868 0.1399187 0.077144593 0.0511633307 -0.031733308 0.00182465685 0.149220034 -0.108540453 0.0384725034 0.0657235011 -0.150511175 0.0859548301 -0.031602826 0.153693542 0.100388199 0.153390184 -0.000678598415 -0.0295467041 0.107720926 0.0790676847 0.075719431 -0.0342444293 -0.105272986 -0.101255842 0.17369619 -0.0664026737 0.0892078429 -0.113553904 -0.0292268191 0.180186689 0.109818101 0.0164926779 0.0922102034 0.0830212384 -0.00248041586 -0.0470679849 -0.165345639 0.12901403 -0.0142368376 0.0695406124 -0.0135625293 -0.106628664 0.00303458911 -0.0594627149 0.0922242925 -0.0427582636 -0.0400496349 0.178693265 0.183653072 0.0575503781 -0.0235571191 0.048922874 -0.00795071851 0.0807336569 -0.0162454005 0.0234919712 -0.102429815 0.0907384083 0.126808628 0.0676393136 -0.167194471 0.0440359078 0.137444958 0.196354747 -0.0737531483 0.0279132333 0.123419479 0.058315713 0.0996660143 0.122060843 -0.0147102922 -0.121966586 -0.0944622681 0.142329201 -0.0832371339 -0.0505962893 0.133063897 0.134808093 -0.0476008505 0.0899724364 -0.00881881081 0.129822224 -0.0857772455 0.0220859721 0.128572404 0.0105826855 0.102347367 -0.0834473595 0.0246756226 0.0283253919 0.0684853047 0.101052776 -0.12661618 -0.086164698 -0.0609710813 -0.0304403771 -0.0863657966 -0.155161664 -0.00632416084 -0.00467219949 -0.137039587 0.111212932 0.166964158 0.0581296235 -0.103098728 0.0281474199 0.0514760315 -0.0765168592 0.0823773816 0.112306684 -0.119962633 0.108718097 -0.0627609268 0.0295355972 0.061364796 0.0683022588 0.164325893 -0.112472534 0.164340407 0.0510179065 0.00963465869 -0.0768766776 -0.04377589 0.096517235 0.148181275 -0.0579664111 0.0587554127 -0.0445416085 -0.03864979 0.00753601873 0.100015543 -0.0362141766 0.133224964 -0.0191601235 0.152707025 -0.0183888227 -0.059442617 0.104400992 -0.0854767412 -0.00690022996 0.0886835605 -0.0273776986 0.130314052 0.105323426 0.161530361 -0.108458608 0.0788582712 -0.0838668495 -0.08755178 -0.102095522 -0.096060887 0.169443905 0.0577232093 0.120626166 -0.0495226867 0.096482262 0.0348549932 0.199682817 -0.0202110633 0.0846792087 -0.0826675221 0.168889627 0.0768956468 0.0698982626 0.0966169164 0.14701435 0.00816824846 0.0694516674 -0.0396548584 0.109372504 -0.0630989447 0.117110327 0.0741739869 0.0132170692 0.100324839 0.00397197716 0.00673523871 0.0452416129 0.0100933397 0.181526616 -0.131816193 -0.0729396716 0.0076587908 0.0789732337 -0.0381261818 -0.082727015 -0.112304315 0.0861935169 0.0106273741 -0.114372075 0.0639646724 -0.0495705158 -0.0882112607 -0.0384016633 0.110038161 -0.0208555609 0.0701313913 -0.0773748457 0.078994669 -0.0506972298 -0.126550719 0.0909916982 0.00305084884 -0.128831208 -0.0751241222 0.134015068 -0.0960550979 -0.0882394835 -0.0782357231 -0.0271630995 -0.0772069469 -0.118653722 -0.0367000699 0.121580288 -0.0561355688 -0.138331473 0.125544876 -0.0296058543 0.0746538565 -0.0162153672 0.062437132 -0.0231160969 0.0841860995 0.0602102727 0.124720961 -0.0469560064 -0.140982583 -0.136888638 -0.0804962814 -0.0440254994 -0.107610121 -0.0446921699 -0.0253842529 -0.0920281038 -0.102073133 0.0864460468 -0.0521458536 -0.0281716138 -0.12248721 0.108246624 0.0153880091 -0.00498063862 -0.0892293677 -0.10190247 0.144978091 -0.0280745663 0.0683950707 -0.0395756029 0.0730759278 -0.00125575683 -0.0320034325 0.0139094684 0.127148211 0.00908912718 0.074735418 0.0604887865 -0.0297355298 0.06545984 -0.0896448418 0.131814942 0.0968025178 0.0828945488 0.0279728677 0.0131593319 -0.117338456 0.0965867341 -0.00240561157 0.10527093 0.0450791791 0.020316802 -0.00228017569 0.00896368176 0.0269619301 0.150818229 0.155888513 0.116388619 -0.137444928 -0.0795316845 0.119856015 0.0243596714 -0.113116957 0.0804922581 -0.0587314703 0.169540256 0.139007777 0.139136776 -0.0298109893 0.0328682661 0.0793280452 -0.132538036 0.10372372 -0.0968293995 -0.048270233 0.0598262101 -0.0930386782 0.00116990507 0.0488549769 0.034680441 0.0733197182 -0.0573430657 0.100507811 0.07086052 -0.101341262 -0.0982980579 0.124785289 0.075397715 -0.106355786 0.00812490284 0.118339553 -0.056385763 -0.073536776 -0.091773212 0.137454107 0.0820370913 -0.0450648963 -0.104219824 -0.0738624409 -0.0940235406 -0.0482450277 0.101593941 0.0674851388 -0.088429369 -0.0648159012 -0.0145470053 -0.138597056 0.086754784 -0.0615454912 0.0661340803 -0.0228997692 0.117914066 -0.0457687825 0.0386412889 -0.139924914 0.0699922591 -0.0633266196 -0.0395022333 -0.0829551816 0.123344138 -0.0269175917 -0.112848774 -0.127718255 0.0982895121 0.00638221437 -0.0981410667 0.0559622087 -0.0958316401 0.0377015024 -0.0980365872 -0.0954344869 -0.0140493829 0.0955453813 0.0694573075 0.132618234 -0.00649338961 0.162486911 -0.0644210577 0.0785710365 -0.0324906185 0.0616738945 0.131774053 -0.065818958 0.171095803 0.17064096 -0.0761323273 0.0750825778 0.0011169787 -0.0321993567 0.0106129069 0.0721838102 0.0231869202 0.0562860221 -0.00459621055 0.0595190637 -0.0840448812 -0.081757158 -0.0908710882 0.0917035788 -0.157435328 -0.0259377975 0.137452871 0.0223723184 0.0868041068 0.120598882 0.00937895011 0.155116081 -0.100786671 0.0592786036 -0.133716181 -0.080596447 0.0356213599 0.00931102037 -0.0950432196 -0.0769308135 0.0528798848 -0.120534495 0.00211757421 0.0449208915 0.0501613319 -0.0820226222 -0.0251336843 -0.0144774914 0.0143801719 -0.0881446749 -0.123702742 -0.0779745206 -0.139141038 0.0531492084 -0.12054643 0.0143575966 0.10409309 0.0117436498 -0.0403948873 -0.110459745 -0.0662581548 -0.1114856 0.00284221768 -0.052574873 0.0127746612 0.0721953511 -0.0164361224 0.0638382882 -0.0129706711 -0.0614550114 -0.060835205 -0.0864404589 -0.132438704 -0.108277529 -0.105507694 -0.0632813722 0.0298161656 -0.0744545162 0.0407374054 0.0746406019 -0.106669813 -0.10373731 0.12445356 0.0397888571 0.0220787525 0.0202515423 0.12489415 0.132801518 0.0626152232 0.0729931891 0.0667951256 -0.0493182763 -0.0674306005 0.0432554148 -0.0183124356 -0.104636811 0.063214466 -0.128902912 0.105252452 0.082741566 0.0974095464 -0.0517911017 -0.022460917 0.0845701918 0.00974364486 0.0755192861 0.122211002 -0.0929605439 -0.0323449485 -0.00275745941 0.13703306 -0.126241356 -0.0444845371 -0.0595683604 0.0420802012 -0.121947996 -0.0962189585 0.034678936 0.133177251 0.0845321864 0.0163415857 0.0860773325 -0.0293091722 -0.0457063392 0.116976917 -0.112045035 -0.114811443 -0.0518591814 0.0787069798 0.00974517968 0.11453367 -0.115365967 -0.0442552418 0.0140983164 -0.0719776675 -0.0414564312 -0.00496765925 -0.0418873541 0.0135209961 0.150040343 0.018279193 0.0670056716 -0.0128589002 0.00129946775 -0.0175037291 -0.10695336 -0.0812502131 -0.00151915848 0.105532125 0.156881258 0.107406408 0.0299301185 0.0623822287 0.105002947 -0.00692248205 -0.0561903454 0.0528496578 0.0645767525 -0.0349360071 0.0887037516 0.0392689034 0.160266221 0.0597992055 0.0948610157 0.0702525973 0.0522093065 0.116412245 0.0978843421 -0.148455024 -0.0602231361 0.0339148492 0.0785273239 0.143222392 -0.1303702 -0.0237084106 0.00480483705 -0.00544637674 0.123185195 0.066207394 0.138298839 -0.0356794894 0.176579177 0.0118068606 -0.0892722458 0.110782906 -0.105181009 0.0947949737 0.0951533318 -0.0448544845 -0.0301951393 -0.0468887351 -0.00123819872 0.101715624 0.0727012604 0.0642970651 -0.0423549041 0.0688230544 0.104760513 0.101078875 -0.0486233123 -0.0383191928 0.00411880249 0.110782482 -0.0667161196 -0.115577795 -0.107555278 -0.0455098785 0.0147148855 -0.0387868471 0.119055025 0.113614053 -0.0650238395 -0.116119511 0.0904611796 0.0928509012 0.036130257 0.063363038 0.088962093 -0.0774177462 0.00342554948 -0.0879331529 0.0105287833 -0.150841638 0.0330097973 -0.101222105 0.000547326345 0.0222531687 0.00177719572 -0.168144733 -0.102801181 0.127630353 -0.0044782632 -0.0718901828 -0.0688694715 -0.107435137 -0.00648547709 -0.137246579 0.116464987 0.0476300418 0.0752717406 0.0731578618 0.100494623 -0.0570759401 -0.0219588652 0.060869351 -0.0204062089 0.119302526 0.110057697 -0.0576427504 0.0296158791 -0.0681548789 -0.0114316642 -0.132044569 -0.0581466183 0.00203379989 -0.113137707 -0.0557623059 -0.0389640704 -0.129291847 0.121296927 -0.00863479078 0.109711155 -0.0720649809 -0.114019588 -0.0326023102 0.047520563 0.13592954 -0.136486098 -0.138339326 -0.130544424 -0.0851323009 0.0625912994 -0.0132746696 -0.0394350886 0.106071725 0.0412790775 -0.0212111101 -0.110249251 0.128628239 -0.00126694143 -0.0719275251 -0.0233325437 -0.0285679474 -0.119621359 0.0375062078 -0.0438100025 0.127980903 0.0915731937 0.0225412827 -0.0376331583 0.0345656835 0.135839269 -0.152139679 -0.00529241795 0.0757251382 -0.0507489964 0.0434143096 -0.109213173 -0.0232270882 0.110102899 -0.11542847 0.178933024 -0.146328598 0.080565691 -0.0281426851 -0.0798788965 -0.0825010538 0.102853604 0.176710308 0.105933264 0.142999679 0.0393511392 0.0469196737 0.155381039 -0.0202247016 0.170517668 0.00554223079 -0.067655623 0.128527895 0.00835985132 0.192383677 0.133679509 0.109696992 0.124087319 -0.0682987794 -0.0266768672 -0.0692853928 -0.15578717 0.111135691 0.152784228 0.182785735 0.115072496 -0.0234794691 -0.14098835 -0.0995724574 -0.0710255876 -0.0245003197 -0.121010661 0.214906275 0.126054928 0.0240032822 -0.0867983475 0.0794893727 -0.0287744384 -0.0114687914 -0.0225537177 0.00640312536 0.0122232735 0.148882598 -0.0123748779 -0.0145422816 -0.0797223598 -0.0824621096 0.050172396 0.197323322 0.0408616215 0.165366396 0.141404614 0.135947406 -0.0240413714 -0.115962207 0.193585813 0.0844455436 -0.0809815899 0.17395325 0.0107643139 -0.0946478769 -0.0715151504 -0.0346882716 0.0626753345 0.181155458 0.146124348 0.050194148 -0.016578801 -0.0884145498 -0.119957604 -0.0384309553 0.0239939895 0.071738176 -0.0269928221 -0.0424483791 0.0305357967 0.129883602 0.143514901 0.133759692 0.0695038289 -0.000178681847 -0.0580186956 -0.0775882527 0.136189267 -0.0727865323 0.0678651482 -0.049817346 -0.0649325028 -0.0088552665 0.156783015 -0.048809994 -0.0406761616 0.158696339 0.0890753791 0.136065736 0.160149634 -0.0645535365 -0.111809649 -0.0370648354 0.194843888 0.0213514157 0.102395862 -0.0400028452 0.0761639178 0.0394547395 0.0327902511 0.162416309 -0.00130897725 0.020087108 -0.0961144641 0.0304949749 -0.045121409 -0.0313251726 0.085803628 0.0291031022 -0.0910456851 0.0660064593 -0.068344146 -0.0507363826 0.0779818743 0.14303115 -0.0030358301 0.0674030483 0.147154242 0.0136561031 0.054678835 0.0916109383 -0.108775541 -0.0925002992 0.0767795593 0.0127289426 -0.0589483082 -0.119000398 -0.121735357 -0.0326918289 0.137503535 0.119247034 0.0430034138 0.0618253574 -0.0975219831 0.0736228079 -0.0372737274 0.153161958 -0.0518422537 -0.0204763189 -0.0608311482 -0.0457191877 0.147904009 0.0655161589 0.000597919687 -0.0326539725 -0.155328959 0.167974561 -0.0343649164 0.130426079 0.00636828598 0.141477138 0.057434544 -0.0446176901 -0.0856851637 -0.0112518054 -0.0844905823 -0.0406574272 -0.153341204 -0.0638041422 0.0856886953 0.0646770895 0.130005434 -0.0040921187 -0.0604991764 0.0163501818 -0.0783527344 0.120934926 0.161637381 0.115128227 -0.014278437 0.0813159347 -0.0724511221 0.0282054543 -0.00229437649 0.0406513065 -0.0661629364 0.0403244048 0.0202239044 -0.0395012945 -0.0349203013 -0.054835394 0.0276283957 0.0147262886 0.171100989 0.0777800605 0.113793746 0.029462589 -0.0530196279 -0.115720108 0.167530239 -0.0646177605 0.163341776 0.0519124195 -0.0436448865 -0.00622291025 -0.118472219 -0.11200767 -0.0293926019 -0.0851374194 -0.0335079357 -0.00235709315 -0.114089273 -0.125252411 0.0811661184 0.143217117 0.0657678992 -0.144908518 0.074894011 0.0680066049 0.0251119025 0.0237030108 0.130361617 -0.117028616 0.0458782166 -0.0999599174 0.0947599187 0.125303924 0.124342829 0.0346639715 0.0599663034 0.00829797983 -0.12561053 0.0641372502 0.076463908 0.0719346106 -0.0685527846 0.0804899856 0.133784577 0.0426442474 0.108722381 -0.120919384 0.0425808728 0.0937603563 -0.0493984073 -0.101851352 -0.0743994713 -0.0168575719 -0.0864764303 0.134012744 -0.0345991999 0.0765475258 -0.0502674989 0.148546934 0.154048041 0.112422884 -0.0310489275 0.0740677267 0.124277003 0.11085771 0.0675311983 0.0243612733 0.0620236471 0.0995759219 0.168544546 0.00101517653 -0.0610326529 0.0783934444 -0.0371061936 0.0923965722 -0.0212610923 0.133722678 0.0999392346 -0.089509137 0.0711376369 0.112333678 0.0204899628 0.17973493 0.0402919464 0.126110092 -0.00392504036 -0.000692084432 -0.0994881168 0.053651616 0.00273740292 -0.0712720156 -0.0218583867 0.00874059927 0.0359456241 0.0621751361 0.000342633168 0.0569748171 -0.0946905017 0.00123212801 0.142329782 -0.0667219386 -0.0533551276 -0.0563121587 0.071681805 0.101482138 0.161976591 -0.106341578 0.186979875 0.0426207557 0.140588462 0.0434038043 -0.0569239818 0.164891273 0.140453205 0.0955060944 0.0860626772 0.151230052 0.187594429 -0.18127653 0.131841645 0.15597482 -0.131366268 -0.165060341 0.128387749 -0.0202594791 0.0415013544 -0.0959378406 -0.0706115887 -0.121529371 0.0975343287 0.0213517249 0.183627069 0.0060459557 0.00643412722 0.113227792 0.169127882 -0.109208152 -0.151426241 -0.00370581448 0.0630536079 0.108941384 0.0786992684 0.0706410259 0.0392542407 -0.127687827 0.0440069884 0.0562533028 0.0949133858 -0.0812414587 0.092598483 0.0175310317 0.0891861469 -0.045317024 0.136054009 0.0259930789 0.00634265412 0.0573615059 -0.114974082 0.0966024846 -0.112975411 -0.0831556097 -0.0627890527 -0.00156623824 -0.0914661735 0.0832677707 -0.0836677551 0.0405839272 -0.072140947 0.0206351802 0.0579435751 0.175754473 0.0569373667 -0.0288151708 -0.133179188 0.0659383461 -0.0622974038 0.111952148 -0.0266913269 0.0549159199 0.126251087 -0.0655740872 0.103494681 0.000715725822 0.092549786 0.0262457915 -0.0120499283 -0.134702772 0.0988872126 -tensor_8bias 50 -0.0448136181 -0.0294532757 0.00591958454 -0.0112828789 0.0547700003 0.102279283 0.00554918963 0.0933698788 0.138683245 0.153071642 -0.0246890131 -0.066205956 0.0102847284 -0.0217106864 -0.11153923 -0.0833024532 0.0690509453 0.0574259795 0.0326761454 0.048058711 0.0932174474 0.173286349 0.0437983349 0.0692929476 -0.1425194 0.0164392311 -0.0525733009 -0.0926198289 0.01558726 0.124148585 0.159763724 -0.112289928 0.122134581 -0.0329846852 0.123975173 0.00884330273 -0.125247195 -0.108203024 -0.0963885933 0.12722528 0.105277926 -0.0898397416 0.108396716 0.133004621 0.111592449 -0.0548007637 0.112471558 0.0952548459 -0.0418147035 0.0495906435 -tensor_4bias 50 -0.0420062914 -0.0531011894 -0.0405919701 0.147642136 -0.0448930375 -0.0946018249 0.0368757285 0.0895275325 -0.00135793048 -0.0465053245 0.104558863 0.0464918055 -0.0928135291 0.145776987 -0.0437397324 0.0744188651 -0.0975865945 0.0791935027 -0.0783651695 0.0380954593 -0.0641139522 0.0319918618 0.0519438572 0.00847010501 0.124498516 0.182475775 -0.0537090674 0.0583103821 -0.0401648097 0.0082509499 -0.0618926026 -0.122952975 0.0772916004 0.014789585 0.101875983 0.0958903432 0.064464353 0.0122809373 0.149964184 -0.141134128 -0.0849211961 -0.0111745978 -0.0645377114 -0.0344211683 0.0628582314 0.0434207059 -0.0433468781 -0.0299602263 0.15525946 -0.0448016711 -tensor_2weight 2500 --0.0597149245 -0.0791020989 -0.00306093879 0.113323435 0.118636928 -0.0843338519 -0.109422937 0.0164578613 0.168519169 -0.0703572854 0.0312314406 0.0899977908 0.0896739215 -0.0900451988 -0.057600379 0.0125688771 0.0722137764 -0.0290169287 -0.0694356412 -0.111381322 0.0917039365 0.00489026168 -0.0580901131 0.183314383 0.195475265 -0.12944217 -0.0534728765 0.074898228 0.104391731 0.123983808 -0.013343907 -0.112780578 0.012140803 -0.086059548 -0.0357166752 -0.0239756703 0.114319615 0.0447655618 -0.0479144566 0.0672920421 -0.039890483 -0.0342019647 0.170793653 -0.0611885674 0.128305733 0.0986138955 -0.0286394898 -0.0084637003 -0.141880184 0.0852712765 -0.0972362906 -0.00365298078 -0.108331524 -0.0803529769 0.179286033 0.0825248212 -0.0778654292 -0.0261579026 0.0222861301 0.199497893 -0.0576646812 0.142493397 0.018432891 -0.0569059029 0.0996442288 -0.0431534536 -0.0794040635 0.136226013 -0.0141376657 -0.0539442487 -0.133499324 -0.0887252018 -0.0284489784 -0.0330936722 -0.03493331 0.0510139801 0.192286044 -0.00151121407 -0.0730649382 0.136111543 0.162208974 -0.115568712 0.176949784 0.0509604737 -0.140759781 0.0942156538 0.15726684 0.0260999966 -0.0726049989 -0.0243513957 0.156701684 0.138213098 0.112526298 0.0941351131 0.104868479 0.105548747 -0.0304395221 0.0303013697 0.162006006 0.100969627 0.145671651 -0.0650625825 0.0855033845 0.0336373001 0.141778961 -0.0337854326 -0.00864057243 -0.0735450611 0.0464367941 -0.0596558116 0.0623771138 0.14349483 0.0591385625 -0.00258940901 -0.0122495294 0.14376843 -0.0750882924 -0.0664319023 0.0305001531 0.0184416007 0.02046955 0.0551448241 -0.0694528297 -0.0207397975 0.154329836 0.0494214594 0.0845211819 0.16324687 0.0757716969 0.0634511784 0.120605588 -0.113957532 -0.0832520127 -0.0171713699 -0.0601701811 0.148658082 0.0899651572 0.118677244 0.0283228904 -0.0590552986 0.0797857642 0.0911054611 -0.022215249 0.176669434 0.000942089071 0.112969555 0.105361097 -0.0645927563 0.103734575 -0.0436463058 -0.0349569395 0.115449831 0.0422306731 -0.0804883987 0.0807694271 -0.0505034067 0.00729625719 0.137707859 -0.0488397889 0.162600547 0.15114215 0.0636213571 0.00903507788 0.128289327 0.163847417 0.000159272255 0.0834238008 -0.104029171 -0.0793354735 0.0541718863 0.00707805855 0.077409409 -0.00238326658 0.125607908 0.0396535546 -0.0790733248 0.0564618595 0.100612111 -0.0357064828 0.117824152 0.132536173 -0.0289113428 -0.014852941 -0.0426625349 0.0135453995 0.103636682 -0.0972069129 0.0516828299 -0.00995481107 0.0232977849 0.0937414765 -0.023261575 -0.0417088531 0.0130363097 -0.14154911 0.0702126473 0.00403433712 -0.0650982484 -0.0789552182 0.216502696 0.122806698 0.027723331 0.063748695 -0.0578081496 -0.0157720149 0.0400142148 0.133040145 0.0334649682 0.0875510424 0.110794596 0.0254984461 -0.0512416363 0.0211649723 -0.143576398 -0.0205686055 -0.111181781 0.0162975509 0.121590719 0.0656936541 0.155964255 0.0245984644 0.0352118239 0.133722454 -0.0262214299 -0.0336278044 0.156469122 -0.13011755 -0.027528204 -0.0602145456 -0.0930233747 0.0099506909 -0.0182043407 -0.118824221 -0.00373798492 0.178733543 0.00827211235 -0.0456761308 -0.0721783042 0.00670965109 -0.0409170277 0.00431948341 0.124081343 -0.0710947514 -0.104117736 0.093746461 0.171907842 0.110169716 -0.070081532 -0.0667723492 0.125274718 -0.0586081445 0.139502883 0.177527696 0.0687526166 -0.0820335746 -0.0490859933 -0.12959671 0.124665432 -0.0872184113 0.0991814062 0.0363627896 0.190564334 -0.0296370834 0.0762037039 0.0642659366 -0.0918578207 -0.054685194 -0.0458993316 0.146039933 0.0528010353 -0.0662797019 0.00561331725 -0.01142208 0.0815358981 0.0418767408 0.110681847 -0.00722674327 0.130719125 0.139407441 0.0292424969 -0.0270317923 0.0958031863 -0.0573824011 0.12932986 -0.043775145 0.059319146 -0.0913528278 0.115791552 0.078004472 0.115792975 0.107448012 -0.0748391598 0.0529222861 0.13462083 -0.141233921 0.166953042 0.168474525 -0.0700130537 -0.117624134 -0.00714296196 0.0268919822 0.163626537 0.0181761291 -0.0640345961 -0.0449223928 -0.141952619 -0.0284713078 0.147408575 0.139610574 -0.0779195204 0.106946297 0.117024481 -0.0941873938 0.09258876 -0.00288540404 -0.0543360636 0.0990853012 -0.0131437555 -0.0769185126 0.0146610877 0.0856351554 -0.090552628 0.124525517 0.072334148 0.00881079119 0.0441620275 -0.0116904415 -0.108310528 -0.0406595394 0.0195690114 0.0474229716 0.08090958 0.0409525596 0.077940464 -0.121437781 -0.0896261111 -0.134390622 0.099559769 0.107502699 0.0738855898 -0.0311849546 0.12491411 0.0958716646 0.048406072 0.0154622868 -0.130314365 0.148058236 0.00762006547 -0.0898886994 0.144507095 -0.0986621678 0.0791233629 0.0717348233 0.137725651 0.0972002074 0.0856728703 0.0490715429 -0.0558436215 0.177653775 -0.0812159926 0.174190253 -0.0374299698 -0.0888636857 0.0568164624 0.0539831966 -0.046500802 -0.088104479 -0.0324098729 0.123006575 0.174390927 -0.0655597001 0.118238717 0.165678978 0.115315504 0.149962306 -0.0967894346 0.0218543001 -0.0471816473 0.136843204 0.0418579951 0.130341902 -0.10788656 -0.0118869822 0.0904047042 0.10771846 -0.0203160401 0.0716004148 0.121576704 0.114085183 0.0813911036 -0.0706418529 0.0724584237 0.0249532741 0.156553373 -0.00865705032 0.134671107 0.0270873979 0.0121872211 -0.000827496988 -0.103484429 0.12091063 0.0684384331 -0.112646192 -0.0716026947 0.0865510404 -0.0961387679 -0.0992462859 -0.014073588 0.0901760384 -0.0329191796 -0.00509604625 0.0300773419 -0.113896236 0.0637915656 0.176874548 -0.0267044064 0.12591213 0.0827189684 0.00802489929 -0.0155225964 0.139007181 -0.0314813517 -0.0244915821 0.0454487316 0.113499463 0.147255525 0.0290668719 0.0196187459 -0.0756559074 -0.0474474952 0.000423966238 -0.125565693 -0.142974168 0.0265704822 0.100150622 0.124454536 0.128189385 -0.125751778 -0.0660192817 -0.0496372506 -0.025079174 -0.0945867226 0.00687600998 -0.108164005 -0.0449875742 -0.0757939294 0.0345570296 -0.0277413465 -0.0288163945 -0.0649622455 0.00885617267 0.0745153949 -0.0630018637 -0.00193145883 0.0763816684 0.156405032 -0.0854697376 -0.0829446241 0.0749762207 -0.0894886181 0.00361103215 0.0892253667 -0.00260828738 -0.0638676211 0.00824388769 -0.0162695311 0.0992859229 0.0285193995 -0.0495389216 0.0868888199 0.0549531169 -0.0304261018 -0.0182636939 -0.0249298904 0.159364238 -0.0837972984 0.11065764 0.0529022627 0.010110856 0.110683426 0.0919133052 0.0737009645 0.0965587646 0.0305129029 -0.0127110174 0.0697814003 0.103699945 -0.0261213128 0.170093238 -0.0687487945 -0.12052843 -0.104825832 -0.126111925 0.142499581 -0.128851101 0.0239339732 -0.0617658421 0.0295549762 0.119156219 -0.0673037395 -0.0500704497 -0.0940866619 0.0919373184 0.146928117 0.0300044753 0.0634653345 0.0144530665 0.0691985935 0.0211127512 -0.0590388924 0.0216479953 -0.0947615728 0.00890090037 -0.143075675 -0.00150912558 0.101439707 0.0146557204 -0.0631864071 0.0695210993 0.159808844 0.0115857897 -0.00928535312 -0.0489135161 -0.0782282799 0.125244364 -0.0499396287 0.140853539 -0.0960367844 0.0661479533 -0.0767967701 0.0877454206 -0.0602071472 -0.00595363509 0.115926109 0.178855419 -0.000521433423 0.0932693109 0.0502367616 0.152228653 0.104619421 0.0170960594 -0.103684276 0.0711491629 0.0488289595 -0.0617828257 0.0788236633 0.163875833 -0.0177440327 0.0156344157 0.109268099 -0.0375487134 0.0692994222 0.0731202066 0.0198084135 -0.0638355985 -0.0859975517 -0.0729697719 0.0573660471 -0.0556606203 -0.0930642337 0.145462662 -0.00594186038 -0.0928620845 0.139376998 -0.0553284064 0.0321234614 0.122701474 -0.123724081 -0.118198179 -0.0378811546 0.0980066508 -0.110459164 -0.0362307765 -0.0317853428 0.0111791994 0.0406676829 0.102611743 0.181697577 0.0510763824 0.123425812 -2.60259403e-05 -0.00361568225 0.153392524 -0.0397593305 0.0637998879 -0.145311564 -0.0472530723 0.0862638727 -0.0162773281 0.14536725 -0.0755254775 0.111187324 -0.0551111922 -0.000961930782 0.0612597242 0.163095102 0.0857004449 0.134386837 -0.0350845531 0.104531094 -0.0771434605 -0.067063503 0.171728879 0.167630181 -0.055467926 0.0409745835 0.177932739 -0.0550457239 0.107211053 0.0066946256 -0.00466190139 0.00958849117 0.156683907 0.111835107 0.0854923576 0.0730453655 0.121291943 0.0563716777 -0.111218229 0.0502161607 0.00677639991 0.18682304 -0.0360405892 -0.0132346814 -0.0414354391 0.0244455282 0.0727593377 -0.0868931487 -0.102708675 0.0923786163 0.154591203 -0.0693407878 0.106593266 0.162805468 0.0318478011 -0.031251967 -0.126320124 0.0780377984 -0.0280229542 -0.0295661092 0.0982864872 0.101671919 0.120140024 0.0414738134 0.105208568 0.0855850428 -0.0743453577 -0.000411789661 -0.0912177339 0.0883763209 -0.0493486412 0.123505704 0.166449651 0.105023161 -0.0776017308 0.162414178 -0.117349826 0.168772966 -0.101528428 0.13711141 -0.0164699852 0.0836634934 0.0919587389 -0.0328455754 -0.0752447918 -0.009731967 0.0349985808 -0.0194250569 -0.0934877768 0.185227469 0.0435012877 0.154062793 -0.0773278996 0.0929438472 -0.00670079701 0.0923984647 0.102851599 0.134395629 -0.120910235 0.170304388 0.0816775039 -0.0626546219 -0.0595025942 0.0244693402 0.0510447063 -0.115126796 0.115374513 -0.0176392663 -0.0923264623 0.122397989 0.0872549042 0.125522628 -0.100656673 -0.00508889835 -0.123501971 0.0617450103 0.139201492 0.051387202 0.00884217676 0.0175981224 -0.0483928584 -0.0360136032 0.05417905 0.022909319 -0.0881463438 -0.0459814519 -0.0131944772 0.0480347835 0.1673228 0.137037218 0.14526543 0.0446226932 0.0413857326 0.0612014905 0.132490978 0.0794302076 -0.0342803597 0.0863904208 0.15610376 0.121424645 0.0110774338 -0.0368165858 0.104494691 -0.0254124962 0.154777497 -0.0138444677 0.118794315 0.0259997863 -0.00128288078 0.142353535 0.155503765 0.0894722044 0.0424166657 -0.0683410317 0.0425889567 -0.10710226 -0.0400536358 -0.000696110365 -0.0677292421 -0.0385467038 0.0813434571 -0.0811068788 0.0311896447 0.0156664345 -0.147901028 -0.000463384727 0.0149115929 -0.112064414 0.0082620522 -0.024438085 -0.0304117016 0.162811249 0.128227949 0.0702825859 0.0863868073 0.0475940667 -0.100322515 0.118678033 0.153219327 0.103472307 0.106240071 -0.00983386766 0.0908779651 0.0990438908 0.0359329022 -0.0689288601 -0.0298974775 -0.115996465 0.0365048237 0.0202663038 -0.133836135 0.0477452688 0.0554565825 0.0893209428 -0.0239705388 -0.0640460923 0.13875863 0.105679706 0.0737722218 -0.0183230489 -0.0404619724 -0.0105633233 -0.0761946291 0.164347902 -0.0172834061 -0.094510898 0.0345971286 0.0106645143 0.194848433 -0.0547695532 -0.106579714 -0.0123255178 0.0403105766 -0.0313294157 -0.00499826716 0.100480273 -0.00637257611 -0.0778858364 -0.0411514193 -0.00478123594 0.0215911381 -0.0732492954 0.194053754 0.0539965741 0.113155119 -0.0752726197 -0.0769620165 0.193490297 0.0789649859 -0.0801189467 -0.0407260284 0.0242670309 0.10401839 -0.0375796929 0.0314083621 0.0724864528 -0.0512620732 -0.137483209 -0.0787761062 0.0968051478 -0.087627165 0.190840423 0.158406734 0.0864097029 0.133482426 -0.0359799229 -0.0242824815 0.0159911942 -0.0485018119 0.144926906 -0.058078561 0.111498684 0.065325208 -0.0478983261 0.0192427151 -0.0443237759 0.0666328892 0.0329897963 0.134647146 0.0964290947 -0.109050713 -0.000148722494 -0.00192280754 0.07619223 -0.203230783 0.0140038347 -0.0237551313 0.113345623 -0.0610194132 -0.123688005 0.00247963867 -0.0892862976 0.0488414988 -0.0904518217 0.174596399 -0.131306589 0.0763920173 0.150487289 -0.153154299 0.0224560183 0.0973761827 -0.0426088274 -0.0505751371 0.104424372 -0.133484393 0.0833508074 0.0194486398 -0.142139688 -0.0637170449 -0.101654164 0.0209246967 -0.140727118 0.00838450529 0.0946883485 0.148535386 0.103271469 -0.114976875 -0.0612382665 0.0309834275 -0.080561161 -0.0438423492 -0.0763120055 -0.0871841311 0.0487271659 0.192025125 0.0274662226 0.0726716295 0.126637235 0.11231558 0.00397039996 0.192436248 -0.0129653281 0.087810427 0.101742446 -0.0811328292 -0.0573779941 -0.00594198145 0.157350421 0.0783605501 0.201680467 0.0806498379 -0.0635789633 0.173802316 0.046798829 -0.111674123 -0.105566561 -0.110239312 0.137469321 0.0206604954 0.190349817 0.169501752 0.126503631 0.167581499 -0.0180790145 -0.0187429003 -0.0419336259 0.0993470997 -0.0918064341 0.110130824 -0.0955291986 -0.0254780296 -0.0506573617 0.0148899863 0.104261681 -0.0428472869 -0.0548303574 -0.0506917909 -0.0156032071 0.0699625984 -0.15484792 -0.0459163263 -0.110004574 -0.0441328883 0.197784573 0.0838625804 -0.0522456057 0.0942399129 0.0829744935 0.057554815 0.109780334 0.10274224 0.154105842 0.10721004 -0.0166563932 -0.0466450788 0.0287705809 0.10081622 -0.0953564495 -0.0839984119 0.101254053 -0.0738965794 0.0948913991 0.167173281 0.0429453701 0.0383497626 0.091592297 0.106202237 0.156739905 0.0656498298 0.13198331 0.147935465 -0.0810967013 -0.018950887 0.00158079178 0.164120257 0.0798614174 0.0186908729 -0.132187113 0.124651186 0.112890542 -0.138516054 -0.0781108215 0.106892236 -0.0400021151 0.121923052 -0.00202938612 0.0550861284 0.115557112 -0.0589016899 0.103443392 0.138156414 -0.156100512 0.141828462 0.163233846 0.185174793 -0.134581283 -0.0946970135 0.0305168517 0.0545697697 0.122422308 0.0272117686 0.092828013 -0.0790654421 0.0789960772 -0.113887571 0.139471903 0.177440643 -0.0422447994 -0.0695037767 0.133454293 0.00472546089 0.0994608104 0.0261894893 -0.0549818948 -0.0645656288 0.0830694512 -0.122029178 0.110560618 0.021865055 0.0957053602 0.153808683 0.153240129 0.00273627671 0.107639149 0.0361639187 -0.0830527321 -0.0444489233 -0.00363346422 0.0640293211 -0.0754873753 0.0189995058 -0.1402542 0.162265539 0.138485089 -0.0446577705 -0.000309297611 0.169430003 -0.0834633186 0.00541008823 0.034290649 0.0778444111 -0.0421900116 -0.0198174808 0.0522686094 -0.0672751144 -0.0208641775 0.151453003 -0.0738410801 0.043029502 0.0127242813 0.0245345235 -0.0372197554 0.090350613 -0.0694454312 -0.045643907 0.0334979966 0.0695154294 -0.0134842489 0.141971424 -0.074017182 0.0237953663 0.106125079 -0.0695564449 -0.145977944 0.166921124 -0.0877014548 0.0716962293 0.0305217579 0.117084034 -0.0790342316 0.0964029655 0.13598761 -0.134147704 0.189042479 0.182969391 -0.107122943 0.0923936591 -0.0212771464 -0.0149747208 -0.0244534928 -0.0772951767 -0.0497068875 -0.0293945558 -0.00921653118 0.105580427 -0.0721728429 -0.0770729706 -0.00264244643 0.00397060299 -0.139186502 0.0292970631 -0.0475326255 0.08476118 -0.0867509693 0.126799867 -0.0671816245 0.160967201 -0.0940391421 0.036259234 -0.0219887402 0.0285151005 -0.0580190904 0.13070558 -0.171060801 0.135117233 -0.0228546057 -0.107383102 0.0618890449 -0.0694213063 -0.0618949234 0.133807048 0.17643562 0.128621712 0.0101680793 0.176956698 0.0840079859 0.097374849 -0.100451432 0.0399295464 0.172356963 0.00101820775 0.145156473 0.104961276 0.0815578476 0.146059379 0.107903466 -0.121531352 0.0570647754 0.047216557 0.170416638 -0.0707143247 0.0478855185 0.0394140966 0.0102794804 0.125966758 0.135744303 0.0133625893 -0.0925729126 0.136732638 -0.0822476298 0.154679909 -0.13795127 -0.0215002652 -0.0249491148 0.0930954218 -0.106826156 0.10373725 -0.0187940467 -0.0534816161 0.134281337 -0.0336386599 0.114718519 0.0787281469 0.0239171404 0.0408289284 0.117535852 0.0759770721 -0.0240571704 0.0102049625 0.0229755491 -0.0571867488 -0.0825752616 -0.0630160421 0.0233204234 -0.0362254977 -0.0341095217 0.110644877 -0.0943035707 0.0922036394 -0.052436009 -0.0474082902 0.0808229521 -0.0361060351 0.0341569446 0.127944812 -0.0520493798 0.00435285084 0.0624745227 0.0890819654 0.120440952 -0.125715539 -0.0429935902 -0.100851558 0.115291968 0.103215486 -0.0138821993 0.114144072 0.144928649 -0.0672504827 0.0337884873 0.178193495 0.0654718578 -0.032493107 -0.0594031401 -0.0149731291 -0.108951643 0.148577735 -0.0590856262 0.123775907 0.129149333 0.120561078 0.0938586891 -0.0787900835 0.157910496 -0.0426145568 0.124586366 0.153994665 -0.0279286914 0.0712008774 0.132654876 -0.058968544 0.152131483 0.0144725023 -0.0846911147 -0.0830136165 -0.0503571592 -0.129175395 0.107455552 0.0278498847 -0.0376918465 0.125603542 0.0866251886 0.0744670108 -0.0176635683 0.081767872 -0.116470791 -0.076551564 0.107822165 0.0519237667 0.169635236 0.000728378771 0.195453733 0.0923824608 -0.00255433074 0.130946428 0.033110749 0.0234523341 0.159311384 0.0584074371 -0.0724054351 -0.00702239107 0.0528859086 0.0255747363 0.123749338 -0.0502231903 -0.127378836 0.000618861057 0.168384925 0.0564158484 -0.0874255598 0.0325605795 0.110717267 -0.0185731165 0.0527723245 -0.0973552689 -0.0553385355 0.099351272 0.126928583 0.037081793 0.159003794 -0.0413037315 -0.0480074212 -0.0216640383 -0.109965399 -0.0768443644 -0.0550187156 -0.02983227 -0.041209314 -0.0762111172 0.00517032761 -0.021049602 -0.082603015 0.128907517 -0.0880745947 -0.101325043 0.0862258524 0.0882336497 -0.0198778603 -0.0331840217 -0.109220311 -0.107734382 0.0400603004 -0.132840812 -0.0447417721 0.00368672935 0.0691269711 -0.0319370776 -0.0310771763 -0.152229711 -0.126779377 -0.0754033923 -0.106641732 0.0897259042 0.0624115281 -0.084738642 -0.0616546944 -0.00815979205 0.0202450287 0.071183376 0.0515766665 -0.0504490845 0.0691114515 -0.121255346 -0.0616305098 -0.121116355 -0.0412869304 0.0541755706 -0.00845611095 -0.019792689 -0.0873068273 -0.1018041 0.00566182006 0.155241832 0.0709863603 -0.0654985607 0.0548714921 0.1288618 -0.0813171715 -0.0274352692 0.050162863 0.0174831059 0.115988865 -0.0983620062 0.00916780252 -0.111271255 -0.0194736812 0.122007161 -0.05491817 -0.155907109 0.0968826488 0.0766369477 0.120036878 0.118291102 0.144479945 -0.109428965 0.0191301908 -0.131886169 0.0119547276 -0.178280339 -0.074061133 0.0725457594 0.047452867 -0.0980938748 0.00940938015 0.17627655 0.0703046694 -0.0134887863 -0.0899318606 0.140372112 0.076489009 0.0844909772 -0.0435512364 -0.0578976758 0.0688769594 0.10411185 -0.114739448 0.11660511 -0.0925834179 0.0873279944 0.175916493 -0.0425273553 0.143908709 0.0721898228 -0.0761375278 -0.11759565 -0.020234637 0.0312824845 0.0598440468 0.110991903 -0.0383540764 0.105179779 0.0467798598 0.167937577 0.0386657864 0.0542986952 0.0948523358 0.0433264002 -0.148534298 -0.0129731102 -0.0280822664 0.0361635387 -0.00415288471 0.138104618 0.10342367 -0.0275076535 0.116774455 0.101908013 0.0884111896 0.0440905578 0.117984377 -0.013649038 -0.126955181 -0.0553081445 0.00625609886 0.133344293 0.00572153553 -0.0223944504 0.177496225 0.0815475732 -0.00271677272 -0.0417993777 0.106240101 -0.0412545837 0.18434307 -0.0274373218 0.178807244 0.0294444654 0.0646818206 0.0889737979 -0.0807103813 -0.0720598325 0.151093379 -0.033041738 -0.117520221 0.126272097 -0.106381506 0.037757419 0.00232348521 -0.0547570363 0.076936692 0.107204638 -0.0481706001 -0.00307636359 0.132705554 0.0537186749 0.0473928303 0.0915753692 0.119206257 0.176839486 -0.155492246 -0.151921168 -0.101832837 0.0815496519 -0.0724380389 0.0371969007 0.0601178631 -0.058801692 -0.0965428352 -0.0115199285 -0.0381022878 0.105424263 0.0742912889 -0.0960293785 -0.0112020867 0.0849209279 0.0520411208 0.122885831 0.151498944 0.122882292 0.153430328 -0.0156365 -0.0119283618 -0.0820145831 0.0155551042 -0.149645686 -0.0800471455 0.094618395 -0.0650205612 -0.0104006175 0.131104678 0.0416966155 -0.0876214504 0.0637880862 0.0821948424 -0.0084727779 -0.0978877619 0.168948174 0.149989381 -0.0299459342 0.0972742289 0.000181726937 -0.0922966674 0.176449746 -0.0968618467 0.0333946943 -0.0902563259 0.16238676 -0.0905583873 -0.03544081 0.152139008 -0.119497493 0.0413080677 0.0570814125 -0.000148650375 0.112468541 0.058299277 0.0766911507 0.0998951718 -0.0232174434 0.167854264 0.168274015 -0.0583992265 0.154771283 0.13650085 0.117386065 -0.0722455084 0.0544443242 0.0949110314 0.143746346 0.108085796 -0.0419899784 0.143864065 -0.025696218 0.133225232 -0.111586809 0.0990424305 -0.0993287787 0.078866601 -0.0784433931 0.132836834 -0.106675968 -0.1069621 0.077828303 0.187477276 0.0277358871 0.0359606817 -0.0598008744 -0.00338539528 0.00320412288 -0.0859301984 -0.0230135676 0.163934514 0.0130302329 -0.0939015523 0.0754140466 0.107570499 0.00613959366 0.135453999 -0.0996567607 0.109139353 -0.100983992 0.0580916367 0.0119609917 0.0797068924 0.151979074 0.16187796 0.075011678 0.0931628644 0.0360457934 0.00347460015 0.0319518261 -0.0867329165 0.0962795615 0.0821009502 -0.0356594585 -0.02422712 0.0152814919 0.0527246408 0.132090867 0.144757852 -0.0431858338 0.139542729 -0.0139607172 0.171679854 -0.0980732143 0.180641383 0.186638147 0.0636721104 0.0896847546 -0.0584419966 0.143411934 -0.0701248869 0.0328124799 -0.0829237774 0.145310253 -0.00750299264 0.118470781 -0.0582295991 -0.0695642605 0.0633131266 -0.0640299097 0.159781903 0.0272349548 0.109332368 0.168833092 0.0473833978 -0.0882677585 0.0282821339 -0.0536800846 0.0558247045 -0.13541919 -0.107939526 -0.0673646927 0.0988901109 0.144200847 -0.0628962666 0.0835133493 0.0278197322 0.112931602 -0.0297801625 -0.00817243289 -0.119054325 0.00599690992 0.16512607 -0.0284597538 0.133023679 0.108261056 0.175182506 0.148225054 0.0877180696 0.0722195581 -0.0575301908 0.0970565677 -0.0798201114 -0.0708039552 0.0288234167 0.155979618 -0.0531227216 -0.0605055392 -0.0713208541 -0.0868417323 -0.0402501673 0.0165239926 0.181012854 -0.160325661 0.0927709043 -0.0364443325 -0.0312899835 -0.109137982 0.111598797 0.11623574 -0.068648465 -0.0206921138 -0.13928856 0.0243328102 0.0555803142 0.132689178 -0.0608246513 -0.0354508683 0.172304466 0.0327581689 0.170413792 0.163448825 -0.0454982035 -0.0583826788 0.0481920801 -0.0540810302 0.182651609 -0.174388662 0.155782059 -0.0293228272 0.015085889 -0.108551085 -0.121923782 0.0719362497 -0.168311208 -0.120032616 0.0659890622 0.115256436 0.131331578 0.119614907 0.178487614 0.09089607 0.00386154489 -0.0554215722 -0.0119620096 -0.0467320494 0.0933647081 -0.0323850662 0.141180277 0.107638344 -0.0253946837 0.173948079 0.137527362 -0.0205912776 0.010461146 0.142752916 -0.0192131344 0.107507631 0.14598392 -0.0370280705 0.0341507122 0.138847277 0.102246776 0.067923449 -0.00280428468 -0.0519020297 0.0715199634 0.151722491 0.00090766669 -0.0237915833 -0.00762919895 0.0348006599 0.0952979177 0.11985556 0.143176049 -0.00832088478 0.0575121641 0.0766030177 0.0191355087 -0.0326572359 -0.104509436 0.0300794542 0.062483415 0.132271856 0.100359082 0.0538696684 0.0351427197 0.071405977 -0.040407598 0.157145143 0.0427171327 0.031071905 0.085857898 -0.140569568 0.0797872916 0.160493046 0.0627081841 0.0700800642 0.0625736564 -0.0243169125 -0.0110870786 0.150555253 0.057589937 0.166157231 0.120003015 -0.0579976961 0.0612958968 -0.0452432111 0.054822579 0.0524013229 0.149773791 -0.0714917257 -0.023807399 0.0454889461 0.0992185473 0.0658304542 0.02966832 0.101745747 0.0872673169 -0.125905886 0.0570483804 0.135766774 0.113360628 0.0364370681 0.0223136339 0.0019436914 0.0164411664 -0.0682152584 0.0921245515 0.0128188692 -0.0173182599 -0.00139826769 0.0518905111 0.0928640962 -0.0397072323 0.103173278 0.00413324265 -0.0670715123 -0.0588557124 -0.0857635513 0.0062935818 -0.0887346044 0.138430178 -0.025497932 -0.0639876872 0.0670730025 -0.0446693785 0.0594656765 -0.0820678324 0.157999322 -0.182115525 0.00614317786 0.0362051241 0.0657482669 0.106698424 0.191083074 0.135481074 0.0106980857 0.00957398489 0.0367676988 -0.0136649683 0.0735901445 0.0689036474 -0.134390131 -0.0718721896 -0.00467563979 -0.00951108709 -0.0722589716 -0.00359070604 0.0947275981 0.126477376 -0.0131597025 -0.12138617 -0.0347730219 -0.00653850706 0.135745063 -0.0925999731 -0.0944521725 -0.0723555461 -0.153894082 0.188464135 0.0043896623 -0.00922763348 0.213275999 0.00525289867 0.0494968928 0.0589311495 -0.07227844 -0.0655579418 0.173283234 0.0376433432 0.156717747 -0.078233324 -0.0844314247 0.0813227743 -0.0925660729 0.124083593 0.153772607 0.068052493 0.0846082121 0.127492517 0.147963956 0.0722059608 0.154211655 0.0819868073 0.111736804 -0.0420656316 -0.156026006 -0.135763094 0.132754937 0.110232912 -0.119001575 0.119631797 0.100629732 -0.0977817997 -0.0254859347 0.0714960396 0.0816458389 0.0694845393 0.107111402 0.0238910895 -0.11218477 -0.117907912 -0.0337541923 -0.114325784 -0.114220396 -0.153953075 -0.0372015573 -0.0811879858 -0.0323405489 0.128496513 -0.0856468379 0.0182948634 0.0260079242 0.0171604026 0.0346086845 0.114011452 -0.0935687795 -0.011811249 0.131105796 0.0234864186 0.0406528525 -0.114756532 0.122138247 0.0470963418 0.0791598186 -0.0303514749 -0.0687026605 0.194362417 -0.022815939 0.0876949206 0.0480690859 -0.0250811335 0.148164272 0.0488567054 -0.0881642401 0.198841885 -0.0379917733 -0.00708210841 0.041188851 0.0292753335 0.0645876899 0.00623326236 0.0477512181 -0.000584310852 -0.106165297 -0.015090609 0.0250555836 0.0323013403 0.041745469 -0.133533582 -0.0977768302 0.0384080522 -0.014046954 0.0909342691 -0.0820525289 0.132567018 -0.0924441591 0.155193165 0.110916458 -0.0571177192 -0.141427621 0.0474443957 0.0907076299 -0.064002499 -0.0244310405 0.0177996214 0.0721451417 -0.00413550809 -0.0516352393 0.0421805531 0.131461561 -0.0123250391 -0.0480676852 0.0910230353 -0.0799057558 0.0509942733 0.112865351 0.104875125 -0.085275501 0.0623678714 0.0686701387 -0.080322735 0.0964362845 -0.0460433923 -0.0657152012 -0.074650757 -0.0327339992 0.16137737 0.0697549805 -0.108303167 0.00211762171 -0.0693195313 0.00135752186 0.013155547 -0.0307769664 0.0750898421 0.0616175942 -0.0536347926 0.0857256502 0.0237529613 -0.021395212 0.00901291613 0.00728149712 -0.113782011 -0.0464270264 0.167675585 0.0525661036 -0.0210970417 0.156918585 0.061230965 0.0992827117 -0.0678927675 -0.151397571 0.075506404 -0.0497730784 -0.0540236272 -0.0624297559 0.0426682606 -0.0725995973 0.076271072 0.116657615 -0.0210639741 -0.0213112682 -0.0862966105 0.0802445114 -0.0198064968 0.176064715 -0.0988808721 0.101349302 0.119555868 0.128017023 0.0522831939 -0.0366001837 0.145147249 0.0257630255 0.0434764959 0.111463912 0.0327721126 0.123577833 0.0872525647 0.162450716 0.199674487 0.164900869 0.0993724763 -0.144233376 0.0194361061 -0.0317708701 -0.0597182736 0.0684302226 -0.0456766337 0.0549333505 0.101966113 0.0526848994 -0.118291982 0.0568728857 0.125453085 0.107232653 -0.0476998277 0.135429114 -0.130028173 0.0840768516 0.158564597 0.0256799646 -0.0523730144 0.031615708 0.0914076194 0.188867077 0.143099532 -0.0071679526 -0.0894442722 0.0995981768 -0.0183322281 0.0598267131 -0.0731880441 0.0917812809 -0.140130281 0.00585151743 0.00310393353 0.152784497 0.158248886 -0.137339488 0.0995804071 -0.0764262974 0.171144649 -0.0672199726 -0.0027869083 -0.102201961 0.107543819 -0.0715040565 0.214907989 -0.0438209847 -0.119756781 -0.00894473586 0.137930363 0.126813769 0.0999665186 0.0293341558 -0.0930798054 0.0649531037 -0.101108015 -0.0516813099 -0.0952921212 -0.0980607495 0.0295815989 0.0134664373 0.0469121002 0.0314295888 0.116126269 0.144171268 -0.109329514 0.102265686 0.0232829526 -0.0721712261 0.0460076891 0.00450235466 -0.078920044 0.120493911 -0.0557029285 -0.0781340078 -0.113349713 -0.146189392 -0.0182035994 0.10833291 -0.0549698845 0.111201309 0.0124426633 0.0892337486 -0.10926117 -0.00912767928 -0.0338271856 -0.105445758 0.211141855 -0.119850591 -0.180531412 -0.00868519023 0.218875796 -0.17652452 0.0990117118 0.0245204382 0.149801061 -0.00586622301 0.0881711915 -0.0257251803 -0.0817466527 0.0768139511 -0.0895878077 -0.107276358 0.0430153459 -0.162487656 0.0267249998 0.130476043 0.0166731309 -0.127173543 0.0398012064 0.0680280626 0.0879124179 -0.0295924786 0.0133784497 0.000692039728 -0.0751812905 -0.0830598176 -0.131929606 0.135406211 -0.112499252 0.0126099214 0.00665883068 -0.00475356216 0.0249228943 0.102894537 -0.0225983374 0.061107967 -0.0330257192 -0.0597277209 0.179125711 0.050645031 -0.0669407696 0.158360988 0.205021739 0.00652983878 0.121627569 -0.0640201867 -0.0522308983 0.0900547057 -0.122924723 -0.114422083 0.0658481047 0.0173800383 -0.0786071345 -0.0717952251 0.0280309897 0.0219677705 0.0759255365 0.084643513 -0.0923913196 0.166452676 -0.0389173701 0.0821120963 -0.113245606 -0.0364514329 -0.0393794179 -0.0335422643 -0.0305757262 0.0866778567 0.127289161 0.0190164503 0.0835306719 0.152247652 0.138753071 0.130225837 0.00651189126 -0.148018956 0.0714074373 -0.10346128 0.143939614 -0.0185987595 0.0229391046 0.101105355 0.0875614211 0.168692231 0.0672137067 -0.116006561 -0.069646351 -0.0440914668 -0.0887304097 0.0605254248 -0.0931111053 -0.0291967671 0.0453826375 -0.0647103488 -0.08282765 -0.0912294909 -0.0972726122 -0.000134341666 0.117167793 0.14967677 -0.103549697 0.070657745 -0.0581128635 -0.1150591 -0.0362361856 -0.00619550841 -0.0881526992 -0.0659523532 0.0312618017 -0.0897310749 -0.0618666895 -0.0287295692 0.176449448 0.159317598 0.0456931591 -0.0967225209 -0.0319313519 -0.0248310566 -0.021381963 -0.0773523748 0.0478850566 -0.0207856018 -0.0701669753 0.147760212 0.0727391243 0.0043316409 0.159602404 0.0948895067 0.0890363902 0.00142308639 -0.0310148094 0.0753311291 0.0981000364 0.0223886538 -0.0931696519 -0.0682313293 -0.0667512491 0.0491678901 0.0186522752 0.137692913 -0.0236726794 -0.00927374046 0.114333265 0.104560494 -0.0608054437 0.204097703 0.205062792 0.094779104 -0.0898220837 0.0206578374 0.142832264 -0.049721241 0.0423907191 -0.0763793141 -0.093291074 -0.0389654711 -0.15401794 -0.00734718097 -0.0189076923 0.170103803 -0.0205226652 0.0636992827 -0.0644718111 0.108284459 0.138413697 -0.0257913806 0.00794647168 -0.0307983607 -0.0936900675 -0.0606061853 -0.154241458 -0.00459569367 0.141765147 -0.117230393 -0.0629995316 0.125530377 -0.0406536944 0.200848967 -0.104435295 -0.123753838 0.0367464684 -0.141358763 -0.0206649359 0.202375904 0.022151649 0.0812491998 -0.131115228 -0.0367442705 0.172151536 -0.0442129597 0.0605035909 -0.082755737 0.0954060331 -0.0230737943 0.0334409699 0.13487561 -0.110517241 -0.0225322787 0.062106967 0.119216867 -0.0953765139 0.14884612 -0.0287193451 -0.0106208287 0.153283879 0.0467924252 0.177715346 0.068970874 -0.0450268574 -0.0325655416 0.0547090545 0.0860708728 0.0107891522 0.0944289416 -0.0758065358 -0.0732419118 0.183351845 0.0771918073 -0.140022755 0.0537439361 0.149029925 0.131033957 0.0027445287 0.107114777 0.0976283476 0.156666949 0.192800567 -0.0668931082 -0.048805628 -0.0531376973 0.00703070173 0.135483757 -0.0116650816 0.0274772495 0.0685842708 0.0897314772 0.168943107 -0.0672629923 -0.132059872 0.120118931 -0.0828481987 -0.0856146142 0.0526181571 -0.0933162645 -0.00942354929 0.169790015 0.0899278149 -0.112179808 -0.0518537723 -0.0197913386 0.107157297 -0.0402358808 -0.0525892675 -0.0279793683 -0.0564811975 0.111123219 0.0285395123 0.103563443 0.0289735198 -0.119780019 -0.138365477 0.0683289915 0.0212225579 -0.136515081 0.178696275 0.0141859809 0.0798230842 -0.0112462975 0.00572248595 0.165446609 0.1772625 0.0877416283 0.0257206522 0.0255760346 0.109550558 0.0173372477 0.0941226184 0.156007677 -0.0609857477 -0.168111339 0.112304233 0.141951621 0.0942777917 0.0506730452 0.0117849326 0.110127218 0.11164064 -0.0365206338 0.17797333 0.0919450074 -0.0340361036 0.0371871293 0.137831658 -0.0145821422 0.171712354 0.0708972588 0.00538893556 0.138060763 0.0720485598 -0.102158196 -0.00157117634 0.161901429 0.164550751 0.0551112816 0.00221332046 0.163098738 0.0364854559 0.137255967 -0.0139900306 0.116951399 0.0309598278 -0.0843858272 0.149897203 -0.0105481371 0.129127055 0.0816876367 0.0559929311 0.109518707 -0.0339205861 0.102632456 0.00931944605 -0.00911251083 -0.0575628281 -0.0238492247 -0.13892445 -0.0204126779 0.140317202 -0.0053701899 0.0747784898 0.0447182879 0.0408651866 0.00653599948 0.0279005133 -0.0747578964 0.0836786777 0.115476467 0.0596954301 0.0199178606 -0.0829307586 -0.0326780863 -0.113550745 -0.0547465198 -0.0249119569 -0.113830656 -0.0401589163 0.093042478 0.0902988166 0.0128500452 0.0456254408 0.0772466585 0.159909874 -0.0500820577 -0.0781232342 -0.051164262 0.0975523815 -0.0487125441 -0.0570325479 0.0562490486 -0.0282622576 0.0981458947 -0.131002113 -0.157566205 0.0727057457 -0.146405846 0.146773353 0.101571307 0.130754113 -0.0140098277 0.119490281 -0.134308934 0.0664061159 0.100537203 0.152347729 -0.066916585 -0.0918895528 0.0884105414 0.150467262 0.190053374 0.0404316857 0.0131604439 0.0616949201 0.0204220749 -0.00819397438 -0.0989773422 0.135392219 0.034526363 -0.0443142466 0.097198464 -0.0308621768 -0.116274439 -0.0293579884 -0.0962141603 -0.0434402823 0.199946746 0.112885021 -0.00315347826 -0.0176970661 0.0463292599 -0.0291460063 0.0120348148 -0.00303878007 -0.0146520687 0.155439973 0.0927805379 0.112616234 -0.10787008 0.199581042 -0.0556919798 0.0174912345 -0.147659183 0.00716985716 0.0517262444 -0.132578805 0.0649443641 0.0491473936 0.0128262425 0.0570766106 0.14404805 0.0125820432 -0.0378993787 -0.0890177265 0.0629585683 -0.0781803727 -0.0468946658 -0.0970080867 0.107453912 -0.0726181269 -0.0156788174 0.109249413 0.179553613 0.0696004778 0.0497379862 0.1262182 0.070380047 0.036693722 0.153491363 -0.0165516399 0.11358387 -0.0461250581 -0.0716388002 -0.063194491 0.182627037 -0.0979600772 -tensor_0bias 50 -0.0687436238 0.121526092 -0.0379632339 -0.0472203717 0.085131444 0.0979627594 0.12071842 0.0766481757 0.111980788 0.0231025815 0.0757955536 0.0592933744 -0.0364503562 0.118033081 -0.0119619071 -0.0852706879 -0.057033807 0.104488559 -0.0188827403 -0.00805457216 0.106946483 -0.0220598206 0.0901777968 0.154047132 0.127174735 0.0643620193 0.119487718 -0.0108337859 0.10897246 -0.0441434234 0.0685871169 -0.0381012894 0.0594230555 0.0113021079 0.163605392 -0.0388617851 0.0634208769 0.104776219 0.0779020116 0.0259756818 0.152426898 -0.0797943622 -0.0156979878 0.161263436 0.0584381446 -0.00747399358 0.0999026075 0.0664042234 -0.0277017541 0.0495129935 -tensor_12bias 50 --0.0650987327 0.0561347082 -0.0524960756 -0.0606846772 0.0553311668 0.084040381 0.0655064732 0.132255286 0.0351522863 -0.0857200176 0.0463379882 -0.138509437 -0.0309930108 0.0726053342 -0.0611225963 0.040046718 0.0333271585 -0.139368355 -0.115387805 0.035529051 -0.0753710642 -0.108340122 -0.165888399 0.0588014238 0.0744016021 0.0737710446 -0.166636527 0.139449701 -0.107233316 0.166755453 0.111904733 0.142458484 -0.10559769 0.17358239 0.0248687863 -0.00832488388 -0.00902078301 0.0966997072 0.16634649 0.0510999039 -0.11830131 -0.137912169 -0.0549811572 -0.140467361 0.0248682722 -0.0492456779 0.132404506 -0.136645094 -0.00630686618 -0.0665986538 -tensor_18bias 10 --0.0286833197 0.0315113701 -0.0158580411 0.0455938913 0.0954583585 -0.105117157 -0.0738683939 -0.119185224 -0.0686925054 0.0992293954 -tensor_14bias 50 -0.0160144269 0.0659304708 -0.134516135 -0.125120386 -0.126968578 0.011852067 0.111996368 -0.0366256982 -0.0781780854 -0.00189105608 -0.0748870224 0.11818061 -0.0440538898 -0.0143895745 0.0724157915 -0.0405166261 -0.0633764267 -0.0380873531 0.0670853108 0.0602433793 0.0957999155 -0.0834713951 -0.0430925563 -0.0397071578 -0.0210147407 -0.00462661684 0.0973884314 -0.153825372 -0.147843331 0.0121727288 0.180789441 0.0183312204 -0.130684286 -0.10787309 -0.049283646 -0.0444232263 -0.059754774 -0.0348485857 -0.159336865 0.0452591404 -0.0289487373 0.0982468277 -0.0173284747 -0.102011278 0.0217117742 0.0264923107 0.137902901 0.00199478748 -0.0913077295 0.0711041912 -tensor_4weight 2500 -0.0806722939 -0.0843013674 0.0457266495 0.0729286149 0.077442795 0.0629948378 -0.0479649454 0.18801184 -0.13502112 0.139153555 -0.0434509926 0.146485865 0.113608092 0.0014678 -0.00269440887 -0.0458229147 0.10888987 0.0207153317 0.182204321 -0.0768271685 -0.0549781434 -0.142413139 -0.0730496719 0.179332584 0.0324325487 -0.133773685 -0.0963223055 0.0017872625 0.0347659923 0.125833228 0.0962186158 0.0585900992 0.135499209 0.1581707 -0.0280255843 0.0394914299 -0.136502922 -0.0513676554 0.0243021026 0.13252829 0.0698634982 0.016627552 -0.0407816991 0.0601785071 0.0856543556 -0.0628332347 0.117408261 0.100019909 -0.0541871078 -0.0604645647 -0.10079059 0.129933119 0.0221414883 0.0313292705 0.0785076022 0.0515565313 0.103933319 0.130206779 0.0966112837 0.0613389611 -0.0399818122 0.185782522 0.0632089376 0.203524143 -0.00186416006 0.0119111966 0.175457805 0.0410548635 0.132948205 0.145901531 0.0722996518 -0.0906703025 -0.0737293884 0.114364773 -0.0281930994 0.0720122755 -0.008361076 0.090595901 -0.0460113436 0.0419355966 -0.0804221854 0.0608950555 0.170809716 -0.0476390161 0.169650152 0.0800267011 -0.148617357 0.0836865678 -0.0557933412 -0.0582912862 -0.0435073562 -0.0371096209 0.083936885 0.141528875 -0.0368124209 0.162937 0.182462409 0.0500146635 -0.0173346996 0.00344588282 0.077063866 0.0220477413 0.0806412846 -0.00341189606 -0.101663046 0.0451156981 0.0767283887 0.0141340708 0.039356716 -0.0705280155 0.0963134021 0.0619241111 0.0269989092 -0.021566296 0.145353943 0.0327979699 -0.0733648017 -0.0134483287 -0.0573412068 0.0651545897 0.0168404263 0.0243993159 0.0994131193 -0.0538567007 0.00572972698 0.0229900386 -0.0910499841 -0.105468161 0.0651908889 -0.0938435644 0.0910287648 0.0970317647 -0.0301273968 0.131670371 -0.035016574 0.0178018566 -0.117108293 -0.104218014 -0.0651276186 -0.0432341956 -0.0111995684 -0.113138527 -0.0553993024 -0.0827366859 0.111415848 0.0760915801 -0.036721129 0.0393031836 -0.107385024 0.0509901345 0.137131959 0.101355053 -0.0619110428 -0.0586683974 -0.125746161 0.146015525 0.0182545464 0.101042837 -0.103411034 0.141968071 -0.110162877 0.0819647014 0.151268393 0.181896359 -0.0776448846 0.158234477 -0.0236076955 0.162451014 -0.0745234191 -0.0891344696 -0.0165763423 0.0465359874 0.164738223 -0.0147009594 0.136529386 0.119493932 0.123288825 -0.0328544565 -0.0433127023 0.142125174 0.104210556 0.165809229 -0.0778093338 -0.0373858176 -0.0823435411 0.0155735873 0.103326514 -0.05250616 0.0069106333 -0.0459599681 0.0475454628 -0.0535901822 -0.0878656879 0.188019454 0.0936229303 0.00049323542 0.111949839 0.101932622 0.111278057 0.00650064787 -0.132303327 0.154887334 -0.0368106291 0.185657039 0.078358531 0.0830566436 0.000230199876 0.1663609 -0.110823177 0.0969691128 -0.0866530016 -0.0828108639 0.063297838 0.0365174599 -0.0799051747 -0.00249398779 0.0037801282 0.175893486 -0.0303821024 0.17547828 0.156276211 -0.0808659643 0.0175747536 0.0641925558 0.132568434 -0.107452586 0.0114268949 -0.0109794568 0.105085135 -0.00249309023 -0.105099067 0.11644727 0.00391933694 0.0913905948 0.195951268 0.0354405977 -0.00441903574 0.0888798311 0.0769788325 0.0180195644 0.0349424444 0.00618674606 0.0450688228 0.0371989682 -0.0904219598 0.0357578248 -0.0248050801 -0.019140562 -0.0610508397 0.0400745049 -0.0465866067 -0.0127383219 0.136099428 -0.00988076627 -0.0224271286 0.138736099 -0.0945356563 -0.0887529403 0.00517961383 0.000571548939 0.0760833472 -0.0421531834 0.107855894 -0.12219803 -0.120920695 0.010690853 0.00134639442 -0.0446796417 -0.0908018351 0.0188111514 -0.117398165 -0.00559857022 -0.112447143 0.0241752416 0.051861912 -0.0751599744 0.0727101266 -0.0994263515 -0.116854861 -0.109606072 0.0153764635 0.0296985656 -0.094453536 -0.123156615 -0.0982722938 0.102426931 -0.038060952 -0.127354875 -0.0424764156 -0.141495243 -0.136656374 -0.0849142522 0.100465015 0.0261948798 -0.0149132377 -0.095423542 0.0603073835 0.0239272416 0.0944917873 0.0912092179 0.0132168755 -0.0580653921 0.0647564083 0.0321055204 0.0641190782 0.107408777 -0.0238600653 0.0974822938 -0.151330233 0.0900393799 0.185146719 -0.0562634654 0.19377704 0.0260389671 0.149799034 0.0271496754 -0.0140250009 0.0241740346 0.0609554648 0.0848416314 0.0207572728 -0.107217379 -0.0550570227 0.0541072674 -0.0474922284 -0.0888904482 0.0580282025 0.0328076519 -0.00170895853 0.0491873212 0.090218015 -0.0734803379 -0.0112014636 0.150287092 0.16178152 -0.0178813841 -0.030176945 0.175077632 -0.0382624194 -0.0120699406 0.0354120433 0.0163750257 0.116489731 0.138228595 -0.00104773929 0.02116061 0.195760205 -0.113974452 0.204999462 -0.00420999806 0.0197493862 0.140327349 0.145642623 0.113952592 0.0668037087 0.144647643 0.138338432 0.0301354099 -0.092116423 0.141155869 -0.0757502168 -0.0111120678 -0.019430887 -0.132206604 0.0304258037 -0.00902231503 0.023573963 -0.11457108 -0.0038465804 0.0600269213 0.185435995 0.103536278 0.108110771 0.0262743887 0.092287004 0.0180775113 0.0580065064 0.109430514 0.167516813 -0.0948597863 0.147749871 -0.0377445519 -0.16559723 0.103454545 -0.0619672574 -0.0660705566 -0.0222212803 -0.00183966081 -0.0677803308 0.0315424129 -0.00417117588 -0.144289106 -0.0828239396 -0.146710843 0.0357453451 0.00331253489 0.0235776883 0.0018393771 -0.0240897052 0.112991959 -0.097301051 -0.0531368554 0.102575697 0.224759638 -0.100583948 0.027663447 0.0663552508 -0.0544704907 0.0913643613 0.10431046 0.14408429 0.160526797 -0.0272651091 0.128108725 0.137256622 0.0514451601 0.0290343836 0.0522942841 0.169628382 0.0517538302 0.039717 -0.112903044 -0.0319129899 0.142312348 0.16764465 0.00277794432 0.155595258 -0.016347399 0.0998492464 -0.0829867125 0.0122846849 0.10995502 0.176780567 0.166116044 -0.0651847348 0.0968866721 0.0796400309 0.156421289 0.00979311764 -0.111015052 0.100035012 0.198834509 0.104847461 0.0955422893 0.0701622218 -0.00574288098 0.0388571136 0.0862576142 0.0804817602 0.0700528994 0.0890722573 -0.00526280887 0.0880217403 0.108766705 0.0439562909 -0.136678606 -0.017526824 -0.101755708 0.150479943 -0.0441651195 0.0611818954 -0.010830123 -0.0615075193 -0.0789036453 -0.0960501432 -0.0448041894 -0.114129215 0.157983571 0.0660151616 0.00131378241 -0.0953527689 0.0812098756 0.0714970827 -0.0841728672 0.0815933347 -0.0241262466 -0.0606837049 0.117682979 0.170510948 0.0609742589 -0.0866294503 0.0337947756 0.0836874992 0.128505945 0.0142021542 0.00716301799 0.073032476 0.096828863 0.0873111039 -0.0305738319 -0.017774554 -0.0532108061 0.180189192 0.0185202211 -0.0507842451 0.100284688 0.0385177433 0.0343939774 -0.151525408 0.161509618 0.0328486934 0.0315718576 -0.0216901544 0.011822544 0.0440483205 0.123011395 -0.0785683393 -0.0336200632 -0.0159502272 0.148681283 -0.0669046566 0.185322538 -0.0733356997 0.0739779621 0.0648668483 -0.0301269554 0.156339601 0.0585463084 0.155070648 0.18907924 0.0647668913 0.013078318 0.167015359 -0.0203347579 0.134109989 0.110318691 0.0168762747 -0.00621265173 -0.0438371375 0.0293022711 0.0136860888 -0.0961167067 0.131283402 0.0199183244 0.0874097347 0.109514065 -0.0711590275 0.13801989 -0.100633904 -0.0766485333 -0.0797629207 0.120990887 0.117599219 -0.0800174996 -0.0554481633 -0.0130264247 -0.0096846018 0.0329468772 0.200460345 -0.0798182935 -0.127444193 0.137921482 0.056331329 0.0758561566 -0.0259927046 -0.00583240716 -0.151809007 -0.0616948605 0.0165051967 -0.106223613 -0.107458085 0.0237796139 -0.133243531 -0.0831126943 -0.012560742 0.0367795378 0.00879683718 -0.121515289 0.0290033501 -0.0651801005 -0.0471335575 -0.00044152551 0.112791196 -0.156063318 0.0274669975 -0.00387126207 -0.142437324 -0.133588076 -0.0751931593 -0.0703300163 -0.0889332145 0.0192210358 -0.0770214796 -0.142161205 0.118518829 0.015532054 0.00297008874 0.00619109394 -0.0286132246 0.067848444 0.0446564294 -0.12528789 0.0658862889 0.142027885 -0.0245133974 -0.0243382379 0.0615522414 -0.131100833 0.0117956251 -0.144661099 0.153534442 -0.00748422509 -0.101551078 0.140787482 0.120413505 0.133537158 0.109931737 -0.076232776 -0.0067446162 -0.105740324 -0.0634061843 0.0939473137 0.119690232 -0.0357088707 0.0102475164 0.150871128 -0.000345803623 0.111536011 0.0299190637 0.191872507 -0.00425557932 0.0131858671 0.0683450401 0.011605869 0.0222013909 0.0556304455 -0.0517201163 0.161248505 0.0784498453 0.171983451 0.119539365 -0.0555509515 0.0169317685 0.00468148896 0.0350351445 -0.143832296 -0.123316839 -0.060894113 -0.00792651903 0.165182695 0.13920185 -0.0278753694 -0.0646031126 0.0390878469 0.103839591 -0.0264649615 0.159046769 0.161050528 0.056850709 0.0216505565 -0.019877946 0.0416690223 0.0680721179 -0.00811236072 0.0474281274 0.139705688 0.128775299 -0.134791732 -0.0200266857 -0.0365998596 -0.0124936523 -0.0767151639 -0.042266313 -0.0712475628 -0.0536471978 0.133768514 -0.0192902926 0.105865858 0.113762073 0.104997188 0.0901620463 0.066951476 -0.0920727104 0.150316 -0.115942262 -0.0646094009 0.051550284 0.106786288 0.0553277843 -0.135059014 -0.0844271183 -0.093783997 0.14749904 -0.0715771541 0.104918532 0.169773012 0.030166015 0.0254033525 0.0341539346 -0.0932782665 0.0505385213 -0.13566044 0.172710717 0.181072846 0.0247942675 -0.0522602275 -0.0928869545 -0.0629897714 -0.00787132327 0.160656855 0.119059108 -0.0577676259 0.130579263 0.103787817 -0.0639968142 0.0237170234 -0.0796101764 0.0789383575 -0.11092788 0.0240584183 0.0880425051 0.0497003458 0.0207255501 0.0609250851 0.121555626 0.0423985943 0.169498548 -0.168339416 -0.0737465993 0.00344401528 -0.0481818803 -0.0240785405 0.138308004 -0.0498832725 -0.0870527998 0.135833338 0.0367706791 0.164695784 -0.0926531628 -0.0138947945 0.0515966341 -0.124790356 0.160091609 -0.08937978 0.0392833501 -0.0552154407 -0.0162713174 -0.0258723479 -0.065008454 0.0126740728 0.136108771 0.112826265 -0.0117993969 -0.0383974053 0.13958928 -0.111128941 0.0530200005 0.0264452137 -0.00290334155 -0.0446272232 0.061192058 0.175629675 0.0535970144 0.0791243389 -0.144284248 0.161759198 -0.0264586899 0.170231506 0.0360257179 0.0236983728 -0.0918620229 -0.124832675 -0.129897267 0.0827946812 0.167229131 -0.0483314805 0.0731398612 0.0330644958 -0.0419231206 0.147590339 0.120546743 0.0866150856 -0.0558574684 0.14509137 0.112841494 0.010361298 -0.0738257468 0.062864013 -0.000246174692 -0.0360012166 0.172877163 0.117604062 -0.143552348 0.169368088 0.0966829956 -0.0905596018 0.0989860147 0.143281475 0.0763563141 0.137610212 0.122151025 -0.139252588 0.0253664367 -0.0899616033 -0.0669621378 0.173054621 -0.0460386537 0.0831045434 0.136806592 0.134621754 -0.0229169969 -0.00980438758 0.0452408046 0.0591817014 0.186792865 0.00453559728 -0.0630519763 0.129970819 -0.0708865598 0.0169868432 -0.0856622308 -0.0697054416 -0.00249436265 -0.00108185853 -0.0849266797 -0.0896446258 0.205029503 0.0984538794 0.0920003206 0.0979186818 -0.0760004744 0.0389556028 0.154188663 0.0658197105 -0.0182720162 0.134750709 0.0715288147 0.17903395 -0.133035272 0.200460136 -0.0744331852 -0.0414500348 0.0895937532 0.0214252006 -0.022997003 -0.0672739893 -0.0548784323 0.166354895 -0.0431604087 0.165455922 -0.124262832 0.113898836 -0.166768521 0.00970084779 -0.0567515977 0.0607765876 0.183420077 -0.0727137551 -0.0270099547 0.0259942077 0.00637345994 0.0362093039 0.0634940416 0.22586067 0.0804543719 0.177989498 0.168853745 -0.0725347623 -0.130849689 0.14897649 0.0990756676 0.105376959 0.0459880121 0.037299931 0.0371170193 0.124092944 0.026399713 0.214453608 0.165884897 -0.0445454419 -0.02944877 -0.0441051386 0.0706486255 -0.0643619671 0.107751079 0.0106587159 -0.0160649233 0.0853765532 0.0439129174 -0.0179767329 -0.0966302827 0.153880417 0.158972874 -0.0232971646 0.109733656 0.118792728 0.111737549 -0.0411141589 0.128475308 0.177355379 0.088045463 0.0918510482 -0.0182551499 0.0349350236 0.0172623489 0.0358161516 -0.0553316772 0.00500165345 0.0473173968 0.0152386809 -0.102430955 -0.0801292434 0.142120838 0.182653144 0.0498294421 -0.0422114469 0.0125372913 0.165790632 -0.0877576023 0.0534564219 -0.0601202659 0.04567682 0.0359176025 -0.0493109711 -0.0345178694 0.0486616641 0.179762542 -0.0616127439 0.142689958 0.173905298 0.13620089 0.0958447605 0.0267633125 0.0164805949 0.0387719236 -0.0923323482 -0.025394721 -0.0696693659 -0.0885034949 0.155368611 0.175244749 0.108128108 0.0818990022 0.146583825 0.0607022159 -0.0263073556 0.0531130992 0.0492566414 0.0426749587 0.128532976 0.165642813 -0.0271078423 -0.0249717701 -0.00465310086 0.0946793407 -0.0277413856 -0.0293258466 0.153490797 0.0542890653 -0.121490426 0.0403553173 -0.0457720421 0.0372848473 0.1336312 -0.0569381975 0.0157627482 0.183314934 -0.098882556 0.0972879678 0.133379266 -0.13416934 0.127192289 0.114743538 -0.145406723 -0.141376868 -0.00748612825 -0.0768275931 -0.00242518331 0.0610179976 0.11269661 0.0289160293 0.134316772 0.0604642555 -0.0706829354 0.0602234714 -0.00745525956 0.128726706 0.02637784 0.0765895173 -0.100985415 -0.103230231 0.0484438539 -0.0102437539 0.117040537 0.130066067 0.0934641883 0.035608504 0.080431819 -0.0183086582 -0.0162367485 -0.000762896263 0.091369085 0.0433430262 -0.0226776432 0.0331841335 -0.0515708551 -0.138115823 0.111762553 -0.00960157253 0.0194415804 0.0162233952 0.0687385723 0.00495963311 0.124809526 -0.0617128015 0.128910005 0.124350287 0.123989262 -0.0698941946 -0.0828819647 -0.051271636 0.108241625 0.090747878 -0.0240470748 -0.0150890118 0.159876198 0.111201644 -0.107370481 0.041435346 0.0879196003 -0.073871471 -0.00736038294 -0.0421624519 -0.00446702167 0.0206496771 -0.0786093399 0.00770913251 0.0940739587 -0.143417105 0.161424622 -0.00790184364 -0.0727001727 0.042316515 0.141635984 0.168245554 -0.0294214915 -0.159648478 0.112373084 -0.0489424169 -0.0889650211 -0.136385739 0.0504631549 -0.0346960463 0.0639858767 0.0742279962 -0.0959718451 0.125432774 0.0435161628 0.0543604121 0.12924619 -0.067039676 0.0832744464 -0.119794376 -0.0892888829 0.144032732 -0.13564758 0.0327132791 0.0262426939 0.0729919598 -0.0233827997 0.0430958606 0.103070885 -0.00284027657 -0.131751791 0.0218737386 -0.0669141933 0.0876880065 0.108192131 -0.00722055649 0.0314042829 0.00201363396 0.0693058148 -0.127397463 -0.11308068 0.095366247 0.0613252074 0.0767963976 0.109912105 -0.023631271 9.64457431e-05 0.0741448328 0.0674567968 0.0592180379 0.120218024 -0.138540611 0.186128601 0.0634339973 -0.066954501 0.123150513 0.00262892642 -0.156880677 0.0271030273 0.0389812775 0.163095251 0.0937159061 -0.119892217 -0.10029912 -0.0113538243 0.117122836 0.0958641991 0.0554464087 -0.0611852631 0.0309960768 -0.148533225 0.0168368462 -0.0148341283 0.0199822951 0.0840069354 -0.0250799228 0.000891973905 -0.110700309 -0.0532766916 0.0793971419 0.0984170437 -0.128924787 0.0291142873 -0.0578225479 0.0482807197 0.0368235111 -0.0756311119 0.056336727 0.168977603 0.00820702594 -0.0148615483 0.128323391 -0.0488858111 0.0030394888 0.0392629169 0.0217599515 -0.109780788 -0.124327026 -0.110027082 0.079080537 -0.0519953929 -0.0504782349 -0.00701974798 0.17692171 0.143427163 0.152141586 0.0693683028 -0.115196042 -0.0221416522 0.169055194 -0.0540919825 0.182789385 0.139691234 -0.074015893 0.108184151 -0.0935382247 -0.0598740615 0.0335229784 0.150850862 0.111152209 0.163470238 -0.111606114 -0.0112746516 0.00895981397 0.146160573 0.137397975 0.165998906 -0.0262579694 0.0310111959 0.0895001888 0.0290670171 0.148392752 0.10005831 0.0260470044 0.0666432157 0.14119634 0.145734191 0.140673295 0.0609008037 0.0760866255 -0.0274666939 -0.0857639909 -0.093561694 -0.0673863441 0.06305594 0.183098152 0.0362807289 0.179879576 0.187368259 0.0867011249 -0.00191641552 0.179400802 -0.178453162 0.0443644077 0.14893277 0.130691677 0.190664202 -0.00733991154 0.0375372507 0.139617547 0.0207137242 -0.0660620481 0.121428333 -0.027175935 0.0812105387 -0.0167020112 0.0997308716 0.0814295784 0.00100216595 0.0675137788 -0.0446306355 0.11655578 0.0790163651 -0.0364569351 0.043299146 0.0583344959 0.0861434639 0.0999846533 -0.101646118 0.0544962138 0.111912884 0.13398391 0.192645401 -0.0995487198 0.172990069 0.0454724953 -0.0182189811 -0.0639074966 -0.117950983 0.158961445 0.0991295949 0.00396099035 -0.120374672 0.168334991 -0.0206933524 0.0865943655 0.161322176 0.0555427149 -0.0746511817 -0.0254731867 0.148725659 0.114840917 -0.0996649787 0.0825758129 0.00449527614 0.162873149 0.14936614 0.156525835 0.0766895339 0.118420944 -0.0548048988 -0.167635486 0.0756825805 0.059926942 0.0492656752 0.0400654711 0.0896347836 0.0765077025 -0.0438671187 -0.146087736 0.117657624 -0.0255973134 -0.11745102 0.0930163413 -0.0821457729 -0.115750015 0.0327894762 0.120342232 -0.0219539329 0.190586492 0.0217166767 0.0146391429 0.0616531707 0.103957534 -0.0640848204 -0.0858041495 0.0310945753 -0.102986038 -0.0559266806 -0.000226511125 -0.140096694 -0.119943008 -0.111525618 -0.022964308 0.0660581961 -0.140670016 -0.0539666936 0.0656664073 -0.146068677 -0.0434579179 0.0442539938 0.049366042 0.138595164 0.215925127 0.127935782 0.00820590742 -0.0747593045 -0.0842378289 -0.0417899489 0.0216342304 0.0241500065 0.10414844 0.0635119677 0.109194174 -0.0664032325 -0.0841010138 0.0333074108 0.144626364 0.0835791081 0.0405562595 0.144938678 0.113412 -0.0413297117 -0.0116025591 0.116451755 -0.122508198 0.115656048 0.118746422 0.149316311 -0.0756765008 0.162036806 0.136063144 0.0960770398 0.0914931074 0.00234524277 0.141637772 0.0776848495 -0.103856735 0.0223964415 0.0540647469 0.154280543 0.0363733396 0.020892188 0.0519513749 0.00717404438 -0.0717171952 0.0605637506 0.123303227 -0.0711054057 -0.0547375344 0.0238987729 0.122411825 0.0981374756 -0.0796336755 0.181836978 -0.0139241079 0.0208457373 0.0578660555 0.115007117 -0.152290791 -0.116023742 0.125889778 -0.0744427964 0.173140392 -0.0230522808 0.0991717577 0.0317968801 0.107756197 0.164815009 -0.0174482651 0.0639693215 0.0523474552 0.0462639593 0.140508741 0.0506025292 -0.0438129827 0.0276643373 0.0874049738 0.138593227 -0.0969195291 -0.0404044203 -0.0224188063 0.108781926 -0.100896388 -0.0309308395 0.125772789 0.028097406 0.0634060204 0.0683069155 0.145577833 0.183691531 -0.0496028848 -0.0776938125 -0.060156174 0.0218149051 0.197596177 -0.0154853165 -0.0412122235 0.0939057693 -0.118166968 0.1283319 0.111681804 -0.143919662 0.00493258471 0.126385331 0.151842475 0.186866894 0.0665669069 -0.0903968439 -0.0313272439 -0.0247976556 -0.169458717 0.054224968 0.0968870521 0.139724314 0.0172182582 0.103909202 0.155110002 -0.0126233418 0.190851286 -0.0473378785 -0.106794864 0.0950474441 0.108153269 -0.00215253839 0.0927259997 0.0746136159 0.000158840179 0.151475027 0.112737246 0.0532431304 0.054269813 0.129630253 0.164428711 0.0523424074 0.0814517438 0.016212143 -0.0117008882 0.0680367425 -0.0788285807 -0.0789092779 0.172827527 0.119092286 -0.0780554339 0.148280904 -0.0357619487 0.0404880531 0.139303714 0.152678803 -0.0313055441 -0.0433851704 0.145161822 -0.157154232 0.0209975056 -0.0296302848 0.020536093 0.0674732029 -0.0705216452 -0.0414924286 0.057908535 -0.169467628 -0.0330408588 0.182830229 0.0190448835 0.13370271 -0.0713856667 -0.0501033887 0.0232970063 -0.0963442922 0.0447021201 -0.118378267 0.189147756 -0.0498844683 0.0468240269 0.00958443806 -0.109032102 0.108971842 0.00711469864 0.00700109964 0.112477995 -0.138462275 -0.114364117 -0.0077861608 -0.143087372 -0.0425114706 -0.137003303 0.00309556606 -0.056427639 -0.084226869 -0.0175813958 -0.120090812 -0.131012186 -0.136584803 -0.0965648219 -0.0900525004 0.0984451473 -0.0295922905 -0.0215709601 -0.157052785 -0.0698363632 0.128503025 0.0812449306 0.044423122 -0.0622849166 0.126199692 -0.0240686592 0.110058717 0.0588081293 -0.0629438162 0.0803009868 0.0551974401 0.00367958308 0.0953964517 0.096288465 0.0765576512 0.134714395 0.128183305 -0.0794131979 0.158218175 0.0334427804 0.103117377 0.118011713 -0.0353304408 0.0812821984 -0.0924249962 -0.0943661332 0.0499824919 0.172689974 -0.015012878 -0.00591862109 0.000791038619 0.0393676013 -0.087351352 -0.00550199067 -0.131391063 0.110083923 -0.0942325443 0.151406854 -0.0441909246 0.125668615 0.113547325 0.104156129 0.0539508313 0.04261766 0.0514821857 0.120158657 0.157996073 0.00543851778 0.145554841 -0.141709834 -0.0490901694 -0.0447609834 -0.0976633877 -0.0905513093 0.142786831 0.134974882 -0.0384936519 0.124396443 0.0711318851 0.0364370346 0.0655808076 -0.111105889 0.177323133 0.0175751373 -0.00530883716 0.0142328804 0.0944742784 -0.104841068 0.0137548354 0.072433494 0.0684480369 -0.0881074294 -0.0346771851 0.153584346 0.127561867 -0.036190562 0.0799537897 -0.0859517306 -0.0208766013 0.0334149264 0.0264191292 0.0374499112 0.142725974 0.0713385791 -0.0855041817 -0.0318115205 -0.070518069 0.0307250991 0.13150534 -0.072534509 -0.104207613 0.189150855 0.0493283272 -0.0275112167 0.166747719 -0.0543703809 0.0186180789 0.165644959 -0.0484347753 -0.101127744 0.1094006 0.185057849 -0.0443641394 -0.144589871 -0.0198352784 0.0630682558 0.152583927 0.0218491945 -0.122168265 0.0245404653 0.0799318552 0.0951262489 0.122852422 0.0709591582 0.147164628 0.0161824599 0.137200028 0.0640827939 -0.0755483732 0.0245481338 -0.0100152371 0.14329806 0.0801420659 0.0691645741 -0.0589840487 0.130164921 0.00311033521 0.0268984325 -0.0349549092 -0.00415431196 0.0867199227 -0.0820708722 0.164873272 -0.169460997 -0.0531368032 0.152747095 -0.165769219 -0.0328237601 0.208387405 0.047868643 -0.137711033 0.091696687 0.0407248922 -0.0470836647 0.145012021 0.0215788931 -0.0970180035 -0.0877238438 0.172740042 -0.0380049273 0.0389408693 -0.0466512367 -0.0354783237 0.0639048144 -0.0723189265 0.104144089 0.0859282464 0.13152495 -0.128844962 -0.0214285571 0.115861677 -0.0982220173 -0.0596799552 -0.146179408 0.0965073407 -0.0615618378 0.0918795392 -0.0798900872 -0.160657704 -0.126372367 -0.0579259321 0.0660004467 0.161658168 0.104445606 -0.0138723087 -0.0934201628 -0.0294323321 0.00640385225 -0.0297847576 0.00359103805 0.0525301509 0.043393068 0.157615259 0.180452317 0.194085628 0.0691384748 0.134813935 -0.0515557639 -0.0708269849 0.088720344 0.0964737907 0.155151665 -0.0886232555 0.0660808086 -0.0829776451 0.101593263 -0.0553871915 -0.0182833746 -0.0800305083 0.178971991 -0.118555464 0.000954513147 0.00644796155 -0.0530595593 0.0575908013 -0.118949205 0.0164483711 -0.0739891082 0.157318026 0.0651573166 -0.0650493428 -0.0733206868 -0.0576313995 0.164077073 0.145226628 0.162652925 0.0094029773 -0.0282768738 0.00458042137 -0.0341153368 0.0680479035 0.00607198244 -0.00773193361 0.0329170078 0.0389629118 -0.162258938 0.0433447473 0.04725869 0.0874198675 0.0951827168 0.171968609 -0.0408742838 0.0696654394 0.0726477876 -0.0282292832 0.13275665 -0.0873321742 0.118847266 -0.094043538 -0.00783028547 -0.110805973 0.0371340886 0.011893562 0.0594444014 -0.0330062397 0.103927121 0.0566597134 0.070883058 -0.0379756056 -0.0413067117 -0.0425783545 0.111014336 0.0684353039 0.0692486465 0.0570905209 -0.0403401703 -0.0371561572 0.0209832303 0.115475081 -0.0771651715 0.1641756 0.132279456 0.11953865 0.152888119 0.161066189 0.0236473735 0.00623266771 0.0192979313 -0.0633362159 -0.0164176226 0.112811953 -0.0552775189 0.104069315 -0.00800814759 0.142864808 -0.119347326 0.0983854905 0.0419207662 0.172624946 0.13796024 0.14337796 -0.0860052034 0.0162426792 0.189442113 0.152368501 -0.0791355148 0.191711664 -0.045804102 0.0885845646 -0.0440708026 0.0840106755 0.142962575 -0.109646082 0.00183130568 -0.0525661372 -0.063833341 0.135730505 -0.0724909231 -0.0305184722 -0.0249376651 0.121382438 -0.053534802 0.0984386578 -0.0753171369 0.0939152837 0.00636771461 -0.049575828 -0.0524887219 0.168064952 -0.160115361 0.164482102 0.0465373471 -0.0193462316 0.189078987 -0.0162147954 0.0448620357 -0.0965996385 0.029319942 -0.00714721577 0.113784157 0.0306320339 -0.162794113 0.036773555 0.132374078 0.127610669 0.050170999 -0.0961276665 0.119895853 0.139223352 0.0842405856 -0.0275717005 -0.048406817 0.0329983979 0.0270373188 0.0343525745 0.143185422 0.126393601 0.117207043 -0.111426808 0.00335491286 -0.0176657494 0.00746619329 0.0158684719 0.0560003184 0.162911817 0.169606015 -0.00238073198 0.110828638 0.170307085 -0.00346783875 0.0999374315 0.120749101 -0.0795692578 0.00236885715 -0.0515253469 0.150424793 0.105336741 0.0109604793 0.173142359 -0.0780003294 0.0945810005 -0.00646437472 0.180615485 0.165021613 0.0602739379 0.187165871 0.0765264705 0.127765179 -0.176980063 0.0931098312 -0.0693295747 0.00362776732 0.0865088329 -0.122369155 -0.0243129283 0.0619697198 -0.104171418 -0.0679299384 -0.0262254607 0.0717521831 0.177063763 -0.0804491788 0.0471047014 0.130668938 0.0129235433 -0.044484172 -0.110434927 -0.0484529473 0.056467887 0.161255106 0.0596067756 0.00202068407 -0.0361751877 -0.0691773742 0.13520807 -0.076868318 -0.00055724883 0.136181444 -0.144539505 0.0790223703 0.104204692 0.113801822 0.079020001 -0.00296835252 -0.0561508648 -0.0639912412 -0.096902214 0.063912116 0.0158059336 0.0301315952 -0.0454157777 0.0474643707 -0.0998102129 0.0738191977 0.104636416 -0.0670538545 -0.0157381035 0.0721107796 0.145137876 -0.0157069545 0.0764504448 -0.102792904 -0.0242816862 -0.0148483599 0.164760023 0.0949771851 0.0556184649 0.159742668 0.11568401 0.133465067 0.0253654663 0.0464388952 0.1051047 0.168289691 0.0721212029 0.0222112965 0.177081063 0.0332994349 -0.0857983083 0.168268591 0.0322159566 0.109546766 -0.0359725878 0.173063204 0.114196211 -0.0472102724 -0.0865413472 0.0990323052 0.106711067 0.124012247 -0.0643537641 -0.0489022098 0.0309291538 -0.139787465 0.154954955 0.158387884 0.175662607 0.0474990308 -0.0359640867 0.0122991987 -0.0742847919 -0.116437078 -0.022644069 0.0906722546 0.109451734 -0.0519334488 0.0178270023 0.166177243 -0.0722740144 -0.00594325503 -0.054272633 -0.142582893 0.0954159126 -0.00533125736 0.0493725352 -0.146273687 -0.00553551223 0.126585066 0.078031756 0.0408783071 0.0403124169 0.0905888006 -0.0999932885 0.0324288867 -0.048167184 0.048798237 0.0910838693 0.010453077 0.0368673541 0.126243964 -0.10025917 0.119546175 0.0917273164 0.00480829086 0.150867537 0.0249657575 0.0332049243 0.0880667567 -0.0586667955 0.072079584 0.175612509 0.0517335869 0.0897404104 0.0503287949 -0.133238509 0.0293708127 -0.0389682427 -0.14583306 -0.0541342646 -0.145008922 -0.00405186322 -0.0991181433 0.149902388 -0.079027079 0.13217856 0.152729511 -0.00680424459 0.0569330305 0.0193487108 -0.0548162982 0.0513189137 -0.0619835034 0.0174638182 -0.102416456 -0.0279327556 -0.129593804 -0.0595460832 -0.0616615489 -0.0346394479 0.108837441 0.0645946115 -0.11461664 -0.00987040997 -0.0194515288 -0.0440613478 -0.0762307048 0.113881603 0.0807152838 -0.0215207562 0.053498432 0.00671930611 -0.0759949684 0.078516528 -0.129376277 0.00994156301 -0.0961488858 -0.0222117975 -0.0067448318 0.109046414 -0.118271597 -0.0475344136 -0.00401996076 -0.0324587896 0.111761943 0.0669000298 -0.140469015 0.0275926143 -0.0115185082 -0.111927435 -0.131411597 0.0218255222 0.0812726617 -0.0837273151 0.0129608214 0.133702025 0.0800562501 -0.0325852484 0.0149642564 -0.0189071596 -0.173735201 0.0099428352 -0.00971476547 0.0206104293 -0.113343984 -0.107871518 -0.120784573 -0.0340123661 0.12200997 0.0924243927 0.134061486 0.00931480248 0.00610988587 -0.143206105 -0.013435632 0.157742649 0.0986237824 0.031523902 -0.109645322 0.101808242 -0.0647404119 -0.0524963662 0.0176734496 0.0557880327 0.108553298 -0.0902532712 -0.130619377 0.0632717982 0.104041591 0.0543672703 0.0434634201 0.147163749 -0.114743508 0.158163086 0.147472963 -0.0521723554 -0.100233488 -0.0301290527 0.022240812 -0.0719027072 -0.0280963797 0.0667710602 0.00854949374 -0.0772623569 0.0685823038 0.00593935698 -0.0318717696 0.0944193527 0.0372171178 0.0526549183 -0.101998597 -0.0764912069 -0.118265085 -0.0155763114 0.0363124497 0.0786181912 0.0214089006 0.00902846642 -0.0222423617 -0.0114359492 0.016868338 0.0275472291 -0.0955874622 0.068063274 0.0991849825 -0.129994661 -0.14134939 -0.0891924128 0.0620854646 -0.147031859 -0.0616799332 0.125663459 0.0466651432 0.049083516 0.0202748105 -0.0635501817 -0.0811214596 0.0342678167 -0.144643277 -0.0279157292 -0.0520310104 -0.0900295675 -0.0991411358 -0.00983341318 0.0775161907 -0.108855121 0.0795320719 0.0555958673 0.047831919 0.116747767 -0.0178907923 0.00232720398 0.0800028816 0.0948506668 0.109556422 -0.139458165 -0.0708387718 -0.0218770187 0.0945127904 -0.15837191 -0.0499448627 -0.148272514 -0.0720821992 0.0286393929 0.166063771 -0.0910914093 0.0242926553 -0.12178494 0.0714246258 0.0331623964 0.141669735 -0.0356313661 -0.113949567 -0.107093729 0.0499410294 0.178825215 0.0778585151 -0.0158302784 -0.104186572 0.0341806933 0.00881133415 -0.0617542751 -0.136974439 0.141358063 0.00945444964 -0.0606168583 -0.0930097848 0.191351295 -0.0328337252 0.0643470958 -0.0714427084 -0.0224459022 -0.0216223132 0.0666145608 0.158240885 0.197071999 -0.00105335366 -0.130007252 -0.105515987 0.138388366 -0.137579709 0.114597313 0.189100042 0.142671525 0.17301853 -0.134293392 -0.0512899421 0.0793258399 -0.0749241337 -0.0476800092 -0.0704626963 0.188314125 -0.0592299625 0.13017118 -0.101025827 -0.0467180312 0.0082515683 -0.112287328 -0.062346559 0.013593995 0.0616131909 -0.078616567 0.0776763111 0.0905192047 0.0462051481 0.161554873 0.105334468 0.13517189 -0.115146726 -0.143860593 0.0162272323 -0.0732620955 -0.0247567333 0.0928674936 0.115267269 0.0816683248 0.00596335484 0.0484995171 0.0975567997 0.055007495 0.194354102 -0.00516810175 -0.00678860582 0.0775851458 -0.0466337353 0.106064767 0.143327415 0.0536226183 0.0567489788 0.205744937 0.0850102827 0.066885747 0.0559313521 -0.0664127171 0.157319784 0.0911302492 0.168385208 -0.0391344093 -0.0564815253 0.098924838 0.0328915305 -0.0534631759 0.0365355276 -0.0136043811 0.0482157357 -0.00801647455 -0.0435665064 -0.0428646132 -0.0644146577 -0.049035199 -0.0692589357 -0.113157302 0.0250543877 -0.0696002543 -0.0740747377 -0.0703184903 -0.00694498792 -0.0684268475 0.149164468 0.153054193 0.0761677772 -0.0204661116 0.12485972 -0.126783043 -0.00187381369 0.0541003644 0.107983328 -0.0837595835 -0.0870729461 0.0846110657 -0.0927637219 0.0266203284 -0.00495085446 0.0371818319 0.148703456 0.0929978117 0.00649248715 -0.052705083 -0.00607873127 -0.114258632 0.0115164211 -0.0972977728 0.084398061 -0.103781044 0.042367924 -0.0428892151 0.0323127471 -0.00776143046 -0.0386278778 -0.0703245178 -0.0709099472 0.16123727 -0.0346258432 0.0354511216 0.0405629389 -0.0782804564 -0.038936574 0.0836522579 0.176592737 -0.00653237011 0.0523184314 0.0602646545 0.0936931893 0.161708683 -0.0552729927 0.113780089 0.150944054 -0.0067132581 0.033431489 0.134626687 -0.0877803564 0.0627585277 0.159808293 0.0874273032 -0.0712592527 -0.0547058992 -0.0712833256 -0.0840452388 0.164110661 0.0463254526 0.11971055 -0.0787557662 0.0414926298 -0.0132505866 0.143975362 -0.0445205495 0.0115820579 0.0280404091 -0.0880124941 0.0269049294 -0.151099011 -0.0708077624 0.1746151 -0.0408792794 0.170332685 0.147516906 0.16913189 -0.0805162191 -0.0267642699 0.00414879527 -0.138525307 0.00516474945 -0.0246851854 -0.0470002219 -0.0861195773 -0.100950107 0.0405560918 -0.0432230942 0.104467802 -0.0955900922 -0.0287470184 0.13435027 -0.0651793703 -0.0681400895 -0.105308339 0.105755769 -0.130883738 0.0873673931 -0.0740140676 0.0563389994 0.0325795859 -0.0656638816 0.0513560064 -0.0224221945 -0.117003471 0.0642713904 0.0981090814 -0.0234821606 0.00352106593 0.0429167375 0.0492503606 -0.0412014462 0.00425704801 -0.0755375251 0.0191665534 0.001770781 -0.110787489 -0.1348757 0.106690206 0.00657321559 -0.0908673182 -0.108731627 0.0566626191 0.0443638451 -0.0703660399 0.0542139225 -0.00493319333 0.12348906 0.00126835168 -0.00761680584 -0.0449741632 0.128203496 0.0697475076 0.156427085 0.111904904 0.0363090038 -0.069633007 -0.124383852 0.0528828725 -0.0260626376 0.146264195 0.202404305 0.123526029 0.113593549 0.063587226 0.115930393 -0.0480900295 0.0870323107 -0.072083123 -0.0411015637 0.1239696 -0.0247502401 0.101827934 0.0338473544 0.179488152 0.0185336322 -0.0024687883 0.193507329 0.0594470054 -0.122646861 0.0367143154 0.0131789902 -0.00824107043 -0.0438461341 0.0322048962 -0.0372958966 -0.0309159439 0.112236492 0.104765609 0.0508270562 -0.134375989 0.0461360626 0.161751613 -0.0832926556 0.0294436906 -0.0715111569 0.151263878 -0.165225923 0.0409720019 -0.0970856622 0.118698254 -0.0376353674 0.0590884909 0.172025725 -0.0388665274 0.0342746116 0.0503573269 -0.0705714077 0.143638507 0.0997425094 -0.0244571362 0.126339301 0.00611212337 -0.0767538771 0.135550186 -0.0817491114 0.0428956598 0.053690739 0.0867463723 -0.120110415 0.092980817 0.0624419227 0.171113074 0.071235843 0.0251063108 0.147427261 0.0600847751 0.147185415 0.0782428235 -0.0334974229 0.140428677 -0.0508178994 -0.0417781211 0.096801661 -0.0446150079 -0.0366388001 -0.0328694917 0.111436456 0.0238367319 0.0866102576 -0.151921302 -0.0687064454 -0.104527332 -0.0853670314 -0.000437619659 0.126783535 0.0569791384 -0.0517665707 -tensor_10weight 2500 --0.0212358683 0.122197703 -0.0510823093 -0.0501324013 -0.0548678078 0.0356177911 -0.00784289744 0.133274257 -0.113695405 -0.0432012156 0.0939747244 0.0988038033 -0.0639443696 0.0290857553 0.0895108432 -0.0702648386 -0.103292271 -0.0736945942 -0.00376112177 0.0998725593 0.104668014 -0.105342008 0.0656864345 -0.114149243 -0.00571362674 0.0216598436 -0.0114243887 -0.0504532829 0.0596787184 0.00372021808 -0.15211682 -0.0136099635 -0.0886892602 0.0599286295 0.111488119 -0.0207552537 0.0917330608 -0.0306493416 -0.0526035354 -0.0849622265 0.101938419 0.0732950419 -0.117307015 0.0721970722 -0.0825721473 0.0530262738 -0.0570005514 -0.14794296 -0.125646636 0.0978401229 -0.0572635084 -0.0405422673 -0.0372838974 -0.0117440075 -0.0372347534 0.05405204 -0.125367016 0.00642648339 0.0431452096 -0.0818922222 0.0333031267 0.0492147207 -0.108358391 0.0211769491 0.0296456665 -0.115399122 0.0998793691 0.0635934472 -0.0597816631 0.135748908 -0.0291152298 -0.0674216747 -0.0520641059 0.13197051 0.112524465 0.0249439776 0.0945808142 -0.104327582 0.131835803 -0.0558281392 -0.104002684 -0.0600294694 -0.0934771448 0.035828352 -0.00607830286 -0.0175107867 -0.126915321 0.10744977 0.0146200657 0.080092743 0.0436066091 -0.0399526656 0.00826710463 0.0102796853 -0.138014555 0.088743791 -0.0517612249 -0.103482887 -0.0803165734 -0.126166776 0.0944030806 0.117681094 0.109592296 -0.0782303661 -0.012482455 0.0995225459 -0.139412105 -0.085186258 0.148377135 0.150794506 0.110053003 0.0709926337 -0.000629723771 0.013662681 0.0823172182 -0.0580728464 0.0406894386 0.0878868401 -0.0696239024 0.180716202 -0.0156155387 0.102927946 -0.0455422476 -0.0267394036 -0.105474576 -0.0847397819 0.0854922086 0.0470506549 0.0965595171 0.127182499 0.137551412 -0.0368003063 -0.0720289052 0.0134783518 0.0268878676 0.0088609159 -0.0511660501 -0.0823307037 0.0519733205 -0.0623468719 0.0798326582 0.0710632354 -0.00685403682 -0.137981296 0.011066772 -0.0105396928 -0.0804577619 0.111978434 0.125334308 0.00390364812 0.0692017078 -0.0330482721 0.163387418 0.0201984197 0.151748836 -0.0597846881 0.0121450806 0.0635938272 0.143580437 0.120768994 -0.0914445743 -0.0753249824 -0.0758429915 -0.0557716116 -0.0532640293 0.0682220832 -0.0222889148 -0.0228032283 -0.0421909615 0.0669203699 0.0878867134 -0.10618075 0.0309686121 0.0602300242 -0.115028903 -0.104819998 0.0828765184 -0.0887905657 -0.128947496 0.0665918365 -0.0184224322 -0.0902371928 0.0826666802 -0.118614137 -0.0974627435 -0.126331478 0.121555597 0.0860794485 0.087571308 0.10830126 0.000543156988 0.0120290369 0.119321413 -0.0516519211 0.0992750004 -0.0404139012 -6.53001553e-05 0.0228800084 0.142584652 -0.112774611 -0.0440897308 -0.101826452 -0.0968946069 0.110168286 0.0165652726 -0.0190366097 -0.00230144663 0.165133551 0.000178731847 0.163377017 0.0187537577 -0.111495733 0.00262274873 0.0370500833 0.111238286 0.0467720181 -0.112312317 0.0970760286 -0.00430823164 -0.18515943 0.0749811605 -0.108619854 -0.115121402 -0.127359807 -0.139737591 0.144899085 0.125063911 -0.0695543438 -0.0871604905 0.0424468778 -0.0412323475 -0.023522187 0.0755847916 -0.00548974751 0.12203481 -0.135008246 0.133453161 -0.0179671869 -0.0897851288 0.0138236731 0.0413174592 -0.130779296 0.0947689861 0.145704255 0.0761682168 0.0180642232 -0.0403898954 0.0231717415 0.0988920107 -0.116821639 -0.000677700795 -0.0828759521 -0.0409976803 -0.0652928352 -0.00307619479 -0.0512737185 0.0509406962 0.164199054 0.0935533643 0.0614940412 0.0182006471 -0.136083275 -0.036729455 -0.0655212551 -0.000610545278 0.110906526 -0.0215685032 0.0942183807 0.091754362 0.0299259573 -0.0927302539 -0.0870193392 0.041432485 -0.0959858447 -0.0521472655 0.133616418 0.00140008167 0.0231243372 -0.0265129507 -0.0465798788 -0.0466384888 -0.115962021 -0.0177416876 0.0583319366 0.0504825823 0.0262723565 -0.00666236039 0.0547430068 0.176730543 0.0766595826 0.0228095483 -0.0677164495 0.142202839 0.0357140489 0.152742878 -0.138932645 -0.0411144495 0.046292562 -0.0618947372 0.129624233 0.0132857962 0.13160333 0.0381516591 -0.00642365264 0.146867096 0.172721684 0.0822038203 -0.170568198 0.104037531 0.0469250493 0.149638385 0.0287801176 -0.105733179 -0.060603328 0.104576632 -0.109430753 -0.0709009469 0.142124668 -0.0206337441 -0.0685040206 -0.00922098476 0.154050708 0.140051425 0.0351035632 -0.049322959 -0.133136809 -0.00690346071 -0.126874417 0.0869078487 0.163344264 0.177283853 0.0478345305 0.0679927543 -0.122830227 -0.0329710469 -0.138243169 -0.12043523 0.018810302 0.0234610289 0.085149698 0.122265451 0.0947110653 0.0231434219 -0.135353088 0.102692701 0.0495786704 0.0994817838 -0.00882655289 0.0960466415 0.139429167 -0.00701248366 -0.0530242138 0.13024801 -0.0270533189 0.0678792298 -0.0942333192 -0.0897237882 -0.116916768 0.128777713 -0.0864267498 0.000630012888 0.0271891207 0.0435388982 0.0202370584 -0.101047307 0.0206810199 0.17194964 0.148503706 0.0857690498 0.161830828 -0.135013863 0.118901089 -0.0623394176 0.144353598 -0.0805446953 -0.166448697 0.0953875184 -0.0034776032 -0.108664006 0.0685736537 -0.0221380815 -0.0657745823 0.00281999423 -0.107203327 -0.0475207977 -0.055595018 -0.110597998 -0.000231489539 -0.0365380459 -0.0790299848 -0.0336767174 0.00941203535 0.0301933419 -0.0706446469 0.0102126878 -0.0478211716 0.0370407067 0.106186956 -0.0224919319 -0.0260020383 0.0235891566 0.0859296694 0.108447783 0.00656368863 -0.0794644728 -0.075159736 -0.110706359 -0.0758301392 0.0576422177 -0.0484935977 0.0564662404 0.137293592 -0.1790566 -0.00773862004 -0.0347555578 0.0215124693 0.0171802938 0.176061988 0.0460711457 0.147571266 -0.102327831 0.195341617 0.058222834 0.13799569 0.118976817 0.0509692021 0.050737951 -0.0811304599 -0.0793619528 0.0404427722 0.166059211 0.0436591581 0.0677934214 -0.103122085 -0.0477355011 0.0969016552 0.0731616244 -0.0612649173 0.0483927317 0.101544008 -0.0105341347 0.0251087993 -0.0852457061 0.0189878102 -0.00559592852 -0.0792983919 0.0710483044 0.0725165606 0.0998317599 -0.00865345914 0.0574824326 0.00925513823 -0.124299236 -0.152690053 -0.0688084438 0.0785533562 0.0506814644 0.147026345 -0.020497581 0.0864086375 0.0827063099 0.0358608812 -0.0127497688 -0.051433742 -0.03434515 -0.128280848 0.0762891397 0.0711952001 0.0842626169 0.116974868 -0.13133204 -0.0335378908 0.0899138004 -0.133157939 -0.120754138 0.122247837 0.144659519 0.0242889076 0.0994777754 0.06880299 -0.0276972707 -0.113097489 -0.0623187982 0.0822641999 -0.119825244 -0.103717044 -0.0319737606 -0.135691062 0.152195513 -0.0339170918 0.14714168 0.0540374666 0.133274242 -0.0455024727 0.113066867 -0.0257611051 0.0637556389 0.0359611772 -0.0776446313 0.0980009288 0.00792387128 -0.113957405 -0.0919724554 0.0144144921 0.138423935 0.073610343 -0.143383607 0.0898004025 -0.0354972184 0.0135471914 -0.157295063 0.0106644779 0.0293142907 -0.00285607576 -0.0387508944 0.0805381238 0.134696633 -0.0486889333 0.115237691 -0.114024878 0.0478819907 -0.204706341 0.0640740022 0.0566777252 0.127831176 -0.0553595684 0.111671567 -0.09231098 0.0911259577 0.000540402718 0.048529759 0.0324980132 0.131618008 -0.0291069895 -0.117312111 0.119812474 0.0617211722 -0.0828384385 0.103557624 -0.102094062 -0.165967241 -0.0316339955 0.114424214 0.0979985967 0.106774256 0.10326688 -0.0260687321 0.0577113181 0.107890628 -0.207330927 0.13924247 -0.108645178 -0.006839226 -0.0576377921 0.086189121 -0.0478715226 -0.0539538078 0.0736430809 -0.113258503 -0.144859955 0.00832846761 -0.139867589 -0.0278512705 0.111798391 -0.128404155 0.0160521064 0.0621018 -0.0222936384 0.067758739 0.151578188 0.0899427235 -0.0596541949 -0.107079484 0.0257597771 0.10411863 -0.0212498736 -0.106785044 0.102427348 -0.037666291 -0.0371498428 0.12570031 0.0917552412 -0.0337271765 -0.00665520132 0.0846108422 -0.137335181 -0.0166137852 0.0288310051 -0.0332151465 0.129317015 0.00994049478 0.0253879577 0.0699467286 0.0467383862 0.00212879106 0.0807025656 0.104752108 -0.0590361021 0.0355920568 0.144212484 -0.0798209384 -0.119622223 0.129336998 0.144326404 0.140256554 0.130315661 -0.0842513517 0.0202817731 0.0428673401 0.0224014688 0.0633894131 -0.121904492 0.0456793755 -0.119502507 0.0149252117 0.0991675705 -0.00801187102 0.0127708912 0.146813497 0.0297204014 0.0190939084 -0.0910222307 0.0188052319 0.158833399 0.0565757714 0.106845409 0.126346767 0.0609186888 -0.0289904978 0.176135257 -0.0152246384 0.00210902141 0.170274019 0.147236124 0.100339673 -0.0750769973 0.126910731 0.0639681518 -0.141766325 0.00575648621 0.00252318289 -0.132266387 0.0917782336 0.165258139 0.159399614 0.110745199 -0.0729087219 0.00930848531 0.129214033 -0.0562379323 0.114632159 0.0686002523 -0.0798907951 -0.0475209691 0.135875911 0.0170198567 -0.116139926 0.00134182745 0.119676389 0.00874867849 0.044542592 0.0469349325 -0.171083689 0.137197331 0.0741593838 -0.0608005896 0.142061442 -0.012179587 0.0524498336 -0.0355517454 -0.093820259 -0.116283298 0.0172735397 0.112878129 -0.133789182 -0.0446203165 -0.056491144 0.0793790519 -0.0914917514 0.102299117 -0.0138002355 0.162907958 -0.0748615712 0.138051897 0.106378302 0.00609137118 0.122283094 0.139686123 0.0373347066 0.0692594871 -0.09532848 -0.0478848442 0.125734076 0.0966270939 0.079935506 -0.010005963 -0.062777698 0.0338496156 0.0195008758 0.0981275588 0.161850542 -0.0924032331 0.132830709 -0.061219655 -0.112690888 0.121970147 -0.0678780898 -0.0108335214 -0.026362868 -0.0930077806 0.0707007274 0.0775098354 0.0764014944 -0.0804891065 -0.113752075 0.00710404944 0.0588292368 -0.0711446628 0.0660018176 -0.0639827028 -0.0958132148 -0.123145066 -0.0116624041 -0.0329767279 0.0308814552 -0.138171315 -0.126469448 -0.0836871266 -0.0362357274 0.0118497657 0.0553193018 0.162871584 0.00862812717 -0.0775254369 -0.0664473996 -0.0720814988 0.0207482781 -0.000734820962 -0.0429652929 0.156263977 0.0950327292 -0.131239817 0.0131368376 0.0467994325 0.152589336 0.0339371823 0.0687561333 0.0528964065 0.165966034 -0.124363385 -0.0527783372 -0.0752571672 0.0272485688 0.19544439 -0.0526332743 0.11452125 0.06950555 0.0388930514 -0.105637603 -0.031256184 -0.143096924 -0.131879777 0.0679151788 0.0898881108 0.138390079 0.0468003303 0.00110050617 -0.166997537 0.148328051 -0.135715783 -0.0184072368 -0.0204313342 0.0777179599 0.0854007453 0.0669743344 0.0512876213 0.0736838058 -0.0702814385 -0.153760359 -0.0127334371 0.0702296048 0.0945134461 0.113579147 -0.045809608 -0.0650008023 0.0018505156 0.0389033966 -0.0164941698 0.0427322201 0.0889017582 0.0659029856 -0.0811767578 0.0873623267 0.18029575 0.16048792 -0.112073712 -0.134867206 0.0882760212 -0.102488875 -0.175802514 -0.0120754875 0.0511610173 -0.0656074211 0.0839222074 0.132837757 0.19432193 0.0962905958 0.11927399 -0.0800096765 0.0369717441 0.172430858 -0.124649733 -0.0634947792 -0.0230292752 0.0867050886 -0.0332086422 0.0130523248 0.1857972 -0.0377311036 -0.0985669419 -0.0854148418 0.0455307364 0.11115057 0.0544578135 0.0360678583 0.0398137569 0.0514812209 0.0629364699 0.157751113 -0.0630130768 -0.0467872992 0.104552008 -0.00756954914 0.128961414 0.152928904 0.0538875759 -0.0584964044 0.000610977411 0.106909499 0.167631388 0.0725584775 0.129740968 -0.0139085511 0.102276772 0.0839342475 0.0176016726 0.0198159665 -0.0576478094 -0.0319991671 -0.0245914981 -0.0934077576 -0.00444747973 0.0162859596 -0.12718007 0.137412518 -0.0712438971 0.0353779495 -0.00824063458 -0.0929021388 -0.0211561657 0.00593618024 0.0581243485 -0.0115015805 -0.117641151 0.0745487809 0.100706778 0.101716518 0.180298716 0.0615622588 0.190501958 0.0251207165 0.161565259 0.0331474617 -0.049629122 -0.0350433774 -0.177207738 -0.0606218576 -0.135168135 -0.0552285984 0.0633888692 0.169191226 0.181376621 0.123600326 0.0766487271 0.0379917473 0.0728779733 -0.0138807297 -0.126598462 0.0824816525 -0.0179684516 0.0607266538 0.127557591 0.130710021 -0.0497126617 0.097561419 0.0444984324 0.00906473491 -0.107714869 0.109389283 0.0244740434 0.0977818221 0.0717759356 0.146900296 0.0456320979 0.0432121679 -0.068095766 0.035988193 0.16688697 0.015179188 0.193963483 0.030294802 0.0684091449 -0.154407337 -0.0253650546 -0.1035157 -0.123490132 0.160163686 0.000832796795 0.0173495747 -0.103751779 -0.114844963 -0.0266452879 -0.10764344 0.057379473 -0.0908356607 -0.0109218499 0.0242156517 0.107376277 0.00434230454 -0.0106718605 0.126825973 -0.076470606 0.082616128 0.043287307 -0.0409609638 -0.0944999158 -0.102769725 0.0562386662 0.110875674 0.075079143 -0.0874663591 -0.087224707 -0.0344407968 -0.032368429 0.139833078 0.138399825 0.0838530734 0.145841986 -0.0942437425 0.0514989197 0.106409363 -0.105719045 -0.0327457897 0.0023922089 0.0723857582 0.115679517 0.0641390905 -0.0389708839 0.064969562 -0.11146944 -0.0840641856 -0.00882211328 -0.0985186249 -0.0100410283 -0.140838861 -0.0820496976 0.126737103 0.0637906492 0.185262144 0.0401138254 -0.0199363641 0.0733033046 0.0741309002 -0.0405171663 0.119358622 0.131731167 0.00470401347 -0.0874768347 -0.0030607495 0.154426917 0.142330453 0.0776753575 -0.0136618298 -0.0559839308 0.117518134 0.0162534118 0.105629325 0.0662130266 -0.0396728814 -0.0103532532 -0.0827013478 -0.0040447861 0.113319181 -0.0071705617 0.118163139 0.0378845818 0.0424246453 0.148900077 0.0901416466 -0.131045297 -0.0908931792 -0.00706362771 0.118041806 0.0408021808 0.0160984807 0.114142895 -0.0700615197 -0.0494136475 -0.0595068522 -0.0467700139 -0.169063121 0.0931548029 -0.0394937769 0.0162656307 0.10976477 -0.124994159 0.152687699 -0.161219954 0.0349471532 0.00292883557 0.0899900794 0.0686580092 -0.0421929248 -0.0581205003 0.015063826 -0.0568712726 -0.147363365 0.0534492135 -0.0798066407 -0.0166791826 0.0615924746 0.0440535769 0.157255575 0.0443781093 0.107450068 0.0306068957 0.111347824 -0.0176973268 0.0151626179 0.083994858 0.00934020989 -0.0776927471 0.142738372 -0.0590152331 -0.0490930192 -0.103396282 -0.113522559 0.0619038753 0.14320752 -0.162481412 0.0684234798 0.019900918 0.0254009217 0.104212388 -0.034442611 -0.0442203879 -0.0163948387 0.00209845603 0.00254264474 -0.0859125108 0.0658564866 0.0487911701 -0.10677994 -0.0656869113 -0.0264248922 -0.102019623 -0.102298513 -0.138458312 -0.119788498 0.0699746907 -0.127610922 -0.029055886 0.0234031677 -0.0221559554 -0.0785237625 -0.079463318 -0.00504159927 -0.0934635699 -0.128190622 0.116880015 -0.131823063 -0.0321453363 0.00674818456 -0.118852653 0.0121976882 -0.124695078 -0.0894826874 -0.0633899048 0.0750036389 -0.0270189941 0.0216782093 0.119181171 0.139441684 -0.0148586482 0.0138805658 0.0725070536 0.0154770464 -0.126432493 -0.0768562183 -0.0687850416 -0.0399501249 -0.0991529524 -0.0160450432 -0.0420119353 -0.0326361097 -0.0698527694 0.0993200317 0.205627039 0.177876145 0.158109769 -0.0378685482 -0.0387157574 0.0318316072 0.0931719393 -0.0390335843 -0.093457289 0.158116326 -0.0205905512 -0.0280272551 0.0694964528 -0.00315693673 0.100278348 -0.115536571 0.134927243 -0.0945299864 0.157936096 0.0946900696 0.0962090343 -0.0132327564 0.0670364797 0.112801351 0.0329531841 0.00913766772 -0.157800838 -0.0242684614 -0.062707752 -0.111074157 -0.0969263613 -0.0453660265 0.0788833573 -0.10100504 0.0351482034 0.00255969632 0.146189213 0.176710948 0.076782994 0.0267225392 -0.0470640622 0.0270443261 -0.0651312843 -0.0475523248 -0.0839515477 -0.0822535306 0.0876104087 -0.0853568986 0.0850623995 -0.0537006631 -0.00454986189 -0.115827605 0.076602146 -0.0653875545 -0.0943169966 0.0738318786 0.00882227719 -0.105202228 0.0500551313 -0.0072239181 -0.095633924 0.0686312243 -0.155843362 -0.10586188 -0.0515479743 -0.0627304092 -0.119748496 -0.055315733 -0.034655381 -0.011634198 -0.0673866794 0.0497374982 -0.0230727922 -0.117348522 -0.0596318021 0.124789402 -0.100009322 0.125387162 0.125093237 0.11934261 0.122768745 0.00161180296 0.0404978395 -0.0511606485 0.025636822 0.0928659737 -0.119610175 -0.0845185071 -0.0342509151 0.0703661814 -0.0939814001 -0.0344806798 -0.0912777558 0.00682032388 0.121527597 0.152524486 -0.013022732 0.0765541866 -0.0932440087 -0.0872863084 -0.0211912636 0.0815037265 -0.0279196408 0.0894725025 0.0345439613 0.00409509987 -0.0809944794 -0.1393179 -0.125571921 -0.00232244516 -0.149977431 -0.000756583293 -0.107811421 -0.13097401 -0.124444515 -0.016273234 -0.0980345458 -0.0679222867 0.145107448 -0.0677620098 0.11634396 -0.150404945 0.0630804896 -0.0887408033 0.0502750538 0.0362975895 -0.000291265926 -0.00586632686 -0.0945255011 -0.106234178 0.140956268 -0.0830846429 -0.0355807208 -0.0545149483 0.0505581349 -0.00131463038 0.158409923 -0.0294266306 -0.075270161 -0.0552593507 0.110549971 0.0572320521 -0.139074802 0.079300411 -0.13124457 -0.0520654507 0.0863010138 -0.0589499101 0.0994329005 -0.0107528744 0.133117393 0.135243297 -0.100229755 -0.064115867 0.117229715 -0.0909496993 -0.123401761 -0.118980557 0.0693805069 -0.0614001341 -0.103551611 0.0602181554 -0.0570658669 0.0473706871 -0.115162462 -0.00543242693 0.0720573142 -0.0638199747 0.163928419 0.15177232 -0.107249737 -0.150279045 -0.124889374 -0.0754014552 -0.117463201 0.0907788277 -0.137130409 -0.112758525 -0.0632348582 -0.085662134 0.074386403 -0.0294712894 -0.103990085 -0.0954368263 -0.116161741 0.139300272 0.109896317 0.0364004523 0.0969301388 -0.0921835527 0.0688580126 0.0143036414 0.135515511 -0.10856048 -0.128424734 0.00355436816 -0.0129383691 -0.0427853577 0.0744294003 0.120447546 0.0322268978 0.111017898 -0.148288384 -0.0120353373 -0.0698817894 0.123846047 0.0900507122 0.0200054049 -0.135218769 -0.0566676334 0.000346322719 0.138647377 0.133002952 0.0393482894 0.0430621244 0.123535052 -0.0048245755 0.0777058303 0.105987355 0.0330017395 -0.0667480379 -0.0552513823 0.119488135 -0.0444536582 -0.0245578699 0.10310775 0.135614321 -0.088050127 0.101776421 -0.106960148 -0.0866610706 0.0718072876 -0.126476645 -0.124185599 -0.0585252866 0.0773498043 0.0260625742 -0.0329737999 0.050648436 -0.0158751626 -0.11962828 0.119259298 0.074308984 0.138976827 -0.159304231 0.0172711946 0.164110437 0.100464553 0.10389293 -0.0240411293 -0.0973320231 -0.119676456 0.00236897776 -0.0644378364 0.0506922454 0.19321233 -0.000225052238 -0.0394313931 0.110896409 0.132451773 -0.0674725696 0.106891982 0.0839640722 0.00246000011 -0.128063992 -0.106095694 -0.0592876561 0.0409409404 0.0347136185 5.23093404e-05 -0.0714386553 -0.00371642876 -0.109261386 0.150267109 -0.0990683362 0.151943013 -0.0184463765 -0.0465980396 0.0613862872 0.136278436 -0.00558510423 0.100865357 0.101894312 -0.0244332775 -0.125516385 0.0207252149 0.101681627 0.145940647 0.0516951084 0.16129303 0.178174466 0.067602627 -0.0624158829 -0.0751179382 -0.0219047312 0.052100759 -0.0685305297 0.0354404449 0.145376444 0.18646054 0.178686082 -0.0324649215 0.186916694 0.0407279916 -0.130371153 -0.0193989351 0.0320602059 0.0507811755 -0.113682859 0.0327428654 0.00616077147 -0.0258325189 -0.143563882 -0.00310279964 0.185173586 -0.0143889384 -0.0377997085 0.102268487 0.193651006 -0.0199139044 0.0859818161 -0.12391866 -0.0357881412 0.117275149 -0.00614784472 0.157511353 -0.0342883319 0.0726206675 -0.140140399 -0.0890319422 -0.084323287 0.181992263 0.0513049066 0.0995599255 -0.0906688347 -0.103707798 0.189509571 0.0850646123 -0.160007775 -0.125715733 0.119538076 -0.119608335 0.0870004892 -0.0289871693 0.0428543352 0.0881076306 -0.0739037469 0.150500178 -0.0330894664 0.120570533 0.0367856883 0.0812487155 0.0561989732 0.0205095038 -0.0794103295 -0.149736494 0.0125511289 -0.117065132 -0.107071228 0.0454606973 0.0796560869 -0.101957068 0.147831231 -0.0325231701 0.102706663 -0.00391825195 0.164131463 0.113403954 -0.121139087 -0.124457628 0.00445246696 0.040741276 0.135801449 -0.00480739959 -0.078587763 0.0388003998 0.0373185351 -0.0382047556 0.123435661 0.0475043617 0.00739693642 0.0977098569 -0.0144506181 0.113023907 -0.134157673 0.115016133 0.0749712959 0.122992218 0.128705814 -0.0182231162 -0.0774768889 -0.100664325 0.0511561967 -0.0247491226 -0.0616593063 0.0696846992 -0.131600708 0.128312185 -0.0420887545 -0.0552831143 -0.132332042 0.0888988078 -0.0928973109 -0.0273143947 -0.00685594976 -0.0243961141 -0.0604439601 0.127762869 0.0312011689 0.0182463527 0.0971829295 -0.0156243443 -0.110507861 0.118185356 -0.0953080505 0.0230623633 -0.139256597 -0.0295829773 -0.00552763045 0.02986148 -0.0686590225 -0.092862606 -0.1297746 0.0351940989 -0.0635817125 -0.00291644037 -0.0894218385 -0.081991896 -0.0788865611 0.123435721 -0.07980977 0.127385929 0.0179787278 0.14100419 0.0584084392 -0.11898651 0.0776305497 -0.0203871056 -0.0342618525 -0.0426038876 0.0608503371 -0.0924751759 -0.110731475 -0.0939228088 0.0693204403 -0.0963335559 -0.136444792 -0.0746414661 -0.0768960863 -0.0328734815 0.121303841 0.0307305455 -0.0240531266 0.0461567193 -0.112294145 0.0924198776 0.0726142228 -0.0232265219 0.0855844915 -0.0904331505 0.00588195026 -0.0725407153 0.0315330774 0.0881674886 -0.0255857036 0.00970177352 -0.0484308302 0.0706667453 -0.0180258229 -0.0909893364 0.0912033021 -0.0541153103 0.118749335 -0.000519677997 -0.12067198 5.66840172e-05 -0.0196669474 -0.0159197301 -0.120875023 0.0988306701 -0.0475680456 -0.0561974943 0.0541714281 -0.138189748 -0.00213968754 0.0791497976 0.0153562725 0.102331981 -0.02512566 0.0891461223 -0.100474566 0.0814501047 -0.0774096027 -0.0491994061 -0.0873599797 0.125588104 0.194157138 0.021343857 0.153216049 0.153526738 0.0659974441 0.00557104684 0.00512425043 0.0593339801 -0.0341665149 0.1543639 -0.000350087881 -0.0057605654 -0.0569509007 0.0794611201 -0.0399132632 -0.053406354 -0.021354856 0.0188272204 0.161608189 -0.124593504 0.0364353582 0.0259574521 0.0545204356 0.123703361 0.0302464068 -0.180545017 -0.0880055279 0.0392708704 -0.0457140729 0.0489293262 -0.0629897118 0.0735282153 0.0903664082 -0.0868221596 0.0773085654 0.0447509512 -0.0666414499 0.10026215 0.0482167453 -0.0492483862 0.194690838 0.0770569816 -0.142567039 0.114170618 0.0906017646 -0.0648275763 -0.0698935315 0.0423483737 0.0252110269 0.0347312838 -0.0560424551 0.155089974 -0.0118696066 -0.154669881 0.122313514 -0.0593367517 -0.133725002 0.0684605017 -0.086332038 0.15027903 0.00808770955 0.0856792182 -0.103054002 0.144213781 0.0522497185 -0.012934139 -0.090749681 -0.111638978 -0.15779312 -0.120520085 0.00437208824 -0.0104053728 -0.0667165816 -0.126051918 -0.0548642427 -0.104130857 0.0505597442 -0.103951007 0.0535476543 0.126115173 -0.126277968 -0.0643455386 -0.0392601117 0.0434453227 0.0778148323 -0.0601691268 -0.0518789664 -0.104312316 0.0010818392 0.0459540337 0.0427468866 -0.053878624 0.139487013 0.0611597076 0.0600839928 0.11611075 -0.0964443162 0.0624526814 -0.120941721 0.132605001 0.0708762035 0.163541496 -0.00394374598 -0.166257143 -0.203483716 0.0616421737 0.116930254 -0.0280899294 0.0485812873 -0.219869539 0.00187383592 0.0102478564 0.108631112 0.063601993 0.0346559994 0.0576099493 0.129458979 0.0913215131 -0.0457242802 -0.128046185 0.0772113949 0.0461370759 -0.16218935 0.031650953 -0.0577221215 0.060324017 0.11425402 -0.0406978093 0.181004599 0.122253641 -0.0693835318 0.092224963 -0.00997300446 0.137522176 -0.100897603 -0.0125190523 0.0379933044 -0.0922655612 0.0624521673 -0.0938842148 -0.0446401648 -0.0100792432 0.153803915 0.114698537 -0.0192592535 0.0803508684 -0.0886118263 -0.0518604182 0.0633063093 -0.051035013 -0.110467285 0.0447706729 -0.00330133177 -0.0912512243 0.00899628736 0.165726572 -0.0454223789 0.101888083 -0.0568365306 -0.076063931 -0.0752097219 -0.0845429227 0.0309042297 0.0270464383 0.124918491 -0.0662075952 -0.108895019 0.139057159 0.140161321 -0.119610265 0.136644498 -0.0796718523 0.00887751393 -0.178879619 0.0901582688 0.138248637 0.0798882693 -0.0129555576 0.0835241526 0.112452 -0.0344961025 0.00462428341 -0.129168198 0.120342299 0.0168783814 0.0924949348 0.104666054 0.0520613231 -0.048391819 0.0304543953 -0.0920903161 -0.0853118896 0.0968189314 0.144676819 0.0184051823 0.129063278 0.158408046 -0.0152725829 -0.0893933401 -0.0740989223 -0.0408062041 0.000471571781 -0.126842275 0.0290431026 -0.00776752643 0.0354271829 0.137151405 0.0211606342 -0.075007841 -0.0753669515 0.046267733 -0.09437415 -0.137393638 0.113058127 -0.026870288 -0.0587519705 0.117264286 0.00866104662 0.0201778039 -0.0951031819 0.018844882 0.0590609238 -0.0709743276 0.0873017982 -0.105027102 0.0530570112 -0.133408979 0.0909369588 0.0397072695 -0.126006484 -0.0984181166 0.130100951 -0.0197343528 -0.14802596 -0.0323335156 0.0627914593 -0.115520857 -0.0452975444 -0.012140125 0.0625165701 0.12852025 0.137867913 -0.0958045647 0.0133615816 0.107896157 -0.0483738929 0.129055023 0.116109088 -0.077403754 -0.0633423328 -0.0807941109 -0.0901692063 0.135869384 0.115573078 0.0980086252 -0.0180905815 0.00574294198 0.0841204077 -0.114126891 -0.0070350226 0.12609975 -0.12341994 -0.0101188719 -0.0531954169 0.0926595107 -0.14045234 0.0385289043 -0.00951111317 -0.130858913 -0.0142143071 0.106903538 -0.108551912 -0.125400409 0.058905676 -0.117564946 -0.0387115702 -0.120323576 -0.00476152822 -0.117001377 0.0521154404 0.0897176862 -0.143047303 0.0822072178 -0.0936514139 -0.0156190991 0.0703094602 -0.109612264 0.10537225 0.10860981 0.0861182958 0.0131997541 0.0654514953 0.0550837517 0.0355030596 -0.127356902 0.0202751011 0.0479572453 0.0155448811 0.0376787409 -0.104008965 0.138826773 0.128562942 -0.0176690705 0.00616015308 0.0954742804 0.168244436 0.0313249305 -0.00562194875 -0.013728708 0.0743624717 -0.0685951263 -0.118008956 -0.0987153798 -0.0248185005 -0.140825942 0.0919594541 0.0747423917 -0.0905265957 0.129529849 -0.124772102 0.0051275813 0.0195230469 -0.146458313 -0.0511724278 -0.00252351165 -0.0710987002 0.164692074 0.0975965038 0.0976115763 -0.017378984 -0.0336411037 -0.00410315767 -0.111187756 0.0705890879 -0.0971891209 -0.170355156 -0.0228465442 -0.000340677885 -0.111434825 0.0483258702 0.0879553556 0.0596619062 0.0922146142 -0.0954236314 0.100458361 0.0672615394 -0.0278729852 0.0144202998 -0.0134725468 0.00500577223 -0.0834878609 0.0704662427 0.0280400775 -0.173883215 0.0107927518 0.145330369 -0.00825130939 0.0123181194 0.104186043 -0.00961343665 -0.0443730503 -0.12468195 -0.127869114 0.0712913722 0.0349406078 -0.0263537578 0.140321776 0.0573507696 0.0558672026 0.0422607958 0.0942431912 -0.134432197 -0.0805723518 -0.117681436 -0.0301251039 0.0112107592 0.0862491578 0.0813791007 0.15245752 -0.104849078 -0.1278539 -0.0234719608 -0.0375554711 -0.00391989155 0.0951362252 -0.124997646 0.101080559 -0.0823273435 0.114658192 0.0812243894 0.0979240239 0.0534090586 0.11992234 -0.0614060089 0.0227183215 -0.0045140041 -0.0936731175 0.164146602 0.109354012 0.0560285486 0.0972351655 0.0452851653 -0.0221107267 -0.143891752 -0.0367256775 -0.100730948 -0.0520177893 -0.0628575087 -0.0123458058 -0.144427627 -0.0617014915 -0.0792786926 0.105423264 0.118998893 0.065015249 0.103419602 0.078821741 0.11923746 -0.0886161029 0.0995480195 -0.00256725191 0.00349370553 -0.0529144071 -0.0110389693 0.137066856 -0.177615538 -0.0219782609 0.0117756883 -0.0622062907 -0.031752415 0.109670192 0.00176507875 -0.128339231 0.0967004448 -0.0175355524 -0.0179675464 0.00220880075 0.0363271013 -0.0472624972 0.0390423499 0.0431711748 -0.0524883382 0.137009606 0.032474678 0.0447325036 -0.109758742 0.0141501743 -0.0879177302 0.0324584693 0.0984169021 0.0776358694 -0.0186196659 -0.0178685524 0.136504993 -0.130911753 0.120253332 -0.00131494773 0.0500290655 -0.0261307955 -0.115568407 0.0599436909 0.0793855786 0.013578156 0.164593741 -0.0608182624 0.00377323222 0.144239753 -0.0365212336 -0.00670494884 0.113193937 -0.0333096795 -0.0992462263 -0.0314201638 0.121462323 -0.00150117278 0.152934536 -0.0595164932 0.155643508 -0.0155162774 0.113217972 0.0924211666 -0.0591561124 0.119596377 -0.148361132 0.0774345249 -0.0543995574 -0.0430051051 -0.0587892085 -0.0859734565 0.0093586091 -0.139499143 0.144523352 0.0625231117 -0.0972201005 -0.0138707748 0.105667144 0.0957431421 -0.0930925608 0.114664152 0.0465527698 -0.0664715692 0.103240147 0.116609365 -0.192610845 -0.152009219 -0.0707181469 -0.123133332 -0.0165981147 -0.119874239 -0.123395704 -0.134802729 -0.0183263794 -0.00162356615 0.0698149651 -0.0478172004 -0.0292479843 0.0124004614 0.0228632949 0.125499591 -0.128627047 -0.0394836068 -0.0904139578 0.0717693791 -0.0241678786 -0.0282474756 0.066885747 0.172793537 -0.0325833708 0.134693041 -0.0837737098 0.028767975 0.149519458 0.10379082 0.134075478 -0.00300905108 -0.11726708 0.00835976377 0.0115354434 -0.104070403 0.0869700015 0.0493195616 0.173674643 0.136860088 0.133943602 0.0349466503 0.0715380386 0.0455492027 -0.0717399567 0.0613892823 -0.071349673 0.103834003 -0.0662872195 0.110759154 -0.0086634336 0.11282818 -0.0787081271 0.0525955185 -0.115244508 0.017306909 -0.148093 0.11725767 -0.0918413401 -0.0415653959 0.0675327182 -0.142755657 0.122713141 -0.0754952356 0.119042411 0.120726988 0.0600856133 0.0850800574 0.042756021 -0.0042983531 -0.0966215879 -0.109561965 -0.121179365 -0.121256597 -0.0916649252 -0.0139565729 0.0183815174 0.010678432 -0.0070024007 -0.10860841 0.123465493 -0.0865222588 0.0993401259 0.0353338942 -0.122070476 -0.00103206933 -0.147107095 -0.0779001042 -0.0985540375 -0.0854975283 0.0685408339 -0.065476723 -0.0296396669 0.0578391589 0.0765029639 -0.0989598259 -0.0449470505 0.0685051531 0.0537158102 0.0139079243 0.0583296567 -0.0594620258 0.0189818889 -0.0988531634 -0.00361028314 -0.0409312546 -0.0480199158 -0.0213640556 -0.126316875 -0.118182555 -0.134643987 0.0825530589 -0.0812493861 -0.00928412378 0.0827149451 0.014478147 -0.124131575 -0.13848491 0.113321409 -0.0203420967 0.132658973 -0.0168279931 0.0254632235 -0.0577695444 -0.125243694 0.0185761452 0.125729159 -0.0470989868 0.137575284 0.00578674674 0.125307932 0.13194342 -0.110660031 -0.108901128 0.0635878146 0.139960542 -0.0849670395 0.0299605131 -0.0711956099 0.0486410856 -0.0859787986 -0.125518829 -0.0788395777 0.113387808 0.172473475 0.125997916 -0.03521844 0.00814832374 -0.0874923393 0.0611119755 -0.103455245 0.0358751714 0.0404794477 0.116629399 0.107773907 0.0883051604 0.155530751 0.0984854996 -0.0652066395 0.0862129629 0.0566241778 0.0940288976 -0.0396610685 -0.054708723 -0.0403634794 0.0204263702 -0.0830316693 0.175091997 0.0323943421 -0.0326925032 0.187265396 -0.0557819456 -0.149056599 0.161892369 -0.0292865653 -0.106854089 0.113595374 -0.0478481576 0.0871280357 -0.0546426699 0.109687231 -0.0883881673 0.152132541 0.0782102272 -0.0743416622 -0.0343025103 0.151285902 0.0897574127 0.0613243282 -0.0363124833 -0.0416660458 0.0365589708 0.00544850901 0.0712229908 -0.0174090713 -0.00367248501 0.100005753 -0.0259860251 0.0931316465 0.0641765073 -0.0520533472 -0.000773876556 0.113493264 -0.0614270456 -0.126095414 0.11269249 0.00773805752 0.0544681847 -0.0156176239 0.0190839916 0.00293012918 0.0550577864 0.0748387203 -0.0842898712 -0.0772737563 -0.157246128 0.00333786267 0.0245691296 -0.0825911462 0.0128215477 -0.0858282223 -0.132421732 -0.0927961841 0.144973248 0.0598813556 -0.0359893106 0.000982378377 0.100832321 -0.106284365 0.0759487748 0.124556273 0.105182365 0.00680022268 0.0535307154 -0.0955361351 0.0177737772 0.0095509449 -0.00627064146 0.033846356 0.168117985 0.0789029747 0.152832642 0.175960913 -0.00220050896 -0.0862830505 0.106605045 -0.0212400369 -0.0751578733 -0.0164428316 0.0700538829 -0.0992731154 0.129148081 -0.0179386213 0.0452752709 -0.0637316629 0.0592104197 -0.128685504 -0.0348353833 -0.087284103 -0.0663602129 -0.0031849267 -0.000142063553 -0.0840348825 0.0924766883 0.0673238337 0.167500377 -0.0353617668 -0.0333009921 -0.112182476 -0.04946943 0.134662643 0.139826789 0.156980231 0.0751472116 0.0695004016 -0.0762207955 0.156616062 -0.0323073752 -0.074322626 -0.0840249732 -0.0568689369 0.0383799225 0.12136513 0.0337613001 0.150748312 -0.0912267268 -0.0950863957 -0.0684669167 0.0555381961 0.00922425464 0.0324769616 0.154439181 0.00271727936 0.0754308924 -0.137119815 -0.0415384322 0.107144803 -0.125383273 0.053086549 -0.0475495011 -0.00223423541 -0.128673628 -0.0492692962 -0.0113832206 -0.0116038918 0.104703367 0.0881138444 0.137240604 -0.005506441 0.00293928385 0.0956130475 -0.0784134567 -0.021586366 -0.0949726552 0.109912977 0.0546887219 0.0080575645 -0.0473530963 0.101009175 -0.0167142078 0.0431778133 -0.0919825733 0.0523650348 -0.136390731 -0.0124782622 -0.0131750405 0.0803216249 -0.135611996 -0.139048174 0.121725962 0.0170050114 -0.0948430598 0.126605704 -0.12657319 0.0280110091 0.0484481603 -0.101917908 -0.131059453 -0.00414025784 -0.000507161021 -0.0291253105 -0.0539416969 -0.132729664 0.118548885 -0.119953021 -0.106853649 -0.0724168047 0.0265188962 -0.0701930001 -0.0523263291 0.0513340086 0.0227334052 0.134236738 0.00679840147 -0.0369084999 0.101770133 -0.0643866509 -0.0391958281 -0.11996039 0.0662431717 -0.0149399638 0.113427982 0.00900928676 -0.115386441 -0.0923655182 -0.125217244 -0.105257966 -0.127515703 -0.115915604 -0.117300279 -0.0824699104 -0.00401163101 -0.0243823603 -0.071768783 -0.0823556334 -0.116988376 -0.0874817073 -0.0767723396 0.0525470376 0.0697348416 0.0105717331 0.116433874 0.119623169 -0.0428111032 0.0553773344 -0.107416034 0.131372139 -0.0292251855 -0.064143002 -0.129900947 0.0492804348 -0.0171842128 -0.13053751 0.0456082523 -tensor_6bias 50 --0.130149469 -0.0166715905 0.0930550545 -0.00245699566 -0.106651746 0.0490312241 -0.022929607 0.0280555151 -0.149067715 0.102508798 0.00938428845 0.150981218 -0.114400074 -0.0645027235 0.016750779 -0.0841871202 -0.140838712 0.0354661271 -0.127782494 -0.107863024 0.0691528246 0.0463019311 0.0961098671 0.0680775866 -0.118705533 -0.0768498629 0.17632094 0.119572431 -0.0184698328 0.0619278774 0.0973391309 0.0654409006 0.0840726122 -0.0982169956 -0.0271483194 0.119829573 -0.0558238514 0.0868603587 0.109693505 -0.126328036 0.169493452 -0.153565153 -0.0748259053 0.0136530614 0.00735191396 0.121958517 0.00247201324 -0.0304538812 -0.0688641742 0.0533529967 -tensor_18weight 500 -0.116833255 -0.0357756764 -0.131794453 -0.0954782292 -0.0199575797 -0.0554031618 0.0123106642 -0.0748193115 -0.138343185 -0.0409799181 0.00820469763 -0.000622143503 0.103817098 0.133209154 0.108685024 -0.0963258296 0.133699819 -0.0743798465 0.105768584 0.101406492 -0.0850842893 0.088313885 -0.0203158874 0.0809838101 -0.00829087198 -0.00621365244 0.12420509 -0.0834524781 0.013258785 0.000458776922 -0.0094107436 0.121913455 -0.112981685 -0.0701991916 -0.0657723844 -0.0241640378 -0.111835979 0.0305915046 0.0958627611 -0.123314679 -0.0531556047 0.0353454947 0.122457325 -0.188562363 -0.0805713162 -0.0883217677 0.137407482 -0.0494341888 0.08294186 0.02592903 -0.102925614 0.112401806 -0.0666541457 -0.0743903071 -0.124930732 0.00989535823 0.0654063374 0.0936208814 0.00587140396 -0.133220345 -0.0864736214 0.129711837 0.00181314978 -0.11009489 -0.142534941 0.112804607 0.0828809589 -0.0675114542 -0.0837594494 0.080936946 -0.0578315705 0.000171717635 -0.115505785 -0.00781203434 -0.044651553 0.0236158818 -0.0261275116 0.0364638008 -0.0263226833 -0.0818104967 -0.0312857106 0.0161777474 0.0981943533 -0.142439932 0.130368665 -0.148819655 -0.0904635265 0.142308936 -0.0558655635 0.119650826 -0.123948567 0.071270369 -0.0475144461 0.0499968566 0.0238669682 0.0490803383 0.102086172 0.0440850668 -0.0912592411 -0.124338679 0.0205278974 0.0803509951 -0.127337903 0.0695622861 -0.0565674454 -0.0611764155 0.0246511605 0.147374704 0.0300100464 0.031006122 0.0128832478 -0.134186521 -0.0788531825 0.0231011659 -0.077104196 -0.0899467021 0.083257556 -0.016190676 -0.15599066 0.0265589394 0.0970405489 -0.101687469 -0.119606331 0.101642758 0.0926929563 0.0224016327 -0.118740149 -0.0145147676 -0.121801361 0.0961833745 -0.0375055596 -0.0604088642 -0.0904211402 0.0308890697 0.0637984425 0.0605207421 0.0125890784 0.101664178 0.0726759285 0.0591030382 0.0281341467 0.0179437492 -0.0888262913 0.0439237058 0.0959485695 0.0477892607 0.138858929 -0.0815726668 0.0842805654 -0.0488859788 -0.127596661 0.0637440085 0.0945658982 0.0379101187 -0.133902624 -0.0156785361 0.153879091 -0.0837965533 0.112203002 -0.109656185 -0.121323109 -0.0539827608 0.157840356 -0.140984669 0.103252746 0.0117826462 0.0724756718 0.1193185 -0.0168017652 0.105931647 -0.15716891 -0.0412402935 -0.182400733 0.175201252 0.0334252864 0.190847382 -0.0840521902 -0.102074817 0.0166378226 -0.070603177 -0.0926473141 0.0471240357 -0.0813179836 -0.0973169506 0.049886927 0.107353233 0.0245145429 -0.122061022 0.0877110511 0.0779518932 0.181554541 0.00650133053 0.148282856 -0.167027533 0.0817222595 0.166063353 -0.071306996 0.0401937515 -0.0894560814 0.0982646197 -0.0373243652 0.0289797336 0.0392166823 -0.108381942 0.119098619 -0.0920399055 -0.0729553699 -0.124035373 -0.0866058767 0.132396668 -0.0131686293 -0.0622706711 -0.115531176 -0.129241109 0.0608382747 0.0826291889 -0.0870855898 -0.153687358 -0.00150228257 0.114700183 0.093991451 0.0560563877 -0.0242470428 -0.03554409 -0.0501358062 0.0884645432 -0.0462445691 -0.160832793 -0.0499034822 -0.0424251109 0.15960142 0.00967518892 0.013454861 0.0476650223 -0.0562032312 0.00298618712 0.113581337 0.0738084391 -0.137588665 -0.0807389989 -0.0702914745 0.0433799401 0.130559713 -0.0844176263 0.00879538152 0.0190126356 0.0929833725 -0.0415338278 -0.0416321158 0.0581194386 -0.106194869 0.0854856074 0.105097309 0.0867655277 -0.110841736 0.13861914 0.00394579815 0.0424983464 -0.0553477593 0.0576893315 -0.0487310477 0.00869362801 -0.0946266651 0.0686361194 0.0094735641 -0.0982639343 0.0865717679 -0.0489508957 -0.0480820388 0.0424392 -0.0348532163 -0.145720199 -0.0116074253 -0.0465409979 0.0409410596 0.137870952 -0.141640723 0.133919835 0.0830813125 -0.0514443479 -0.144528806 -0.0606587119 0.0772298053 -0.000756907742 0.0295330584 -0.00934765488 0.0772825181 -0.120189674 -0.0941238254 0.110182583 0.020738909 -0.110578194 -0.170464888 0.135421559 0.0186651032 0.157670006 0.106957033 0.113828443 0.032888636 -0.085790351 -0.102287576 0.10205999 0.11301367 -0.0841406286 0.0869232267 -0.0806331262 -0.0432705954 0.0882454589 -0.127744198 0.0729970783 0.183013499 0.0513928235 -0.160567686 -0.0651886687 0.0733133778 -0.140486658 -0.119877644 -0.0233747195 0.0682742521 -0.0181181505 0.0523737594 -0.114034481 0.178959042 -0.108594783 0.0531802028 0.00544614438 0.122413933 -0.107881032 0.00437956769 -0.0753047615 -0.0751520917 -0.0669195428 0.140085652 0.107123025 0.0215605646 -0.0108890682 0.126112461 -0.0248530898 0.0944449008 -0.11579188 0.0103179337 -0.147988439 0.0894878879 0.155256197 0.0593105108 0.0212335344 0.108353369 -0.0329438969 0.0173103362 0.113536589 -0.0336008444 -0.0386447273 0.0362053365 0.0621379763 -0.0990284234 0.00793749839 -0.0617283881 -0.0743452683 0.179765821 0.114959568 0.136922091 -0.1003832 -0.0692859069 -0.139016584 -0.13847138 -0.0461068004 -0.0357935503 -0.175952822 -0.0971040502 0.0884984359 0.00901553407 0.173032984 -0.0787108466 0.0656532124 -0.0365875959 -0.0772555619 -0.101957574 0.177314684 -0.184264794 0.0541368276 -0.173181415 -0.0607216991 0.0584572963 -0.0959036872 -0.0192932636 -0.140759885 -0.0871745721 -0.0414703935 -0.128599197 -0.134148136 -0.0330905467 0.086126022 0.0754621923 -0.0512827821 0.0647300407 -0.0423469543 0.103672571 -0.10058222 -0.0269276202 -0.0845367238 0.148252413 -0.127518728 0.120773628 0.0699376613 0.085009709 -0.0772422925 -0.00348520023 -0.0487586632 0.0242007188 0.0718890578 0.0988076255 0.0587318242 0.0960896015 -0.0790796131 0.0568241999 0.0869796574 0.0882544219 -0.0609133728 -0.193863526 0.117342524 -0.0339369737 0.115510337 0.0176041406 -0.134604976 -0.0149109662 -0.0460692905 -0.0518316031 0.154208392 0.116131343 0.000284732843 -0.124516778 0.00545642432 -0.1934973 -0.0553306863 -0.114772283 0.0383958407 0.065391317 0.0921707079 -0.112537354 0.138822451 -0.112784393 0.163916111 0.141898572 0.144761667 0.043000266 -0.0156551208 -0.13070862 0.0155345816 0.0829150677 -0.15498811 0.0502647795 0.106921747 -0.0415367335 -0.101776689 -0.0270393789 -0.150163233 -0.00523975072 0.12342082 0.030272549 -0.0634030774 0.110089242 0.078154169 0.141164288 -0.0665735304 -0.133532166 0.0205077082 -0.0799736828 0.00293931179 -0.0775768757 -0.0609018579 0.104522519 -0.0473734476 -0.000608845323 -0.139011964 -0.12750718 -0.113618098 0.0852759406 0.0522349291 -0.011812062 0.103033014 0.116778359 -0.0851101875 0.0191278923 -0.138369411 -0.0144041777 -0.033769384 0.0952177495 -0.184691101 0.00995114446 0.0508734547 -0.161694378 0.103683837 0.106144048 -0.0914210454 -0.10774231 -0.0468717627 0.0950566381 0.0999391824 -0.0267179832 0.114936009 -tensor_0weight 5000 --0.0508145355 0.0380447619 0.063999176 0.0305916369 0.0178726781 -0.0604492612 -0.0922251716 -0.0409420505 -0.0806181803 0.0253173634 0.0200065672 0.025728466 -0.0917467773 0.103931934 -0.0364619642 0.0943374634 -0.00828016549 -0.00586269284 0.0867723376 -0.0814111456 0.031561438 0.0127995471 -0.0175799523 -0.0762633756 0.0174684227 0.0652227029 -0.0757252946 -0.0175922979 -0.0378516056 -0.0103352945 0.0671745986 -0.00129433826 -0.0402865373 -0.0769478306 0.136584729 -0.00320164161 0.0293592662 -0.131899893 -0.00832800474 -0.0754740536 0.0523712561 0.00832175463 0.0861478224 -0.0513150692 0.0660690591 0.0819229931 -0.061753273 -0.0584596395 0.00335742347 0.0352997482 -0.0355550982 -0.0571611226 0.0267518349 0.00366023136 0.0501797497 -0.0911384001 -0.0553132854 -0.0707477331 -0.00369775807 0.0324063897 0.0952493548 -0.0701338053 0.0869731754 -0.0122592403 0.0579397976 0.0811071023 0.0882389173 0.00392138492 -0.0282733813 -0.07796707 -0.0237656292 -0.0976018459 -0.0302748028 0.0959793851 -0.0774898157 0.0740917549 0.0638118461 0.078004308 0.0606729016 0.00807148404 0.122843958 -0.0412954316 -0.00570290349 0.048803661 -0.0925534815 0.0642913654 0.0318598822 0.0163798314 -0.128930375 0.10925965 0.0126452744 0.0192803536 0.0565545857 0.0464702807 0.0887314975 -0.0057315547 -0.0403685123 -0.0338817462 -0.048792094 -0.0464581065 -0.0035396677 0.00428326242 -0.00884059165 -0.0119973514 -0.000725717517 -0.00789349712 0.118645795 -0.0580181517 0.0644906759 0.00912526064 0.0169304255 0.124032162 -0.0209737495 -0.0644200072 -0.00610294472 0.0443046205 -0.111826301 0.0924093947 -0.0439966656 -0.0174338557 0.0350687169 0.00473201321 0.0256146453 -0.0102232145 -0.0740443543 -0.0562746376 -0.014960424 0.0814036652 -0.116801761 0.100059807 -0.103689127 -0.0274910927 0.0271945633 0.0108639002 0.10785304 0.119615123 -0.0933286771 -0.104783587 0.0308681801 -0.0486758314 -0.109117366 0.0569621176 -0.0481622331 0.00365207065 -0.0219343584 -0.0347234085 -0.0862182751 -0.0892119408 -0.0102323415 0.0456935875 -0.0520036221 0.0333958827 0.0777817443 -0.0517655611 -0.0659507513 -0.0258211251 0.0767518133 -0.0409224853 0.0908992663 -0.02203927 -0.0486994721 -0.0106065404 0.0736719891 -0.0421070047 0.00211916398 -0.0954323933 -0.0184283517 0.0945980549 0.0464413576 0.0754498392 -0.037175186 -0.00513185887 -0.0806778446 0.0640999004 0.0459177829 -0.0637460798 0.050385993 0.0309800953 -0.0565693192 0.123325884 -0.0541292951 -0.103475936 0.131021678 0.0500673242 -0.0464580432 -0.0415508747 0.0403000787 0.0370427035 -0.0539194234 0.115986116 -0.0146925198 0.0295080431 -0.0609974898 -0.0897742435 0.0866930038 -0.0403378308 0.0923621878 0.0294523649 0.0361655056 0.0205234103 0.120485581 0.0700325444 -0.0497162156 0.043903362 -0.123235166 -0.00563619565 0.0633756816 0.0214825068 0.0333110169 0.0104085831 0.00706916722 0.0849041864 0.110511042 -0.0315861739 0.0258784778 0.0600173473 0.0242875703 -0.0698327497 -0.0123227434 0.014183823 0.0316864885 -0.0905837119 -0.166954413 0.0110382149 -0.0523379371 0.113217518 -0.0749479383 -0.0839105621 0.06123152 -0.123284116 0.00467087328 -0.0400639065 -0.139171645 -0.0208464172 0.0824865252 0.0881877691 -0.0531907566 -0.0462760702 -0.110711597 0.0575970635 0.0207520071 -0.0592331663 -0.0876662433 0.0696238056 0.0587312393 -0.146805972 0.0465192385 -0.0876265243 0.0274952278 -0.0920811594 0.0425466485 0.138803825 -0.11095649 0.035078045 0.0631543472 0.0814108402 -0.0799154192 0.0390180871 0.076176472 0.0414959937 -0.0411920138 -0.013391098 0.00715481211 0.0667355806 0.0209038183 -0.0535365157 -0.000487437297 0.0508622043 -0.0740626454 -0.0971553922 -0.00739993062 -0.11939621 0.0771144629 -0.0776564106 0.00858938135 -0.103641413 0.0731763914 0.00908944476 0.0822138935 -0.0353183523 0.0420758463 0.00362044154 0.107373729 0.0974787101 -0.101556815 0.0841847733 0.0912442133 0.125659168 0.0618594773 -0.0642373115 0.0193936136 0.101438187 0.0530573241 0.0676667318 -0.00218354817 0.0432167687 -0.0871621072 -0.0426511392 0.0288807489 0.0790897906 0.0490392447 -0.0205203649 -0.0993364006 0.00468417443 -0.0547306687 -0.0277090929 0.00697927317 -0.0244437791 -0.0157332867 0.106168769 -0.0359168798 0.114783011 0.152111039 -0.0253520384 -0.0015796381 0.03375398 -0.104952097 0.0092763612 0.101215295 -0.00308345142 -0.0368209258 -0.0473044775 -0.00817228947 -0.109114319 0.105732635 0.0244474001 -0.0146100083 0.0529635809 -0.00840851665 -0.0632252246 -0.0520593971 -0.00865435693 0.0344991274 -5.80968299e-06 -0.133371904 -0.151106805 0.0796020627 -0.00727936905 0.0341606252 -0.0332682915 -0.121840335 -0.152285203 -0.0688880011 0.0230131447 0.000283442176 0.0609894954 -0.004379577 0.0477737971 0.044799611 -0.132041544 -0.0921159685 0.0794112161 0.0724173859 0.0694021881 -0.0325237289 -0.0596630126 -0.128212959 0.0867897272 0.0183652658 0.067165792 -0.0221667513 -0.0792030767 0.00673970953 0.0961405337 0.11915601 0.0162419658 -0.0958381593 -0.0221719481 0.066273272 0.0103854984 0.0839003772 -0.0880922079 0.0691054389 -0.0436538383 -0.0678017363 -0.0862348899 -0.0580505431 0.0340274572 -0.0189464837 -0.0844241232 0.077873528 0.07532157 0.0911468565 0.130459666 0.0642754659 0.103514485 -0.0523621738 0.0538226627 -0.00427193614 -0.0198291782 0.0464040674 -0.0794681758 -0.0358173288 -0.0710784718 0.108343065 -0.0409613326 0.0320960544 0.053875234 0.00935616158 0.0279227011 -0.0595730767 -0.0895934626 -0.054435689 0.0687097013 -0.0623276383 -0.0781896859 0.0710855275 -0.0379823111 0.0614629425 0.107129268 -0.0969881415 -0.116216652 0.104508013 -0.0730313659 -0.0942338929 -0.124592021 -0.0121723814 0.0757561401 0.00725453952 0.027494695 -0.0790883899 -0.0104121519 -0.0122909518 0.0885993391 0.00961995777 -0.0863305554 0.0516466871 -0.00846583862 -0.137650937 0.053744074 0.0191885531 0.099622637 0.119871758 -0.0234789476 -0.0225552637 -0.0628033355 -0.061706692 0.00870011281 0.0219527185 -0.113005184 0.0864791349 -0.0586110726 -0.0858683884 0.0617091358 -0.0387163647 0.0250992496 0.0188102666 -0.0987309664 0.0387692712 -0.0278170835 -0.0702976808 -0.036741849 0.0252645276 0.0743944049 0.0373597182 -0.0650147647 0.0886150151 -0.0378745385 -0.0721595287 -0.100263052 -0.024431048 -0.00138329086 -0.0156793948 -0.108034611 0.00560034066 -0.10105747 0.10377124 0.108116172 0.106484957 -0.00357731106 -0.103540003 -0.162499279 -0.0292240772 -0.13454926 -0.0578239672 0.0473558456 -0.0877546594 0.0137864761 -0.0327536836 0.0507721342 0.0252886489 -0.092969656 -0.046330668 0.0129592251 0.0318424702 -0.0836347714 -0.00133580307 0.0577662215 -0.130686596 0.0925165117 -0.0197680425 -0.0594279207 0.081254214 -0.025833251 0.106694445 0.103731573 0.0476561114 -0.0746863931 0.0867198333 0.0718293041 -0.0795527846 0.0907836407 -0.0875569582 -0.0436345451 0.0336315818 -0.101902887 -0.112922281 0.0268265437 -0.0239662174 0.108922079 0.041044455 -0.00692772307 0.0482088998 -0.0167634431 -0.0713200569 -0.131396279 -0.0818632841 -0.0646765679 -0.00467614038 -0.068184495 0.0581986308 -0.12290591 0.0856338814 0.0330237187 -0.113662779 0.0768672228 -0.0640005991 -0.102779485 -0.0699071512 0.109070554 0.0373121388 0.00894282851 0.0210740287 0.102456108 0.00209105411 0.0643166751 -0.0748509914 0.0103702946 -0.00475171115 -0.124941736 0.0627204254 -0.110363327 -0.0701798648 -0.00204091449 -0.0581695102 0.0710774362 0.0637491271 -0.112383977 -0.0604935288 -0.0444468074 -0.0884831399 0.0787647441 -0.057589367 -0.0092884656 -0.0068281414 -6.82513783e-05 -0.00189695833 0.0291572809 0.0887888893 -0.10821224 -0.0333320834 -0.0257991888 0.0457047522 0.0474029109 -0.0698928088 -0.12633343 -0.0517579019 0.0700997636 -0.0429069959 0.0199789684 -0.0108658681 0.00652803527 -0.00151343702 -0.0620038249 0.0806239918 -0.0406728946 -0.0541682765 -0.0738097504 0.106633566 0.118564427 -0.0846382231 -0.0406942107 -0.0214116126 0.021105893 0.00434125355 -0.0575985499 -0.0204750691 -0.0223995987 -0.108478487 -0.0789667591 0.00276051858 -0.0364289954 0.0240258034 -0.00772039779 0.0677978322 0.0111008026 -0.0301737618 0.129933059 -0.0297325328 -0.121423602 -0.00256420486 -0.0767344758 -0.0345042236 0.0232742243 0.0518034101 0.0377323851 -0.0785427988 0.0944864005 0.0168189276 -0.0450433195 -0.0179200061 -0.0332794897 -0.115497865 -0.079460144 0.0748219565 -0.0902453661 0.0155278947 -0.0175510496 0.095810093 0.081910409 0.0412754081 -0.124072641 -0.0311682243 0.0492392965 -0.0202937964 0.0275281016 0.0286301002 -0.0205335319 -0.06336198 -0.00144966797 -0.0174041037 -0.116765253 -0.0784229636 0.0826164782 0.0420248657 -0.0860279575 0.0445345417 0.0170288365 -0.0538485646 0.0218434893 -0.126645058 -0.081135571 0.100772187 0.074350059 0.0520832427 0.0342816785 -0.0332369693 -0.00494507421 0.0594232231 0.0195060819 -0.0653662607 -0.0566894747 -0.049552016 0.0946275666 0.0667197555 -0.0115687326 0.0609822571 -0.0733285174 -0.00757924188 0.128872409 0.131651253 0.0883550048 0.00453105802 -0.0755265802 -0.0273298975 -0.075551331 0.0423105136 0.103586905 -0.103956595 -0.0853345916 -0.0501238741 0.0979430974 0.0415611826 0.0830030888 0.026601227 0.0730280057 -0.0635615513 0.0683744699 -0.0418914109 -0.0580942109 -0.0672050193 0.000723240606 0.0774079859 -0.0247790851 0.0417027809 0.0464081317 0.0150757832 0.0990941375 -0.0262722876 0.0383368991 -0.0866433606 0.053820353 0.000491182785 0.0509168692 0.0996452123 -0.109139279 -0.0142310057 -0.0809439868 0.0759590417 -0.0160349142 0.0490121357 -0.096037291 0.0512820296 0.0240419395 -0.0778301433 -0.00461494876 0.0313165486 -0.0526363626 0.0636812896 0.0332127437 -0.0893361941 0.107702576 0.0753764287 -0.134968281 0.154497638 -0.0106210969 0.0807469338 0.0397658274 -0.0412878655 0.0725319758 -0.075096637 0.0352239423 -0.156253964 -0.105903931 0.00186598103 0.0204177406 0.0137510747 -0.0850754306 -0.0996614769 -0.137975514 -0.0964332893 -0.0970748141 0.0658250079 -0.0284603387 -0.0586091056 0.0547327399 -0.0909201056 0.0791378096 -0.135371462 0.0970040932 -0.0691698939 -0.0478290841 -0.091066964 -0.10993892 -0.0587934963 -0.149786964 -0.0152382096 0.104548037 -0.0258558169 -0.144351274 -0.0335272104 0.0226420816 -0.0596394576 -0.0499719083 -0.0401447415 -0.137909144 -0.0354104489 0.0517158546 -0.00912801269 0.100223139 0.0372407772 -0.0557585917 -0.00336286239 0.0683526322 -0.040298298 -0.00263097975 -0.0374882258 0.0522439405 0.0507735275 0.105002061 0.0763192996 -0.0607291535 0.0252055712 -0.00846379343 -0.0764852315 -0.0580886006 0.0643623322 0.0687340647 -0.0175391026 0.0497902818 -0.0412122346 -0.0626358017 -0.0582311451 0.126354679 -0.118473426 0.151346073 0.0104045104 0.0904658511 0.0403522402 0.00989431608 -0.104035281 0.0668206066 -0.0350457989 0.0594084747 -0.0234730225 0.0567279682 0.0705103427 0.0155637255 0.00617892295 0.0591375902 0.10290321 0.0125923716 0.0783741623 0.00137256691 -0.00307283737 0.0503848121 0.10381522 0.118850879 0.128660917 -0.053519316 0.0977203473 -0.0134721575 -0.0389264151 -0.00525255827 0.0452782214 -0.0551993214 -0.10694126 0.027695125 0.0864779726 0.0454558991 -0.0506804623 -0.0287189651 -0.0546144284 0.120786496 -0.0527668484 -0.0774859414 0.102176331 0.0673900619 -0.0448943712 -0.0719371215 -0.0406077392 0.0517419763 -0.133232012 -0.0570902154 -0.09013246 -0.0748804808 0.00943455193 0.0882416517 0.000705939427 0.0691983029 -0.0305666197 0.0502307639 0.0774589181 0.0290872231 -0.103126198 0.0543247163 0.0888695046 -0.0432999581 -0.0238669831 -0.0651162937 0.0898748636 -0.0334561318 -0.0923917145 0.00535089429 0.0831253678 -0.032534346 -0.103109762 0.0489915684 -0.0154016791 -0.0483072698 0.0992657989 -0.0456443615 0.0638154149 -0.00041857746 0.0412595235 -0.0256175622 -0.0011343424 0.0302553996 -0.0492172204 0.0441855341 0.0358452648 -0.125684917 0.0641204044 -0.101313218 0.0406814888 0.0231520366 0.00894289184 -0.0159130525 -0.0403623842 -0.0126857739 -0.0646654069 -0.0864315107 0.0479207449 0.0227875356 0.0891341716 0.0144964764 -0.128592268 -0.0647967756 -0.0491824746 -0.122899771 0.0843127072 -0.0399818346 -0.0702486336 0.0469990969 0.0585947372 0.00991726387 -0.0545531549 0.121398546 0.0267390348 -0.0134512298 0.0329682231 -0.0672333017 -0.0224784035 -0.00435023708 -0.0271258652 0.0712630972 -0.0160659477 0.0995363668 -0.0256949402 0.103435107 0.109910071 0.00650324021 -0.0404900536 -0.0908767134 0.0118982857 0.00520248339 0.0329482853 0.0144852586 -0.0797013938 -0.0785156786 -0.114622436 -0.0149816191 -0.0634922013 -0.0747183189 0.0377447829 0.00633793836 0.0832202658 -0.0870476142 -0.0112469308 0.051385209 0.00177763787 0.0805689245 0.0667984635 0.119763464 -0.0189604852 -0.0689202473 -0.00829955377 0.0841114894 -0.0166632887 -0.101568498 0.0870780572 0.0787321255 -0.101076506 -0.0728867874 -0.0815497339 0.059538722 0.0476107113 -0.0611895993 -0.055862911 -0.00502554746 0.0184646137 -0.0100589432 -0.141160175 0.0608552545 0.0207750183 -0.0828769058 -0.0782217011 -0.0249421597 0.0649304986 -0.0759224221 0.0226793531 0.0345480256 0.101637982 -0.0291147213 -0.020399509 -0.0961149037 0.0607593879 -0.0901033953 -0.00980376825 0.0093408674 0.0903950557 -0.0326510593 -0.0616331063 -0.0332476608 -0.0641225353 -0.0496507026 -0.058669664 0.117607869 -0.0409576073 -0.00356686814 -0.105142437 0.0766613707 0.0395114012 0.0188095663 0.0634850636 -0.0737257972 -0.0844153538 0.118897498 -0.000630921393 0.080352664 0.00662139896 -0.0893025771 -0.0714181289 0.081619963 0.0111359404 -0.0571513996 0.0548180155 -0.0636223927 0.125711203 0.0851431414 0.130260974 0.0836031362 0.061986275 -0.028846303 -0.0287329499 0.0502533987 0.115427487 -0.0506522879 0.127979293 0.119268231 0.0850080177 0.0331578441 0.0409094281 0.0090124933 -0.0136618558 0.0948067382 -0.0672471449 0.0505564883 0.032799989 0.0633241385 -0.0469509736 0.0506216548 -0.0372176617 0.0645158365 0.149505928 0.0132820019 0.0121845976 0.0295179803 0.0295598768 -0.130403206 0.0423673615 0.0379888043 -0.0185889266 0.0913859308 0.0504159145 -0.0536566004 -0.064247027 0.0357843451 -0.00891068671 0.0950773582 -0.116974562 -0.0360760242 -0.121625684 0.103534453 -0.110155627 -0.109326176 -0.0306907389 -0.124297231 0.0215684474 -0.107538059 0.115768477 -0.0631534979 -0.107648998 0.00836135633 -0.0481221005 -0.0226832405 0.00744933914 0.0239705071 0.00856848713 -0.0518919192 -0.0672201142 -0.0423557498 0.0152753228 0.0322034582 -0.0436891429 -0.0355248898 -0.0221560691 -0.0227099117 -0.0872905031 -0.074751161 -0.0961238891 0.0214987211 -0.0765815899 -0.101568431 0.0197522994 0.0158146303 0.0358287059 -0.0310186576 0.054503344 -0.0471081249 0.00175969047 0.0102003291 0.0548275784 0.0608831719 0.00927542709 -0.00995576289 -0.00546212913 0.124199063 -0.0787529647 -0.107979171 0.0664112717 0.00175410474 0.0996535346 0.053399168 0.0650362223 0.0413330421 0.059269011 0.00307723135 0.0968322679 0.0304244794 0.0847681016 -0.0587718002 -0.0920936614 0.0963051766 0.0730310529 0.075302057 -0.101675689 0.00834253523 0.0334894434 8.25827228e-05 -0.0221394673 0.0630398169 0.0403992832 -0.0181807 0.0733471513 -0.0055750059 0.0181060694 0.1169772 -0.00306291087 -0.0245710369 0.0374747738 0.0355481431 0.127457261 0.06669911 0.033534728 -0.0313876085 0.058364775 -0.0700249672 -0.0348450616 -0.0787659734 -0.111428857 -0.0750032812 0.1067295 0.0295113139 0.022092022 0.0761882439 -0.0214715526 -0.0454636477 0.0440265127 -0.0405539833 0.0178954173 -0.0918944478 0.0349099524 0.0980099589 -0.0593721792 -0.0717693344 0.0749724507 -0.10822311 -0.10527648 -0.0456449613 0.00945392437 -0.113418877 -0.0248292517 -0.151771814 -0.0317451209 0.00303221145 0.0136932479 0.0757390037 -0.0645068213 0.110142581 0.0331983566 -0.0726855695 0.0410212204 -0.0837602541 0.00736812409 -0.0960764661 0.0659725666 -0.0506423712 0.108358607 0.0074415463 -0.0579753295 0.0222589932 0.0219781511 -0.0769435242 -0.00365759665 0.0658315271 -0.0195193309 0.0876873434 0.0829789042 0.030799007 0.0445269085 -0.087823227 0.0490200967 0.0495685935 -0.0617967919 -0.00453193625 0.103787176 -0.0256911721 -0.0746461451 0.149433792 -0.00853996538 0.0359981731 -0.0535804741 0.10725081 0.0878978521 0.0258817542 -0.0147519195 -0.0875247493 0.0177521463 0.0229451209 0.0438379906 -0.0674143359 0.0837276876 0.0518606342 0.0602514297 -0.0148247061 0.0175807085 0.0104981009 0.0398374051 0.016279107 0.0897895545 -0.010169927 0.12621972 -0.152914077 0.102994591 -0.00934717152 -0.0707922205 0.088611111 0.106939681 0.112134047 -0.0540277697 -0.054023616 -0.0951209962 0.0558281131 -0.0773286074 -0.000430493499 -0.0220108796 0.00224742503 -0.042162884 0.0229496341 0.000386319705 0.0903915763 -0.0727334097 -0.0566792227 -0.0469854027 0.0666792765 -0.0901913494 0.0639531165 0.00190761709 0.0819069371 0.0437930077 0.073981382 -0.10753461 -0.0635947138 0.0397201367 0.0639339834 0.0106142825 -0.095933184 -0.0258502234 -0.151261196 0.0201133601 0.0523358956 -0.113775507 -0.0635734051 -0.0209280569 0.0180092286 -0.0952379927 -0.0805232748 0.0792436674 0.116160475 0.0405516624 -0.0603361167 0.0921702161 0.0638613254 -0.0797907561 0.0562291071 -0.0404303297 0.0192060955 0.0931882188 -0.0454974994 -0.063482672 0.0183900204 -0.0941224843 0.0311275516 -0.0276973266 -0.0228528716 -0.0103476569 -0.0013043856 0.0595675893 -0.0146932686 -0.0967626795 0.0205185581 0.0111512868 0.0304273423 0.0346512347 0.0115508316 -0.0314554684 0.0335132703 -0.0399859101 0.0783086121 0.0110251317 0.050887987 0.0386743098 -0.018033972 0.0640587211 0.0695254728 -0.0274955798 0.0315612257 -0.0987086147 0.0660334751 0.108960167 0.0362012573 -0.0556706525 0.0763316229 0.0343016721 0.0549547151 0.0566200167 -0.00617094245 0.104899995 -0.0148995249 -0.0461326651 -0.052078858 0.113826625 0.042423591 0.0696527734 0.0174295567 0.0255777556 0.0323791206 -0.085186027 -0.0352433883 0.0130573669 0.116727203 0.0527772866 0.0953754038 0.0984134078 0.0301339664 0.0283296034 0.0112838252 -0.041340284 -0.106966309 0.0208709706 0.0510318168 0.0411410108 0.0704910904 0.113985598 0.0480646491 -0.101439185 -0.101105615 -0.0554792546 -0.0963118672 -0.0833592713 0.0804136619 -0.0818424746 -0.0130467992 0.0993848965 -0.0514523238 0.0999550074 0.102077879 0.00966593996 -0.00935996324 -0.0179428924 -0.0360591672 0.00619822368 0.0243546553 -0.142853007 -0.0114681982 -0.0543433689 0.0218674429 -0.0637027845 0.0662505031 0.11204917 -0.0893480182 0.0857268497 0.103264339 0.0781002343 -0.0893782303 -0.0274790041 0.0431495346 0.0856630653 -0.12378367 -0.0509530865 -0.0479679741 -0.0808392987 0.0511769354 -0.00993785262 -0.0495909974 0.00732931681 0.107190818 0.0212429408 0.0919175819 -0.0032403795 -0.0621873438 -0.0842421055 0.0878323093 0.00147393253 0.0229070615 -0.0386694148 -0.0345502682 -0.0645541772 0.14156653 -0.0889476463 0.0902122259 -0.0681383684 -0.0405545346 -0.0987435952 0.0225519631 -0.118827663 -0.111183643 -0.0112256492 -0.00946287438 0.0775573999 0.0200256836 -0.0373974107 -0.078532733 -0.108547017 0.0992447287 0.0162392482 -0.0711892024 0.0401137359 0.0209429767 0.000362629071 0.0647842437 -0.0358259976 0.00750721199 0.0491359942 0.0709332824 -0.105451792 -0.0134563902 0.0872533396 -0.0307084043 -0.118991949 0.0960125998 0.0121480636 -0.036713779 0.0374878086 0.0718258396 -0.0660520568 -0.00429979758 -0.055313319 0.018989075 0.0844045654 0.0639191419 0.0425145887 -0.0436811857 0.0248131063 0.0507366285 0.00984115712 0.0211421121 -0.0417334475 0.127702236 -0.142305464 0.038462583 -0.100248791 -0.0598390587 0.0798201784 0.0749086887 -0.0129145803 0.0493668057 0.0832006335 -0.00326930895 0.0621138252 0.116234139 -0.0619600303 -0.0258555952 0.00560154766 -0.00271366001 -0.0680233538 0.039063748 -0.114920385 -0.0542362481 0.0695442334 0.0281284824 0.0585357882 0.125471935 0.0688281953 0.0719351396 -0.0179130882 -0.0254238006 -0.00948760845 -0.0995621756 -0.0127528915 0.0291331895 -0.0169871729 -0.00137848861 0.126049355 0.0243894756 -0.00514754048 -0.0438758358 -0.070057936 0.00142127706 0.0820695385 -0.0231800079 -0.0708072856 -0.0734865814 -0.114026025 -0.0061859726 -0.0585030317 0.0943298936 -0.0582126155 0.064423196 0.0419933088 0.0116295256 0.0170936771 0.0498891808 0.0110197524 0.0411308594 -0.0257459451 0.0114618847 0.0878219977 -0.0317848064 0.0811458603 0.01887214 0.00988883246 -0.0506531522 0.0625907555 0.0145452367 -0.112982243 0.0802996382 -0.0328567512 0.0700641721 -0.00277703465 -0.0246732663 -0.0414474681 -0.0930275917 0.0817583874 -0.0246985424 -0.0693705902 0.0860790238 0.0245301407 0.028220322 0.0357720293 0.0410393327 0.0705156475 -0.063267082 0.050686691 -0.0218410157 0.0550663397 0.0759022906 -0.0350831598 -0.0160008334 -0.115162447 -0.0647135377 0.0396890379 -0.0345642604 0.0103187198 -0.0589025803 0.0834977105 -0.0107147945 0.0380949751 0.0866653398 -0.0723311082 -0.0372112989 -0.000454910012 0.0213319007 -0.00432507833 -0.0310348179 0.0425887331 -0.0940774977 -0.0323967934 -0.0242477451 0.117995851 -0.0160061121 0.0213480443 -0.0668758824 0.114949614 0.0316681191 -0.0759480372 -0.0610279627 0.0633142143 0.0236565657 0.0845542625 0.00935758371 0.0250929277 -0.0281674396 -0.0359582417 0.0694757774 0.056437064 0.016289724 -0.043686077 0.0887322947 0.000600125699 0.0521455668 0.0419055298 -0.0610189848 -0.0224667937 0.0316987857 -0.0323978439 -0.0178262964 -0.0366154872 0.0907478258 -0.0856860802 -0.110066622 -0.0315983742 -0.0946494043 -0.0222084317 -0.0352201238 0.0455912501 0.0811657757 -0.0895951316 0.0279459916 -0.0952548608 0.113056384 0.00558312191 0.050939288 0.124181278 0.0341638587 0.00255426345 -0.033172816 0.0153816594 0.104887553 0.0244834907 0.0457413457 -0.0520596057 0.095031105 0.0351452902 -0.11665196 -0.0497119017 0.050630711 -0.0861758068 -0.0872503743 -0.0496218018 -0.00381143531 0.109498873 -0.0175776016 0.00528071402 0.0259748194 0.0909558833 0.0579428524 0.139967725 0.0764526948 0.00463831052 -0.0771861747 -0.144396409 -0.104723662 -0.0137682576 0.0223192684 0.0313319825 -0.019306751 0.0563000366 -0.00657232618 0.034467455 0.0391030945 -0.0310320668 -0.0617044605 -0.126123548 0.0184416007 -0.051189024 -0.0356684178 0.0274483245 -0.0450351276 -0.0650538877 -0.000781424344 -0.0433340222 -0.0677636564 -0.0434984639 0.0460208468 -0.0124574052 0.045673795 0.0250319857 -0.0097975824 -0.000288532581 -0.0693829432 0.0817056447 -0.0367149822 -0.0803469568 -0.0272673164 -0.0125355599 0.108721487 0.0555210412 -0.0362726599 0.00188711134 -0.0225252602 -0.0467403233 0.0223246478 0.0924254134 -0.00607204111 0.0348412544 -0.0419691056 -0.0352974981 0.120286591 -0.0532627963 0.0599474981 0.117449939 0.0250896253 -0.0453546159 0.0333019607 0.0678343773 0.0618110187 0.0790082738 -0.013288267 0.0124899093 -0.0627008379 -0.0927575454 0.0872658491 -0.0858765841 0.0804509819 0.115199946 -0.116047971 0.114141606 -0.000370875583 -0.0547132045 0.0655369386 -0.105160132 -0.0587072149 0.00996344257 -0.0588719957 0.143944472 -0.0636086613 -0.0625388771 0.050297644 0.01688735 0.0503490344 -0.0187371671 0.0263831038 0.0351513959 -0.0622758158 -0.0289025009 -0.00445907749 -0.0819463283 0.0783530101 0.00172135397 -0.0108682076 0.053622894 0.0898650363 -0.125441834 -0.0469661765 -0.0740193054 0.0146140624 -0.074739024 -0.124391489 0.000910399249 0.100878544 -0.00938480534 0.10746365 -0.10205555 0.0822874457 0.117170572 0.065103583 -0.0468601808 0.0443411134 -0.0220601298 -0.0349924974 0.0565963052 0.024442032 -0.000596265076 -0.0503311418 0.0310966447 -0.0986445844 -0.0381193534 -0.107975848 0.041932255 0.0274684485 0.060344439 -0.0951578543 -0.00214851787 -0.0242667589 -0.00569425896 0.058796335 0.106092222 0.0197916087 -0.0124082975 0.0247668065 0.0231674556 0.0468560532 -0.000621114043 0.0964491889 0.0251123365 -0.0552343167 0.119750619 0.043985635 0.00931171793 -0.0136433262 0.091603227 -0.0712718666 0.0786479861 -0.0408394635 0.0966731384 -0.0660808533 0.0769225433 -0.0086235553 -0.105327964 -0.017321486 0.0972045138 -0.077172406 0.0514651127 -0.0781937093 -0.108713485 0.112203546 -0.0802456141 0.121202722 0.11901883 -0.0931790471 -0.0164292976 0.0312756896 0.105686158 -0.0783906654 0.0468474701 0.0110720228 -0.00267141312 0.0711446702 -0.0328070559 0.0175967477 -0.0356302932 -0.0124149965 0.0686402246 -0.0505678542 0.129400566 0.0214219112 0.0196880996 0.104359493 -0.0519865453 0.052242592 0.00997835957 -0.0990768448 -0.0456322841 0.0230734646 -0.0203887951 -0.0376775041 -0.07378342 -0.0275467373 -0.0691813529 -0.0821307749 0.0593699105 -0.0246762205 -0.101402849 0.0678628758 -0.00588039402 0.106908754 0.00991031248 -0.115229808 0.0721573606 -0.0429049321 0.0701546818 -0.0851534382 0.0652838498 -0.0788848251 -0.0332299247 -0.0408851914 0.00320880138 0.0196518935 0.0832488984 -0.0366014726 0.01875652 0.053820096 0.0153092891 0.0467731841 -0.0368329771 0.0111917052 -0.0291276965 0.0631685331 0.0357577875 -0.0179604348 0.00486189499 0.0305900779 -0.0209680013 0.0740240738 -0.0749756619 -0.121885069 0.032916151 0.00262444629 0.00849013589 0.0662304983 0.0742840394 -0.125835225 -0.0522070974 -0.113991506 0.0744321495 0.0978048667 -0.00498304795 -0.0719037652 -0.0263758246 0.0775782466 0.0118285939 -0.0350849591 -0.0356183834 -0.0106398668 -0.0223848727 0.0236225128 -0.120488241 0.0512224138 -0.041531492 0.0656389818 0.0879166201 -0.0274794661 0.0920548141 0.0264546964 0.0468961522 0.0466408059 0.0399114974 0.0449604504 -0.0700372905 0.0427690521 0.100792646 -0.0325727239 0.0341325775 -0.114281707 -0.00742708845 0.0433078147 -0.10298638 -0.104879826 -0.0632601455 -0.0102007883 -0.0802601725 -0.00294449297 0.00281117624 0.104717933 0.0612074584 -0.0467934757 0.118006982 0.117171124 0.106652826 -0.0225161687 0.0956271738 0.0270829834 0.0200848412 0.0168362167 0.0149010466 -0.0440483093 0.117436014 0.0664134845 0.0302254353 0.0535751954 -0.00848081987 0.0130929723 -0.0325898565 0.128652498 -0.0109613249 -0.00605653459 0.0190438107 -0.0259028617 0.0401356928 -0.00171622215 -0.104202524 0.0845206603 0.12883538 -0.0919445157 -0.00195987965 0.0736826509 0.0480313748 0.0636631101 -0.0408567712 0.0155776199 0.0579566024 0.134289131 0.0326339938 0.0374059007 -0.0366481617 -0.0377640799 -0.0160350259 0.0126323858 -0.0398559012 -0.0692037791 0.00400359975 -0.106625289 -0.0896666497 0.119961634 0.129376546 -0.0542201884 -0.0679891706 -0.0174552612 0.0752889439 0.116622798 -0.122125328 -0.0475201905 0.0421101414 0.00309556024 0.0322735868 -0.097082302 -0.0326796286 -0.0467596054 -0.0276475735 -0.088789694 0.0212633777 0.0486687906 0.108680114 -9.16656572e-05 -0.0739132911 -0.00859406963 -0.0290659312 -0.0827777684 0.151205987 0.0135993576 0.0095570432 -0.142162323 0.00178637984 -0.0175982956 -0.00341362623 -0.116277464 0.127863139 0.155240506 0.0902651772 -0.0665329844 -0.0343229175 0.00592056988 -0.0689622238 -0.0882099047 0.0423216335 -0.0418753251 0.0649065152 -0.148054436 -0.126529023 -0.107144743 0.048180446 0.0964411348 0.0283801127 -0.147997066 0.0730884373 0.0641160384 0.103855938 -0.0519286469 -0.0627045557 -0.123388886 0.106936358 -0.100495324 0.036348857 -0.0662566945 0.0681579113 0.0571975075 -0.0754548088 -0.0399843156 0.0368984528 -0.0353834778 -0.0517093278 -0.123962395 0.0263090748 -0.130231589 0.0996464565 0.0178089179 0.0458062775 0.0963696018 0.0762125254 0.0340860561 0.109043621 -0.0622775555 0.113345571 0.110095598 -0.100886367 0.0148935774 -0.123453058 0.0149683403 0.0882795379 -0.0308197234 -0.00579763902 -0.0442597829 -0.0558761358 0.00445035286 0.0967673883 -0.0510170944 0.1171581 0.0859833658 0.00351574784 -0.0282143541 0.0262071025 -0.0563719533 0.0486262627 0.0520373955 0.0674998388 -0.0777793005 -0.019211974 0.0729704723 0.0623332597 0.0411960185 -0.00379213877 -0.0160260908 0.101385273 -0.07236664 0.118454322 -0.0279059727 -0.00111757044 0.108530454 0.0313402973 -0.109885067 -0.00746698584 0.0517579988 -0.102587014 -0.062045224 -0.0723215193 -0.00753403036 -0.0194992591 -0.055590637 -0.110146999 0.0563573688 0.000793169835 -0.0437380224 0.037614204 -0.103893019 -0.11840263 -0.0892521739 0.0177615266 0.0299307667 -0.0603615977 0.0103125488 -0.0940437391 -0.0742155388 -0.00279134372 0.116551526 -0.0507049747 -0.0112416446 0.0206989124 -0.0475890413 0.00135824515 0.0360365659 -0.0638581216 -0.110917598 -0.0285419766 -0.0785639212 -0.00673839869 -0.072663039 -0.0943017006 0.0209225155 0.0458435677 0.06708619 -0.00773984846 0.130092591 -0.0302911103 -0.094658874 -0.106029265 0.0598360002 0.0363203026 -0.0587395169 0.0218387116 -0.00121726026 0.0923015103 0.0268146414 0.00547261769 -0.0118542481 -0.0527351797 0.0381134599 0.0549164079 -0.0742723569 -0.00661152741 -0.0885568187 -0.146828458 -0.145066977 -0.0526844971 0.0974245518 0.0119285621 -0.148422763 0.138095111 -0.0548562445 0.0224515107 -0.033984974 -0.0918067098 -0.0412526764 -0.129727185 -0.091969721 -0.0195525698 -0.0304857362 -0.114038028 0.123106226 0.0100026429 0.0864370763 0.0507619679 0.00284729549 -0.00105298625 0.0372510068 0.0406655185 0.035562437 0.0691316351 0.0814873502 -0.0958798006 -0.0228097066 0.0195914619 -0.027246682 0.0997626036 0.0630631745 -0.125293136 0.0745200738 -0.0511293188 0.0464217141 -0.067329675 0.018248735 -0.10921976 -0.0231118333 0.0425507538 0.0270119589 -0.0716171041 0.0184950344 0.0490458496 0.0652568191 0.0191503335 0.0326661766 0.00589203555 -0.0994252041 0.0639910772 0.0935874581 -0.0105717117 0.0074147 -0.0258986168 0.0828858837 0.00915369298 -0.031141039 -0.0613915138 0.0385154858 -0.098638989 0.0779575929 -0.0105742011 -0.0758871809 -0.0109963436 -0.00425893022 -0.0998037308 0.0786853656 -0.00168336509 -0.0925417468 -0.125538707 -0.122153223 0.0821714997 0.124297135 0.0863585621 -0.0707112625 -0.0507845916 0.0522913001 0.0209270567 0.0663688928 -0.0528395213 0.120615751 0.0676541924 -0.024552837 -0.0572560132 -0.0213594604 0.10777957 0.101967193 0.13353315 -0.0903856754 0.000669586763 0.0565047972 -0.0825800672 -0.062248636 -0.014425775 -0.0738483593 -0.00286239828 0.0884365365 -0.105007574 -0.0629888326 0.0934715867 0.110788323 0.0860318914 0.00210579997 -0.0725004748 -0.127494186 0.0919124186 0.110653833 -0.0781571791 0.00416795025 -0.110430084 -0.0848361403 0.00444030436 0.116966464 -0.0922116861 -0.0228395946 -0.00113955385 0.00864992663 0.0542619862 0.0738494545 0.0541707687 0.0777184516 -0.0970832705 0.0126359928 -0.0184956864 -0.0622084662 0.0451156609 0.079605639 0.012876017 -0.0658479407 0.0148149095 -0.116397806 -0.084576413 -0.100534178 0.0414143018 0.0586240441 0.0751088932 -0.0757061094 -0.0277423412 0.134908676 -0.033364512 -0.0541506857 -0.0223149844 0.0424581356 -0.0582377762 -0.0225137156 0.0737239867 0.0508049503 0.0808460936 0.0816969201 0.0865024626 0.0526327603 0.00781754963 0.0650917143 0.0286054742 0.0803678334 -0.0147822825 0.0908693671 0.107531264 0.0507709011 0.0359385237 0.0059293001 -0.00380560011 -0.00963426009 0.0474996306 0.0564068668 -0.0188577659 0.0513748489 0.0650842935 -0.0713231862 0.0369577892 -0.0133927027 -0.0248449575 0.0464835763 0.0194541477 0.0589518249 -0.0366000384 -0.00920657907 0.0802554563 0.107316345 -0.0738855079 -0.041999802 -0.061523933 -0.0305218492 0.156987384 0.0346348062 0.0126057826 -0.0152376043 0.0502441861 0.0664728656 -0.0426487103 -0.0210334025 -0.0939696729 -0.0341476351 0.0202106778 -0.0405531637 -0.0474039763 -0.122032635 -0.0849575177 0.0358551703 0.0997392237 0.0705704316 -0.0954606384 -0.0823383406 -0.0335653499 -0.121191315 0.013300227 -0.0529904217 0.0114150038 -0.0746903941 -0.03891664 -0.0819836557 -0.0729287639 -0.0179754216 -0.0102402242 -0.0607133955 -0.0891000032 0.00797273777 -0.022605991 -0.0809449404 0.0195738394 -0.134260491 0.0545250103 -0.0856008008 0.0678676516 -0.0582477748 0.0922091454 0.0154068666 0.0158708468 -0.12760596 0.0564289317 -0.0613937639 -0.0670805797 -2.70218661e-05 0.0686709732 -0.0105099222 -0.0782819167 -0.126448721 -0.0540316962 -0.00444437843 0.0426672585 0.0511715114 -0.0664667189 -0.117823824 -0.0813703611 0.0417935252 -0.075663574 -0.0227859747 -0.0927959681 0.00258619245 0.0769350082 -0.0262786634 0.15649274 0.00548388064 -0.0489241667 -0.028820714 -0.11300534 0.0169768985 0.0593335219 0.107664488 0.0454386249 0.127441257 0.0917166471 0.118455522 -0.089602381 -0.00173335895 0.039862778 0.030519424 0.0692663789 -0.0869349092 0.0913121849 0.0777036697 -0.103544652 -0.0235558059 -0.00459441263 0.0603882074 0.092305325 -0.0165223666 -0.0444068499 0.0616826303 -0.0684993789 -0.0543850996 0.02927945 0.0985921547 0.0364675447 0.0572966635 0.130570352 -0.109130152 0.0495605171 -0.0749507546 0.0501365066 -0.119342029 0.0564106107 -0.0683375373 -0.10465467 -0.10835091 -0.0725807771 -0.061554186 0.0320482478 0.0128553994 0.0586562306 0.0384311117 0.0629496649 -0.0201265886 0.00698905718 0.0514870435 -0.100664191 -0.0316986516 0.00712753553 0.0836560577 -0.129708961 -0.0514545329 0.127032861 0.0874415487 0.0538004041 -0.12563847 0.0540097728 0.0164953042 0.0776124969 0.0573409572 0.0104391398 0.0330088995 0.0108658904 -0.0540808886 -0.0545005389 -0.0912899747 0.039707467 -0.0845631137 0.00952329952 -0.132783711 -0.00215431862 -0.00721834227 -0.0975917131 0.0458213203 0.0200680383 0.0877264142 -0.037273623 0.0161861442 0.110501729 0.0362346061 -0.0118159521 0.06564527 -0.0172634032 -0.0569092482 -0.0533066876 0.049190376 -0.0601144843 -0.0810378492 0.0477174371 0.0174570587 -0.0335904099 -0.133734539 0.0280133393 -0.0691580176 0.0767375976 0.138999552 -0.120073155 -0.0186303947 -0.0229670238 0.110347301 -0.0162759181 -0.125030503 -0.0610758066 -0.0921585709 0.0763650015 -0.0880154371 0.0487306491 -0.0280137099 0.013374065 -0.106856197 0.0210058708 -0.0297262985 -0.100724012 -0.0436595678 0.0109883668 -0.0853584632 0.0911873952 0.0432055667 0.00453142403 0.0127071068 -0.0517662428 0.0676092654 -0.0881505087 0.0098452922 -0.0823895931 0.026693739 -0.0401734523 -0.0659064725 0.0980170965 -0.0178305618 0.0141226156 -0.0129480297 -0.0210044179 -0.052819252 -0.00334462686 -0.0292001851 0.102371179 0.0348540843 0.0185807701 -0.0405740775 0.0680675134 -0.0530634858 -0.0756660923 0.000960345787 0.0799955055 -0.00204092567 0.106765963 -0.00071719382 -0.0700320154 0.0350265689 -0.0876352489 -0.0223301966 0.0634540915 0.046514608 0.00813020207 -0.0424726084 -0.0671557188 -0.0563678183 0.00290626287 -0.0465417765 0.0410110131 -0.0751074255 0.0387596823 0.0907988921 -0.0267684907 0.0381636135 -0.0729710087 0.111388907 -0.00196558004 -0.0716333836 0.00468423311 0.0505745411 -0.0488672592 -0.0919072255 -0.0417954288 0.0445425287 -0.133938208 0.0828562379 -0.0701478645 -0.086428687 0.031541761 -0.0582731441 0.0219943449 0.0624012351 0.0130704865 -0.0401517116 0.0552026331 0.0567754321 0.0710184798 0.00758881867 -0.0124910641 0.0352796912 -0.0257061403 0.0308687277 -0.0213920511 9.48071975e-05 -0.0383036099 0.0287866332 -0.0110332398 0.0567016639 0.0603689998 0.00948372204 0.0733415633 -0.057091359 -0.0315212272 0.0307328366 -0.047477711 0.0429863371 -0.0265197102 -0.0657150894 0.0344378874 0.00179678167 0.010823926 0.0920330659 0.0393380597 -0.117785789 0.0551497154 0.0594726615 -0.0610899888 0.0493407547 0.0192096289 0.018806411 -0.0479772612 0.084946245 -0.0363575257 0.0180488937 0.0628424212 0.047983963 -0.00609665224 0.0490711816 -0.0524433553 0.0299023781 -0.134415284 0.0352817513 0.112402901 -0.00522935297 -0.108842514 -0.100159943 0.0536962375 -0.0482212268 0.033140216 0.0652505681 -0.0243575778 -0.00743648084 0.109651551 -0.0922743604 0.00548604131 -0.00986081176 0.0220172554 0.0101632392 -0.0740579367 -0.127274752 0.123685867 -0.03647862 -0.0687318668 0.00219690241 0.0450170375 -0.115679674 0.0136389351 0.103024855 0.103029318 0.110668465 0.00388179533 -0.0727895275 0.0803632215 0.0280900523 0.0280285254 -0.0467234924 -0.0731821135 -0.130512729 -0.0447000824 -0.107333377 -0.0769113675 -0.0171804633 0.0346588232 -0.0489323661 -0.0676056147 -0.105796985 0.0340438746 -0.0489065722 0.0679880902 -0.138374269 -0.10602235 0.0318014435 -0.116686605 0.0709820092 -0.0534674339 -0.00963871367 0.117157362 -0.0614339933 -0.0393194258 -0.0876544267 0.00427854154 0.0532199927 -0.044476755 0.000165691803 -0.0572972745 0.0502985567 -0.0602571145 0.12552923 0.0100918449 -0.0934075043 -0.0230401549 0.0295511037 0.0494234338 -0.0848071203 0.0259556789 0.079481706 -0.070821397 -0.0908804163 0.0531463176 0.0720867738 -0.0683894381 0.0868811682 -0.0569350533 0.0212230869 0.0892836973 0.0608907081 0.0264557358 0.117087588 -0.0282123219 0.0711553991 0.10003607 0.0398537852 0.0925926194 0.0799961835 0.106195562 0.0835256651 0.0742167607 -0.1288362 -0.0119199455 0.0209458005 -0.0807825178 -0.090900667 0.0211038124 -0.0566857532 0.031491559 0.0864643902 -0.151924461 -0.0301744267 0.0409735925 0.111272551 -0.0702080205 0.0960132927 0.108262971 0.0476099811 -0.00588667253 0.0851650238 0.010426431 -0.0125091802 -0.101221651 -0.0594466254 0.0449221432 0.0909607708 -0.0161867402 -0.0196655095 0.0530647635 0.0182610322 -0.0709991604 0.00761050964 0.0762544423 -0.0591728203 -0.0941646695 0.024339376 -0.0626933351 -0.103930928 -0.0321634226 0.0614973754 -0.118808359 0.0537795126 -0.0431177206 -0.116265662 -0.0131834941 -0.00236911164 0.0999807268 0.110797547 0.0114548178 -0.0998885259 0.145602047 0.111171857 -0.0988338813 0.0828444064 -0.00907499064 0.00587745896 0.071559459 -0.13120684 0.060727559 0.0683118403 -0.0836969465 -0.0657875538 -0.0527593195 -0.115409821 0.0975560099 0.0509091392 -0.0899974853 0.0956521481 0.0244503263 0.0126064662 0.114975713 -0.0405928865 -0.0483787097 0.0214200635 0.00839174818 -0.046142336 0.0722193867 0.0103485761 0.107258148 -0.0378706008 -0.0450719856 -0.0208163224 -0.00753858453 0.0927842855 -0.0761416107 0.0222469252 0.0221501626 0.103600152 0.00779794529 -0.0233703442 0.012515164 0.0401047952 -0.0354013927 0.0374246277 0.0105700931 0.0782879516 0.0528782457 0.0609663725 0.121859848 0.0579176694 0.0506275222 0.089213632 -0.118543468 0.037998043 -0.0670093521 -0.13410008 0.0411561765 0.0282420814 0.0700198412 0.0285238512 0.00444778334 -0.0674335882 -0.0760352165 -0.0417404994 -0.00990704726 0.0137323095 0.0390490703 -0.05264882 -0.0782701373 0.123745263 0.0234257653 -0.0788318142 0.024272114 -0.0218193699 0.0922509953 -0.00793454051 -0.0463180654 -0.113730133 -0.0577475242 -0.00455238903 0.0886773691 0.0554309674 0.0941009894 0.129545763 -0.021742925 -0.081449911 -0.0626695529 -0.0395893045 -0.036148537 -0.0715967566 0.00607152935 0.0371897854 -0.0568351522 -0.0322895311 -0.104940452 0.0889158696 0.0984952897 0.0380211174 0.109986477 0.00241002371 0.0807410851 0.0208322443 0.0305939745 -0.109555371 0.0584459454 0.0174565129 0.0446334742 -0.00203529699 -0.0710110068 -0.00075355859 0.00535989506 -0.00548237702 -0.0412316248 -0.10585098 -0.0147320451 -0.000358470366 -0.0302088298 0.0840577036 0.0291063283 -0.0138413198 0.0101552876 -0.0291738547 0.0731139556 -0.00815887749 -0.117785364 -0.0678437576 0.0784235671 0.0205320921 0.0843389407 0.0271183364 0.000885073736 0.0346361846 0.0954925418 0.130852431 -0.0751837641 -0.0253316611 -0.0976730809 -0.0676677674 0.0476101562 0.0662705749 -0.0301036816 -0.049003385 0.0106137209 -0.11252144 -0.0744634122 -0.0980421826 -0.0166393481 0.0481715277 0.0189415459 -0.0880238637 0.104637556 0.10711097 -0.0564402714 -0.0601721779 0.0159785729 0.0403848588 0.0139113516 -0.108862996 -0.00510752294 -0.0115773957 -0.0455890708 -0.0705545172 0.0851743072 -0.0565754622 0.0915891081 -0.0679899007 0.140255541 0.0228361152 -0.0286569875 0.0132950023 0.0172942225 -0.0447521694 0.0719969049 0.109762378 0.0197884869 -0.0228357762 -0.0507639088 0.0672996938 -0.0723399743 -0.00699901069 0.103675373 0.00931620412 -0.0457025245 0.0969348028 -0.0169527791 0.0237304047 -0.0895861909 -0.0194170661 0.0993531495 0.00229117088 -0.0562044792 0.0336305238 0.0490789376 0.0386500955 0.0127196591 0.00393643929 0.00232719886 -0.0829996243 -0.121718653 0.0753233731 0.0201976635 0.082195513 0.0600713976 0.0539501272 -0.0779756531 0.0625429153 0.00583441136 0.00761622144 0.115591303 -0.0334634334 -0.0245005973 -0.00851792749 0.0614887588 -0.0673773736 -0.0610792227 -0.0935594514 0.0109041268 0.0797310621 -0.054385256 -0.0951922908 0.0503217317 -0.105528668 -0.111157358 -0.0676904768 0.0699280798 -0.0395813137 0.0542365499 -0.0423914567 0.0401284434 -0.108586438 0.106151514 0.0741012841 0.0261538271 -0.0234557595 0.025597224 0.128329813 0.075639084 -0.0326176621 0.0483325124 0.0441246293 -0.00211445754 -0.0714289173 -0.028952891 -0.0652696118 0.10838379 0.0806302279 0.0361339003 0.000588985044 -0.0501025841 -0.0767339468 0.0711772069 -0.109505966 0.0615409054 -0.00328358519 -0.0599714369 0.0810240135 0.0322509259 -0.105258301 0.00425739167 -0.0571172498 0.011786839 0.0552534238 0.0161224175 0.00839114189 -0.0516668111 -0.0663074031 0.0675223991 0.00408511516 -0.0934429094 -0.00482452614 -0.0146074528 0.116948992 -0.0514550433 -0.0092106685 0.0192382932 0.0676550567 -0.107134365 -0.0554183982 -0.0378831327 -0.00111221685 -0.111972146 0.0370764211 -0.025367327 0.0421501771 -0.034930028 -0.133550882 0.0460590795 -0.0408849232 0.0172165278 -0.0828626677 -0.142174855 0.0368294381 -0.0315607302 -0.0813754499 -0.0344028175 0.024872696 0.115811288 0.0284592416 -0.0183729436 0.0680122226 0.096686542 -0.0504275933 0.0555682927 -0.0117671303 -0.106590241 -0.0779706761 -0.0753313005 -0.01689367 -0.0120249027 -0.121162862 0.0904329047 -0.0305291414 0.115454301 -0.0964366719 -0.00301298662 -0.0242908541 0.0638555288 0.130841374 -0.10721194 -0.108135305 0.0741211176 0.0291729122 -0.113760702 -0.0604273416 -0.0709419549 0.139797956 -0.0342961662 -0.0201933645 -0.112033077 -0.0611329861 0.0254594646 0.0681489855 0.0438128486 0.0619565509 -0.11234884 0.0307241976 0.0981715992 0.00958520174 -0.0568824336 -0.0283987094 0.0636719465 0.0751391798 -0.114547461 -0.0748261958 -0.0586201847 0.000263712311 0.00999936834 -0.0185962841 -0.08149115 0.105373196 -0.00710947951 0.000694327406 0.0461735427 -0.0157357287 -0.0240037851 0.0580982715 -0.100919247 0.111699469 -0.0659800097 -0.00773917325 -0.0232742541 -0.0999335274 0.0753451958 -0.051228717 -0.0892171562 -0.0358730741 -0.0996832997 0.0939411744 0.00339663634 0.0281091705 -0.0894726738 -0.00704634562 -0.07571394 -0.0520362742 -0.0228929147 -0.0211675484 0.0902847946 -0.0659550056 0.11681138 -0.0221742485 0.0613481849 -0.0157842189 0.0737548843 0.00965575501 -0.0838649422 0.00177340093 0.0528827235 -0.11357832 0.100755192 -0.0534734391 -0.0358427912 0.144596636 0.0647218004 -0.0423597619 0.0605341755 0.0732165053 0.0211333074 -0.132864833 -0.0364422947 0.0478251725 0.0119115161 -0.00161979138 0.0571164563 -0.0192272216 0.00423192605 0.0322174877 -0.0759943277 0.0137448525 0.00333114085 0.0397117473 -0.0238687452 -0.0832956731 -0.0279709753 0.0938207209 0.0343491249 -0.0871218666 0.0408064276 -0.116285831 0.0795068964 0.0848761573 -0.046004314 0.0709750503 -0.10165219 0.113097928 -0.0288509876 0.0324093103 0.120680496 -0.0231527574 0.0672659576 0.0315056667 -0.0114693092 0.0518258587 -0.0120203597 0.0803814754 0.034682408 -0.0337615535 0.0386820808 -0.0665814355 -0.0311845411 -0.0300064813 0.000839714077 0.0999553874 0.0476576835 -0.0808109865 -0.0593067668 -0.0203568004 -0.0215366837 -0.0306633245 0.0436415672 -0.0116597321 -0.0583168492 -0.0336035974 0.0782463998 0.019496616 -0.113910265 -0.100025997 0.0590599142 -0.0404247232 0.0250362512 -0.114405021 0.0491880253 0.0765769333 -0.0574881397 0.0377197377 0.0666196123 -0.0882478282 0.12657164 -0.072820656 0.0688454062 -0.0431665219 -0.0397588946 0.0174000375 -0.0294575971 -0.113417856 0.0438238829 0.0227670204 -0.0407737307 0.00265934458 0.0681627318 0.0419177152 -0.0144360522 0.0160792209 0.0458587408 0.0618349649 0.0176743343 -0.0137938112 -0.0633563772 0.0310720019 0.0681380481 0.0212434102 0.0676606894 0.041460555 0.110738382 0.0473630205 -0.0378823988 0.0510912016 0.00387570239 0.0408548079 -0.0602159686 -0.129621208 -0.00288634188 0.00880218763 0.091095008 -0.132332921 0.0251508802 -0.0712097511 -0.103759281 0.0195465796 0.0568811819 0.0538945012 0.140887022 -0.0393595546 -0.00443352573 0.0130884098 0.0407299697 -0.0129098492 0.120743103 0.02760542 -0.129898503 -0.00857560523 -0.0720292479 0.0491493791 0.102326475 0.0921848789 0.0479474291 0.0605254434 0.0442970507 -0.0229930989 -0.101877302 0.120247759 -0.0461608209 0.0228956696 -0.0468554012 0.0931479931 0.0344555005 0.01930671 -0.0351826884 0.0757239461 0.0770438984 -0.0225172918 0.0203145165 0.0127433063 -0.0512304567 0.070727922 0.011212891 0.0631789118 0.061186403 -0.0620558858 -0.0242845789 -0.0357322469 0.0807544664 0.0694836825 0.0275604576 0.048231598 0.0312998705 0.0098650381 -0.0849438533 0.00338348607 -0.0562642589 0.062496379 0.00795244705 -0.00989409816 0.0154066579 0.0518637821 -0.103680417 0.00535006076 -0.104885489 0.0388466492 -0.0735442638 -0.0991858095 -0.0114390533 -0.024811225 0.0624354333 0.106003806 -0.0726236701 -0.106910177 -0.0611559413 0.0201094151 -0.052110929 0.0187292732 -0.0280273762 -0.101260066 -0.143803522 -0.1252902 0.0355423726 0.041179236 -0.126104265 -0.0216143429 0.0806514397 0.00608616043 0.0657909364 0.0178345367 -0.0923066512 0.0481731519 0.145438254 0.0159616042 -0.0456462018 0.0941475853 0.0632876828 0.0367626883 0.0247407742 0.0601012856 -0.0355465524 -0.0118422816 0.0488038473 -0.0545568913 0.0373688899 -0.0515505187 -0.0396510959 -0.0605122671 -0.085121952 0.0596127883 0.105902717 0.0220958665 -0.0255203731 -0.0148762362 -0.0768131837 -0.0578792021 -0.0949795991 0.0773940459 0.0814553499 0.137127966 -0.116018936 0.0563674271 0.0888326541 0.0284422096 -0.110917278 0.0353827216 0.0380769633 0.12019825 -0.00778515963 0.0705309212 0.00951496419 0.0804332197 0.0100569949 -0.0600129589 0.0635915622 0.0929165035 0.0890567824 -0.0334398523 0.0183780789 0.0171072353 -0.0848544464 0.0573717169 -0.0625854135 0.00517629972 -0.0316587314 -0.00222206302 -0.139464319 0.00396719109 0.119108282 -0.0407875292 -0.0145511776 0.0634373575 0.0286066066 0.0339107104 0.0838994458 0.156156093 0.0932729319 -0.067164138 -0.0970614329 -0.000510855229 0.108286127 0.151279747 -0.153703973 0.035030935 0.0742894635 -0.0494455397 -0.0341568068 -0.0577272587 -0.0769041032 -0.0278417245 -0.0176225342 0.0874658376 -0.0257863011 -0.0709038004 0.0103997458 0.105501436 -0.132797644 0.0622315481 -0.0974398479 -0.0254051387 0.0495131202 -0.111515976 0.103808023 -0.101846233 -0.0294793397 -0.131958127 -0.0533692092 0.109044902 -0.0826396644 -0.0222143289 -0.0188204758 -0.0809235647 -0.0443305187 0.0731882006 -0.0607132837 0.108671054 -0.099729836 0.0198555607 -0.14591822 0.0282850396 -0.0493627414 -0.0674319044 -0.132230341 -0.0341024846 -0.0808820575 0.0899107382 -0.0263775121 -0.103454776 0.0222094338 0.00426623598 -0.084072873 -0.0383605286 -0.0547198616 0.0559252352 0.0470217839 0.0677336454 0.0497331806 0.0631156936 -0.00398747297 0.112397343 -0.166542813 0.137340739 0.0250983089 0.0890974551 0.119683884 -0.0544718653 0.0518688969 -0.0565655194 0.0104118874 0.0981469452 -0.0510016531 0.00272554019 0.0927296504 -0.0194716733 0.110565722 0.0546717308 0.0536676086 0.064474754 0.0118900863 -0.11696136 0.0142176412 0.0417189002 -0.0839173347 -0.0281918701 -0.0403215103 0.02901816 -0.0981693715 -0.0701962784 -0.00782805495 -0.0462877564 -0.0264666826 0.0648322478 0.0252208374 -0.0529760942 0.0255852453 -0.0645134747 -0.0154027175 -0.0532422848 0.0832066536 0.0396291837 -0.0148703549 -0.00148318615 -0.0615470037 0.0724665746 -0.0584392287 -0.026484957 -0.0585906915 0.0238307714 -0.0229600184 -0.0852913335 0.038450405 0.103452124 -0.033543352 -0.101024874 -0.0779693052 0.0964624882 -0.0119416546 -0.0178287029 -0.0222403612 0.0204786509 0.0163842663 -0.045447167 -0.0765725672 0.0971594155 0.041313909 0.0470899418 0.00305234478 -0.119385377 0.0464745872 0.0310937278 -0.131466374 0.0710817575 0.0100257266 0.068332687 0.0689313188 0.0125061376 -0.0347266309 0.0975375995 -0.00572358584 -0.0388996676 0.0531310216 0.137647584 0.0694899485 0.00679214392 -0.00121487537 -0.0537946038 0.0458747223 0.0680655316 -0.0407916345 0.0546781644 0.0273533594 -0.12560609 -0.0675602481 0.0385174453 -0.0973455235 -0.00201383908 0.0485716909 -0.0049632024 0.00582174305 0.0641960278 -0.0848527774 -0.0219417512 -0.15150407 -0.0783323124 -0.00965964142 -0.0608184524 -0.0130811296 0.0480553694 -0.0386784896 0.0999142677 0.0546924099 -0.1222317 0.0141203087 -0.0226628929 -0.0910275429 0.0631851926 -0.0745201185 0.0876418352 0.00886597019 -0.047717195 -0.128835618 0.0425309427 -0.0195610169 0.0638543293 -0.129029542 -0.0131098628 0.0650595948 0.0454248711 0.0853424594 0.0140957199 -0.0202541035 0.0317879245 -0.143378869 -0.00423101289 -0.131989092 0.0190712065 0.0510258228 -0.0865119174 0.0910336599 0.0548940673 -0.105332822 -0.0292321127 0.00560635095 0.0586736389 0.00505677052 0.110722467 -0.107276112 0.110618874 -0.0691331774 0.118226469 -0.0460344777 0.0930353999 0.0951236412 -0.0392442942 -0.112528846 -0.0722931251 -0.0136691937 -0.0806170553 -0.0411446244 -0.041987583 -0.00134162803 0.0213002544 -0.0701876432 -0.0885930806 0.0525745451 0.0624400154 0.0732306167 -0.0200780723 0.0234855395 0.00717696035 -0.147869304 0.0787881762 -0.0716938302 0.0857125446 -0.0352597944 -0.0805057809 0.0154540623 -0.00925941207 0.0756853744 0.0621306412 0.00842974614 -0.0371917672 0.108534172 -0.035098426 0.0504793301 -0.020088356 -0.0706505328 0.0162423179 -0.0752581954 -0.134370595 -0.015700357 -0.0898832977 0.0254374146 -0.0247301795 0.130348459 -0.035687875 -0.0680520609 -0.0444837064 -0.0601570755 -0.0513698831 0.111533344 -0.113860339 -0.0939767584 -0.0477326587 0.0567156076 0.0808228627 -0.0376587808 -0.114645995 0.0915196016 -0.019314684 0.0117936404 0.0774793029 0.0794394761 0.0432011634 0.0209889244 0.0314350612 0.0340264812 -0.104788385 -0.00981875602 0.0270214248 -0.0868451148 -0.0589688234 -0.0642679632 0.063923806 0.0117191905 0.0869612917 -0.0395875722 -0.0766618028 0.0934183374 -0.0996760055 0.0946377516 0.0345660634 -0.0784306899 0.07895834 -0.0232259352 0.0685211644 -0.0316710509 -0.0698057264 -0.011367701 -0.0852755904 0.0756148174 0.112687804 -0.0221658461 -0.0638062581 -0.0995947495 -0.148020685 -0.130782247 -0.0682474449 -0.0904296935 -0.0312870853 0.00539993821 0.0133634834 -0.0529328249 0.11267703 -0.0565492623 0.00755324587 -0.13352786 0.0963837281 -0.0480984338 -0.0886128098 0.0665832683 -0.109329402 -0.0235391576 -0.057248909 0.0689797029 -0.06789276 0.055123087 -0.0196565576 -0.0988758132 -0.0760087073 -0.0214166064 -0.0119032441 0.0697430596 0.00237821974 -0.0490270369 0.117983244 0.0743466169 -0.0153463781 0.101554446 0.103069983 0.0187342204 0.06316486 -0.117018297 0.0541630685 -0.100673176 0.0376137197 -0.0421548113 0.0645036548 -0.0168326087 0.0727319941 -0.0281656329 -0.117367707 0.116008684 0.0423757844 0.0988826826 0.0217613652 -0.0824515149 -0.0351628885 0.0812880173 0.147239089 0.0299862716 0.0364563912 0.145076081 0.105126604 -0.0210315958 0.0879319981 -0.0583813041 0.0593444556 0.106604464 -0.0204098541 0.0887133107 0.0528428815 -0.0444989093 -0.157084122 0.054473713 0.150896385 0.028199533 -0.0832022205 0.0864810869 -0.0489188135 -0.00317808706 -0.138337359 -0.0614818409 -0.0958974063 0.16130729 0.0542741828 -0.0278767291 -0.0963605344 0.132825524 -0.0308959335 0.0545662679 -0.0319377147 -0.0980552807 0.0630095378 -0.0429899767 -0.0260642897 -0.0517068692 -0.0561511219 0.11457511 -0.060042996 -0.0527282394 0.0207744949 0.023834521 0.0296360757 0.0525115617 -0.114589319 -0.147910029 -0.0456607491 0.0715667382 0.0993826538 -0.0107942242 0.117987439 -0.0284947716 -0.0709881261 -0.0903323144 -0.0224822853 -0.157054216 -0.0552059412 -0.0338664018 0.0750938728 -0.065085113 0.0211203843 0.118827477 -0.0167396851 -0.0932219103 0.0603475198 -0.0151796769 0.00819401443 -0.103917979 -0.0764359087 0.0163631905 0.0167195648 0.10436935 0.031059213 -0.010204182 0.0322529972 -0.0338583738 0.0547566526 0.0993093476 -0.0449988134 0.0820005462 0.0658240914 -0.0828819126 -0.0934411883 -0.00780287059 0.0153802652 -0.0455549546 -0.021676302 -0.00396145368 -0.121144004 0.0972423553 -0.0255077239 -0.0110973027 0.120320976 -0.0416977331 -0.0117513239 -0.105017632 0.0385619588 -0.0398330316 0.0233246256 -0.0271476638 -0.0675703511 0.0597647466 0.0618401542 -0.0964857414 0.060786169 0.00302257249 0.0607231446 0.0319902562 0.0811921582 -0.01984399 -0.00189587893 -0.139521733 0.0102705099 0.0954400972 0.0966984481 0.0283194389 -0.045280274 0.0892768875 -0.052237168 0.0279194918 0.0737474114 -0.000261810783 0.112771511 -0.00738663413 -0.0538329072 0.0805022269 0.0928170681 0.0922827125 -0.12284258 -0.0546792485 0.0152943293 -0.0549117215 -0.0439201444 0.148618817 -0.0444157384 -0.014640267 -0.0463561974 -0.141159236 -0.0240516476 -0.0309760477 0.103102759 -0.0665320605 0.0416155756 0.0295819342 -0.072761029 0.106963806 -0.0282385554 -0.0605240837 0.0600927584 -0.0168974958 0.0207078587 -0.0489886738 0.0462408178 0.0563473664 0.0989545807 0.0286699794 0.00622357149 -0.0955138803 -0.089370057 0.0498380885 0.111254118 0.0486598499 0.0335665718 0.00651514437 -0.137515217 -0.00872137025 -0.0768070891 -0.103112787 -0.0138399014 0.0693487599 -0.000350349292 -0.132112339 -0.054441724 0.0118631627 0.0867957175 0.0417998731 -0.125937298 0.0554638319 0.0775117502 0.0927276611 0.0790062174 -0.107764006 -0.00206389814 -0.0824461728 0.0240072217 0.0872223303 0.0261813533 0.0419610031 -0.110041335 0.0405180678 -0.0859660432 0.0439100154 0.0232283361 -0.0124879908 0.0721851513 -0.078385748 -0.0744791776 0.0467836894 0.0291622393 -0.00720773824 0.0299567468 -0.0575182885 0.0597681552 0.0921448171 -0.0661888644 -0.00482166698 0.0675290599 0.0166998263 -0.0955132842 0.085087046 0.102876 0.0426338613 0.0119831273 0.0855897442 0.0225691516 0.0364397429 0.0160135124 0.00990157295 0.0420151539 0.0316322856 -0.0585764237 0.0824364498 -0.0413012579 0.135568514 0.034757603 -0.0277830604 -0.034982793 0.0369454138 0.0193050615 0.0799474046 -0.0846258327 0.0366695002 0.0598423779 0.0799307451 0.136157006 0.128120825 -0.0201004725 0.0452948473 -0.06387043 -0.0197872147 -0.0240808073 0.0412721485 -0.0553675219 -0.106740355 -0.035156589 -0.0776401386 -0.104280807 0.0636275262 0.143016845 0.0158824641 0.0104194768 -0.0614338666 0.0736046582 0.0777402669 -0.0251369067 0.08835724 0.0629755557 0.100526057 0.130498186 -0.0605841354 -0.107173443 0.0618615188 -0.0027110891 0.0448608994 0.0324240513 0.13815707 0.0885208547 -0.0184885561 0.0110004703 0.0750818923 -0.123291738 -0.0332586765 -0.00227115862 0.0191539656 -0.00745699275 0.0960062817 -0.00826996565 -0.11104311 0.00985418726 -0.00825903285 0.0663968921 -0.022775976 0.120236516 -0.0306508504 -0.126447007 0.0266145803 0.0663856491 0.0159932058 -0.00567367487 0.0217537843 -0.129739061 0.0966290981 -0.120227985 -0.0869013295 -0.0486435518 0.145977855 0.0870844871 0.08520028 -0.0570347048 0.0191150215 0.0577304959 -0.0748146251 0.138433784 -0.00482775643 0.0784191266 -0.00595876481 -0.089486897 -0.0807763785 0.0747055635 0.0123625547 -0.0272405632 0.0675494596 0.0216415282 0.0251738597 -0.055193793 0.00323623535 0.104679525 0.00744761759 -0.0563779734 -0.00321181351 0.0755025595 0.0668580309 0.0714727044 0.0588193573 -0.0533336736 -0.027081253 0.0995806679 -0.00321418745 -0.0934964344 -0.0121698389 -0.0306112021 0.049315121 -0.0717256963 0.0284800846 -0.0465604663 0.0592573173 0.0975120962 -0.0522723123 0.0236058217 0.03004965 0.0192594938 -0.0153996143 0.0517514087 0.0202556662 -0.036583852 0.105843432 0.0923823789 -0.108679689 0.104115218 0.0757252499 -0.0786331147 -0.108693816 -0.0475629792 0.0984940901 -0.014999046 -0.0789110363 -0.0634896383 -0.124007449 0.0513020195 0.0337021165 0.00936586969 0.0974761024 -0.0039249598 -0.0539403148 0.0093635805 0.064329423 0.00841173995 -0.0369432382 -0.0830086768 0.0733837709 -0.0366025865 -0.0928544104 -0.0144924261 -0.0935278535 -0.0608592965 -0.102837108 -0.105442159 -0.0516982377 0.0344825126 -0.0649234951 0.0406593382 0.0177465007 -0.129067734 0.0989128351 -0.00798356999 0.0440664552 0.0535025857 0.0184675008 0.0339980274 0.0178082474 0.0744322464 -0.0364990495 -0.00785736833 -0.0367429368 0.110767066 0.043253459 0.0398505144 0.0362781025 -0.0534571707 -0.0138610825 0.0870700777 0.0741645619 0.0842578635 0.142304018 -0.0709979832 -0.0997136533 -0.0118433814 -0.0195060018 -0.0260943621 -0.0851690397 -0.0610457137 0.0346727297 0.038758263 -0.026763279 0.00496497378 -0.0248329956 -0.0392976888 0.0639327541 -0.0116053829 0.0389414802 -0.0765374303 -0.00860751234 0.0580505244 0.058850836 0.0729101896 -0.0168091431 0.0493836068 0.0378085151 -0.0690903589 -0.0735144988 0.0763928369 -0.0370460264 0.0515179113 -0.0130856326 0.0213577785 -0.0343368538 -0.0334647931 -0.0814459473 0.0176412053 0.0479053147 0.00514184404 0.0467168912 -0.114557423 0.0326536633 -0.122538239 0.0797366053 -0.0220797621 -0.0939437151 -0.0356741399 -0.129558548 -0.0728810504 0.0334232114 0.0454140641 0.0641237572 0.0639395788 0.0806245655 -0.0568198524 0.113629669 -0.0269122235 0.0319497921 0.0329489671 -0.0914393291 -0.0630809143 0.0387656465 -0.0879159197 -0.0275259484 0.0394459814 -0.0404246971 0.0497982427 0.0524061657 0.00315544894 0.0564953573 0.126382247 -0.0345166884 0.0698444024 -0.0992106721 -0.0982451588 -0.107451998 -0.0527341142 0.123538248 0.024919359 -0.0131305484 -0.0894226953 -0.115056708 -0.0378506444 0.0621916279 -0.122512206 -0.0171847306 -0.0232702196 -0.103842773 -0.100498989 0.0790945068 0.0964731276 -0.0845638365 -0.0501652695 0.0643050224 -0.0128263319 0.0967387334 0.0712623745 -0.0615100749 0.0906366855 -0.0492125414 0.0326652825 0.126826495 -0.0253421534 -0.0376052111 -0.0545951948 0.0150278658 -0.120675765 0.0287395269 0.0592594892 0.0814532936 0.102257699 -0.00316688977 0.0232737195 -0.0206201728 -0.106220126 0.0464446917 0.0267721917 -0.0964898616 0.000449200714 0.058107879 -0.0119522484 0.119053274 -0.0157440305 -0.0229303446 -0.0538970679 0.0736326724 0.0061632446 0.084165886 0.0724296197 -0.144264609 0.0839208364 0.0139587959 -0.0285230391 0.0120399361 -0.11818894 -0.104979657 0.0247651879 0.0166639592 0.00610556966 -0.0571998879 -0.127792791 -0.0762260929 -0.0623565726 -0.0835529417 -0.0365828983 0.0787034184 0.0618906133 -0.0250335261 -0.0716648474 0.0972160697 0.0754901916 -0.0548195727 -0.0320330486 -0.0405605473 -0.0662167147 -0.0892478526 -0.0222243164 -0.0625544339 -0.0201768409 0.118638895 -0.0409025624 0.0845576376 -0.107005633 0.0590842962 0.034769319 0.00361982756 -0.0245255139 0.0547625758 -0.115161017 -0.0553721972 0.0778890774 -0.041641593 -0.0323726982 0.0310737193 -0.0700510889 0.015053235 0.160222828 0.10772761 0.037959829 -0.0358525217 -0.111562051 0.0241074972 -0.12357261 -0.12483231 -0.0926482156 -0.0271823723 -0.113680638 0.108206771 -0.116128989 0.00913964305 -0.0308017898 0.0424275286 0.122627713 -0.0544019043 -0.132333323 -0.046340026 0.139717042 -0.0182990991 0.068831861 0.0823961869 -0.043938648 -0.038030766 -0.0475954749 -0.0332984775 -0.058541622 -0.0173970181 0.00698842388 -0.00505919755 -0.0264520328 -0.075296253 -0.0592983365 0.0703864917 0.0332557037 -0.083748579 0.0185417943 0.0330124721 0.081373781 -0.0521305203 -0.0828755274 -0.0191443413 -0.0275393687 -0.104940005 -0.0373899266 -0.129006848 -0.0350307822 -0.0594692267 -0.086783044 0.0255106967 -0.112269998 -0.0306032244 0.0352828279 -0.0759110004 0.0436218269 0.0371037386 -0.0105048856 -0.0273989253 0.0408369228 -0.0481947623 -0.0490162857 0.0253327843 -0.0900856555 -0.10654892 0.0296242032 -0.0569313392 0.00557792746 0.0511694998 0.0238375813 -0.0494668148 0.00665589096 -0.0544650368 -0.0221331436 0.122494169 -0.00346396537 -0.03750135 -0.0583185181 -0.0697296709 -0.00882709585 -0.0139009692 -0.0581789836 0.0511611365 -0.0665547177 0.0625669286 0.0951525047 0.0109070055 -0.0445890985 0.0510924347 0.0549049266 0.076408051 0.0454293936 0.00382474251 -0.0298265554 0.00967820082 -0.0501072742 0.0321529955 0.0609736592 0.0241174586 -0.0162564274 -0.0707081556 0.144424051 -0.0120626008 0.0290390942 -0.0303947851 -0.0576401316 -0.0760076717 0.0552736968 0.0407108702 -0.0295744073 -0.097608991 0.0594237335 -0.0941474885 0.0194068011 -0.0656058192 -0.0635675117 0.0444326811 0.0656519532 -0.0508928746 -0.0729632974 0.0443925709 0.0649101809 -0.0789977983 0.0154495686 0.0545210727 0.0447072089 0.126279771 -0.0358525813 0.0973265097 0.0260252561 0.0660820976 0.103842169 0.0981507078 0.0491054058 -0.0901521593 0.00591290556 0.0812497959 0.00227644946 -0.0607588552 0.0970650539 -0.0110606086 0.0776812136 -0.0386007279 0.119623892 0.0970067903 0.0679384917 0.05710252 0.0563185252 0.120259158 -0.0155343693 0.11131572 0.0304788649 0.0158111248 -0.0598068163 0.0854219869 0.0570583344 -0.0570600703 0.0287855826 -0.0342741273 0.036824815 -0.0501024202 -0.0268743541 -0.0634012967 -0.0412885621 0.128790557 0.00406311126 0.042762816 0.00955149718 -0.0193585306 0.0519001707 -0.039887663 -0.0505587868 0.0825586244 0.159575224 -0.039045386 0.0544076897 -0.0779607669 0.0380125828 0.0408898331 -0.0760996863 0.0313064456 -0.0805607736 -0.0574796721 -0.0846826658 0.113500386 -0.0871631727 -0.117222093 0.0810274109 0.051653102 -0.0653802082 -0.00290928991 -0.0630526915 -0.119209491 0.0347142629 0.0174591336 -0.0608103834 -0.0927200988 -0.0334013142 -0.0835639536 0.11650601 0.0233004745 0.0682244599 -0.0430421382 0.028754117 -0.0900809765 0.012021089 0.0624547713 -0.105882496 0.0918491483 -0.0683888867 -0.0233582761 -0.0216962695 -0.0297207572 -0.0362710096 0.0270867273 0.0460449308 0.0642470419 0.0419084579 0.096854955 0.0901691094 0.0210975073 0.0876087993 -0.0793926194 -0.0800184235 0.0771968812 0.00599401817 0.0235112216 -0.124454454 -0.0479293279 0.0157109667 0.0773659572 0.00740274787 0.0359256268 0.0233147927 0.0827361718 0.0162823889 0.0475280918 0.0248643626 -0.0826426297 0.0601135641 0.119723722 -0.0729700252 -0.115519248 0.0721688643 0.105871052 -0.0132921757 0.08133322 -0.0257968605 -0.0480753519 0.0262065995 0.0138876187 0.0765962973 -0.0963439941 0.0358153284 -0.0679376945 -0.0568544529 -0.0514913723 -0.0568194948 -0.0500201248 0.040343143 -0.0777348354 0.0369281098 0.0772028044 0.00108942436 -0.00182866259 -0.0662001669 0.0198386908 0.045538079 0.0673875585 0.0710163489 0.0381334536 -0.0855338573 0.0286879074 0.151627332 -0.0448625647 0.0633926764 0.0938242897 0.0526457354 0.0111530349 0.097349681 0.0770600736 0.0608735308 0.0969019234 -0.0362519659 0.00380012137 -0.0779155269 0.00992168486 0.0114733698 -0.000200923663 -0.0576153658 0.083698988 -0.147824839 0.0438186601 0.0710815191 0.028712105 0.04132507 -0.0103248488 -0.000444060774 -0.0523407757 -0.0500133485 -0.0338968448 0.0263319649 -0.04361872 -0.0277711656 -0.0168558471 0.0467232168 0.0177872274 0.0654023588 0.0379033573 -0.0607685857 0.00964797754 -0.120628364 0.0780472904 -0.0843662843 0.030455105 0.118488185 0.0485005565 0.0704616383 0.100928433 0.0408400409 -0.0665229484 -0.00970364176 -0.0212464705 0.0161203556 0.147231802 -0.0107072778 -0.0776233077 -0.0784361213 -0.0594038591 0.0418931209 -0.000451631407 0.112979718 -0.0695450008 -0.0122077055 0.00301642111 0.0691217184 0.0310368631 0.0258781705 -0.0280865338 0.0467126593 0.00575158047 0.00231607817 -0.0486097038 -0.119848073 0.0345518552 -0.043744415 0.0102821859 -0.0540690646 0.138281018 -0.00140106888 -0.0396243855 -0.0496274233 -0.0120704891 -0.0222738367 -0.0162870642 0.014834349 0.00513116037 -0.0692124441 -0.0776496753 -0.120830476 0.0161017329 -0.0370132066 -0.0627025366 -0.108544096 -0.0967517719 -0.0203821119 0.0064773499 -0.13926363 -0.0478015207 -0.00240246905 0.0692687705 -0.0158551876 -0.0689087138 0.104410864 0.0010006387 0.0992425233 0.0829349607 0.0481005087 -0.0365891643 -0.150370061 0.0162225049 -0.0743764937 0.0161576103 -0.0280919597 0.0834880769 0.0755537376 -0.0628848672 0.0377435535 0.0462861024 0.0946150273 0.037716087 0.0241654012 -0.0828214586 0.0492958799 -0.0326795466 -0.0770705715 -0.140277237 -0.0159451012 -0.109213002 0.0240502506 0.0428965315 0.0193452798 0.0742191896 0.0848924294 -0.0308462307 0.0752010345 -0.0391890407 -0.00412439182 0.0942446142 0.00469598826 -0.0910378024 0.000205826393 -0.0885702595 -0.10040123 -0.0408986025 -0.0709256157 0.0822851807 -0.0281461775 -0.0398267582 0.0393168256 -0.112278141 -0.0479413383 -0.0573235005 -0.0405642055 0.122419089 0.0270299502 0.00841662008 -0.0121508595 -0.0198617335 -0.000879280851 0.00153166568 0.0554407202 0.00480276532 -0.0682227761 -0.0328659527 -0.0611312203 -0.142565057 -0.00181693793 0.0861305967 0.0628089532 -0.0740653574 -0.0160195958 0.0974677652 -0.101770975 0.0444948077 0.0841920972 -0.0720307156 -0.00300135929 -0.069788307 -0.0935182795 -0.0931349918 -0.10534066 0.00375205046 0.0325018056 0.0886859596 0.0366864353 -0.0217052139 0.0339041911 -0.0782768726 0.00892924238 -0.0385166742 0.0129920086 0.0826299712 -0.044218149 -0.0138300406 0.00623118551 0.0313761942 0.126124471 0.0703845546 0.0405096114 0.0779507384 0.056386482 0.0744233653 0.0530518629 0.0425919332 -0.0413660035 0.0031353673 0.0289649554 0.0164382402 -0.011526701 0.0496848971 0.119253859 -0.111141384 -0.036666058 -0.0288353041 -0.00853391178 -0.0851149112 -0.0335021652 -0.0633720756 -0.0867637545 0.0132676009 0.0459979139 0.0367925242 -0.0876119509 -0.0333126523 -0.0919199139 0.0108301183 0.117218599 0.107971512 0.0295869391 0.0267735161 -0.0267074816 0.0445350818 0.111544669 0.0392179638 0.0937596411 -0.00281381886 0.0201951191 -0.0501904786 0.0335953049 -0.0029760797 -0.0398072712 -0.0350824408 0.0377969109 -0.0179890636 0.0425337292 0.0354793109 0.0247561317 -0.0294101313 -0.0284981932 0.028481964 -0.0429043695 -0.053075958 -0.126678079 -0.085693188 0.0219913088 -0.103943169 -0.0727316067 -0.0124961985 -0.126258418 -0.0539888591 0.0462416373 0.0975957662 -0.00795640703 -0.021866478 -0.0801899433 0.0211574696 -0.00211753859 -0.0770760551 0.106369033 0.0529551283 0.00306034461 -0.153457433 0.0278290957 -0.0596790686 -0.04930925 0.072257936 -0.067070365 -0.0302931052 -0.115538754 0.138532385 0.0228340477 0.154491559 -0.0212814286 0.000630576746 -0.0118962303 0.0624279119 0.0366596803 -0.104068108 -0.0649591386 0.0323766321 0.0730391443 0.0661889538 -0.0391814969 0.0793058872 -0.0105679389 -0.0682836398 -0.0303012673 0.122320741 -0.00334193907 -0.107540131 0.0638230518 -0.119354151 0.108916059 0.0184885114 -0.0397466794 -0.074820742 0.0587450974 0.111003347 -0.0857884958 0.0496451035 -0.175095469 -0.0324501954 0.0114386939 -0.083123289 -0.0846996307 -0.0342796296 -0.0571025424 0.126033574 0.0537063144 0.0963928178 -0.0292254034 0.0303790402 0.0882642195 0.0210448559 0.0844297558 0.0784011334 -0.067198731 0.0598029867 -0.115000091 -0.0233332999 0.11517673 0.0587579273 0.0726984292 -0.0598884225 -0.0596332885 0.10184852 -0.00164783257 0.0644008815 -0.0344729498 0.00754436292 -0.133005932 0.0759031922 0.00654394785 -0.00963001978 -0.172101706 0.0105558978 0.176982358 -0.0497983247 -0.0037973139 -0.0655243993 0.0364305004 0.0520126633 0.016448427 -0.00652270019 0.068141371 0.0190387368 -0.0741908997 -0.0276863649 -0.0545798913 0.0041190316 0.0235041492 0.0122208726 -0.0720304623 0.0136137297 -0.0269483216 -tensor_10bias 50 -0.12787357 0.017543152 0.122975975 0.0730041191 0.0510178655 -0.00993559696 0.139933825 0.15092434 0.0684130192 -0.0333705768 -0.184260622 -0.13440612 0.109378524 0.111376524 -0.10483826 -0.0250708181 0.120549299 0.0411001481 0.183845177 0.135748357 -0.00771392835 -0.12025056 0.085442692 -0.0513125733 0.136845529 -0.0145230526 -0.0895486251 -0.0252410602 -0.00896273553 0.0933182612 -0.108676046 -0.104239464 0.170086652 -0.0341263078 0.0728005916 -0.0453254506 -0.100045033 -0.110129185 -0.00771265198 -0.119152002 0.1214706 0.101130307 0.0332861841 0.0142126186 -0.010599345 0.109234303 -0.0182705577 0.177162722 0.0691059828 -0.0739419758 -tensor_2bias 50 --0.0447338857 0.0537877791 0.0785957575 -0.0634338111 0.153481558 0.148676842 0.0265698414 -0.0261984505 -0.0751923025 -0.0352455713 0.0932889804 0.113871664 -0.0193461645 0.175267622 -0.0770687833 0.157511786 0.0196232703 -0.0737266392 0.0872744098 0.116388358 0.168398216 0.0425802097 -0.102230035 0.0693789497 -0.0855393335 0.126388997 0.0205914602 0.140580684 -0.00234525092 -0.0295791756 0.0197821874 0.0661892593 0.166472748 0.149337456 0.0513125136 0.00068877294 -0.0757507607 -0.0540507101 0.134943455 0.0256511811 -0.0943378955 -0.0261238459 0.0309584048 0.111188456 0.169084176 0.136096522 0.0985386074 0.0480017625 -0.0471420884 0.122215845 -tensor_6weight 2500 -0.0649253875 0.129901871 -0.0820776671 -0.0164463595 -0.0272229239 0.0591965616 -0.118314907 -0.037768431 0.0372078121 -0.105141595 -0.140254259 0.0649844706 -0.112917937 0.141195908 0.140458569 0.0553426445 0.0367731303 -0.0505450144 0.0507215112 0.114758804 0.115806922 -0.0424669459 0.0370975286 -0.14095898 -0.104349688 -0.007835567 -0.0608764365 -0.0330444127 -0.12756449 -0.104601666 -0.0191679522 0.00627362728 -0.0662557259 0.0937368721 -0.101459384 -0.0692796931 -0.0512177646 -0.126805127 0.0393478721 0.0119376034 -0.0574386194 0.100259379 -0.10315454 0.109866068 -0.02667135 0.130284503 -0.127174616 -0.0201597661 0.0414076746 -0.122587755 0.126039341 -0.115497321 -0.126209974 -0.00932627916 0.0310982913 0.0501976013 -0.0105512738 -0.117707536 -0.116891071 0.117860749 0.0559653193 0.0531298667 -0.0543823317 0.106951609 0.0151336193 -0.0444077402 -0.112000868 0.0114103854 0.0838644654 -0.012747705 -0.0791340023 -0.0889710411 -0.0655299723 -0.0225159228 -0.00320497155 -0.0662335902 -0.0993035883 0.137778953 0.105412766 -0.116872713 0.0578503758 0.0725949556 0.0382958353 -0.0512723327 -0.00722907484 0.0786679238 -0.116880074 -0.0138037503 -0.0500161424 -0.133497417 0.0958063304 0.0558829457 0.0326671302 -0.0238390192 0.0845869035 -0.0934950113 -0.0433793738 0.0942181498 -0.045510605 0.0947668105 -0.106258683 0.0446187519 -0.0900780708 -0.0834366232 0.191142887 -0.100739747 0.171907842 -0.0254000407 0.138836846 -0.0700232163 0.114825904 -0.143776864 -0.0321323685 0.0355321914 -0.178224027 0.119957708 -0.0752720386 0.127894193 0.164032444 0.065395847 -0.063121289 -0.0970638469 0.102740057 0.0505844206 0.0253012329 0.0821145922 0.180317059 0.136325151 -0.103746325 0.126737922 -0.0877246112 -0.0697940513 0.0607301034 0.0686804578 -0.0175086763 0.0285665393 0.147603065 -0.159169093 -0.058806546 0.101134196 -0.0185775906 -0.113093227 0.0278050229 -0.0363715962 0.123531096 0.105049185 0.0325903893 0.101475507 0.175050184 0.0439927392 -0.0129783954 -0.103368133 -0.094232142 -0.133218303 -0.10637027 -0.126878336 0.100644603 -0.0823836327 -0.0993345156 -0.0921484306 -0.00233977009 0.0756816864 -0.0497992188 0.044235874 -0.100462228 0.0119998753 -0.0844490379 -0.0331858918 -0.0446389243 0.042482052 -0.126429394 -0.105036467 0.0468023382 -0.0696351752 0.0628612116 0.0562251285 -0.0864542499 -0.0504873767 -0.057342425 0.107809477 0.103574097 0.0706402957 0.0782148615 -0.112125456 0.0768203884 0.0012682596 -0.124097727 0.114557318 -0.0111420928 0.0438492894 -0.0157870948 -0.129962921 0.115011618 0.0792783797 0.0613046065 -0.000343024731 -0.0795636103 -0.0708794519 -0.0101428293 0.0629758537 -0.0162976906 0.111654803 -0.134260848 0.00456416048 -0.129808471 -0.0437678993 -0.0731499866 -0.156290948 0.176469311 -0.134536281 -0.0936101675 0.094726339 0.129458129 -0.00281856535 -0.0142846275 0.00348282605 0.129408911 0.125073373 0.153636366 -0.0143775577 -0.013238579 -0.0172810107 0.0421338268 0.116808861 0.0514435619 0.13204819 0.0942413136 -0.012623366 -0.0874075145 -0.0010379689 -0.162753403 -0.0148045626 -0.0110199554 -0.0829107389 -0.0709493682 0.162264898 -0.0466960482 0.115680397 -0.0569904298 0.0977253392 -0.0407817513 0.163954467 -0.0335706919 0.145685494 0.122499764 -0.0530293435 0.160302415 0.00654218439 0.0903446525 -0.0116685461 0.0239315517 -0.0313074701 -0.102479123 0.0804489553 0.0174044427 0.0801673904 -0.0707507953 -0.0458744019 0.0368017294 -0.158817649 0.0533084273 0.0464035608 -0.0136327893 -0.026964413 -0.0722962692 -0.0277424678 0.193694353 -0.00919557363 -0.0336900316 -0.00418696925 -0.0529568717 -0.00187929883 -0.00698451232 -0.0436371192 0.0323710404 -0.019839149 -0.0511180982 -0.110972911 -0.0133787924 -0.00690555479 0.104938939 -0.038326323 0.0560517721 0.138403684 0.143514618 0.199766785 0.14532347 0.0941502005 0.0855569765 0.0256890338 0.0689958632 -0.0572427884 -0.00418164022 0.0580582805 0.150297597 -0.122072354 0.176015973 -0.120600596 0.119270205 0.106842689 0.108840823 -0.0772350207 0.128743961 -0.0015650976 0.0175431371 0.053713128 -0.117410287 0.0328807086 0.0287136007 -0.104569376 0.0721085593 0.0677165911 -0.0558042675 -0.0673747733 0.115988277 -0.122426286 0.0186466724 0.101494573 -0.029576974 -0.115950264 -0.0865741 0.0563799553 0.107808612 -0.0450687222 0.0710128173 -0.0514423363 0.0430348404 -0.0574421734 0.0800841525 0.0757694393 0.10702318 -0.0222116411 -0.0559151433 0.0379136428 -0.0136397472 -0.125272736 -0.12881507 0.0900285095 0.0889691934 0.121225074 0.0771746784 -0.0660418868 -0.044440113 -0.122758932 -0.109487474 -0.0582289658 -0.104467168 -0.00918032415 -0.0209672842 -0.0869374499 0.168161795 0.108111799 -0.0880761966 -0.0135405827 0.178589284 0.003923479 0.0852129236 0.161241695 0.00760242762 0.06472487 0.0908324644 -0.109867044 0.13713856 -0.0345446207 -0.144439176 0.0468028821 0.136207759 0.122578613 0.0340208001 -0.105203725 0.0250524748 0.106788099 -0.121437281 0.181704462 0.11812605 0.0816245601 -0.101409554 0.158797711 -0.0405994244 -0.106579058 0.0417435579 -0.0245459247 -0.00784720015 0.0369141363 -0.070102796 -0.0140520735 0.180028707 -0.0340496227 0.0966045856 -0.0815079585 -0.0375775248 -0.173401833 -0.0957172289 -0.189357907 0.0151246237 0.0324664675 -0.0768369883 -0.106799647 -0.0305638388 0.0201060958 -0.053941071 -0.0226951279 -0.0301792286 -0.0753694102 0.106556229 0.00628629327 0.0264616497 0.115733989 0.0310344063 -0.0524785519 0.0871863812 0.1207719 -0.0298178941 0.152269572 -0.13239485 -0.0819777101 0.0469505712 -0.0912657976 -0.111869723 -0.0653776079 0.10464593 -0.0256920718 0.12280155 -0.143135741 -0.00778760947 0.03016074 0.0972794741 -0.0641395524 -0.0162782986 0.0504767261 0.0761293843 -0.0471233875 -0.0866800919 0.0426621437 0.0164198168 0.111198299 -0.150085554 0.0248084236 -0.0389914848 -0.0365719572 -0.138500616 -0.0784377009 -0.107607454 0.0207631979 0.0907824636 -0.0914271027 0.0534422696 -0.112685621 0.0665683895 -0.0469377451 0.0247338824 -0.0177221745 0.118170217 0.113025144 0.0234410614 0.104385503 0.0654341355 -0.10872592 0.128927425 0.196715385 0.0276464783 -0.0738130882 -0.081564039 -0.00269559864 -0.126407489 0.015476441 -0.045586586 0.0332736522 -0.0798867643 0.135938272 -0.162508756 0.0983785167 -0.0764289424 -0.0560759567 0.0814144537 -0.031941954 -0.121607453 -0.0935366377 -0.0972638801 -0.0318852663 0.134761959 0.00468478957 0.0771510676 0.0787510574 0.164012611 -0.0312081948 -0.0129511952 0.0929201245 0.128727853 -0.00758869387 0.0151306689 0.0861001238 0.106875338 0.0643666014 0.153492779 0.0107787019 0.0601070002 -0.0477736481 -0.131303728 0.00165647722 -0.159763634 0.0611100607 -0.0269413907 0.0301383473 0.118319333 -0.114341162 -0.143750668 -0.106911905 -0.0885151848 0.160572648 -0.0470729731 0.0245884079 -0.0456172712 0.0757794902 0.0562509894 0.0297678653 0.0527246483 0.0166134071 -0.108542152 0.142919838 0.127158552 -0.0228688288 0.00676658237 -0.03869633 -0.0931294337 -0.00328914542 -0.0614178069 -0.0198070854 0.145518914 -0.0294807851 0.0692162439 0.15985842 0.0560066774 -0.0942831039 0.0402628109 -0.118215956 -0.116073422 0.0202833321 0.117826007 0.122413464 -0.0271829292 0.0389408059 0.0934228823 0.0398765206 -0.00495207263 0.0981794819 0.115069546 0.0594924539 0.0624140352 0.0753316805 0.0130726891 0.00351743586 -0.118038118 0.133946255 -0.0532785915 -0.111061007 -0.0136450082 0.0968498662 0.133393183 0.149615765 -0.126794592 -0.107227415 0.167891011 -0.0144322244 -0.181450546 0.0244579148 -0.0923274755 0.157411754 0.050326366 0.143469214 0.00917230081 -0.0694648325 -0.0583085977 0.100404061 -0.0703162327 -0.132603139 0.0277496353 0.182791844 0.0298265126 -0.14978756 -0.0095058633 0.177655354 -0.0389893278 -0.0960298106 0.055750493 -0.0944034085 0.175231501 -0.151938185 0.0563026294 -0.126313433 -0.137585253 -0.11282815 0.0335017443 -0.016390631 0.0258972906 0.149925053 -0.0161783621 0.132413134 -0.129700065 -0.0751069337 -0.0137014491 -0.126565307 -0.0802877396 -0.127848729 -0.0448123366 0.00571359694 -0.0442490689 -0.0026283646 -0.0133119607 -0.117010497 -0.032991223 -0.0752329901 -0.0423538461 0.0337411128 -0.101852775 0.102702036 -0.113081135 0.128210023 0.0527718291 0.0711361766 0.046200335 0.112589262 -0.0602141693 -0.124360792 -0.049823273 -0.140881091 0.116494343 -0.137485832 0.0550901145 -0.0324928425 -0.101916127 -0.0462415516 0.0865442455 -0.119312339 0.0382132456 -0.0243112519 0.101194464 -0.10621307 -0.0587359108 0.107364364 -0.0826650411 0.112274796 0.0253867805 0.0701454431 -0.043696586 -0.0748712718 -0.0725907981 0.0644025356 0.0884814113 0.0663292259 -0.129587308 -0.0319217071 0.0338242948 0.115189984 0.0245237201 0.0201187134 -0.0739658847 -0.0454444066 -0.0267900527 0.0743228644 -0.134670675 0.0116872936 0.123525247 0.00718687475 0.139177337 -0.0978305936 0.0739517361 -0.0291812122 0.0807204247 -0.140549108 0.00840865076 -0.0133223087 -0.0685992762 -0.0170855597 0.060691461 0.0238291025 -0.141307816 0.0849160701 0.0482466817 -0.0244439244 0.0211740434 0.0507029444 -0.069623448 -0.0391115323 -0.045335494 0.105534464 -0.0210918859 0.0410889536 -0.119236276 -0.0102088749 0.0296808928 -0.111803085 0.0251688212 -0.0522222742 -0.134250998 0.112514332 -0.0292918608 0.114655808 -0.115933761 -0.0447240621 -0.0562940501 0.115107387 -0.0417959876 -0.0358452164 0.128562316 0.123079613 0.0867616385 0.0504442304 0.085063085 -0.0750186294 -0.0415927172 0.0159885045 0.0309951119 0.0242125411 0.0228883941 0.128811404 -0.0658345073 0.0893866047 -0.0262501985 -0.0197901707 0.0398271419 -0.00843849033 0.0776178464 0.0806626081 0.168270662 0.015441413 0.0647286773 -0.0716274977 0.0758225247 0.114696413 0.142221808 0.103615619 0.0212591253 0.140274763 0.00738972286 0.159614474 0.11493472 -0.0833858475 0.0133725926 0.0502345115 0.138931051 -0.0143997408 -0.135814145 -0.0122304466 0.157529533 -0.150415257 -0.0632499009 -0.0106943063 -0.0938702598 0.163158879 0.13341108 0.106037788 0.1496768 0.177437797 0.087329708 0.137258947 -0.0137388939 0.0762795284 0.0370195433 -0.0747531578 -0.092746526 0.0398157351 0.0443542562 0.0983223766 -0.00542128552 0.0799729377 0.168658942 0.125600606 0.150951058 0.117274851 0.0738498569 -0.0982450694 -0.011585433 -0.00457595475 -0.0337975733 -0.0616223812 0.0883765817 0.146805629 0.0442404337 -0.101139419 -0.059554819 0.0444233194 0.0295815617 0.0203016624 0.0470338352 0.00290740328 0.0758937672 0.0288642086 -0.0832545534 0.0548138246 -0.00573976338 0.0907851085 0.0382896215 -0.137567922 -0.0848902464 -0.0355325341 0.0280306078 0.0849616677 -0.0109465634 -0.0933749229 -0.0489923954 0.131554142 -0.0105491728 -0.0911042765 0.0896382779 0.107579067 -0.029194802 -0.118035324 -0.0691957697 0.0260686129 -0.117240146 0.0314605832 0.10417594 0.0173794031 -0.10924159 0.00410650671 0.12347053 -0.021081768 -0.0583038926 -0.076368995 -0.0559989214 -0.12317574 0.126255885 0.124372408 -0.139102474 -0.127438575 -0.0832829475 -0.0507567972 -0.0409637913 0.0168262422 -0.109306589 0.0518526733 0.0749200583 0.00206166506 0.0649633855 -0.0586098135 -0.00433701277 -0.140350699 0.0938716233 -0.089609772 -0.0619740263 -0.0610454977 0.0776864439 -0.0440377593 -0.0523070544 0.136881992 0.111145541 0.0935858637 -0.130629882 0.0228392035 0.0660683215 0.0564527586 -0.0145275388 -0.056871783 0.140726104 0.0382112935 0.0346260034 -0.0959678069 0.145820111 -0.0788428187 0.130337492 0.106305443 0.186199993 -0.0118903993 0.114453636 0.0458821617 -0.0491925776 0.0321561061 0.0618102029 -0.16807498 0.146204278 -0.0881870687 -0.169820085 0.0581149757 -0.0209829025 0.000727858045 0.0668258667 0.0809662268 0.0593013167 -0.154004052 -0.0266895164 0.131010324 0.0933532268 0.136942223 0.0960304737 0.127566546 0.128763124 -0.129231334 0.0490520634 0.0179415178 0.035261184 -0.179191247 0.134654313 -0.191801935 -0.076531738 0.0557464883 -0.0514609776 0.030970484 -0.0304086115 -0.058471296 -0.107087307 -0.0737263411 0.0960866362 0.0616026595 0.00334342872 0.0160897672 0.115088649 -0.129959434 -0.0453715175 0.106996052 0.0485980026 -0.0609982088 0.0606777444 0.0854022726 -0.0109910937 0.0280183572 -0.106572933 -0.00772281922 -0.0217049569 0.142191678 0.078674458 0.068385914 -0.0397756584 -0.0448649749 0.0790037736 -0.0683723092 -0.134903669 0.0462144762 -0.0944194347 0.14962922 0.0367264152 -0.075939849 0.151242435 -0.0653834939 0.0671074167 0.0147493538 0.13696453 -0.0275645163 -0.0429917164 -0.0180217978 0.0253212303 -0.0417146906 0.0207910389 -0.0281672105 0.130631521 -0.109785154 0.0733767524 -0.109265648 -0.0798736662 0.0224359911 0.208666578 -0.0645421147 0.0355885737 -0.073725976 -0.0510966443 -0.0937370732 0.173772439 0.0993817151 0.00306298863 -0.195579961 0.052579727 0.127555981 0.0955225378 0.0206778944 0.0144746751 0.130441144 0.0313935652 0.00892100483 0.080054298 -0.128953949 0.0751526803 -0.0949046835 -0.153239205 -0.0463347062 0.016422227 0.0674657375 -0.0140186697 0.064172186 0.202651188 -0.165430844 0.0656619221 -0.0430362485 -0.197136238 -0.0389609933 -0.12942259 0.0315187573 0.0998861641 0.0155031411 0.0358207226 0.168374822 0.0940297097 0.0293072574 -0.0722433701 -0.0128252115 -0.0433789827 0.059830334 0.167342469 0.05525738 0.00795800146 0.177529857 0.0210485943 0.047749389 -0.0363491178 0.168270051 -0.100355022 0.0292338673 0.175140589 -0.127292693 0.162490025 0.0100361016 0.154595226 0.0616088361 0.136025682 -0.00410753815 0.0369135141 -0.143811956 0.0958657786 0.144568652 -0.00905292854 0.130941108 0.0106995432 0.0483372957 -0.0231650397 0.036639642 -0.0617889985 0.0236214604 0.0238810871 -0.0795606971 -0.110024542 0.174338296 -0.0911057219 0.0656976923 0.0863363743 0.0683924854 0.134093165 0.145337448 0.116067648 -0.0847840905 -0.0767683238 -0.0150442421 -0.0229843333 0.0828322992 0.0535647161 0.0319587328 0.068530798 -0.0646711886 0.197244614 0.0427581631 0.0388010144 0.162918717 0.136511028 0.0195802618 -0.0968718901 0.167434052 -0.0834559351 0.0702522248 0.163521126 0.110413931 0.161692828 -0.0881290808 0.148896158 -0.128931329 0.0255813021 -0.0889823139 0.157743439 -0.0732447058 -0.0442789234 0.0533142164 0.133719116 -0.116840921 0.0800347999 0.189877659 -0.135516554 -0.0575624406 -0.0097662257 0.119637571 -0.074548699 -0.0714714378 0.126038283 0.1195461 -0.09768942 0.0303867999 -0.123445861 -0.0530549176 0.107548378 0.106309928 -0.0313007124 0.183906198 0.0751518011 -0.0633003265 -0.0617225319 -0.0701497793 0.0320757441 -0.0290392973 -0.0253149793 -0.0470200963 -0.0478345975 -0.120073162 0.201239541 0.142304704 0.0925019607 0.148831651 -0.167674646 0.123002127 0.106455177 0.0328564122 0.18806994 -0.116831504 -0.00451909332 0.108785309 0.157465339 -0.00134878256 0.168126434 0.0580710471 0.0837541148 -0.0657100528 0.158608526 -0.0463683493 0.0946896747 -0.104266793 0.0244341511 -0.0714015439 -0.0990499556 -0.0860033333 0.145062909 -0.0333383344 0.142448917 -0.00225598761 -0.0131941633 -0.149845496 0.00207266607 0.0925255567 -0.182044104 0.00203921902 0.178830191 0.135419115 0.127062351 0.119150542 0.120787822 0.0427289233 -0.102054872 0.0916266441 -0.0503866151 -0.0314327143 0.113203667 -0.14366518 0.12766479 0.0501433946 -0.0380674638 0.132927895 0.147104084 0.129884318 0.0988519117 0.0387863517 0.0734434873 0.0411540642 -0.027659202 -0.13669847 0.083362028 -0.0450929962 0.145056829 0.0885054395 -0.0165824685 -0.0861969367 -0.0862592608 -0.160450995 0.0212117564 -0.104402281 0.143013418 -0.0506607853 0.121090904 0.00905802753 0.111442901 -0.143552661 0.0210310649 0.0612097643 0.00359729188 0.0227075666 -0.0815051943 0.155096367 -0.0119450046 -0.0233580228 -0.0038536794 -0.0880303755 0.164003551 0.1600402 -0.016360864 -0.0836358368 0.0851199031 0.0105815725 -0.121088877 0.161806434 -0.0379569791 0.0800513998 -0.0538180247 0.153429583 -0.0247538723 -0.00772412121 0.120341845 0.0548929647 0.114107296 0.0127800889 -0.0710391551 0.134522244 -0.0879234001 -0.0632987469 -0.0650375783 0.0809550807 0.137545347 0.0396288 0.186278701 0.110111617 0.143173963 -0.176478416 0.160997689 0.0144827925 0.0872319192 -0.0407468043 0.114270978 0.0436847992 0.0258595552 -0.0514572188 -0.0362136886 0.130494818 0.126685143 -0.0894779786 0.117681846 0.173565581 0.174748227 -0.15385066 0.149053425 0.160555586 0.0397729799 0.156005859 0.110312633 0.104156397 0.161141351 -0.0919019654 0.015511048 0.0107473964 -0.0837544352 -0.0176889747 -0.10078945 -0.0619383864 0.160746276 -0.087044619 -0.0232165866 -0.0215495545 0.0582484603 0.0864141285 0.175924376 0.0442700647 -0.0247930624 0.0347629003 -0.161288068 -0.0290379301 0.170908287 -0.117735907 0.110525407 -0.115487754 -0.000686930609 0.130876914 -0.0291782003 -0.192795917 0.127867773 0.126315489 -0.07262256 -0.098871097 0.0209841039 -0.19527556 0.116880774 -0.02486692 -0.00237640645 0.143660888 -0.016016813 -0.0697216764 0.175688595 0.0232482143 0.0199046992 -0.103963897 -0.0378533229 0.0388961881 0.00533542689 0.0628525913 0.159435913 -0.0747304037 0.0978682712 0.164278746 0.077385895 0.109259471 -0.0799139515 -0.0421864092 -0.0443351157 -0.133975893 0.0834283531 0.093928501 0.00520775095 -0.0434011891 -0.0435828492 0.138147533 0.106794529 0.093232654 -0.077764377 0.16267027 0.051492583 -0.0966648981 -0.0458262265 -0.0408286341 0.0238162875 -0.00872587226 0.153415054 -0.0966666192 -0.0194769856 0.151141167 -0.132202849 0.17568706 0.0875745118 -0.00695692096 0.0846608803 0.0842222869 -0.00846964866 -0.133651823 0.0813971162 0.0544089861 0.101662867 -0.166373864 -0.112454981 0.137616843 0.140390456 0.0915882215 0.10989771 -0.0496877804 0.154562473 0.0789823458 0.0279520545 -0.0192710813 0.025512537 -0.00114545715 0.0528355576 -0.0804974213 0.130488142 -0.0450717099 0.00189470535 -0.126931518 0.00184863445 0.0691755414 0.0959887952 -0.00365662854 -0.0239975173 -0.000226317745 0.162838191 0.110088706 0.103135742 -0.0143095907 0.0685937479 0.039006602 0.181053951 0.0662889108 0.142534971 -0.0225376673 -0.0523421951 -0.0925690904 -0.00610838691 -0.0569295287 -0.0691444948 -0.0351942256 -0.0200236402 0.0384809263 -0.00329685421 -0.15174298 0.1632265 -0.191212401 -0.169024199 -0.093971774 0.115878142 0.0936368257 -0.0726782456 -0.0567203537 0.127668455 0.0460995883 -0.0191945117 0.18582131 -0.171271384 0.0437021852 0.062035732 0.0159470849 0.0150196124 0.00918887649 -0.0672063157 0.0613921694 0.0558371395 -0.172685817 0.0529843457 -0.179647043 -0.00943551958 -0.0415023826 -0.0244376082 -0.0472054332 0.153094694 0.143580258 0.0942730904 0.156098858 -0.00754955551 0.0512687974 0.138893977 0.0646209419 0.00226254459 0.133554146 0.0259827524 0.110805348 0.0725759491 -0.131094366 0.12708883 0.0314303264 -0.0524304323 -0.032248389 0.163754046 0.0906126276 0.00314503536 0.103355683 -0.022527555 -0.1250837 -0.143783137 0.0596455783 0.0511251315 -0.0954806134 0.17346862 -0.00509193866 -0.0772540048 -0.0803210288 0.173364595 0.167615995 -0.129515707 0.0145245409 0.0466810837 0.0946052521 -0.0887519196 -0.0918630585 0.154023126 0.182059482 0.122924969 -0.0969166085 0.0428368933 -0.0473706648 0.0871873423 0.0173784196 -0.0468124636 0.130918413 0.115169801 0.106101029 0.0267140083 0.171541661 0.117503718 0.0674298778 0.0793930814 -0.0995452777 0.0986198336 -0.0477845483 -0.0891349018 -0.110497288 0.149275228 0.0541292913 -0.0509323142 0.00657417579 -0.00849667098 0.0782996938 -0.000425429258 0.0927700475 0.0596327335 -0.0792194828 0.048249729 -0.125496924 -0.119564533 0.0140337572 0.154170945 -0.175600752 0.0509903021 0.0491141193 0.151463166 0.0498116091 -0.0577821173 0.0124854716 0.0519152619 0.000966675114 -0.0199240129 -0.0589309931 0.000340196391 -0.0851683021 0.0118466569 0.109990321 -0.0261993259 -0.0374022834 -0.0214411858 -0.077557683 0.0687204972 0.0663195103 -0.0442392081 0.0338341743 0.13567619 0.180690661 0.19239752 0.107011527 -0.0798124969 0.0309492871 0.0260094907 -0.125474811 0.0975558758 -0.171736181 0.121255443 -0.0812420845 0.174648881 0.0337508172 -0.0655879006 0.168462068 -0.123068273 -0.14526248 0.1509289 0.149049625 0.0172713008 -0.0775876939 0.125850379 0.0576170236 0.0959700122 -0.0350637622 -0.0413426161 0.198388338 0.06012512 -0.18112573 0.0456633084 0.123411685 -0.135381892 0.0592928678 0.0492700674 0.192084178 0.00668479549 0.00347893289 -0.0798124969 0.160300285 -0.0158643834 -0.097056821 -0.00595153868 0.193585619 0.129847378 -0.0445784479 0.154722676 0.0128285876 0.114035919 0.0366068296 0.0581881292 0.0526427999 -0.0453962088 -0.0249866024 0.147942722 0.048362948 0.0718180016 -0.129099786 0.0695572644 -0.040164955 0.151449472 -0.107218184 0.0813755468 0.0611639321 0.120362371 0.00170552568 0.0150107937 0.0923141465 0.179166928 -0.0595131889 0.0748501047 0.024664795 -0.072850123 0.0498956218 -0.118837982 0.11913538 0.1241147 -0.0298356991 -0.0732461885 0.137327462 0.150715679 -0.144629672 0.0296867546 0.0185879748 -0.158391654 -0.0696423948 -0.0815559775 0.120456815 0.174756512 -0.0714245588 0.100912079 0.109141059 -0.0181489885 -0.189933077 0.0589498132 -0.146864727 0.0246144049 -0.0326956324 0.0814622864 0.044614289 0.0344069004 -0.0722796917 0.0347998254 0.00988415256 0.074375473 0.0236355383 0.186613545 -0.0229948368 -0.0627373829 0.058446534 0.0801035017 -0.0250811949 0.0163063705 -0.0360587984 -0.0412077829 -0.131415576 -0.14121896 0.183651194 0.0538982339 -0.0825245678 0.0530949496 0.129799366 0.077988103 -0.163070917 0.131275401 0.115696557 0.0255096387 -0.0695977584 0.149488509 0.110933349 0.0595859699 -0.105136663 0.139630318 0.13104403 0.140138745 -0.101875864 0.0968326181 -0.0490331948 0.0320329145 0.0932519361 0.111740142 -0.0153519753 -0.0669102296 0.0104083447 -0.0649985299 -0.154267743 -0.0946161672 0.139226034 0.107407138 -0.0765753686 -0.0474209748 -0.111844584 -0.0410924852 0.00278180838 0.108596429 0.014437899 -0.120850071 0.101168439 0.0475970656 0.110533401 -0.0760123357 -0.0803952068 -0.0215338543 0.109282747 0.0477782488 0.0887209475 -0.0328624696 -0.0277395248 -0.131564692 0.0674616843 -0.0144642591 0.13782452 -0.0827166885 -0.0459428355 -0.0465939641 -0.0978194177 0.137472615 -0.0644845441 -0.093579635 -0.079621926 0.0540327132 -0.0426073149 -0.0682768524 0.140229478 -0.0689926222 -0.116822943 -0.0883634388 -0.0420724526 0.0797011107 0.134867147 -0.0124301612 -0.0311987475 0.076223284 0.0785176903 -0.0510006249 -0.08932513 0.0967391878 -0.136143774 0.124550089 -0.119794972 -0.106707312 0.0434878916 -0.000768460974 0.083400093 0.123351663 0.154955849 0.0239652898 -0.00470558135 0.0155227007 -0.155885831 -0.0280565098 0.128090873 -0.0347218178 0.0469225496 0.066305764 -0.0798357874 0.0677081048 -0.153243482 0.0412665345 0.15851365 0.0430604853 -0.0530885011 0.131436363 -0.0623488314 0.0265644994 -0.130693406 -0.0925032496 0.167786196 -0.00228108512 -0.051090654 -0.129197508 0.125834614 -0.0826043189 0.0495859832 0.13765806 0.140279785 -0.100200407 0.078553237 0.102651939 0.0530582368 -0.105640791 -0.0712560862 0.0563652664 0.0500995256 0.110330448 0.0879531652 0.0794132054 0.0128588937 0.139566243 0.00905480981 0.0769669786 0.0616210736 0.0383987278 0.024789568 -0.0215452202 -0.0754719898 0.103158571 0.021371033 0.157727793 -0.168305516 0.041431915 -0.205217093 0.0685112029 0.11518427 0.0901029781 0.0836623907 -0.00306673371 -0.078299813 0.0937599093 0.0358634107 0.150480777 0.017379215 0.0400344506 -0.0467984006 -0.0435465574 -0.0746275187 -0.12713474 0.110726796 0.163420781 -0.100556083 -0.00550368195 -0.10327252 0.044071883 0.0337789692 -0.0129005229 -0.0913272351 0.132832885 0.147079349 0.100901216 0.134497017 -0.0322105363 0.133053601 -0.0325982273 0.141311869 0.0040314584 0.151371911 0.181470856 0.0484154783 0.164058596 -0.0128529146 0.0409421511 0.159602627 0.101342879 0.149882555 -0.0492368788 0.172365248 0.124329507 0.0683217645 0.0930551067 -0.0814763457 0.147788212 0.00853961147 0.0389146842 -0.000336691737 0.163021743 -0.084802106 0.0986582115 -0.0116979126 0.0385086611 -0.0496010855 0.0737678558 0.103331998 0.161403298 0.0173213035 -0.103028946 -0.0950937942 -0.0377868973 -0.0620894209 0.13404268 0.0146548431 -0.0653266087 -0.0033960822 -0.112761199 0.0226024743 -0.177061707 0.109000698 0.045506943 0.101955965 0.0158496425 -0.0690437183 0.104365595 0.0359109081 -0.122470014 -0.0659879521 0.0467208475 -0.0753396451 0.0523877777 -0.0585377291 0.100402929 0.119433776 0.0242477674 0.0617414936 0.182905495 0.157282576 0.0866737887 0.107341088 0.114345349 0.00848616753 0.0763099417 0.0206906293 0.0617443733 0.0259690173 0.107850946 -0.111641936 0.133501753 -0.169169813 -0.0887352601 0.089083977 0.156513289 0.0230403095 0.000902002619 0.0383367911 -0.0300379787 -0.146975219 -0.00701804645 0.131880164 -0.0454387777 0.0733794197 0.173866943 0.0410080142 0.19769071 0.0897455812 0.0198194478 0.00869395584 -0.0264868997 0.0861539766 0.123009734 -0.0185853429 0.16686818 -0.0672833547 0.0305484533 0.132848471 -0.127947524 -0.1613774 0.0643686131 0.0070268726 0.0036489605 -0.189245149 -0.0304792393 0.113094799 0.130098417 0.118080013 0.127857327 0.0940245837 -0.165752977 -0.0374614373 0.109492496 0.0428666584 0.170740604 -0.115685873 0.0148922838 -0.116838083 0.111455843 -0.0632996783 0.0108929574 0.0726874396 0.0742699429 -0.0629896522 0.113437019 0.199758425 0.0475728512 0.137889087 0.19803226 0.0400452688 0.0794214979 -0.108014926 -0.00188282889 0.111494496 0.0771949738 -0.116306648 0.0865728483 0.0771485493 0.0915202647 -0.0908453912 0.0911061615 -0.0472535603 0.136873767 -0.037476372 0.129080757 0.173227653 0.176956236 -0.117876649 0.0886662453 -0.0194631983 0.140326738 0.0929994658 -0.0285486728 -0.123725995 0.0545314588 -0.132062644 0.196129248 -0.0776121169 -0.0292998273 -0.0817124322 -0.123064265 0.0644138977 -0.0409719124 0.0910102725 0.0774317682 0.0588561557 -0.0303226635 -0.114509314 0.00717926025 -0.0146975368 -0.0139649464 -0.0769111067 -0.0884687155 -0.0844886228 -0.0546910986 0.0992946401 -0.0306005422 -0.0368665494 0.0252984539 0.0552819744 -0.0180559643 -0.0461472273 -0.059688963 0.0529744141 0.105257906 0.135227516 -0.136654019 0.0433159433 0.0750075579 -0.143905401 0.0697793365 0.0171793997 -0.0880545825 0.0440685079 -0.135759518 -0.0708841234 -0.137341917 0.0642284378 -0.0825591236 0.0998160243 0.104954824 0.0703029931 -0.00554473838 0.0652662367 -0.137622833 -0.0849017501 0.0079975808 0.0469577163 0.0332614519 -0.0239423085 0.0571367703 0.125478789 -0.0188843291 0.0104151899 0.0507268719 0.0427310057 0.182245687 -0.0464136638 -0.0774840489 0.0790423155 0.0158217624 0.174919963 -0.167722598 0.00393518014 -0.141249925 0.0320646316 -0.0712961331 0.18195422 -0.101946741 0.205521435 0.0143015096 0.190244779 0.0565855652 0.143080652 -0.0879745483 0.0268129539 0.0264821127 -0.0976307765 -0.0719135925 -0.0931720287 0.0752973855 -0.0940701365 0.0943753496 -0.00160595321 0.00526125729 -0.0494134016 -0.0277267974 0.114400044 0.0121099204 0.0469762683 -0.0178804994 0.175322458 -0.0936195925 -0.0206507854 0.0129827568 -0.127164483 -0.0533081368 0.0902868807 -0.089850314 0.0812181607 -0.0252427552 -0.0260248482 -0.0263420995 -0.128954813 -0.144561514 -0.0969642028 0.0840708092 0.105219595 -0.0315751806 -0.133927286 -0.0635263324 0.0815265328 -0.103957005 -0.0656396598 -0.0624658093 0.027983008 0.0192227215 -0.0915314779 -0.0996872336 0.0151820509 0.00491440995 -0.0790896341 -0.148336604 0.106279097 0.057419382 0.034870699 0.100479744 0.00237061502 0.0768525749 -0.12644136 -0.125458911 0.112800233 0.00162200222 0.0578222498 -0.056215629 -0.0922449976 -0.158906817 -0.0518889986 -0.100536995 -0.0453334972 0.00646515191 0.0148057342 0.0331344642 -0.00636346964 0.0370892994 0.00641168654 -0.0307880603 -0.0186160952 0.0293306652 0.0952301919 -0.12958698 0.117998272 -0.0704888254 0.00443183212 0.147841737 -0.0992462039 0.0764997005 -0.0257688798 -0.0460406169 0.0839670599 0.120056614 -0.0614700243 0.113699906 0.0346624181 0.180427715 -0.0145217599 0.168693572 -0.00197043363 0.191886678 -0.0972156301 0.0206416119 0.0345100351 0.0903015509 -0.100287922 0.0303347614 0.136919394 -0.0126191778 -0.115950003 -0.0293597691 0.0265962426 0.00261192676 0.0278086904 0.158691257 0.0234635379 0.117342651 0.0816714615 0.0948666632 -0.0849409848 -0.114143133 0.0362917073 0.070062004 -0.0524370112 0.167162567 0.104840927 0.124661915 -0.138633773 0.19061929 0.0486695245 -0.00107917748 -0.0845123231 0.046763584 0.0243339688 0.0911204591 -0.113943458 0.00347187044 0.0777205974 0.095806241 0.0292435624 0.127136692 -0.0182037577 -0.0450141095 -0.0123331165 -0.0598197915 0.152919352 -0.131715685 0.164068297 -0.0793498456 0.00121658249 -0.0503176786 -0.0856561065 0.0431076214 0.0459455065 0.167714477 -0.00190150819 0.0480769761 0.0142310113 0.0774440318 0.0504581034 0.197599128 0.172974482 0.0960050672 -0.0724410191 -0.0655787885 0.0426691361 -0.0474077053 0.0671926141 -0.0111915339 -0.0694714338 -0.0728770122 -0.0334699675 0.0879241973 0.0191930141 -0.0492004342 0.170004874 -0.136069939 -0.0839288905 0.121699564 -0.0373032577 -0.0790554881 0.0212189052 -0.0723486841 -0.0706750974 0.164014727 -0.1265852 0.180671826 -0.0538335219 0.135076165 -0.082566984 0.00627529481 0.0355592817 0.146791458 0.0428247713 -0.0218269154 0.113299571 0.15928854 0.109753877 -0.0433866642 -0.0531712547 0.121344112 -0.0599708892 -0.140954524 -0.0652005821 0.107553594 -0.0420940556 0.0391891636 -0.0892334729 0.0449264199 0.038767308 -0.00515921181 0.00227128062 -0.0260546599 0.145359293 0.0675093085 0.165128261 0.107131146 0.145455942 0.150093794 0.182555065 0.0683342069 -0.0751166418 0.00099511235 0.136942998 -0.0637786239 -0.118969493 0.0861738697 0.121482879 -0.0593939386 -0.0681066886 0.137257427 0.142178074 0.110687025 0.104999736 0.00519723399 -0.0884702951 0.0194963887 0.146859735 -0.00592712127 0.0192816481 -0.0416031592 0.10512694 0.0102964779 -0.0616582707 0.13753584 -0.00928659178 -0.0823482201 0.114266947 0.0528262816 -0.0983823165 0.0188455041 0.0611194335 0.100222267 0.00196855632 0.0390211269 -0.139956653 -0.0327276476 0.156153634 0.0125370612 0.0344246514 -0.0589949451 0.0921387449 -0.144850284 0.0212448426 0.144581348 0.0431137607 -0.0885965675 0.0854236633 0.00550921075 0.172450885 0.123434886 -0.185906976 0.132565111 -0.181601852 -0.10362874 -0.184760764 0.130670205 -0.0472870953 -0.10729944 0.132553771 0.0137786418 0.0876799598 -0.0260619633 -0.0263571106 0.123666577 0.123572513 0.00393577246 -0.0911321938 0.149641573 0.0127057144 0.040043395 0.0847804174 -0.0459438674 -0.177438155 0.00510600302 -0.00431553461 0.138086572 0.116237916 0.168006837 -0.0157650076 -0.00970370602 0.0802516192 -0.0514502637 0.132704586 -0.0417737029 -0.0193822831 0.166242853 0.0419458486 0.15848121 -0.00174845546 -0.0805547163 0.00697086425 -0.171336144 0.0593196638 0.0595933609 0.110469177 -0.0882590115 0.050680656 0.0118347788 -0.0319386683 -0.0632662848 -0.0177531485 0.0838051289 0.0470289141 0.178859159 -0.0346439704 0.0504389554 -0.0520837195 0.00219774805 -0.0491009764 0.0503517203 0.107992731 0.0384831354 0.0872439444 0.167825118 0.0623564459 0.0223074984 0.0821516067 -0.00865145214 -0.0457197949 0.0970128179 0.0120575717 -0.0556218661 -0.116809532 0.074401699 0.0880089849 -0.0123710101 0.007505944 0.135203391 -0.146449044 0.0852448419 -0.132378265 -0.109845184 0.149791658 0.015171879 -0.158416107 -0.0637820587 0.16275458 -0.0364229716 -0.143408865 0.127545208 -0.0622910671 -0.139478207 0.0405872539 0.0932571068 0.0956263393 -0.00292709633 -0.100080743 0.137279779 -0.0495060496 -0.0749291778 -0.0744291395 -0.0862122774 0.0235699266 0.109829761 0.0802345648 -0.123428002 -0.135655686 0.115854591 0.186653689 0.104481116 -0.0934653729 -0.107345767 -0.0480583683 -0.112480521 -0.0674405769 0.0481690913 0.0844945163 0.102531567 -0.132132486 0.137842521 0.00775253773 -0.0610849336 -0.032591112 -0.0524423793 -0.0668133944 -0.113737375 -0.000165238976 0.114906386 -0.1328713 -0.0835750252 -0.088781476 0.018294096 -0.0263542235 -0.0792298913 0.0685598254 0.0419423133 -0.0260287449 0.13109158 -0.138066247 -0.0322780311 -0.0882859379 0.0807678401 0.0512416959 -0.123070188 -0.00298701227 -0.0796232373 -0.104369447 -0.117494076 0.00122408569 0.0332989395 0.0664115399 -0.0739870965 0.0106086135 0.109527692 -0.0934588537 -0.0895289928 0.0728636533 -0.0278315544 0.0639105886 0.0930453986 0.0494588055 0.017094126 0.112311125 -0.00386948418 0.0680094063 0.0254231635 0.0913507342 0.16692546 -0.0122418981 0.108312286 0.0950310752 0.101287387 -0.105884947 0.030036103 0.00558372587 0.109446019 -0.0987028182 -0.0895694122 0.118367992 0.0662995502 0.114169754 0.0966514125 -0.0286930036 -0.0851531997 0.128677562 0.124861382 -0.100621521 -0.128018498 0.0673300773 -0.0310823116 -0.0784357563 0.0379403606 -0.0306251384 0.0655758083 0.0960387737 -0.152080312 0.136492133 -0.101761207 -0.0275989529 0.0933943838 0.0766497627 -0.0804210976 0.143909901 0.143697292 -0.0849372372 -0.10959392 -0.0742666796 0.125293195 -0.0966164172 -tensor_14weight 2500 --0.0543760806 0.0856281444 0.0533403084 0.0177523084 -0.0268334541 -0.0549559146 0.159062862 0.172800139 0.109722741 0.0875528008 0.0125674438 -0.0810011849 -0.068577148 0.170207128 -0.135173365 -0.0806247443 -0.0548967347 -0.0452914089 0.0365853943 0.129278928 -0.0377073251 -0.17943646 -0.00266921567 0.0811229944 0.0154373068 -0.0359650813 -0.0855926052 0.127574399 -0.1265679 0.04885903 0.0561187416 -0.112507693 -0.139889583 0.170207158 -0.097494632 -0.0187973343 -0.0904997438 -0.0484883524 0.031168703 -0.055549074 -0.0741278306 -0.002624318 -0.117438287 -0.0157258548 -0.0880523771 0.114648446 0.0272049736 0.103814438 -0.0217095967 -0.140518233 -0.0760676265 -0.110887714 -0.0115829725 -0.00750160404 0.0959720686 0.0384376198 0.061359182 0.0955482125 0.101260468 -0.0115174651 -0.013766964 0.0398462117 0.166129872 -0.0850986466 0.140506133 -0.103672192 -0.154903129 0.0968019962 0.066429466 0.0431276001 0.147400737 -0.00412948243 -0.0342022404 -0.0535201877 -5.71517012e-05 0.0244176984 0.0832642242 0.176724657 -0.0719986036 0.172275752 -0.114797458 0.0914949924 0.0334078744 0.0464251973 -0.00394226797 -0.0035392812 -0.0278604105 0.0514154881 -0.0311339442 0.021156881 -0.0213947129 -0.0683914274 0.0251719803 0.0944593325 0.12849097 -0.049127765 -0.0469818637 -0.0983457267 0.13893728 0.0303975027 -0.0299507454 -0.0138533115 0.139151528 -0.135961041 -0.0921831578 0.0593009293 -0.0144180804 -0.0136186779 -0.0715967119 0.0790341347 0.00953520834 -0.0408776402 0.101040825 0.00309920311 0.0447804034 0.0982600003 -0.0721947402 -0.118167073 0.0333673507 -0.0950507745 0.10244967 0.08306925 0.0455361456 -0.122597888 -0.0647362471 0.00561864674 -0.136176527 0.0647586584 -0.122481212 -0.0205618665 -0.094566375 0.0131596476 -0.117649406 -0.110489279 -0.0717473105 0.103288978 0.0714375228 -0.0784455761 -0.105901703 0.0811899006 -0.131345108 -0.0233812556 0.102898851 -0.00886622071 0.0682659149 0.129993364 -0.0971994996 -0.0193270147 0.00360363722 0.121052161 0.00784411095 -0.123100765 -0.0609981082 -0.135147735 0.0461434908 0.117215686 -0.0296066701 -0.0148467962 0.054072503 -0.118035108 -0.13138777 0.0103239622 0.0106298085 0.00161406794 0.121524885 0.106862329 -0.0696737245 0.122207746 -0.129250824 0.0716361329 0.117990665 0.0917533413 0.0275282189 -0.124964394 0.123115174 0.0490060188 -0.0750153661 -0.0502910502 -0.0452317111 0.101086549 -0.101995051 -0.112885557 -0.0476158895 -0.0509889536 0.0219939649 0.00387603301 -0.0764786229 -0.0421580113 -0.0788122267 0.084515363 0.0346965827 -0.01090011 0.0382516384 -0.00645032525 0.129111394 -0.0737728179 -0.00789030734 0.11321111 0.00651154015 0.000951979193 -0.0776003599 0.0253983736 -0.0880478546 0.111969553 0.0747581348 -0.0281555094 0.0477269702 -0.0837645158 -0.00260412018 0.0995940417 -0.0253548026 0.0838286281 0.037731003 0.0643470585 0.0464969426 -0.102055438 0.00463358313 0.0325008184 -0.00370962941 0.0821173638 0.0869908333 0.032846041 0.0313670263 -0.148173332 -0.177075326 0.014351381 -0.0347749256 0.0631445199 0.0138477925 0.141796917 0.0031752775 -0.0240941141 0.115030944 0.0497418977 0.0109222829 0.0674659908 0.010190879 -0.0980509967 0.107191958 -0.0665694326 -0.0149048567 -0.135567963 -0.0943998545 -0.0724455938 -0.108684249 -0.117758349 -0.0431607552 -0.0478789434 0.0548663996 -0.0874581188 -0.12479274 0.0178123116 0.070239827 -0.0386666693 0.134508371 -0.0741510987 -0.045267418 -0.104734987 0.0435491502 0.0155023336 0.062136706 0.103647709 0.00290234643 0.064395614 0.0177004337 -0.0480007231 -0.110428169 0.0850054473 -0.0885846689 -0.0500162207 -0.0616900064 0.148497447 0.0951149315 -0.0552124381 -0.14905256 -0.0889345855 0.0241270382 -0.0488678627 -0.00426269416 -0.0119903926 0.141092837 -0.0797038823 0.0120936269 -0.0693103597 0.0249975473 0.145910755 0.0371512882 0.117824383 -0.0573362373 0.0323375016 0.10749159 0.0636148006 0.0273176879 -0.0183407739 -0.115713961 0.00984863937 -0.042138014 -0.118756019 0.0761800632 -0.132937029 0.119471751 -0.0354485847 0.127721861 0.121748939 -0.0444656201 -0.116685092 0.0304207485 0.0655196533 0.0813344195 -0.107156277 -0.0844279304 -0.0894685909 -0.127893046 -0.0939210355 0.0843395889 0.0614806749 -0.0614846796 0.0341131836 0.0421788543 -0.105776869 -0.122536495 -0.129194289 0.12389411 0.0393402874 0.0495846197 -0.119962715 0.106891051 0.0449610613 -0.145153821 0.0629368573 -0.108572282 -0.0517346151 -0.0622508824 0.0323888771 0.0450324118 0.0440010354 -0.0938819498 0.0203166902 -0.122573078 0.0831483901 -0.0707751289 0.0610891283 -0.0958172753 -0.0424687862 -0.107412554 0.126818612 -0.0554413795 -0.122475646 0.0243445728 0.0513011068 -0.0796121135 -0.0414110497 0.0280717909 -0.0394670665 0.0598106235 0.0484230518 -0.126170114 -0.0951998904 -0.078391239 0.0402923077 0.0229541957 -0.0937744156 -0.0477972776 -0.0179067627 0.0602110922 0.0934107453 0.139090851 0.0272798836 -0.0181408152 0.0339401662 -0.0496698096 0.123755589 0.077883482 0.0388832986 -0.119654641 0.140062913 -0.02580522 -0.0365974084 -0.0948570818 0.0476925224 -0.106507264 -0.0877594203 0.0167225003 0.0365579128 0.0707214922 0.0943449885 -0.0219054744 0.0260573626 -0.0470213518 0.139553711 -0.0147360563 0.0706477165 -0.078761287 -0.0111072361 0.0549765974 -0.0400532633 -0.0153049231 0.0315274298 -0.0705541149 0.0631048977 0.117633738 -0.0514981188 -0.134041414 0.0551473498 -0.0794123039 0.00670406362 -0.0401185192 0.00911470596 -0.0211872291 0.116496786 0.155161962 0.0629097223 0.138448417 0.0936189666 0.0627936721 0.0648671389 -0.129390776 0.0583335906 -0.0174725447 0.0610876642 0.163619712 0.0462206006 -0.0404846109 0.0467165858 -0.149703398 0.0884451717 0.0297990069 0.0904366821 -6.66035776e-05 0.089948453 -0.164717227 0.0440124683 0.0429885276 -0.0889559984 0.0580933429 -0.0497451164 0.0140721994 0.123201773 -0.0521491505 0.0792684183 -0.116658807 -0.0728405491 0.138154134 0.0858280063 -0.0885532424 -0.0259025618 0.143468827 0.11027436 0.130449191 0.0466446765 0.0738923401 -0.0459300056 0.135415688 -0.0519030988 0.0363911055 0.100617178 -0.0735667422 -0.056867335 -0.100770339 -0.0581379086 -0.0582638234 0.057107687 -0.0833413973 0.117787801 -0.121084802 -0.0609023273 0.0458093919 -0.0386206284 0.120702438 0.152693301 -0.0653539896 0.00243751518 -0.116231412 0.129159972 0.123322234 -0.00450206548 -0.0729444399 0.0853474438 -0.145202518 0.0369620174 0.0304967947 -0.0492551252 -0.130056858 0.0221843477 0.0469832569 -0.056170959 -0.146923915 0.0244862288 -0.0021409702 0.0949956179 0.134217575 -0.0556118563 -0.106579103 -0.0108840466 -0.147231668 -0.0594046339 -0.0605274215 0.00136603415 0.127606124 -0.115885407 -0.00178258657 0.0252946466 -0.0912591442 -0.0576305799 -0.0229029693 -0.085684374 0.165293708 -0.0522565134 -0.0692233294 -0.0375391915 -0.0102695916 -0.104804181 -0.0526487827 0.150056034 0.0551703274 -0.0257776212 -0.0228184611 0.073696211 -0.0467144549 0.0735779107 0.0948753133 0.144433752 0.166210935 0.150882557 0.0348055102 -0.0166522712 0.161574543 -0.0576791242 -0.0115333898 0.0468233787 0.0915934965 0.0572047532 0.00290581165 -0.0225567296 -0.0246865228 0.0868225098 -0.00762603246 -0.11816176 -0.12570864 -0.0243588239 0.0893646181 0.0507476032 0.0117150992 0.10665486 0.121998012 0.0696426779 0.0685170516 0.025851354 0.142432615 0.146065772 -0.0138009675 -0.129955053 -0.000132796747 0.0203777198 0.181162477 0.0261075366 0.168559924 -0.0609995425 0.168947399 0.0293546468 0.0149049358 0.0582519248 -0.04978792 0.103416584 0.0590672493 -0.0700641274 -0.14282304 0.0278612077 -0.131275356 0.14719297 -0.0653766692 -0.0751730502 -0.0065545626 -0.0380778089 -0.157771811 -0.0827088878 0.0777130723 0.198460281 -0.0469098203 0.134435102 0.14349848 0.0417049713 -0.167793706 -0.0996251702 0.0797272176 0.122879468 0.179209173 -0.0471446738 0.168509901 -0.0920644701 0.000451093569 -0.129801482 -0.0594977811 0.00711449794 -0.102573976 -0.109216064 0.148805737 -0.0950382799 -0.00647751195 -0.109716304 -0.055815164 0.0108515322 0.0906130522 -0.0650295168 0.0893351659 0.114572234 -0.106268756 -0.0118306447 0.113306493 0.0420926064 -0.143582255 -0.11182075 -0.0272862986 0.0896898583 0.0240881741 -0.0932913795 0.0246650521 -0.138226554 0.0727393776 0.0228461325 -0.16916123 0.00246544858 0.0807503536 0.0973562822 0.0370443426 -0.135211006 -0.0732924193 -0.00324719655 0.00122735673 -0.0426253006 0.0464077778 0.0109117776 0.0653063208 -0.0427299105 0.0784161389 -0.0572868735 0.105860651 -0.097189337 0.000937802775 0.0585776716 0.141582102 0.0805247277 0.0409072042 0.00662690401 -0.0858124942 -0.0805532038 0.0404491127 0.124266788 -0.106811218 -0.0971105546 -0.0997086912 0.0074750483 -0.108586416 0.0607502013 -0.054395549 -0.0843265578 -0.0900614634 -0.0676774904 0.0206092894 0.099438563 -0.0570041686 0.0200342685 0.0236357749 0.0572907329 0.0950599462 -0.125204116 0.0311794877 -0.0876096636 -0.136037469 -0.0375309587 -0.0594457537 -0.113332778 -0.0978064537 0.0964330435 -0.0265298411 -0.00542576611 -0.0438456684 -0.0523090437 0.0630306751 0.124709442 0.0266276151 0.0232248306 -0.0171631426 0.106016204 -0.110088825 0.08274737 -0.110974953 0.0963929445 -0.0833926201 0.0303138644 -0.0306184739 -0.00307349861 -0.11504256 0.1075629 -0.131726444 -0.0421231985 -0.138194129 -0.113239586 0.0452417433 -0.0149982423 -0.0436716527 0.0988965183 0.0192198902 0.0135216201 0.00940239057 -0.11294537 0.0857888535 -0.120034076 -0.108530715 -0.101037055 0.0667439774 0.0855601728 0.0677483305 -0.0134334378 0.120776698 -0.116636701 0.0875215456 -0.0949789584 -0.0514179617 -0.115705922 -0.0596454814 0.0422541201 -0.0999356657 0.0502830669 -0.0702968836 -0.123627275 0.106365606 -0.0744836628 0.0104168141 -0.0793894753 -0.114666551 -0.00283100014 0.140118852 0.0356186256 0.054495573 0.102265559 -0.0458586551 -0.0236852318 0.106435075 0.00892684981 0.11433281 -0.115178108 -0.131405771 0.117208794 0.0424666107 0.134797171 -0.143703952 0.0569373965 -0.0665611774 0.0933629125 -0.0201621354 0.111485049 0.00233875564 -0.0812246799 -0.0269328542 0.0232816096 0.0677310228 0.117872521 -0.141205952 -0.00658942759 -0.0309162736 -0.144758567 -0.057528194 -0.0684359372 -0.0633766428 -0.0364208929 0.152422816 0.0387307405 0.0868177786 0.0241200123 -0.0501802117 -0.03670137 0.0153368488 -0.0590804406 0.0290142465 0.100567661 -0.0803031549 0.037419185 -0.0459126569 0.0960116088 0.116186179 -0.0661039278 -0.0716232583 0.0593420751 0.0900740027 0.132992968 -0.025843842 -0.062321458 0.0100088529 0.00727820396 0.0946147069 0.111916468 -0.0648906529 -0.0192210414 -0.10582228 -0.052964583 -0.0713335574 -0.100110069 0.0656400323 -0.0618378446 0.0341230631 0.105089828 -0.0501025058 -0.138664886 -0.115556583 -0.105643809 -0.0142834401 -0.0338118225 0.0103544462 -0.0796577036 -0.00744031509 0.0366418958 -0.13744548 -0.12394321 -0.133015111 0.139994159 -0.096086286 -0.0779372826 0.0771979392 0.127238646 -0.0140574072 -0.0333673917 -0.0900884196 0.0653517544 -0.0381353125 0.124770477 -0.12113288 0.0970005691 0.107545584 0.114952408 0.0286091883 -0.109182179 -0.0630336329 0.0627928153 0.0373910069 0.110793836 0.0872234032 -0.110715158 0.0479132868 0.068171978 0.0977038071 -0.0969489664 0.0069321245 -0.138224244 -0.1087984 -0.0156357884 -0.0806711093 0.0635136664 -0.088648513 0.0915248096 -0.0295681208 -0.0889791846 -0.0202619806 0.0473107845 0.0719934851 0.0844703317 -0.0181293488 -0.0325784534 -0.0223496631 0.0800980031 -0.0469706431 0.12284486 -0.0163284689 -0.0720243454 0.0100721121 0.012039721 0.0458173305 0.00532619655 -0.0372635648 -0.0938430429 -0.0808144957 -0.0140093267 -0.139559567 -0.0209952146 -0.0627007261 0.0597438067 0.105400652 -0.034525536 -0.0433830321 0.00658106804 -0.113124847 0.039323777 -0.0219132751 -0.0291076973 -0.0714975595 0.0354093611 -0.0999722928 -0.0223256275 -0.00125360489 0.131300226 0.0749686807 -0.103176132 -0.00177618861 0.100545034 -0.0482359231 -0.130312055 0.0585651398 0.0481558293 -0.050323084 -0.0714227259 0.114079475 0.0674445853 0.0338538028 -0.0207888857 -0.0817157254 0.0414048955 -0.00719799427 0.000449810963 -0.0368338116 -0.000197023153 -0.128918022 -0.120564923 0.00234631728 0.017666148 -0.0760105997 0.129530162 0.0278998706 0.0785642117 -0.0846611708 0.142189592 -0.0866099969 -0.034735851 -0.0374385677 -0.141838074 -0.0461979173 0.0792662352 0.0883275494 0.0821309313 0.0135414349 0.0762536079 0.162841812 0.11422585 0.0459163897 -0.101808973 0.124757119 0.10234201 0.121778518 0.124219798 0.15815866 0.120062478 -0.0342520848 -0.0277299657 0.00312125683 0.0146831786 0.0419388674 0.12670289 -0.0661896765 0.00485484302 -0.0807016641 -0.102789596 -0.10495542 -0.0415844247 0.117631674 0.0792787224 0.121481225 -0.121911712 0.0956676602 -0.13807556 -0.071042493 0.10285683 0.134201437 0.0446345471 -0.0243865289 -0.0965441614 -0.0743445978 0.0868661553 0.0650995299 0.0439964831 0.0520170368 0.122856326 -0.00557545433 0.0643403828 -0.130216479 0.075305514 -0.0704696178 0.0523242615 0.132898629 0.0556151196 0.0135608455 0.106630892 0.00448958855 0.0294760223 -0.130943984 -0.115232065 -0.0185228847 0.0750679225 0.0868396237 0.115859844 0.0128320716 -0.0767292604 -0.0757243782 -0.109670304 -0.022307232 -0.0409514084 -0.0354676992 -0.101452865 -0.0788824335 0.0799969286 0.139659941 -0.080234088 0.0067355819 0.0488539226 0.134902641 -0.044175718 0.0824501589 -0.150373846 -0.134587288 0.0352238007 0.116991237 -0.0220136195 0.114933126 -0.114182681 -0.0119344881 0.0254181288 0.110809639 -0.139513344 0.0670420676 0.100754023 0.0473007746 -0.110368282 -0.0900191069 -0.0120764263 0.0714306533 0.122893341 -0.0579950325 -0.069827266 -0.0631239116 -0.0197088365 0.0283315647 -0.0101505062 -0.012342534 -0.136444777 -0.110550106 -0.0110609038 0.0534135252 0.0715058818 -0.135529175 0.0218331032 -0.0914941207 -0.117715605 0.0637661964 -0.119437411 -0.0767295882 0.13249214 -0.0307924412 -0.0124762207 -0.0491118282 0.115513906 -0.0599435866 0.0437990949 -0.0970950872 -0.126184925 0.00789543986 -0.0699488521 -0.0769708008 0.0143007189 -0.125161707 0.0508386642 -0.0768451542 0.0126496255 0.0037975586 0.0945261717 0.030873267 -0.0632951036 0.121244743 -0.124326058 -0.0519415066 -0.00867667794 -0.0128302025 0.0473873913 0.134872839 0.0456339866 -0.141257316 0.131795123 -0.129008144 0.0524923205 0.111486077 0.00192398916 0.0257397145 0.0104135079 0.0229955614 -0.0101489769 0.0377998948 0.0529350787 -0.0652860105 -0.0964240208 -0.119346842 0.102311134 -0.0513100103 -0.0956246778 0.0625582039 0.10400553 -0.0152444094 0.0996984094 -0.11846026 -0.0332592428 0.0517609864 0.124776825 0.0835027397 -0.0161721092 0.0544919521 0.0960061252 -0.0852253288 -0.115617849 0.132461503 0.0126986802 -0.0718445331 -0.0552118719 0.0423579067 0.00376538932 0.109214559 -0.060116075 0.00753490627 -0.11368005 0.00783166289 -0.0537703261 0.0794192106 -0.0919727385 0.00208424032 0.111719355 -0.0963476151 0.0950013399 -0.108282149 -0.12641567 0.12435104 -0.110969186 0.0644554049 -0.140637219 -0.0633735061 -0.0338808447 -0.123940453 -0.012826249 -0.0421397537 0.123598143 -0.038868092 -0.0173738599 -0.0128029287 -0.0981713384 0.100576788 0.075251177 0.0625472218 0.0764244497 -0.0654502288 -0.0070194602 -0.0901699513 -0.018791154 0.109865949 -0.0268438831 -0.0740915313 0.10593608 0.150855407 -0.0649960041 0.05053664 0.131901428 0.0731908754 -0.109051332 -0.00465088245 -0.0684918538 0.146911919 0.00280831754 0.14333044 0.108945541 0.0746650323 0.112723231 0.0545722842 -0.120427899 0.0842576474 0.0266837925 0.0456104651 0.0698814988 0.047249984 0.190266967 0.163280755 -0.0185097642 0.0825716704 0.031122379 0.064069435 -0.164239749 0.0375948921 0.0769198686 -0.155088678 -0.104684114 0.075281471 -0.00940326042 -0.0429962575 -0.0142965838 0.0120388716 0.0730001554 0.00848747697 -0.107716456 -0.0337854624 -0.133317709 0.128731623 -0.108297765 -0.0142049389 0.133316174 -0.115573399 0.0836417973 -0.0845889002 0.0138655473 -0.0655629039 -0.0104014426 8.64409303e-05 0.1608392 0.0675673187 0.0179192871 -0.0236063488 0.046505671 -0.0392516479 0.0720221549 0.0741254017 0.0510466658 0.149464175 0.0988350585 -0.0124777406 -0.0114185531 -0.143563926 0.116806343 0.0688097924 0.145737663 0.0330820084 0.103331283 0.189101636 -0.0580817536 -0.152595311 0.10088592 -0.012149916 -0.153301135 -0.0369912386 0.0269313119 -0.100157224 -0.0723579377 0.0605176054 -0.16903989 -0.140898824 -0.124555223 0.149031043 -0.112365574 0.0937826708 0.0157145858 -0.100749768 0.0515762866 -0.0710803419 -0.0686863139 -0.0993681699 0.136043593 -0.0661427677 -0.00102904439 -0.0144443447 0.00527101662 0.0664612353 -0.125710681 -0.0143222958 0.0327278823 0.00587114692 0.0143621564 0.0739389807 0.0117307007 0.00317768753 0.131541565 0.0450980216 0.0562246889 -0.112021118 -0.121544585 -0.0703852251 -0.0653774664 -0.049726896 -0.0712407231 0.0384204239 -0.0756127983 -0.108454555 0.0761180222 -0.108996943 0.031490311 -0.110682026 0.0954553038 -0.0480172858 0.026041314 -0.0700769648 -0.0236957669 -0.105962321 -0.0145984888 -0.0221198499 0.080327794 -0.0255625173 0.0917616338 -0.0579084232 -0.115254268 0.0298689604 -0.033957921 -0.00849801302 0.101248682 0.0688192248 -0.033151634 0.0967501849 0.125775561 0.0594263077 -0.0315592438 -0.0751049966 -0.0152497739 0.0194816925 -0.025003599 0.110108972 0.0337932631 0.0466330573 -0.108177759 0.0923204646 -0.127821535 0.132012337 -0.0206957385 0.160091415 -0.0121085728 0.103845544 -0.00710585574 0.106360584 -0.0573629886 -0.116646938 -0.0393697619 0.111357979 -0.00339666428 -0.0565832593 0.187762156 0.0738954321 0.00436372962 -0.0201180782 0.0962937772 -0.0409735553 0.126787856 -0.0257689413 0.142943844 0.0456875302 0.0157842282 -0.125038773 -0.127323419 -0.0811595023 0.127563968 0.0147216143 -0.0188120566 -0.037811175 -0.0973046944 0.155958325 0.107728779 -0.0499815643 -0.132879764 0.0703277811 -0.0151593685 -0.00903364085 -0.12080054 -0.0394929722 -0.05078182 -0.0835544169 -0.0767298788 0.111453474 0.0164176039 0.0738900974 -0.0237710364 0.108378887 0.00188849773 0.0275815967 0.0885725319 0.0609878637 0.0426038019 -0.0367399938 0.00715940725 0.0550841689 0.0243325494 -0.182480216 0.0272398591 0.0657472908 0.0879313201 0.0918957889 0.156981304 -0.0646059811 -0.119247735 -0.047599677 -0.116651364 0.147660568 -0.0381035991 0.0582092069 -0.147984505 0.19525826 0.122297406 0.0312584154 0.0444355682 0.0738523602 -0.00197054748 -0.0539330691 -0.101374164 -0.0567407869 -0.083611846 0.0657852963 0.0735184103 -0.0166746452 0.0735289529 0.0353927538 0.0238035768 0.0864389837 -0.0453045592 0.0350245051 0.0573615246 -0.117907874 -0.0528462976 0.0303292908 0.0802146792 0.0530198105 0.0752285719 0.0932139829 -0.126501501 -0.0627672225 0.0169230103 -0.0626025647 0.0349148065 0.109626763 -0.105911814 0.0788237303 0.0676720217 -0.153280228 0.0910756141 -0.0301282536 -0.000840488705 0.106123164 0.0890205577 -0.0100000529 -0.0720009357 -0.0945366025 0.10201738 0.139737338 -0.00469925907 -0.00698884297 0.108112089 0.0135092204 -0.127867475 0.0966705605 -0.00785964262 -0.138336435 0.0638243333 0.0331486128 0.0260607041 -0.0856452286 0.0471999496 0.0917534381 -0.0968203172 0.137134388 -0.108194001 -0.0242078379 -0.0838816911 0.0852543861 0.101658776 0.0793644413 0.0438847691 0.048443377 0.0175982714 -0.0791520029 -0.126521289 0.120813206 -0.0748142153 0.0292773098 -0.0915241987 -0.0191392377 0.070805788 0.0297760516 0.0540979952 0.0455519557 -0.104483157 -0.127842084 0.00549553335 -0.00101320446 -0.0383768007 -0.0929412916 -0.0527148545 -0.121787742 0.116296932 0.127205387 0.0215808749 -0.00527906418 -0.0950953811 -0.102384314 -0.136119738 -0.135512173 0.0213526934 0.131827787 0.0118881762 -0.0137285888 0.0550298095 -0.101517551 -0.0358831212 -0.0636179894 -0.0305602103 0.000535279512 0.0445587337 -0.14124018 0.108244672 -0.0400930718 -0.081310682 -0.133354321 -0.00902658701 0.0237516761 0.136513993 0.0469265431 -0.12061704 -0.0962344632 -0.100439511 -0.0504943989 0.0647302195 -0.00468132785 0.0335817374 -0.0337964632 0.134859458 -0.0318156444 -0.0735609904 -0.087305516 -0.00295244693 0.143023223 0.0555585437 -0.0434618294 -0.0989003032 0.0798072517 0.0347367227 -0.0889567286 0.0454466157 -0.0994898081 0.0186923463 -0.173386604 0.0882037133 0.137024656 0.0983027816 -0.120416664 0.192227334 0.0853054151 0.0870762393 -0.0379575193 0.161280155 -0.0570422672 0.151813626 -0.106967077 0.166952252 0.101883747 0.054235056 0.189217716 -0.0840895325 0.070606254 0.0933529139 -0.0497438386 -0.0975164622 0.156465441 -0.061189115 -0.0255476627 -0.00462962687 -0.0117694922 -0.026365703 -0.0826264173 0.10701783 -0.0981103182 0.0933794603 0.112419106 -0.00185976818 -0.0279008057 -0.0761715546 0.160778821 -0.00510247052 -0.0529871248 -0.00726129953 -0.0403804705 0.106054351 0.0701956153 0.160074636 -0.126040637 -0.0446393117 -0.0562781654 -0.0765028149 0.118484885 -0.00363161834 0.0893984511 0.0428721681 -0.0862490907 0.161411509 0.110493377 -0.125039488 0.160013914 0.138771698 0.00862341002 0.120823577 0.124806479 0.126176298 0.0108156186 0.103124447 0.0837469697 0.150882855 0.0736863688 0.00851425901 0.0157211907 0.0169511139 0.0302761346 0.0810550079 0.0379871763 0.138916358 -0.0554511286 -0.134623349 -0.0141197927 0.102847748 -0.109416708 0.116542891 0.0628927052 -0.0400933027 0.104376331 -0.119453713 0.0155786276 -0.0215776712 -0.00744922459 0.0798532814 0.0639959276 -0.0196548998 0.0446766913 -0.0535386354 -0.0242897347 -0.0326042622 -0.0356833786 0.091811657 0.119531497 -0.00566391647 -0.108836398 -0.0138843209 -0.0777539462 0.0244711339 0.0302925706 -0.0840600431 -0.135451585 -0.0993905663 0.0911848098 0.0254171342 -0.104574814 -0.0337190852 0.123146519 -0.00662016869 0.112501815 0.115908071 -0.0144283175 -0.0765947551 -0.070567295 -0.0816542134 -0.063042447 0.0821659714 -0.106859922 0.0445039272 0.0489273965 -0.054737024 0.0737254471 -0.0994004011 0.0907932669 -0.0525529906 0.0760397166 -0.0459865108 0.115764856 0.089792937 0.0262587946 0.0727124959 -0.0986016765 0.134460911 0.120390087 -0.106480896 -0.126752883 0.0600169674 0.0778978691 0.18399404 -0.116155103 -0.0270620678 0.0360745415 -0.0706986636 0.0219121743 -0.0485235155 0.0116395457 -0.0190455988 -0.031040974 0.138535559 0.0049945279 -0.0669544563 -0.135656506 0.111046769 0.0884723812 0.0641905293 0.0139932213 -0.0747538805 0.14979732 -0.0598235726 -0.0681837425 0.078919284 -0.0920129493 0.0927841365 0.0622474365 -0.0725972429 -0.113833509 -0.106715776 -0.127413034 -0.0326030068 0.0898154825 0.0540499836 0.120726503 0.000129148364 0.122900732 -0.000325784204 -0.152520627 -0.0064624548 0.073874481 0.0152352303 0.0961310565 0.0109456517 -0.0507575348 0.120654956 0.126830235 0.108187631 -0.163000211 -0.17243591 0.0882807449 -0.00232086889 -0.132580787 -0.132526517 0.101748489 0.118433878 -0.034390375 -0.0734529495 -0.163235143 0.0954085439 -0.164447442 0.0220291484 0.165916741 -0.0268109124 0.00910670217 0.0109529579 0.0204264484 0.0675835386 0.0192554276 -0.019607991 -0.139865518 -0.0490590185 0.116421953 0.133383304 -0.00608086493 0.0666911826 0.010473121 0.0839324743 -0.00665302482 -0.0341572762 0.0905544311 -0.0330039002 -0.121482521 -0.13752155 -0.00984864868 -0.00438012183 -0.0152192581 -0.0799304917 -0.144058108 -0.0689092726 0.0797125772 0.049202282 0.121467397 -0.059979789 -0.0338185877 -0.0875877663 -0.0352213718 -0.0265744999 -0.000970848021 -0.0542924628 -0.0339916125 0.0772038847 0.072251454 0.0386321284 -0.123490326 0.115714893 0.00240401109 0.138920873 -0.0890034363 0.0679262504 -0.0547012426 -0.0577247515 -0.00683979178 0.0310920458 0.0301014595 0.09850014 0.0825214908 0.137729675 -0.101440713 0.0277171135 -0.0100952508 -0.106422052 0.0132092983 0.104450844 -0.0620612726 -0.0807258561 -0.0243801288 -0.0753299445 0.0706419945 0.133234069 -0.119514674 0.126184896 -0.0232823435 -0.107439861 0.0486271791 -0.0467080846 -0.0273494851 -0.0494388938 0.106704935 0.0911619067 -0.0630275458 -0.12553288 -0.00862511992 -0.028438285 0.0349335819 0.140132353 0.0860453546 -0.0957376212 -0.00533922017 -0.0604479536 0.105011344 -0.0739720687 0.0970580429 0.0160997361 -0.0723680109 -0.0773093924 -0.100641474 -0.000383406878 0.0533034801 -0.0397997424 -0.094927974 -0.0186026245 -0.128123358 0.0100442469 0.0157190859 -0.0655204803 -0.0211179629 -0.0938301831 -0.0268794596 -0.0193795785 -0.0539523363 0.0891814232 -0.0723926127 -0.0894492418 -0.103800982 -0.0962850004 -0.0336188897 0.0200818777 -0.0712372959 0.0486632138 -0.0899597034 0.0786281079 0.0739620626 0.000252395868 -0.123112433 0.0357136726 -0.123496763 -0.10789144 0.0340666659 0.0580845289 0.129886597 0.0481477603 -0.0664139464 -0.068032667 0.0435736328 0.120462291 0.119476132 -0.123401277 -0.0684902221 0.0760027915 -0.00998137705 0.0795442387 0.0158706605 0.0744519681 -0.0714682937 0.0881680399 0.0666808859 -0.0570629239 0.164650321 0.12301676 0.0870940611 0.0813848004 -0.026452858 -0.0121077476 0.129477188 -0.142631516 0.0968390107 -0.0411611088 0.0438095704 -0.0174164046 0.143360704 -0.0702998862 -0.00564636895 -0.129426509 0.129657581 0.00313778641 -0.0574785173 0.130360812 0.0483710952 -0.119206332 0.0171343237 0.0760408044 0.0280918181 -0.0732154027 0.110174745 0.0896861851 0.00575533276 0.101032197 -0.0848289058 0.0622318983 -0.0934585631 0.13217181 -0.145480588 -0.151452094 -0.193723321 -0.0797042996 0.0565548502 -0.103730097 -0.0197361708 0.0303085633 -0.077303797 -0.0265100189 0.0391752571 -0.0126148164 0.0999564305 0.0248864293 0.120508894 -0.133522972 -0.0817446709 0.168698058 -0.0242357664 0.125314549 0.138810694 0.0583302379 0.117092818 -0.00391758466 0.132906526 0.00617161999 0.0204244088 0.0222095568 -0.0337266289 -0.120259523 -0.0814958364 0.0759975687 -0.127410248 0.107059687 0.0443736836 -0.111485079 -0.0593480803 0.0116199553 -0.0279748887 -0.114313811 0.138794228 0.0156804174 0.0343504995 -0.0126132518 -0.0769002363 0.0819693729 -0.0879027769 0.129266858 -0.089684993 -0.0613807291 -0.141623229 -0.0882831067 -0.00330040953 -0.0679773539 0.0915297493 -0.137266099 -0.130465984 -0.0945152789 0.00593703426 -0.116638407 -0.0219550729 0.156748876 0.180117995 -0.0130009502 0.0716223866 -0.144065097 -0.0823530853 -0.018947104 0.0158558208 0.0430184379 0.154572129 -0.0570289902 0.0835271254 0.117485747 -0.0653187782 -0.0945697576 0.0832378045 0.128256038 -0.074221611 -0.0555155501 -0.0284128729 -0.0774558261 0.0875912234 -0.000803266244 0.0153772263 0.105136067 -0.156031758 -0.0866245553 0.0865259767 0.013942048 0.0376075655 0.0092022717 0.137353256 0.0442829132 0.0658835545 0.12299668 0.0346786119 0.100729421 0.0517841168 -0.0898612216 0.0358949974 -0.0126487454 -0.150891528 0.118704185 0.165351018 -0.0962797925 0.14023003 0.103115313 -0.0602911599 0.110436209 0.0308854431 0.121604525 0.127166167 -0.0259905457 0.128405809 0.0925664902 0.120217182 -0.110264599 0.0578555204 0.0608986728 -0.0673596784 0.0434045941 0.0300820656 0.00743610319 -0.0426555723 -0.0631315857 -0.102573559 0.159559608 -0.0250314325 0.151789397 0.0174076445 0.0587394796 0.0256275982 0.0982463285 0.113202661 -0.0388134755 -0.175611705 0.00276806951 0.100146458 -0.0734548494 0.0292181689 0.0794109702 0.163403466 -0.0469043329 0.0134403957 0.010670647 -0.0128954323 -0.123306222 -0.105166018 0.0782779232 -0.0463915803 0.0925515667 -0.153734311 0.0399706028 0.0324561 -0.136016384 0.121838123 -0.0339740776 0.0617321283 0.0716827065 0.0507700294 0.163305402 -0.0263362825 -0.0235168263 0.156056419 -0.0918754488 0.0799345896 -0.0889241397 0.132214025 -0.0339573547 -0.0231025834 -0.0893127769 -0.15716745 0.0421844684 0.164029196 -0.0144499643 -0.0524246357 0.0808350593 -0.0515551828 0.12877433 0.123672612 -0.00602019066 0.108041525 0.0685625225 -0.107800402 -0.106587365 -0.0706622899 0.079738766 0.00206816196 0.111752108 0.164152429 -0.140735298 0.0629734248 -0.139009103 0.147802591 -0.0249149017 0.0117471283 -0.0343583301 0.100054584 0.0977551788 0.0404288657 0.126847446 0.172036812 -0.0862832591 0.0989949033 -0.0108139813 -0.109132327 -0.065063715 -0.05474668 -0.0446046479 0.0215636939 0.0535516292 0.0117583256 -0.0911338031 0.08406578 -0.0239859279 -0.0490008146 -0.0228198916 -0.0266752448 -0.0871018842 -0.143308818 -0.000947127643 0.0332983695 0.111087874 -0.112454593 0.166085541 0.138097584 -0.11190179 0.0368833952 0.0595792085 -0.08562731 0.159958228 -0.0567510165 -0.139039397 0.142708972 0.0837232322 0.0328567959 -0.137172282 0.0749840587 0.0762316734 -0.121910147 -0.0499757975 -0.0399817154 0.0123820901 0.0685181022 -0.0794972554 0.130480066 0.0485983491 0.138348088 0.115582928 -0.139361694 -0.124933064 -0.116998814 0.0428138971 -0.119468078 0.0110614747 -0.0100474358 -0.0216016769 0.0101694763 -0.0820546895 -0.0777691826 -0.117070802 0.126368955 -0.134564951 -0.0302673057 0.110644177 -0.117660195 -0.00458803773 -0.134814233 -0.0293745026 -0.0682334229 -0.095479995 0.0648405999 -0.0339591131 -0.0932905525 -0.123183005 -0.0931407288 -0.134305537 -0.00308911502 -0.0195324719 0.131572172 0.0200927258 -0.0235931352 0.0316516161 -0.0269541889 -0.0461172685 0.0184912682 0.0468815118 -0.138456166 -0.0628102869 -0.116835825 -0.0204005614 -0.0558201149 -0.0682484955 -0.0157806873 0.0899439305 -0.100633815 0.0933804959 -0.0987777337 0.0174719673 -0.0371658802 -0.0836207345 -0.145877808 -0.0866134912 0.113749318 -0.0686557889 -0.10567008 -0.0482730195 -0.0070918831 0.114946 -0.134217158 -0.118984662 0.158714101 0.108982846 0.0504443385 -0.141264856 0.0717000142 0.104913875 0.138390988 0.0620018132 0.116844647 -0.0917929709 0.00455025444 -0.0148221394 0.090433605 -0.140797302 -0.127093479 -0.139803812 -0.0659283474 0.0552696697 0.0945735574 -0.097406134 0.0754362494 0.068106018 0.124531001 -0.0909646004 -0.102133028 -0.115647264 0.0507812947 -0.0703440532 0.124720164 0.138866737 -0.100535631 0.0667068362 0.00467189308 0.116699241 0.00265486818 -0.0454894938 -0.0559370294 -0.13688907 -0.0905043259 0.0473077707 0.00356861111 0.119102031 -0.00279730256 -0.0290972441 -0.0549311638 0.0690060258 -0.108683005 0.038172666 -0.0376584455 0.0284418333 -0.0453239642 -0.0188913643 0.10330762 0.0158650316 0.0302619878 0.10599114 -0.0192918926 -0.00512768747 -0.067353636 -0.0088639101 -0.0262610465 -0.00164695387 -0.0329788141 -0.0782948136 0.0144974366 -0.102364071 -0.0231709629 -0.053929802 0.0872491896 -0.0339207873 -0.0452650562 -0.0555231161 -0.106613263 0.0545223877 -0.104694769 0.0733768344 0.117440388 0.0850411355 0.127571523 -0.0451369807 0.127713785 0.0604759753 -0.116016053 0.120935522 -0.0490187742 0.0145258456 0.12456093 0.0472011603 0.0200307388 0.213931099 -0.0325213335 0.100024901 0.150687665 -0.0330869481 -0.0743881986 -0.0423107482 0.0197958369 0.144206196 -0.075991191 0.00704598008 0.169469431 -0.0728433281 0.00913137291 0.00888578407 -0.0482634306 -0.0623912066 -0.0938192755 0.0339379199 0.119007394 -0.0949489996 -0.114306375 0.0466704629 0.01765888 0.12498638 0.0581458732 0.0168336164 0.109576389 -0.0280811246 -0.0985747352 0.0206970666 -0.0964695066 -0.0664414242 0.110378399 0.139610469 -0.12254858 0.005280599 0.00761935115 -0.128316179 -0.0374653786 0.0578353852 0.127879024 0.0134341568 -0.0500954539 -0.00337731675 -0.119695731 -0.11888539 -0.00329445861 0.062248569 0.123474449 -0.0654902309 -0.0125339935 -0.0127297472 0.106315874 -0.117941201 0.101428568 -0.0125857871 -0.0383322537 0.0596774332 -0.13507621 -0.0973153785 -0.126249328 -0.0674846619 -0.0139419707 0.120396413 0.105357513 0.0358138867 0.154476896 0.111231543 -0.171673253 -0.0643624365 -0.0841967613 0.0120489495 -0.110221028 -0.0037632389 -0.0754774585 0.10185004 0.094084166 -0.123280331 -0.0452301428 -0.119065173 0.00748275174 0.0470008291 -0.0596443266 0.110938832 0.078253679 0.0948193073 -0.00295168161 -0.0287760254 0.158084825 0.0492092073 0.0768529922 0.0546108335 -0.0640769675 -0.0777091384 0.102904022 -0.0749624521 0.129996344 -0.050066106 0.140702903 0.135439858 0.0565205291 0.039585311 0.131971121 -0.0433706567 -0.00613029394 -0.0415827632 -0.0896254405 -0.0250180494 0.0855293274 0.0162106231 0.0830451697 0.0412258208 0.109798126 0.0821261331 0.119376883 0.123270549 -0.109921172 -0.069528237 -0.128147811 0.00981930271 -0.0908927023 0.162675932 -0.115118623 0.0430796407 -0.0360173024 -0.142209709 -0.0679464936 0.125360489 0.0772604644 0.159708619 0.136423364 -0.113330379 -0.0995218381 0.0548840612 -0.0670673028 -0.0882866234 0.0123610795 -0.0497058704 -0.0615072772 -0.0382896513 -0.114018604 0.139805213 0.139648527 -0.0281077586 0.0916863829 -0.00357940956 0.119387031 -0.038923528 0.099045448 -0.056032382 -0.132692307 0.0513692684 -0.0116330124 0.0760553926 -0.0958063975 0.133466452 0.0711750537 -0.0614313632 -0.0412404239 0.0541786365 0.063894175 0.0131827295 0.0888437405 0.0340017416 0.00108662061 0.102465764 0.172270909 -0.110579006 0.0245050453 -0.113299966 0.0307509303 -0.00821371656 -0.0823507458 0.12988846 0.0906711072 0.0899582058 -0.0237735175 0.0522135533 -0.0568510592 -0.0833672881 0.0160184987 0.0457749926 -0.157355502 -0.137885511 0.0371456817 0.140265986 -0.0514193922 -0.0789706931 -0.0262926575 0.031272471 0.0584937558 0.088243857 0.0618854538 0.105974808 -tensor_16weight 2500 --0.00417222502 0.0333769061 -0.068073824 -0.00174406881 0.0395693518 0.163498551 0.085880965 -0.0441546589 0.0277523138 -0.0394380651 -0.108085141 0.0367854051 -0.088741377 0.110496983 0.137496606 -0.0574882254 -0.0685930923 -0.118896537 -0.0459423698 0.128555194 -0.00706961751 -0.109601662 -0.0728417113 0.0407270938 0.116901517 0.037543118 -0.0457625464 0.105285026 -0.0170973707 0.0741245896 0.0285330229 0.0602779202 -0.0999675766 0.0917604342 0.00275715417 0.0221613981 0.0221239924 0.0220769197 0.0309871975 0.141381815 0.102749333 0.0140401116 -0.160013482 0.0494507849 -0.109416723 0.0506694168 -0.0603474639 0.122836456 0.102406837 0.102601565 0.0129987504 0.0918339491 -0.0100188255 0.0104814339 -0.0403004438 0.0710072964 0.139566272 -0.0848197117 -0.065366447 0.0172738302 0.0708059147 0.0956877321 -0.101862162 0.0278298706 0.119774833 0.109509952 -0.0190650206 -0.0558079928 -0.0829644129 -0.0505085252 0.0924009234 -0.0197391063 0.120741382 0.109388441 -0.155889794 0.0402384102 0.0444546603 -0.0963738933 -0.0165153071 0.0370764881 0.152952656 0.173049316 -0.104738578 0.153395116 0.0572723225 -0.0417025536 -0.0367592871 -0.113802627 0.127408341 -0.0777374357 0.0151805067 -0.0934357792 -0.0254224017 -0.0842629671 0.102764659 0.0851573944 0.119558379 -0.0993547663 -0.0726170391 -0.0864863023 0.0344553739 -0.0371179581 0.014597863 -0.101272777 0.0389231592 0.0477042645 -0.135611862 -0.0279283747 -0.0736898407 -0.00949966908 0.0327468514 0.011397168 0.0977702439 0.0774219632 -0.0666735023 -0.120168492 0.0698120147 0.0764941722 0.00528292358 0.111364022 0.0231147856 -0.0114662051 -0.0972312316 -0.046190843 -0.0422428921 0.0343527198 -0.0671815872 -0.0542576611 0.0252622664 -0.133999184 -0.036504671 -0.10330338 0.0781079531 -0.0724790767 -0.0193377137 -0.106740713 0.119743183 -0.131887868 -0.00206248462 -0.0915471017 -0.0368748158 0.00305576622 0.101493865 0.126230076 -0.0354850665 0.0502597541 -0.00101481378 0.066304937 -0.0897568762 -0.0490704626 -0.104472384 -0.0435468704 -0.083782576 0.00601604581 0.113205031 -0.0322454944 0.0402761698 -0.0695916861 -0.139957666 -0.0615144074 -0.0179994181 -0.0612879917 -0.0798030049 0.105225846 -0.0033620894 0.0714047551 0.0813372284 -0.0406201556 0.0420547277 -0.0629295483 0.128933236 -0.0379166752 0.0688687265 -0.0884593129 -0.12569578 -0.0999955758 0.0851953328 -0.105392635 0.0814247131 -0.123030953 0.0994129926 -0.0500591323 -0.109111317 -0.13911283 -0.0404232666 0.0469972044 -0.135043383 0.0236423463 0.00703085959 -0.0505910367 -0.0494552478 -0.135939568 -0.125193208 0.0693392456 0.128731236 -0.0279947668 -0.00343213975 -0.0427332819 0.137931898 -0.120300733 0.11937128 -0.0187009424 -0.0802288204 -0.13997142 -0.0681145191 -0.123841763 0.12888445 0.0320565253 -0.0545828864 0.0154529363 -0.109894589 0.127666876 0.0496545732 -0.0170144811 0.117039517 0.111107305 -0.140118808 -0.00819459558 -0.0746041089 -0.104405858 0.0340925604 -0.0303836614 0.115028903 0.082659781 0.0525604337 -0.0737821385 -0.0581655875 0.0645884275 -0.121426933 0.0243149996 0.104953259 0.11160703 -0.134071976 -0.0780070424 -0.0556476638 0.0689132363 -0.0958281457 0.0529917628 -0.00589548051 -0.0485527515 -0.133214802 -0.0193034932 -0.0625650287 0.0658643395 -0.115375213 0.0658562183 -0.126765266 0.0663554519 0.105942756 0.0113557875 0.00450533349 -0.0318827182 -0.0382149518 -0.0586391389 0.16599071 -0.0995347276 0.0626212955 -0.0848238021 0.0312059093 -0.0671846345 0.114710093 0.111972772 -0.109937578 -0.033448413 -0.00338487327 -0.137714684 0.061901439 0.0263031721 0.0757300183 -0.0868174583 -0.115281321 -0.0186160952 0.022743687 -0.0695182681 -0.0389948115 -0.00921310484 0.146889284 0.121259861 0.123389371 0.0740807503 -0.0913022682 0.0926736519 -0.0748549104 0.0120129362 -0.0260141995 0.160688117 0.103861287 -0.0145337479 -0.0333565325 0.106497906 0.12470568 -0.0427022539 -0.0200853404 0.125489667 0.117662869 -0.00191673823 0.0511538982 -0.0245015733 -0.0231342129 -0.0423432663 -0.067036055 0.0543258078 0.0126409382 -0.13212578 0.0719170049 0.0198016949 0.103088938 0.0337647051 0.118756339 -0.106006429 0.108546667 -0.161683097 -0.0532578528 -0.0416240096 -0.0414204299 0.137367234 0.113782153 -0.00119005144 -0.124352522 -0.0775565729 -0.0454200171 0.110256732 0.138212636 0.139873043 0.0665704682 -0.0985324904 -0.132870167 0.025891345 0.102456145 0.131423756 -0.108975701 -0.0261260234 0.0348203629 -0.0914004669 -0.130643874 0.064868167 -0.073974885 0.102802224 0.176195145 0.0179768056 0.0108742332 -0.0181707554 -0.0102240648 0.00463731587 0.0538709089 0.00627785875 -0.117067128 -0.087891832 0.004855379 -0.016739469 -0.0725544542 0.0821630657 -0.0417728201 0.113972411 -0.0352853052 0.076194793 0.106641911 0.101013884 -0.0971745029 0.0243909303 0.029293431 0.0236056633 -0.0190717317 -0.12938638 -0.101987876 -0.0398474075 -0.0397941768 -0.0599071085 -0.0816488713 0.131114334 -0.072889857 -0.128262654 -0.048984535 0.0297204573 -0.071578376 0.0270867199 -0.0633577183 0.13518168 0.0259044431 0.0611634552 -0.0133587159 -0.00845749862 0.0748591572 -0.0559645891 0.0772884116 -0.0892494842 -0.124270409 0.0640116185 0.0149187753 0.112770371 -0.0819433481 -0.158321261 0.146862835 0.05744645 -0.136843622 -0.0534451306 -0.057807114 0.112765148 -0.121185474 0.0865707248 0.0242433939 0.00829058886 0.00682345033 -0.0391817167 0.118281983 0.0936698243 -0.0126834186 -0.0224663615 0.0807867125 0.0614369959 -0.0208024122 -0.0155477682 0.0223713629 0.133279428 0.0341431238 -0.0801266879 -0.0473638549 -0.0159126073 0.0619758293 0.104336567 0.0777632669 0.0014840191 -0.0106922537 -0.089225471 0.139960572 0.060204789 0.0285251942 0.00940582156 0.124438897 0.0746723562 -0.0786366537 -0.140947089 -0.0317693353 0.0952398479 0.0504078493 -0.101314582 0.00230675936 0.028885033 0.0213491581 -0.138435528 -0.0281476919 0.108943664 0.00568072731 -0.021763064 0.115609825 -0.0683022514 0.0327104814 0.0758552849 0.11111246 -0.0849706307 -0.0847819373 -0.000146973485 0.0806944817 0.0748187006 0.00227210205 0.0243628193 -0.0767735019 -0.0275015086 0.0700375587 0.0457462482 0.0988681763 -0.033178322 -0.0504397713 0.0441949666 0.0874261707 -0.192612246 -0.0348819122 0.0727340132 -0.0881135315 0.0686711743 -0.0988578126 0.0592095368 -0.0447203517 0.0680712909 0.132170856 -0.0182013065 -0.0532437004 -0.0237953365 -0.0845318213 0.0437362194 0.0721768141 -0.0437071882 -0.0137181133 0.117270313 0.0595132113 0.174431637 0.0367056727 0.16878359 -0.0222882591 0.0592969358 0.0360841192 -0.0704026446 -0.0345776901 -0.0942229927 -0.165675908 -0.0193019863 -0.000750561245 -0.0503743216 -0.0973385572 0.0120092537 -0.168930963 0.162473217 -0.114793286 -0.0891378894 -0.0980757028 0.100445837 -0.0628618151 0.046582222 0.0680130422 -0.110214941 0.127800643 -0.0861949921 -0.0599781014 0.0208107978 0.0531936698 -0.00385034014 -0.0491617396 -0.0419875681 -0.00234212819 -0.044506561 -0.0653151795 0.0169184674 0.0848835111 0.144753113 -0.0450268537 0.0625728816 -0.022600282 0.0092583932 -0.0980294049 0.0949492604 -0.108607598 0.0273391213 -0.0572451502 -0.0307708569 -0.0600267388 0.0144662457 -0.122474261 0.0097047314 0.00327231945 -0.0497302189 0.143207729 0.110742435 0.116505228 -0.0606399626 0.130554333 -0.0482001454 0.146989092 0.0189287849 0.074033089 0.0732528344 0.0350786448 0.0295372307 0.131620392 0.0493342653 0.0956929848 0.0703327283 0.0766540915 -0.0670498535 0.105273128 -0.05142162 0.0384206101 0.0971984193 -0.116058186 -0.120040804 -0.0705016181 -0.10306605 0.100996941 0.104779199 -0.0833229199 0.0132066812 -0.131049663 -0.0160818845 -0.118435718 -0.0491212681 0.0483348295 0.0081577599 -0.0334057733 0.0873719454 -0.0852230042 0.1210372 -0.0395233259 0.0784760267 -0.0274248198 0.099436149 -0.081705071 -0.015222121 0.110525087 -0.0580340363 -0.0512353852 0.0699884966 -0.134925202 -0.00844763592 -0.0294794999 0.0049456358 0.0870961398 0.112957731 -0.12726216 -0.0223451219 0.053539414 0.0455328077 -0.0087382691 -0.0701403543 0.0741796196 -0.122097038 0.0159640387 -0.0312917195 0.10192579 0.0948835909 0.0600303523 -0.116103448 0.0967111215 -0.186777875 0.0394800454 0.0645573735 -0.00990641117 0.0682907104 -0.0206053555 -0.0376345441 -0.0776938945 -0.0846702084 -0.0824906975 0.020499425 -0.0137960762 0.180186674 0.0354575664 0.0700841099 0.0222972054 0.0324955657 0.130303159 -0.0262751058 -0.184336275 0.0281189717 -0.138265505 -0.0036136366 -0.0606828108 -0.013456936 0.112634584 0.105258964 0.176689893 -0.169644877 -0.0470958985 0.132185519 -0.175565004 0.0531695932 0.0104851555 0.0826452076 -0.145328104 -0.0922671333 -0.164871365 0.142606691 -0.0212450475 -0.0627536103 0.120814189 -0.0105575472 0.0765462518 -0.0688535571 0.0809823424 0.00905969739 -0.0526664332 0.0274341255 -0.065106079 0.106400639 0.0910326689 0.0318324715 0.00195610523 -0.0523022339 -0.0163692534 0.0768652707 -0.0259682089 -0.0942348465 -0.126349121 -0.122505806 0.0306945741 -0.113110162 0.0605172664 0.0169282854 -0.0441042334 -0.139085665 0.0421898365 0.0137649477 -0.0915340632 0.0716675818 -0.0818142742 0.0774355978 0.132591441 -0.0364979431 -0.112088569 0.025617823 0.0622905344 0.137828872 -0.128434256 -0.0762574747 0.039005056 0.0113735795 0.0853985548 -0.110001206 -0.122525297 -0.118933201 -0.0976378173 -0.00780165195 0.0592657812 0.127340347 -0.116591275 -0.00909214467 0.0285629407 0.137352273 0.0194081701 0.0885308981 0.157478809 0.00182641763 -0.111265883 -0.0360108428 0.0904895365 0.0231916904 -0.134599373 0.123875104 -0.0176251531 0.00123633444 0.118751198 0.0944036767 0.00726477336 -0.00553962262 0.0213748366 0.0605549626 -0.0671228841 -0.11890097 0.0446490161 -0.124758892 0.0125705721 -0.0418758988 0.0201296303 -0.0815314054 -0.0722059831 -0.0872977003 0.17252858 0.0650849119 0.105454117 -0.0630111396 0.0785340741 -0.0949152634 0.120920762 -0.00637194049 -0.122229263 -0.04851266 0.0649544969 -0.0367785469 0.0145572387 0.0296253487 -0.104165144 -0.0307542253 0.0980055779 -0.0429146662 0.0307714939 0.0965365469 0.0424363613 -0.0620633438 0.037256062 0.0731356591 0.0377854705 0.126446977 -0.0181031153 -0.10487172 -0.012096405 0.0115436465 0.00484970212 0.0365195423 -0.0648169369 0.0427874923 -0.0382243469 -0.131412312 0.0129759014 0.014330104 0.104675427 -0.0191158354 0.110161588 -0.0690229386 -0.134754956 0.0209327489 -0.137074128 0.101832643 -0.0448981151 0.0525557846 -0.0926448479 -0.0648545772 -0.118553191 -0.0258715078 0.0647533536 0.0853616297 0.0266875774 0.0280805677 -0.133945882 -0.131097019 0.0535267889 -0.0883116797 0.0746518523 0.0716847479 0.0854772329 -0.0682440624 -0.0383327305 -0.0687909126 0.0661910623 -0.0651709512 0.0676450729 0.042929098 0.0129889995 -0.0627579913 0.026080206 -0.0745798126 -0.110127226 0.0564449728 -0.0521587133 -0.0703866705 0.0996105373 0.0995502174 -0.0150131434 -0.093635723 -0.0350378379 -0.0970593914 0.122739777 -0.0746073425 0.00775636733 -0.0259913579 -0.0691226423 -0.0844953358 0.0796677917 -0.0647141263 -0.126704842 0.0870190561 0.110861555 -0.0944047272 -0.00367192924 0.0355183631 0.118708417 -0.0181086287 0.10595347 -0.00600086153 -0.00448402762 0.0546585321 0.0393356681 0.140889272 0.0358275473 -0.0797418952 0.0966726542 0.102656618 -0.0845542178 0.000538542867 -0.138633057 -0.103164904 -0.0017221421 -0.0267990902 -0.0042184745 0.0618429407 0.0372477919 -0.0600601025 -0.120651938 0.018602442 0.0741632134 -0.0556408912 -0.0401281454 0.0216008872 0.0986668468 -0.0385047868 -0.00614350522 0.0835534334 0.0581347793 0.0706503093 0.0540118366 -0.0558184311 -0.163868651 0.00549533684 -0.0525678769 0.0940028876 -0.0198096931 0.0326584801 0.0405591354 -0.0935613215 -0.0739186257 -0.032372281 0.131545618 0.076330319 0.0784656629 0.096952945 -0.0555465668 0.127968788 0.0291817039 -0.161188528 -0.0909612328 0.0471343175 -0.050026428 0.0184959229 -0.139395848 0.0397638716 -0.18348141 0.112185314 -0.0655662641 -0.0470221415 -0.164569005 -0.121025704 -0.0150856273 -0.00368866767 -0.0741992891 0.059470281 -0.0445875078 0.0394076109 -0.129359409 0.0192312244 -0.0108757932 0.0112697631 -0.146348611 0.159908772 0.0195346791 0.00573998271 0.106009968 0.0103269666 0.0211634543 -0.0780764073 -0.115655147 0.102710932 -0.0541914441 0.0466746576 -0.106848881 0.10031607 -0.134285003 -0.0872690454 -0.135360599 -0.0295483619 -0.105432019 -0.0230201259 0.0187110342 -0.10428597 0.184378833 -0.0646381974 0.0479477942 0.0756504536 0.101597495 0.10669633 0.0338435024 0.0337592065 -0.136492506 -0.0870145112 0.0574491024 0.0298079327 0.0783730522 -0.0157561749 0.0467197858 0.103480637 0.0719107315 -0.10105852 0.0388642065 -0.0440139845 -0.0164328683 -0.0481204912 0.14119412 -0.136799589 -0.0650587231 0.00970490556 0.0506416559 0.0616328567 -0.00483906409 0.00724408031 0.165032029 -0.124474898 -0.149660811 -0.0778745487 -0.127463415 -0.0629397258 0.0866350383 -0.135488585 -0.120086707 -0.0116316313 -0.0172481909 -0.033788152 -0.00575722754 0.0619941019 0.109511442 -0.1259799 -0.0296196118 0.0126680005 -0.00631114235 0.0279259682 -0.000546666677 0.046820391 -0.0707993954 0.0746022463 -0.0110969217 -0.106299125 0.0408700407 0.0607301854 0.0142738195 -0.117608964 0.125323534 -0.0797425956 -0.147771716 -0.0951133072 -0.0761706829 -0.0271511003 -0.0952035114 0.149354368 0.0850981027 0.12360011 -0.0736458525 0.0565205403 0.0375516564 -0.0330046788 0.0698090419 -0.0682012588 -0.108341932 -0.0916303098 0.0491649024 0.128733024 -0.0175282191 -0.0851026475 0.121217623 0.0644281209 -9.09119844e-05 -0.00583170354 0.0807056576 -0.0556110144 -0.030019151 -0.151016012 -0.0976619869 -0.0704679191 -0.140273213 -0.131401047 -0.0380841792 0.105461046 0.115149468 0.0569063798 -0.0500233844 -0.120028786 -0.0609620214 0.0207634512 0.121349677 0.0845320895 -0.00625681877 0.130006418 0.10405767 -0.0260085575 0.00623696856 -0.00945841614 0.100506075 0.0220730081 -0.125443459 0.0532133728 -0.120297104 -0.11440815 -0.0461563803 -0.0888359398 0.013649012 -0.00815679412 -0.070658952 -0.00767463259 -0.120047957 0.120935254 0.0590654165 -0.0607035644 0.0469796248 0.10308367 0.17886214 -0.0048859301 -0.0231490359 -0.127134889 -0.0811686739 -0.0198651105 0.0723841488 -0.00580265373 0.0251238793 0.0345673561 -0.150929868 0.0692640245 -0.0050682174 -0.0870390087 0.0347174555 0.118515827 0.0921180844 0.0585382432 0.102678254 0.0418618806 -0.0638227612 0.0212153941 -0.048158478 0.067370899 0.134428993 -0.0895267203 -0.137082666 0.0383201912 0.0807136148 0.0119578699 -0.00565120764 0.051708404 -0.0704574063 -0.0806446597 -0.0455211401 0.128211096 -0.152886659 -0.126107663 -0.174904436 0.170342699 0.0492694043 -0.016915286 0.0414748713 -0.0318201743 0.103975341 -0.0692306831 -0.0701901168 -0.136825696 -0.104924172 0.0634303093 -0.103916064 -0.106038429 -0.0103737917 -0.102413662 -0.0131952306 0.0928339362 0.0422227457 0.0485036634 0.100036606 -0.0334572345 0.0251479615 -0.0170369614 0.105127081 0.0789836645 -0.125357226 -0.0810341984 -0.119952716 -0.087011233 -0.136233181 -0.145835862 -0.139367864 0.0879707336 0.118222609 0.0654330924 -0.00252592564 0.0332888886 0.0847103074 -0.147119001 0.13717629 -0.0333771855 -0.0392536968 0.140056893 0.161733225 0.170547694 -0.13669911 -0.126283079 0.136217371 0.0222201925 0.100318342 0.0159461573 -0.151088864 -0.0795981139 0.0589573346 -0.00727232778 -0.0690927505 -0.00238380092 0.00583820418 -0.0911569446 -0.131048679 -0.117706373 0.0648147985 0.00487408834 0.117360242 0.170240089 0.118482806 0.0110958666 -0.0510480367 -0.0351133123 0.059696164 0.0868533999 -0.02283502 0.157529563 0.0911459476 -0.130911916 -0.0573937744 -0.116225123 0.0283462927 -0.0848590583 -0.00989816617 -0.0507650711 0.00583241554 -0.00466799736 -0.0543093197 0.123912387 -0.0726056844 -0.0514792576 0.178007007 -0.0574177206 0.0999287218 0.08989916 -0.0277903583 -0.144622609 0.0931683257 0.140252993 0.0123335114 -0.119432166 0.001549048 -0.00325248647 0.096072562 0.162840962 0.0217829365 0.122066244 -0.0385645702 -0.026368469 0.0513184667 -0.0859575272 -0.0242716596 0.00956724584 0.0109226704 0.0818789154 0.0224322379 0.135026619 -0.04657121 -0.104234524 0.0428646505 0.0151414573 -0.0313612148 -0.00296355784 -0.0544822216 0.129524395 0.0263857096 0.0820539892 0.0624815822 -0.0385384262 0.111337319 -0.0279020891 0.0234304219 -0.138835654 0.0737862438 0.0558309704 0.106782034 0.0583992153 -0.0247403383 -0.128191724 0.0889156908 -0.0910850763 0.0517035276 -0.0938775688 -0.0293845311 -0.120674253 -0.0451591834 0.0491400808 0.0255339593 -0.109320991 -0.0378039181 -0.0312750563 0.0803458393 -0.139691994 -0.00930851698 -0.0976287797 -0.114196926 -0.133310482 0.141642928 0.105140746 0.119025633 0.0663658231 0.0742790997 -0.0807274282 -0.049172353 0.0325890929 0.168383315 0.0958275571 0.0830662847 0.13090989 0.11515788 0.0930101275 -0.135337189 -0.01777054 -0.0463960879 0.121989891 -0.0663873479 0.0556551777 -0.0777027011 0.0780093744 0.0564488322 0.0328528732 0.0221739244 0.0690468401 0.165196538 -0.0979238898 0.0833290517 -0.0583337545 0.171756044 0.0485463925 -0.0788506195 -0.133651629 -0.0622514226 0.146948621 0.00967819989 -0.10787762 -0.0629694313 -0.0672841221 0.0454268195 0.153374791 0.0254830644 -0.107513458 -0.0417494588 -0.128024951 0.0972493291 0.0891800448 0.0753429011 0.0632758364 -0.0226834938 0.0985851139 -0.0661747381 -0.0974865481 -0.116021931 -0.0705863535 0.0088367667 0.11096172 0.00588925183 0.0319114141 -0.0778467134 -0.0426055863 0.0243966766 0.00911161304 -0.125340477 0.0958841443 0.0809076428 -0.0837767944 -0.0955800563 0.0739903226 -0.0197054464 0.0984792486 0.0248065591 0.013472463 0.0424549654 -0.115080677 0.0406080261 -0.0103811678 -0.00163237448 0.124697939 -0.0250684526 -0.109149016 -0.0914341062 0.0849946067 0.114163592 -0.114370003 -0.0137929916 0.0836040005 0.113473631 -0.0115816081 -0.00375672383 0.145472392 0.115759097 -0.0200298876 -0.0216319505 0.0506976917 0.146452606 0.00109191891 0.0250434522 0.125239 -0.070178017 0.112422191 0.128775164 0.0972625315 -0.0562150516 0.0652283952 0.0582313985 -0.0210614875 0.0146073569 -0.0898632482 0.00922326744 0.143344715 -0.154285237 0.120073751 -0.0218681768 -0.0313799977 -0.0713483468 -0.11174649 -0.0150706414 -0.0665470064 0.0984330028 -0.0607178248 0.00206361711 0.144247591 0.114495434 -0.0467930511 0.0812650472 0.126196146 0.155254051 -0.117046624 -0.00681339065 0.110044703 -0.130152121 0.0844703615 -0.103106052 0.07677605 0.136585757 -0.00327930111 -0.0542289279 -0.0169889219 -0.00491619762 0.00191336707 0.123525828 -0.0764942095 0.062074706 -0.118892998 -0.152141303 0.128834948 -0.00888511073 -0.128381923 0.166257232 -0.118136637 0.104106829 -0.00407020096 -0.0890831947 0.0344172269 -0.0881365165 0.00957617164 -0.0420764349 -0.11916101 0.0259462018 -0.12878786 -0.11673443 0.0500368178 0.123366237 -0.118328013 -0.0965624005 -0.124413118 -0.0506703407 0.10639628 0.118844062 0.136438951 -0.096905753 0.0549481958 -0.0769049451 -0.0224119276 0.0397996418 0.0951755494 0.113283962 -0.0366398245 0.106758043 -0.153018385 0.116750017 0.0242051464 0.0555575825 -0.144076809 -0.123421922 -0.0913296789 -0.0159076471 0.0744441524 0.129337355 0.0706739873 0.0101508312 0.00735373795 0.127268285 -0.108703181 0.0155255729 -0.0134423403 -0.0967226699 0.0832142085 -0.174966842 0.130565166 0.0653796941 0.0878034979 -0.0906267166 0.0458128788 -0.00101685664 0.0771127343 0.0380266793 0.0676126033 0.119121142 -0.113756225 -0.044930473 -0.0590678304 -0.114111833 0.0717334226 0.121548906 0.141148821 0.145141944 -0.0386701077 0.0908232033 0.0557151176 0.0785376281 0.0665990561 -0.026476074 0.0890998095 -0.0765963942 0.00731649436 -0.0525127202 0.0552615821 0.0508936346 -0.0130886035 -0.136101693 0.0808141679 -0.0865692198 -0.0861371309 0.120797709 0.14255853 -0.0736281499 -0.110956788 -0.0413296558 0.132970884 -0.128850982 -0.0727135316 -0.0477897227 0.0840527937 -0.0905833915 -0.115051724 -0.0579792745 0.111140922 0.10985522 -0.0506750494 -0.0162282735 0.0716301352 0.0544709153 0.12016838 -0.0960830227 0.0585822612 0.0589075349 0.0988149717 0.0618340187 -0.0847306624 -0.0631039515 0.0553748012 -0.111075364 0.0433333553 -0.0188259855 -0.112079769 0.0795213431 -0.0475602113 0.13167055 0.022298038 0.134109125 0.0383149795 -0.0405841693 -0.120256543 -0.104634985 -0.0977489278 -0.020790996 0.105706528 0.133288711 -0.00679467805 0.107927233 0.132649049 0.184538066 0.0214475244 -0.0248829424 0.0847060084 0.059696611 0.0894906074 -0.0552571602 0.110768363 -0.0777745917 0.0256617665 -0.0267863441 -0.011260841 0.167986751 -0.0874950364 0.191600099 0.145262077 -0.0928313509 0.0274879988 -0.0514709651 0.09432742 0.120864995 0.0269548818 0.00873297453 0.0555894822 0.0474308766 0.10841053 0.0247467272 -0.0354869291 0.0167415757 0.130794838 0.130202994 -0.105830118 -0.0619872436 0.118844941 0.0582736246 -0.0529267974 -0.122846067 -0.00995433331 -0.0649164692 0.0907461941 -0.0287418552 -0.0419690758 0.0771300942 -0.0686131641 -0.137708366 -0.0504630506 -0.0929714441 -0.105465375 0.128161833 0.0110401921 -0.102053583 -0.137259245 -0.106930703 0.116477638 0.0865236223 0.0225237608 -0.10337396 -0.141124219 0.0859540254 0.124468133 0.122546621 -0.13006793 0.0637582615 -0.111786462 0.0577507317 -0.104083821 -0.0128176091 0.165550634 -0.0433670357 -0.0197649412 0.0431121029 0.0775365457 0.0529541112 -0.094568193 0.121677496 -0.0553900301 -0.111113667 0.0546647757 -0.102778159 -0.11795371 -0.0317932889 -0.0803981498 0.0593941696 0.0496674031 0.115015298 -0.0528469123 -0.139373824 0.0843660235 0.139188275 -0.00414940715 -0.0868603438 0.13721244 -0.111397579 -0.159685537 -0.082697615 -0.104306176 0.126542285 0.105239764 0.107114151 0.177761674 0.0128696105 0.0826148763 0.058955878 -0.104517892 -0.0758889616 0.09818995 -0.0577240698 -0.0954538211 -0.00908805989 -0.0742976665 0.0133352363 -0.0486188494 0.110651046 0.142552137 -0.115153261 -0.0301514734 0.063916415 0.15691933 0.0455034524 -0.099480927 -0.0727156326 0.0517707281 -0.00254492369 -0.0865125954 -0.0609851852 -0.0927319676 0.0186524875 0.0648534745 -0.106417365 -0.12526378 -0.0807953104 -0.112901188 0.0167454872 0.0380413979 0.0550592877 0.121599279 -0.110051796 0.02049114 -0.113943994 0.0120341163 0.054577291 -0.0282275155 -0.0312832892 -0.14506188 0.0298683271 0.0238241255 -0.109261803 0.156495273 -0.132307068 0.0377000831 -0.194822595 0.0180359278 0.0903957933 0.0792595521 -0.112396851 0.181946352 0.131865382 0.131673768 -0.0266070645 -0.0772341341 -0.152235314 0.00981780421 0.021913426 0.128900573 0.0193908215 -0.0176257994 -0.0472896919 -0.0430161357 0.00791153312 -0.00393258035 -0.0942965969 -0.121386364 0.0414898992 -0.108187407 -0.0423817933 0.0773015916 0.0430543721 -0.140809581 -0.0302241296 -0.0291004777 0.137758568 0.118799791 0.0682975501 -0.0604750663 0.0137153119 -0.132459 -0.127778888 -0.0843767822 -0.0346240923 0.109940037 0.0166125298 -0.0546926185 0.0908764154 0.021722734 -0.098706007 0.0907239914 0.0898113251 -0.0827805996 -0.0991119295 -0.140943363 0.0492345244 -0.0119397789 0.14030765 0.103607342 0.0925647169 0.0516325533 0.0379345268 -0.0236282125 -0.12176773 0.0705950111 -0.0243805572 0.0545013547 -0.0887916982 -0.114718959 -0.0397065133 -0.0364263579 0.0644538626 -0.0560083427 0.00641170144 -0.115581721 0.0916448981 0.165714934 -0.0800587684 -0.139642954 -0.147206143 0.14433305 0.121563576 0.124653377 0.00290819933 -0.00689320266 -0.134743109 -0.0199477952 0.025890775 -0.097605601 0.144976184 0.0240392517 0.106600702 -0.0212116838 0.141039237 0.0561088696 0.0311461743 -0.0972645953 -0.072422564 -0.00339891389 0.0796980709 0.104762718 -0.000878902618 -0.132795557 -0.0110478699 0.0181016214 0.033460509 0.16719529 0.0324967206 -0.0523595363 -0.0596223697 0.00182852231 -0.0620629825 0.0593552999 -0.0177798048 -0.134050861 0.091143012 0.0613186061 -0.0134655545 0.0728626028 0.11032062 -0.110704623 0.113462664 -0.125822559 -0.0217845738 -0.0629318058 -0.110271119 0.0102175586 0.0384269953 0.041528672 0.117570601 0.0818911791 -0.172721431 -0.0826341584 0.143604502 -0.0946950018 -0.0604351908 -0.145674467 -0.0840071887 0.124765471 -0.0483974069 0.0704860687 -0.0513613969 0.102030285 0.0974786878 -0.122555755 0.0484942682 0.120798931 0.0460326076 -0.158095554 -0.174045458 0.0302520841 0.00143720582 0.109413154 -0.089490898 0.0403371304 0.0639648885 0.0195339117 0.0851712674 0.0805694312 0.14608863 0.0028625133 -0.0460382551 0.133578837 0.0474288128 -0.00900612772 0.0885348395 -0.0367066041 -0.0723806694 0.11516311 -0.118180014 0.173740089 -0.146156281 0.0717909113 0.0275212824 -0.00858630054 -0.0898427665 0.0496289805 -0.00977645814 -0.000550225377 0.107040554 0.00615952769 -0.0177549198 0.00348424283 0.0556790046 0.00946377218 0.0532293469 0.065482147 -0.00725790858 -0.0178769901 -0.0931927189 -0.00408215867 -0.0455512553 0.0916866064 -0.0685584769 0.0127655407 0.153180107 -0.111894742 0.0765284225 0.0260704271 0.166822523 0.0185323507 0.0651063919 -0.0187719949 0.124089167 0.060685847 -0.0775963366 -0.0125377784 -0.0656027719 0.0646102652 -0.168868825 0.0715800896 0.0879634544 -0.0288382769 0.106939703 0.0860616416 -0.147091925 0.0701762438 -0.154209405 0.0168217402 -0.0449583791 0.150799006 -0.0704786852 0.0240108818 -0.0355115384 0.0358552337 -0.00473457575 -0.0508537143 -0.141223907 0.0694443733 0.13417317 -0.0782108009 -0.101270229 0.0261989981 -0.0686776191 0.0585096031 -0.0421225578 0.112916127 0.086612314 -0.0666391104 -0.0785085559 0.0723554343 0.0139737725 0.0708736479 0.104420915 0.0537221283 0.0645926893 0.0457919091 -0.0992759913 -0.0648162961 0.0873944908 0.135357454 0.0170427561 -0.111893699 0.118674949 0.0242431909 0.131864205 -0.0160110891 -0.0652568489 -0.136016116 0.109248772 -0.0484858006 0.0772848427 -0.14131242 -0.0729296431 -0.0393306315 -0.136528164 -0.0325899497 -0.138895422 0.0920817852 -0.00506219268 -0.0218565464 0.178741023 0.000478784292 0.131854966 -0.0545493215 0.0215138886 0.00403119158 0.0818745121 -0.0696112365 0.0600213483 -0.0925471112 -0.0903580412 -0.0477291904 0.107568391 0.00575149059 -0.0782493427 -0.139606044 -0.104121134 0.000853225589 -0.0665216073 0.0340940468 0.089630641 0.105736643 0.0541669875 -0.0017753446 -0.0978966951 0.0146369338 0.148612946 0.0921797678 0.136579424 0.0460715145 0.0911454633 -0.128372431 0.0109238625 -0.0441332199 -0.0767628923 0.0842499733 0.0932835937 0.0461514592 -0.0389378071 0.0933182985 -0.0622017495 -0.150734246 0.0955395252 0.0519403517 -0.0935698077 0.066866152 -0.0563694201 0.0773465633 0.130469427 -0.0873394459 -0.0453016274 0.108205438 0.113830492 0.0172546878 0.142020121 -0.154692054 -0.0341549404 -0.0286441967 0.109917641 0.0980234817 -0.0668107346 0.184589744 0.0403014049 0.0617943406 -0.0170874391 0.165723398 -0.129302576 -0.0549281389 -0.0933095962 0.0417284593 -0.0645934939 -0.110736929 0.0152139366 -0.0576914884 0.14489913 0.0873956382 0.122595996 -0.100744717 -0.0429090895 -0.0427098498 0.00572537677 0.0862406343 0.0775141418 -0.114131004 0.0538300015 -0.133984268 -0.0513852537 -0.10520786 0.0823375359 -0.154663965 0.140852332 -0.12760137 0.0852351859 0.0137166381 0.107333735 0.0452813096 -0.0948897004 0.151388273 0.0713874474 -0.128167778 0.0409513377 -0.0138035901 -0.124753937 0.0356289335 0.0602589771 -0.0651701242 -0.0210708473 0.103122458 0.0906714946 -0.00217249128 0.122800052 -0.181580707 0.116333932 -0.0186058432 -0.0243001878 -0.113380618 -0.118654899 -0.0473651513 -0.0539538004 0.0831884071 -0.146917343 0.087518312 -0.0194211677 -0.0846155882 0.113477409 -0.136310667 -0.0191066563 0.143494949 0.112203613 0.114737526 -0.111948609 -0.00443618372 -0.0773533285 0.136251882 0.0225571636 -0.0362712741 -0.0346994996 0.114932373 0.0432204269 -0.0263436884 0.107738495 0.076974012 0.0996974856 0.119173273 -0.000754479493 -0.120190717 -0.194255233 0.0754887313 -0.098057352 -0.0132691478 -0.179493576 0.0841401368 -0.0858590156 0.0584929027 -0.0300332811 0.123049341 -0.0501776785 0.0645771027 0.147005603 0.0247138739 0.0643707812 -0.163929448 0.106429175 0.0111500323 0.00196518004 -0.136184648 0.0189471096 -0.0814004242 -0.045225054 -0.0670835301 -0.0842032209 0.0329777375 0.0835169405 0.0539242215 0.0254002046 0.0783123672 -0.00774729624 0.0359950811 -0.0802180246 -0.107105985 0.00407534558 -0.00570665905 -0.0215339214 -0.0752450898 0.0079908995 -0.051493451 0.0471975654 0.137056708 -0.0891998485 0.0102625685 0.074707374 -0.105349854 -0.013545217 0.0585588515 0.123852111 -0.0915067494 -0.0506767295 0.146273047 -0.0317402482 0.0230212267 0.0921593308 -0.147391126 -0.102736384 -0.100691356 -0.0281435605 0.0503647961 0.0425743312 0.0990781933 0.119182095 0.0459855348 0.0305577312 -0.123558313 -0.0513050854 0.0112928003 0.00887593627 0.126145825 0.00219893456 0.0915814042 -0.109302096 -0.15622583 -0.13981463 -0.00616870821 -0.0105179995 0.104944795 0.0233698469 0.0621656775 -0.134819224 -0.0790077522 -0.0676694512 -0.118123025 0.0649551898 -0.0268536378 -0.128346384 0.0998368114 0.0453064777 0.0186656322 0.082455948 0.0146208499 0.0852921382 -0.034911897 0.0727281496 -0.0483980887 -0.121127099 0.0403252244 0.063668929 0.0448285639 0.119639181 -0.047936447 0.0020842955 -0.093942605 -0.104747362 0.0033461235 0.034542352 0.0912363082 -0.0369631052 -0.0812179595 -0.0106220422 0.00214873254 -0.0435321555 0.0224560201 -0.136056453 0.0154713243 -0.0294029415 -0.030266948 0.0689932555 0.0570425093 0.098155126 -0.0518679842 0.119383372 -0.138322964 -0.00904059596 -0.106911823 0.103631556 -0.103188537 0.00235909224 -0.0282520652 -0.0132444557 0.0874274075 -0.0390356034 -0.0197433233 -0.0990931988 -0.0221592877 -0.0945370346 0.0292591304 0.0865644217 0.0864166915 0.100083038 -0.0889186412 -0.115894549 0.102965675 0.0463272929 -0.134802818 0.0737079829 -0.129007578 0.0247757733 0.0989635438 -0.11343433 0.134549081 -0.0447648764 -0.0838385448 0.0168119203 -0.0835036933 -0.0518646985 0.0785566717 0.0304447822 0.0878861994 -0.0602087826 0.0966850668 0.00694012549 0.150681928 -0.0779627338 -0.0904122442 -0.0970596001 -0.129838735 -0.050377138 -0.120469227 0.0891878232 0.0898670554 0.0890591964 0.0588300936 -0.00137163699 0.122232124 -0.0888908282 0.091505006 0.0884710699 -0.00118646026 -0.0513543747 0.120835468 0.10668838 -0.123527482 0.126270682 -0.0198479965 -0.136215568 -0.120502472 -0.11477147 -0.0835626945 0.112296805 -0.10101068 -0.0431131609 -0.0186134148 -0.0433320589 -0.000781024981 0.104710743 -0.0381170735 0.0791320279 0.0748666972 0.121473044 -0.0812482983 0.12976031 -0.0290680751 -0.00869207084 0.134282753 -0.124510139 0.0872084498 0.0703281984 0.044497788 -0.0438723378 0.0582895428 0.0864354447 0.00991265662 0.0587073565 0.0693353862 0.0946473181 0.132393822 -0.0691181794 0.114411987 0.0981440097 0.0721710473 0.0940810665 0.0939522609 0.0250950288 -0.00488990825 -0.0450708121 0.00825079717 0.0511284098 -0.0437048152 -0.0188587848 -0.0834606662 0.0222276002 0.115497097 0.0385790952 -0.0981816575 -0.0865187049 -0.0519928411 0.0127416467 0.0403454639 -0.102931798 -0.0622832291 -0.176352426 0.0319424681 0.115145199 0.137701288 -0.160753131 0.0574897826 0.029381264 -0.106967442 0.135685384 0.0374634452 -0.0107224155 -0.0305843167 0.0789548606 0.0807328969 0.116793022 0.0109361755 -0.116716683 0.0137034692 0.0524121523 -0.0414179638 -0.0828967467 -0.131809413 -0.08836063 0.0339427441 0.00510858931 0.102733135 -0.0078022778 -0.0649038255 0.113678351 0.0853035152 -0.0652009472 -0.105958655 -0.0926674381 -0.124525182 0.0650760978 0.0134403473 -0.0214727018 0.0792901665 -0.0695419237 0.00138363184 -0.0841140822 0.00448186696 0.011020178 -0.0218410827 0.124228075 0.135860786 -0.0192789454 -0.10124626 -0.0194821134 0.0967154875 0.14184539 0.0926100463 0.0980347469 0.00184068806 -0.095286414 -0.123941429 0.025722323 0.11833895 -0.141884103 0.117436834 -0.0285835806 0.129173145 -0.0291574448 0.0875629336 -0.12026988 0.108618274 -0.0268362314 0.140304998 0.0205702633 -0.0184959918 -0.0276578963 -0.131651908 -0.0468064025 0.100602806 -0.0214187056 0.121467933 0.132986173 0.0596923828 -0.100520998 0.0469026566 -0.0225379243 -0.124888517 -0.0528000593 -0.119780593 0.103240386 0.0283819586 0.00888063014 -0.0159017146 0.0695591718 -0.0311708003 0.0383356661 0.0305758268 -0.0846125856 -0.133274123 0.0758237839 0.0325065106 0.0458696932 -0.101240918 -0.0143970549 -0.0982608497 -0.111203283 0.0112188458 0.0285433233 0.0977355093 0.0379961878 -0.0932414755 -0.137481123 0.0168030262 -0.130169049 -0.125621766 -0.0568111241 0.0422300845 -0.025392022 -0.0273560993 -0.0488719195 0.00497778924 0.0536593273 0.0217000954 -0.0605572015 -0.0740404874 0.117026515 0.142124146 -0.102213182 0.112507164 0.077157028 0.0520981103 0.105322793 0.126890764 0.0796572641 0.029000476 0.0621110015 -0.112644024 -0.111841545 0.114291631 0.119438633 -0.146059752 -0.140381634 0.12208508 -0.0593487211 -0.123670183 -0.0897043496 0.0858117491 -0.00866622385 0.081977047 0.0776127875 0.0893446133 -0.0835789144 -0.0501656011 -0.00162425637 0.113464631 -0.0252724476 -0.103041545 0.113613687 0.101202324 0.100906543 -0.126668274 -0.0983274356 -0.0464979149 0.151394174 0.0900189281 -0.0734032542 -0.0592180528 -tensor_12weight 2500 --0.0283639673 -0.0221012682 0.133199289 0.0563756078 0.0816197246 0.0497825928 0.0042012888 -0.0795029998 0.0960107818 -0.0432373807 0.121810496 -0.0352233462 -0.00614713971 -0.0204377715 0.0226566363 0.0332049951 0.0991061553 -0.101053268 0.0533379503 0.106147163 0.0264711361 -0.0589281917 0.0324204192 -0.105546504 -0.00826245081 -0.0233381912 -0.0656121224 -0.107647173 -0.0529786088 -0.0139337238 0.0566528141 -0.0541924387 -0.138034001 0.00951558538 0.0105655985 0.127043515 0.0221604444 0.0460083932 -0.053250242 -0.078631945 0.0428880826 0.0788966715 0.0724399239 -0.0252473392 0.105300531 0.00663374923 0.103679053 -0.0542423241 0.00125868618 0.0159070492 0.0878015533 -0.0953141749 -0.00558294635 0.121205859 -0.0582696423 -0.0979145169 0.129265517 -0.135002077 -0.0146291191 -0.122696914 -0.081385985 -0.0635085255 -0.0441100746 -0.0792573318 0.0985748172 -0.0419243202 -0.0581838712 -0.0535519533 -0.142513841 -0.0591027141 -0.137103438 -0.106035508 0.0540995859 -0.00673660636 0.0947093591 0.080907613 0.101540178 -0.0970778465 0.044728864 0.00199462846 -0.138367772 -0.0271480009 -0.141949549 0.133446857 -0.0284297504 0.11598064 -0.139251098 -0.11250446 -0.0144105088 0.0116322637 -0.0658263117 0.00760472333 0.00838519912 0.0230650119 0.107505634 0.0946382433 -0.04299891 0.0719103888 -0.102656573 0.0685082525 -0.0726309568 0.0508401543 -0.164903283 -0.00989604276 0.0877222568 -0.0414222702 0.0155824088 -0.163643703 -0.127258003 -0.036953602 0.144812256 -0.0623725951 -0.0524286106 -0.0514523163 0.149055287 0.151022196 -0.0676416233 -0.00197385135 0.114244364 -0.0560362265 0.0799408183 -0.0222610235 -0.0891805142 -0.00829313789 0.074500069 0.118379205 -0.0137284929 -0.00101503101 0.153622374 -0.0226939153 0.00704781711 -0.0985070691 0.0757866055 -0.0784802958 -0.0075249183 -0.158581018 0.000263459544 -0.0682379603 0.152031437 -0.0480913743 -0.127841696 0.0475812815 -0.0938547626 -0.0747125596 0.108499467 0.138318345 0.149815157 -0.167665958 0.0575088561 0.0208468586 -0.0716508031 0.0452511907 -0.137918025 0.0906910673 0.0187588073 0.024338223 0.0748084113 0.102134176 0.0844082534 0.0736728013 -0.0625494495 0.158075228 0.167300254 0.124639682 0.105899356 0.0630086958 0.131153017 0.112854511 -0.00915049016 0.166930482 0.0416866131 -0.123647667 0.0432391763 0.10396602 0.110268258 0.113732249 -0.0752861872 0.174172521 -0.102077015 -0.0965104327 0.0681533068 -0.00983810425 -0.106261119 0.0500092097 0.0385275073 0.023127025 0.0940388143 -0.0235950071 0.0871616006 0.0998354703 0.0826614797 -0.100802064 -0.0113284597 -0.125418305 -0.0762608498 0.0396648198 -0.0408090092 -0.11062713 0.0179210901 0.030816406 0.0176397078 -0.0463601053 -0.0776805356 -0.0160311256 0.157598197 -0.0891788378 -0.0539689176 -0.095866017 -0.06246797 -0.0310623348 0.0908927321 0.0408000574 0.0765677392 -0.0167786833 -0.0754223838 0.0956952795 0.114675142 -0.108119257 -0.0605223328 -0.024680933 0.0278924722 0.0077753365 -0.0282166582 0.0750903338 -0.0393958427 0.124427982 0.0725398734 0.143909976 0.0277911164 -0.0190528724 0.00227800012 -0.0566477478 0.00833356939 -0.0737289861 0.0828023031 -0.0422451794 0.111898191 -0.134707883 0.19176881 -0.0650343299 0.153366417 -0.132717431 0.0561521985 0.112367474 -0.107373334 0.0557826981 0.139610589 0.0258726813 0.102975905 -0.141169146 -0.0615244322 0.0394206047 -0.0578120835 0.00785397924 0.0674745068 0.0853137672 0.175648615 -0.0636080429 0.155097261 0.111320585 -0.172565565 0.0217328835 -0.0781033784 0.132392913 0.0711862743 0.0581634976 0.186962739 0.0883171558 0.0591275692 0.0622355938 -0.00711058732 -0.0328418538 0.0393970683 0.0599646643 -0.0935084671 0.11538434 0.00435523642 0.0496765226 0.120068103 0.150854304 0.125003979 0.10406445 -0.0222269446 -0.0942721814 -0.05046333 0.0410163514 -0.0161274076 -0.033829648 0.0972495377 -0.143943429 -0.0443067737 -0.0550568588 -0.0995942876 0.055709511 -0.0130466223 0.16307734 -0.0587146431 -0.0876527801 -0.0571654439 -0.0653208718 0.0995535851 0.110708371 -0.117773779 -0.0629195049 -0.0506410673 0.104000725 -0.0097075915 0.039908275 -0.169400364 0.116414152 0.0159012116 -0.115527548 0.0599115118 0.0376613848 0.0347734354 0.149321213 0.0256413259 -0.0118766362 -0.00494059315 0.154646352 -0.0279375352 0.0956456512 0.128097624 -0.0107601006 0.102062821 0.0818155333 -0.00417978317 0.169782162 0.075039342 -0.132441014 0.136600628 -0.074167937 0.182788104 0.0683773085 -0.158134758 -0.153111815 -0.131043464 0.0933924839 0.149934262 0.0977778062 -0.123312928 -0.0546211079 -0.0884571895 -0.123066284 0.03769508 -0.0408919007 -0.0261739939 -0.0946909636 0.0763799548 0.10528186 -0.0239403676 -0.00170964003 0.107443631 0.0624295175 -0.041471377 0.0583784059 0.101691283 0.172578365 0.147721156 -0.00853029452 -0.0682957247 -0.128606334 0.108250916 0.110467754 0.0698471293 -0.10638088 -0.134169206 0.0335389711 0.0807912797 0.10768722 -0.0748209134 -0.0619279668 -0.0597048439 0.0238682833 0.0770351812 0.133448511 -0.116575532 -0.00418985356 -0.014257974 0.186535001 0.0766483098 0.0578167289 -0.1063518 -0.0533722229 -0.0255306661 0.0375191718 -0.0521368645 -0.0643548667 -0.100525327 0.087843962 0.0360278301 0.186081558 0.0779006332 0.124324918 -0.0178796574 0.0964507908 0.000115471157 0.121352643 -0.0145074725 0.136342749 -0.0524556786 -0.0543406755 -0.0231247786 0.0547336638 -0.110653035 0.00989100244 0.0201757103 0.168474302 0.0595019609 0.140253812 -0.0207650233 0.156596705 0.0423379913 0.0540750362 0.0326289497 -0.0725854859 0.120222606 0.0588764995 0.0232573915 0.156475991 -0.107084453 -0.109426022 0.120157138 -0.0353143811 0.109706894 0.0043564043 0.138297886 0.168600574 0.147256792 -0.00588848395 0.0737749934 0.0574953109 0.0112638986 -0.125408784 0.0741967931 -0.0695395544 -0.0118542425 0.0154474955 0.0319254026 -0.0753778815 -0.0449277535 -0.00750545319 0.170274869 -0.131167576 0.135590628 0.122695968 -0.0971804634 0.0734618902 -0.00931429863 0.0365690589 0.0510447621 -0.0520833731 -0.0832352266 -0.101591244 -0.0607022047 0.0549951196 0.127605066 0.0541120619 -0.0588162467 0.0998720974 0.0493049473 -0.0581545606 0.0780114084 0.0665595233 -0.0931976438 0.038903445 -0.0382999554 0.0369430333 -0.085522607 0.0509717613 -0.0687565655 -0.0897999555 -0.0971511528 0.138259694 0.116990075 0.00449900329 -0.0872562379 -0.138075918 0.096727103 -0.0454748869 -0.00835196674 0.0760231018 0.0369116217 0.0443069041 0.0586204678 0.134296641 -0.131746709 0.05054304 0.141017333 -0.0719901621 -0.0659282431 0.109382346 -0.129540086 0.118722782 0.0131379962 -0.10180755 -0.0842798278 -0.0826065615 -0.0181476772 -0.0728539303 0.0718309358 -0.0300796181 -0.120240092 0.133931667 0.0222150292 0.0798185244 -0.107362755 0.105332904 -0.135004058 -0.11729826 0.147456452 -0.0890139267 0.0949609131 0.13500765 0.126173526 0.11857067 -0.000773293898 0.0894765258 0.0475671589 -0.0154326251 0.15092206 -0.037826851 -0.049774427 0.103259467 -0.0269829631 -0.0261214655 0.0183449704 0.0224278755 -0.157012105 -0.0851792991 0.0859342813 0.112071827 -0.0393440984 0.137702033 -0.0219873022 0.115037508 0.0346440673 0.0699992254 -0.0236694999 0.164631814 0.0094958609 -0.153284445 0.040881291 -0.108448327 -0.110614225 0.0260498095 0.0183986761 -0.116771467 -0.00523093343 -0.0689058378 0.0228717029 0.034788698 -0.081005007 0.0377451777 -0.0307365786 -0.0821703076 0.0325528234 -0.14126493 0.0695137903 0.0801631436 0.0886292234 0.183120564 -0.0694635212 0.161076441 0.0139303654 0.108437546 0.0401087664 -0.0091292914 0.00868863612 0.141585931 0.156644121 -0.0755528286 -0.0417527556 0.00487269135 -0.0350182503 -0.126364589 -0.024675725 -0.0234753042 -0.00169406319 0.0142500447 0.0215916336 0.0767152011 0.0652107447 0.159517407 0.103661276 -0.0556443408 -0.0293452106 -0.0731868073 0.002619609 -0.0566306897 0.00929513387 -0.197862059 0.116043128 -0.00975279883 0.105451569 -0.0839810297 0.0331301987 -0.181696653 -0.0319507346 -0.00473482907 0.0933903456 -0.0281781182 0.00192525925 0.0756578669 0.160195202 0.0697703436 0.146909162 0.0564890578 0.00285601406 -0.0287020877 -0.0564336739 -0.0819117948 0.0810552388 0.165045336 0.147315055 -0.0212504752 0.10172905 0.0923355818 -0.0141675817 0.150093243 0.179681987 -0.114264801 -0.0956551284 -0.00638729148 -0.128433809 0.0788437128 -0.119181894 -0.0507362969 -0.115742147 -0.0738439783 0.0803774595 -0.0840121433 0.138585642 -0.0423021615 0.137064368 -0.0672038868 0.117374197 -0.142620116 0.0667178184 0.140385941 0.104921885 -0.107778296 4.28000058e-05 0.124074519 -0.114285842 0.0608676858 0.176241919 -0.0267177299 0.0773821026 -0.010434255 -0.0313907899 -0.0211786404 -0.0771455616 -1.91580984e-05 0.0954339802 -0.0113247158 0.103564784 0.17941153 0.166724324 0.0164197646 0.0291595794 -0.0204665605 -0.0382782891 -0.0668093562 -0.123122104 -0.132946268 0.160618961 0.0747604817 0.0837294608 0.169379547 -0.0834754705 0.0670293868 0.125993848 0.0960132033 0.00812351983 -0.0131131131 0.076757364 0.160401285 0.107536592 0.0723732188 -0.00439980626 0.0320614874 0.147804722 -0.123346433 0.138982892 0.0878069773 0.143579617 0.0674099252 0.0150862103 0.107429564 -0.0661900267 0.169957653 0.0881028622 0.0399612524 -0.0500341803 0.153780296 0.166395634 0.137741536 -0.106945679 0.0822954476 0.0795641989 0.135367706 -0.0886543989 0.00912791491 0.0881962925 0.00199659169 -0.0546213388 -0.0906977132 0.079190582 -0.0895646214 -0.135372177 -0.114347287 -0.0691716 -0.00326307118 0.0402559191 0.104741633 0.139453545 0.0585349798 -0.0482536629 0.0699717999 0.0140683651 -0.0911885202 -0.136688069 -0.0421462804 0.0418486297 -0.119360626 -0.132552981 0.139314875 -0.0619060844 -0.0730924681 0.0170933753 0.0174552351 -0.100706309 -0.0780835152 -0.0697649717 -0.0582113415 -0.0707645416 -0.104700163 0.0463543534 -0.00485464931 -0.0724216402 0.0457475185 -0.0357155427 -0.00419606268 -0.119270869 -0.0787308067 0.0156891048 -0.0961950272 -0.0582991764 0.0977203697 -0.0429823883 -0.0821526423 0.143807203 -0.0634690672 0.00810478069 0.0317232311 0.0819439888 -0.0359285586 0.145277902 0.0952888206 -8.69644427e-05 0.148746893 0.0504151061 0.130737811 -0.0782369152 -0.0521331243 -0.0100645348 -0.0266241431 -0.137800336 0.114405535 0.00846977346 -0.113788694 -0.0912239552 0.120060176 -0.0522767827 0.0220687613 0.069416903 0.0120533248 -0.050613381 -0.0949904099 -0.0282441527 -0.00488385558 -0.099041827 0.137989372 0.0805989653 -0.0629852638 0.0974306986 -0.0589808449 -0.0508747622 0.075892739 -0.112869091 -0.0507352911 0.153549612 -0.071098201 0.101641163 -0.0416162089 0.0734882876 0.0855877772 -0.0345149338 0.127689764 -0.0378296189 -0.0469297916 0.164389119 -0.0796598718 -0.118202388 -0.0146005005 0.168985084 0.160650715 -0.0404448994 0.155802146 0.025944557 0.139599308 0.0971967429 0.0117399581 -0.0171507876 -0.142275631 0.0839506909 0.0660035759 0.125143692 -0.0461359918 -0.116108619 0.098188132 0.119050659 0.0910999924 -0.117082357 0.0748248175 -0.0653233901 -0.0601715185 0.0412754826 0.105733104 0.0728395283 0.0162961781 0.131876916 -0.115422845 0.144570231 0.0887707174 0.097047694 -0.0720524788 0.0252056289 0.0271244925 0.130636543 0.0491141453 0.0332921445 0.113790326 -0.0568653941 -0.0698323101 -0.10968978 -6.08155533e-05 0.0675321668 -0.137371317 0.0749645829 -0.0216469616 0.0328196287 -0.141031533 -0.122319311 0.131579712 0.0271315724 0.12323828 -0.0254372582 -0.0824730098 -0.0256547406 0.1272306 -0.0528855324 0.12240018 -0.0112800747 0.0355034769 -0.0888580233 0.0970521122 0.0183172673 -0.0275907442 -0.122296013 0.0405875295 -0.112721227 0.0605996549 -0.124520272 -0.0803802982 0.0134015977 0.0546580702 -0.13796401 -0.0714464486 0.0842663348 0.124747202 -0.0913373977 -0.0796824321 0.125632361 -0.0140991956 0.139319405 -0.103938892 0.0555772036 -0.100091144 0.0524645001 -0.0437719002 -0.0133467764 -0.129776016 0.133946911 -0.0986202359 0.0850319415 -0.110841952 -0.061344333 0.00772999227 0.0248650014 -0.0392874405 0.133872464 -0.0498681031 -0.0235028919 -0.0904225931 0.0328456573 0.0908727348 0.063584283 0.0274092723 0.116862483 -0.0850711316 -0.00758437719 0.0703060776 0.0831446201 -0.130965278 -0.0187913738 0.0873690993 0.0466382094 0.10954687 0.132400319 -0.0466739088 -0.0960940421 -0.1589448 -0.0499225333 -0.14365828 0.147253916 0.114190049 -0.0489069223 0.00201576366 -0.0865763724 -0.141105622 0.0361168049 0.156169817 0.160132021 -0.0354782976 0.0171889104 0.00112317502 0.112893924 0.078825742 0.100391164 0.0169844106 -0.031357035 0.103728026 -0.0463408977 -0.00959085487 0.131976262 0.0656702071 -0.0224215165 0.0772374868 -0.102550328 0.130415276 0.0524659678 0.0614057928 -0.0569102988 -0.00330987363 -0.123422012 -0.0507996455 -0.111704901 -0.0851848572 0.0836961493 -0.0112077333 0.13896206 -0.0878892019 -0.0824800953 0.0448645279 0.0262479223 0.0815070346 -0.0392298363 0.0658551753 0.0590564199 0.137629583 -0.0485812463 -0.0806304142 0.0890918598 0.0108571658 0.0944422483 -0.0846425742 0.160814211 -0.0429416671 0.0703043491 0.0728700608 0.194119066 0.0854097977 -0.109207563 -0.0898918658 -0.0273189656 -0.116638198 0.0686772019 -0.109993316 0.124169983 -0.197762132 -0.0250630938 -0.0492028296 0.0160446316 0.174763769 -0.0346757248 -0.0763854831 -0.0731105581 -0.0112034082 -0.0417329669 0.0718449354 0.0531028807 0.0267944783 -0.125297725 0.0641857833 -0.0335034095 0.0876890123 -0.0756414309 0.023246035 0.0138799511 -0.00875826553 0.0961356089 0.0716646761 -0.0198832899 0.0437015556 0.0223694257 0.0413542055 -0.0606194325 0.0904049575 0.146203548 0.128786162 0.0143855959 -0.145219877 -0.0717869774 -0.0981839523 -0.0615584739 0.148408964 -0.102737933 -0.0149649279 0.109365121 0.0478291512 -0.10187453 0.00699617295 0.146613404 -0.033152815 -0.0324295312 -0.124276176 0.149597749 0.0922028646 0.157599032 -0.135646835 0.135939449 0.101161912 -0.0470223129 0.135992065 0.049443569 -0.173461407 -0.0964307934 -0.131441608 0.0878081992 -0.0803490132 -0.0302332956 -0.103354827 0.0431390628 -0.102742799 0.0908333436 -0.0526512675 0.0258093104 0.0369244777 0.0257188752 0.133202627 0.0297276527 0.149491176 0.151769906 0.0164624956 0.0616860129 -0.0109535027 -0.118965067 0.0411789566 0.1403061 -0.0148792714 0.0120514603 -0.0927679688 -0.0414527878 0.0476270206 0.0865987465 0.157360256 -0.00801904406 -0.0541693904 -0.0268263221 -0.109232292 0.0128868064 0.0780390874 -0.124661535 -0.0210585389 -0.14193961 0.111578748 0.036265783 0.0652774721 0.0353158712 -0.150081992 -0.0131793777 0.0812528357 -0.119403362 -0.0613792799 0.160303533 -0.191477299 0.0978740901 0.0488265157 0.0381616503 -0.057727918 0.0255572535 0.0918723196 0.0639780238 0.0967227072 0.128719524 -0.0644618943 -0.0716221407 -0.113643073 0.0597443692 0.133907422 0.132179052 -0.0287083741 0.161785662 0.0266503692 -0.0227328409 0.116025813 0.098081924 0.0945273861 0.121004365 -0.128996223 -0.16734314 -0.116783001 -0.125158042 -0.124759458 0.0583286844 0.0625823587 -0.14607048 0.0436232202 -0.0646699741 -0.0939132422 -0.0846700892 0.171912223 -0.00973419473 0.1652738 -0.0948991999 -0.061686486 0.168850407 0.0989598855 -0.111697182 -0.0287689064 -0.0542520583 0.0386746228 0.02370058 0.0738422796 0.0875272676 0.0808269605 0.13247247 0.017513141 -0.0532009676 0.0261921436 0.0148328589 0.12498486 0.0354332887 0.035208758 0.136238024 0.0110940281 -0.0153260306 0.0743869767 0.0171744954 -0.0469010063 0.0871724337 -0.119166553 -0.0623017251 0.0695702434 -0.0897610858 0.0566469952 -0.0424713381 0.0625269711 -0.063430272 -0.138267055 -0.0862576067 0.0633616149 0.0599713437 -0.0952118188 0.120180786 -0.0877415165 -0.0423083492 0.142752334 0.0664141625 0.0153184086 -0.11723125 0.0551554859 0.0941181779 0.0712193325 -0.103319407 0.0191577971 0.122941971 0.10161072 0.0123700919 0.133175269 -0.0438123122 0.00521303201 -0.0965816975 -0.0765956715 0.157058403 0.0214360245 0.0272503048 0.0736062229 -0.127181187 0.0797498822 0.00283462019 -0.0299958158 -0.177072033 -0.10098806 0.0592009835 0.111474186 -0.0195805114 0.0541746095 0.0449817032 0.0997726023 -0.0977167487 0.119134799 -0.0316534825 0.000842235982 0.10191527 0.117535733 -0.0239665229 0.126424983 -0.123411469 -0.075183101 0.0330423154 -0.0681335926 -0.0103695542 -0.0856132656 0.0537110977 -0.102201037 -0.077232644 0.0817721263 0.0940563828 0.0121499747 -0.0706892982 0.0818995014 -0.0357823521 -0.0557577461 -0.0687677711 0.0249025971 0.115824074 0.0820207819 -0.0299545322 0.154968947 0.146601692 0.0333185792 -0.0947523862 0.11622557 -0.00900522526 0.164223433 -0.00459069014 -0.0108394325 -0.102431804 -0.000589489937 -0.149016678 0.094403781 0.00958791561 -0.139076263 -0.00828526635 -0.0485995077 0.0503224507 -0.106602147 -0.0803155676 0.0535347834 0.134317338 0.116844706 -0.0210482683 0.153106958 -0.0599929169 -0.0479789935 -0.00114973786 0.0614023507 0.00115412858 -0.0867474899 -0.0715186149 0.0718893707 0.0815631598 0.0764374584 0.120969631 -0.0238302015 -0.0586408377 -0.172944322 -0.032943733 0.0524964482 -0.116896465 0.0810932145 0.0334346704 -0.146538407 -0.0801583529 -0.101430289 -0.0612037107 -0.113821179 0.00876645837 0.0436918586 0.00689579546 -0.126817837 0.0449476391 0.00970517192 0.120329946 -0.130458891 0.0930926949 -0.0741954371 0.117795736 -0.0513594598 -0.0421553478 -0.0799744502 0.139152512 -0.132767022 -0.148529813 0.0808773115 0.0915669501 -0.137642413 0.179301977 -0.0657531545 0.111276798 0.13818717 0.180973396 0.148477748 0.00805056468 -0.0999653786 -0.0432874598 0.14668797 0.00481138518 0.0158720016 -0.111080863 0.0309790596 0.0831020325 0.0419512913 -0.0200633295 0.0992910117 -0.0361859724 -0.0418536253 -0.0347556248 -0.055278711 -0.129016384 0.156187266 -0.0416366123 -0.0245922692 0.0343578979 0.0859458372 0.101438627 0.00994137488 -0.069522813 -0.0426072292 0.0350656547 -0.103549249 0.0265573412 0.0763911158 -0.102405414 0.0613944381 -0.122291684 -0.030453749 -0.0144146821 0.0219928026 0.128770724 -0.0348007977 0.176855937 0.114892505 0.0656236783 -0.00231774151 0.150578141 -0.0806376413 -0.0191067625 -0.112510107 0.0331190526 0.0922281742 -0.130172268 0.0489657484 0.0407428741 -0.112448193 0.131103024 -0.0824060962 -0.0440546162 -0.135340303 0.167595923 0.0455297045 0.146565259 0.106871687 0.0179066844 0.14257127 0.0931942984 -0.0602279194 0.114411339 0.0802091658 0.183811814 0.0961534381 -0.0782056525 0.0361536555 -0.0492672026 0.0595217533 0.107877143 -0.0258457195 -0.0297816172 0.11304047 -0.0520307608 -0.106525473 0.0179495215 0.050986968 -0.00489270268 0.0162587743 -0.137507111 0.0951974392 0.0486419611 0.117301539 0.0375222899 0.00458820676 0.00400207192 -0.0281724483 0.160230845 -0.0574774742 0.0331582278 0.0722234622 -0.0535472929 0.0631503314 0.0969888195 0.140119597 0.0477451496 -0.175573215 0.0213595796 0.0623874292 0.0219977442 0.123139918 0.0430315174 -0.0433251262 -0.107093088 -0.112723738 -0.000674394774 0.0406252891 0.0568238758 -0.10773351 -0.0489739999 0.0599360615 0.126524165 -0.0252056345 0.103718482 -0.00571722444 0.0458063446 0.163875476 -0.00559463073 -0.129122823 0.0351556465 -0.12400604 0.0866204128 -0.00551600335 -0.0378636234 -0.128071412 -0.0678838044 -0.0604350902 0.135361746 -0.0829056799 0.00692036748 0.00260563195 -0.0532157905 0.124754503 0.0440538712 0.0327834561 0.182058588 0.110708781 -0.0718415529 0.0142884664 -0.0505664833 0.111684635 0.0205106754 -0.115548059 0.0633464828 -0.099902004 -0.144687116 0.102363184 -0.0373336896 0.0391648747 0.108540595 0.171795741 -0.0586603731 0.135946795 0.0958023518 0.0913395435 0.104659595 -0.104977675 -0.136726558 0.101383127 -0.00297953957 -0.0918395743 -0.0204569325 -0.130254254 0.0839903727 0.0175899137 0.113330193 0.0912543014 -0.0047330535 0.123540469 -0.0720442235 0.039481502 0.163159758 -0.122090541 0.161992833 -0.116110057 -0.0345729962 0.180961043 0.13387315 -0.0200760309 0.0601475984 -0.124806568 0.155271858 0.0845254213 0.0596974678 0.126717508 0.0584589094 0.104401402 -0.0844913498 -0.106263012 -0.0435930751 -0.0393355973 -0.043946974 0.0409472808 0.0387777314 -0.0833237693 0.0790580288 0.00874889921 0.122026242 0.106449321 0.00690521859 -0.0539527871 -0.0490253009 -0.116141111 -0.0438183956 0.0866069868 -0.0664470792 0.162248239 0.161943093 0.104601003 -0.0271019768 0.0825799704 0.00950320065 -0.0404817834 0.129946649 0.114401944 -0.0974406078 -0.144606605 0.0822113976 -0.133282393 -0.0866103172 -0.161681429 0.0105247656 0.0808934346 -0.0963236913 -0.130843952 -0.0294511542 0.14981015 0.0511926003 0.119163141 0.0719184428 0.014378503 -0.0375215076 0.100146711 0.00126785447 0.159019947 -0.0868258625 0.0822456554 0.00716631068 0.0106834266 -0.0192997064 0.0986597613 -0.0639843941 0.0297827087 0.0358716287 -0.0587924942 0.0603018999 0.0780824944 -0.123953499 -0.0754288808 -0.00476129353 -0.0413157828 0.0502767749 -0.0340033025 -0.111571126 -0.100440137 0.147668496 0.0673740879 -0.0353276059 0.0640556961 -0.0988995805 0.136604816 0.140064046 0.0980866402 -0.0750294924 -0.137589633 0.043476589 -0.114663213 -0.0333486758 -0.0707642436 -0.0460274704 0.114950918 -0.0877812207 -0.0162663646 0.150960758 -0.126444697 -0.042077966 0.0971751958 0.0925146341 -0.0311614387 0.0668840557 0.114941254 0.0032322011 0.110740408 0.0407327525 0.127984926 -0.0671080053 -0.0279953331 0.0831837729 0.0103864418 -0.0418696329 -0.0798007697 0.00768780895 -0.0546894707 -0.0398143195 0.118723847 0.116604052 0.0362069607 -0.120131537 -0.0460622348 0.0910601467 0.161261335 0.132019728 0.0777907819 0.12057399 -0.0873341486 0.0300714727 -0.0391077697 0.139219776 0.034336701 0.0798435882 0.0177026708 -0.00687919557 -0.0386889726 -0.0851943567 0.120803818 -0.0706116483 0.0470289737 -0.00083668204 -0.0218703523 0.0288076419 -0.0316865966 0.111179002 0.088308543 0.0760286823 0.104716919 0.00950780511 -0.0447407067 0.098795779 -0.116593644 0.103690639 -0.0872254521 -0.0867426321 0.0982015431 0.170969442 0.0893125832 -0.052779343 -0.103205182 -0.0188849494 0.148354068 0.135574684 0.16014275 0.0423907042 -0.123855539 -0.0422023758 -0.0901324898 -0.155949607 -0.0723659694 -0.0916374624 0.0385438688 0.129645094 -0.0276183728 -0.0442992523 0.0822724849 0.12342529 -0.0557483733 -0.0643166676 -0.137550637 -0.0250330232 -0.111662775 0.175575733 0.015148961 0.0261206627 0.134495571 -0.0594918132 0.0758288354 0.060908068 -0.0519245639 0.0473323725 -0.127921104 0.0161799341 0.0626626536 0.0191916153 0.0295425896 0.00972762611 -0.0193439405 0.0721613467 0.0690132603 0.145383656 0.139039531 0.0707440674 0.0853963941 0.121891946 0.0830445886 -0.139111638 -0.046770677 -0.0729267821 -0.0370384678 -0.122914143 0.0188236907 -0.0119272592 0.131827608 0.0751277357 0.0265672356 0.102637075 0.000820508925 0.128561452 -0.113339409 -0.0267119724 -0.0484277457 0.0841864794 -0.0932453424 0.112891175 -0.0558898747 -0.118770018 0.107176155 -0.0351163447 0.0367854685 -0.0528843328 -0.0932976678 0.132041663 -0.0217739437 0.0115754455 0.0903767347 -0.0251083001 -0.0478500426 -0.122446015 -0.110440329 -0.0738081709 0.13881968 0.0556323677 0.113088712 -0.0600917004 0.0295646787 0.0281529948 -0.0955312327 -0.0372477174 -0.0413099937 -0.12244685 -0.0883221477 -0.0363030881 -0.0330142006 0.0267371461 -0.0137264878 -0.110913046 0.0730738714 0.186447442 0.190215573 0.109426335 0.144610614 0.092832081 0.0243124962 0.082650587 -0.0580552481 -0.0164464172 0.132549599 -0.16663675 0.176283285 0.0554265827 -0.0576385558 0.0210192334 -0.0107073868 0.0469353348 0.0421484709 0.20231232 0.112845972 -0.0282869264 0.0477520488 -0.0603832416 0.0570014864 0.14945437 -0.123605035 0.00659620762 -0.0773272812 0.139960304 -0.0898624659 -0.0288678277 -0.073680222 0.0669173375 -0.125848636 -0.0638360605 0.00877119321 0.146627396 0.100049771 -0.174804077 -0.0694195628 0.114080206 -0.035090182 -0.0557439104 0.0807942003 0.0843401998 0.0177999288 0.0371674523 0.035731107 -0.111021757 -0.093117848 -0.0678854063 0.0266885534 -0.00953813922 0.089507781 -0.106526807 -0.0295186415 0.00943453331 0.142027363 0.108211689 0.106748775 0.0579309165 0.0385677479 -0.100708753 0.151549757 0.0247560851 0.0252912976 0.0531664081 -0.123913847 -0.14173685 -0.0242508408 0.159124956 -0.113823667 -0.0568166599 0.0248767957 0.0976887569 -0.0333233364 0.0985455513 0.0487232953 0.0828748867 0.0737239048 -0.00177626917 0.0483965725 0.0487547442 0.0349344462 0.0642146096 0.135932118 0.139095858 -0.161683232 0.147549808 -0.0745932832 -0.0598408207 -0.0111973844 0.052948419 -0.133784652 -0.0361436382 -0.0810343474 0.113400467 0.0375890285 0.0919118151 0.165626541 -0.0969523787 -0.116375238 0.156546161 0.160234615 0.152540177 -0.0588590465 -0.082201235 0.151506081 -0.0245612338 0.108619809 0.0955060497 0.0378499888 0.0223950893 -0.0736716762 -0.0403732657 -0.112797014 0.153787464 -0.00461600721 0.0319394395 0.0467294194 0.0903732851 -0.155461773 -0.0431794003 -0.0157981049 -0.0684597194 -0.0530542284 -0.0374011174 -0.0614936724 -0.00120179285 -0.105804361 0.0348305702 0.146401137 -0.103180595 -0.0832952708 0.0653216466 0.112658091 0.107945248 0.107463151 -0.0334763043 0.137352556 0.0405373275 0.0836709291 0.0841100141 0.000271841243 -0.0795930102 0.115181461 -0.0337604173 -0.128911823 0.123057336 0.0785609409 0.0277274642 0.143650293 0.0181832965 0.126039356 -0.0717210472 -0.0782386661 -0.207162619 -0.0587309264 0.0823272541 0.11590194 -0.15862602 -0.110215195 0.119876325 -0.03551469 0.203044742 0.0244795568 0.10723605 0.0298106819 0.174269021 0.143131137 0.145505443 0.0387773216 0.0399972759 -0.0434709154 0.174700931 0.0863626674 -0.00607159734 -0.00721096992 0.181514055 -0.174008265 -0.106141508 0.0943884626 0.0748455301 -0.08260189 0.128811941 0.09916839 -0.0107264379 0.174222261 -0.116252743 0.108514942 0.0142451078 0.0903487951 0.0105539095 -0.00275873509 -0.0601309314 -0.103395417 0.0936504826 0.00442700088 0.0223080404 -0.0448008254 0.0983777866 0.0909713805 0.0360316932 0.00507651642 -0.0769937262 0.0967232734 -0.0641804114 0.122599706 -0.113460898 -0.107624725 0.117271051 -0.00697429758 0.0493416227 -0.127027497 -0.0720096231 -0.0119818123 0.0336597078 -0.0459525064 -0.0465637259 -0.0834386647 0.0638463795 0.0122022033 -0.0931649208 -0.00553575298 -0.0875594765 0.0826876462 -0.00695849955 0.00258775055 -0.128275678 0.0674737692 -0.0166282598 -0.101550914 -0.000357478857 0.136971578 -0.0781561136 0.0165070295 0.130231693 -0.0846520886 -0.0524368845 0.13351126 0.0329737216 -0.0871247053 -0.0810154378 0.127794713 0.0551171899 0.0123283863 0.0219221041 0.0886786133 -0.0899260417 0.113652535 0.092415683 0.148293018 -0.0763472393 -0.00241494132 0.0385320969 -0.0792235285 0.119529434 0.116140589 -0.00117489253 -0.0458484106 0.022996638 -0.0169208553 0.122886755 -0.123275951 0.00158014265 0.0272646137 0.0736831352 0.0474004149 0.114665695 -0.0364573114 0.021729935 0.107678957 0.0608051606 0.109061301 -0.0183101986 -0.143050343 0.011449188 0.0253396928 0.0116122691 0.169679016 -0.108423777 0.114302441 -0.0180101134 0.0376329049 0.000675914111 -0.0989059806 0.00174778071 0.0729235336 0.0997067988 0.0798274875 0.00286698341 0.0484239347 0.0732191578 -0.0997895151 0.0131101757 0.0049995631 0.0331563503 0.0908577889 0.0351340324 0.0972493589 -0.140762478 0.0144873261 0.0925962031 0.042752862 -0.0921942815 0.123944506 0.103000984 0.134866908 -0.0654911175 -0.131940141 -0.124962308 0.121972367 -0.0777393058 -0.0134515613 -0.0593911484 0.0903083831 -0.0194513649 0.125025287 -0.0718345344 -0.028370142 -0.137873486 -0.138855338 0.102305382 -0.114035651 -0.00128389895 -0.0209189281 -0.0363460779 0.0348549187 0.0462828428 -0.135960251 0.046998933 -0.00560566783 -0.00562353432 0.0511512756 -0.097703442 -0.000405952334 -0.102428362 -0.13385509 0.0778306574 -0.126189426 0.00569581985 -0.0901096463 0.0396448672 -0.0905233249 0.0647575855 -0.0692101866 -0.118010342 0.140668467 0.0754639357 0.0287674516 -0.127176955 0.0508546382 -0.0297820792 -0.034435086 -0.0525137484 0.0414588898 0.125622496 -0.122770369 -0.103983626 0.0979629755 -0.134100616 0.0132061988 0.136052623 -0.101130173 -0.125888899 -0.0912302136 -0.000320419669 0.0951493084 0.0250687301 0.0693171024 0.0681221336 0.0920942575 -0.0242754743 -0.126860991 0.00760373473 0.0472807437 0.00129759312 -0.0417886227 -0.135421842 -0.0420329869 0.109683141 -0.0780478939 0.0869462043 -0.0336673781 0.068866685 -0.0332275853 0.0885529816 -0.0637842491 -0.0287733674 0.0399621874 0.00726474822 -0.0887221545 -0.026667513 -0.10050422 -0.0866817236 -0.0743763596 -0.0557404384 -0.0415508077 -0.0706221908 0.03670571 0.0769552216 -0.136574149 0.12560834 0.0611953586 0.0578964055 0.0321745314 -0.108904168 -0.0603623837 0.106482595 0.0418807454 0.0567226261 -0.0480391793 -0.0818945691 -0.0918800607 0.0692870989 -0.0831394047 -0.140842602 -0.0133125484 0.0388532393 -0.0127856918 -0.0663905591 -0.0192305241 -0.00343641825 0.0277077034 0.0341987535 -0.0332082435 0.0317000449 0.133374199 0.0840312392 -0.00916420575 -0.0913549662 -0.131094456 -0.114135049 0.0324433446 -0.0922504738 0.0968189761 0.0722978711 -0.0336492881 -0.135974228 -0.0138650686 0.0959312618 -0.0392885059 0.0773027018 0.0320036188 -0.136021271 -0.096613884 -0.0733416006 -0.0797348469 0.198495209 0.0582847074 -0.0810181722 -0.0431594327 -0.0618753731 0.0181986317 0.078358449 -0.0357146375 0.0401486307 -0.0250369273 0.0634938627 -0.0496993251 0.020140117 -0.127322808 -0.0666764453 0.138859123 0.0398158766 0.110776573 0.142119944 -0.034179695 -0.0179098602 0.0632333979 -0.00735516846 -0.071974054 -0.118237272 0.097171858 -0.107004285 0.0833490044 -0.0937577263 0.0946442783 0.0788892806 0.0437900051 -0.0310971122 0.0778285116 -0.0993157849 0.0229949653 0.11270339 0.16689226 0.162221476 -0.0412555002 0.173148394 -0.0327887125 -0.121510416 -0.0140555762 0.0903469846 -0.029039843 0.068566218 -0.12506409 0.0382451862 0.0248380303 0.0277039558 -0.14029333 -0.126389086 -0.00850690901 0.0435213149 0.0944143683 0.13669233 0.010785535 0.131652728 0.08942011 -0.0438658521 -0.090351522 0.0258171856 -0.0345944241 0.0610096753 -0.0412340835 -0.0557757616 -0.114867412 -0.0179175213 0.00409558415 -0.0384710208 0.100451782 0.141252503 -0.0818129927 0.0703791678 -0.070404999 -0.110884957 -0.109055385 -0.0898616537 -0.0513886064 0.12470524 -0.13137272 -0.0607609376 -0.0753680915 0.0404375494 -0.0389348865 -0.0476250499 0.139109835 -0.0529488549 -0.123520866 0.11238347 -0.0333348513 -0.0331748128 -0.042967163 0.098916471 0.100189671 0.113453075 0.0968655795 -0.0717557892 0.00738480687 0.0515316948 -0.0553591289 -0.00930176955 -0.00901313685 0.175364062 -0.0196713433 -0.0502335988 0.0383971296 0.121085942 0.103061765 0.15212594 0.116526209 -0.0264002159 -0.094560124 -0.0376541167 0.147969663 -0.0978502855 -0.00200880459 0.0312320646 -0.0133588314 0.0813049003 -0.029433284 -0.0262242351 -0.106563412 0.0297239125 0.0302417222 -0.0301722232 -0.0122521063 -0.0121933669 0.0207778811 0.118457362 0.0412592813 0.133936256 -0.0869062468 -0.0865229592 -0.0799478814 -0.0991789475 -0.0158145819 0.120537013 0.0608306117 0.17652306 -0.0725468472 0.126066044 -0.0148094278 -0.0349133536 0.0138158342 -0.00439210236 0.0949555784 -0.0591577739 -0.0189778134 0.0861846507 0.024233669 -0.0585409105 -0.114641324 -0.0379219502 0.0835855305 0.0618880242 -0.00448146462 -0.0475826338 0.0992924273 -0.0171841681 0.130379245 -0.115067512 -0.0295631811 0.132417724 -0.00352632999 -0.0539170504 -0.0408100188 0.0476579517 0.0435158908 -0.108436733 0.0848499984 -0.0134641975 0.103017956 0.113764629 -0.0229989663 -0.0402461812 0.10478355 0.105660841 -0.103532135 0.0136412531 -0.113981269 -0.130932122 0.020966202 0.103488669 0.0354949832 -0.0729100034 -0.0167546049 -0.0969255865 -0.0415144786 -0.0227924958 0.0969979763 -0.0482411981 -0.0222034156 0.0317669511 -0.0238891691 -0.0465090126 -0.049510397 -0.00603993051 -0.0495383143 0.0391993932 0.0514197163 0.0751582161 -0.0600364245 0.149168238 0.00608866569 0.145530403 -0.0471902825 -0.00813056901 0.0724131763 0.02852365 -0.0375874527 0.0505036674 0.162053078 0.061635565 -0.0772141367 -0.101029664 0.135133624 -0.026366543 0.0173085034 -0.196915612 -0.094210051 -0.0830252171 -0.0152556822 0.0760191381 0.11165338 0.015817089 0.0882711783 0.0494683385 -0.135736912 -0.0937291756 0.155131117 0.0703033358 -0.101866767 -0.136783585 -0.0569891371 0.0305453669 0.0333361402 0.0399238467 0.0434939228 0.0999660268 -0.106167085 -0.0346824601 0.1533719 -0.0761708841 0.0984655321 0.0181172937 -0.0619307309 -0.00992794242 0.070495829 0.154349118 0.0747213513 -0.191178009 -0.087587826 0.0466681495 -0.128632694 0.0855400562 -0.12334384 0.01812369 0.155531704 0.11304646 0.132001072 0.196818605 -0.0396067835 -0.0600948259 -0.0705011338 -0.113130942 -0.11137788 0.0562906638 0.0935193449 0.0702943429 -0.135742277 -0.0983269736 -0.164341018 0.0970739946 0.0331884474 0.0538573004 -0.0459236056 -0.0607440993 -0.057828702 -0.11368376 0.115170449 0.144811422 0.046080593 -0.116181828 0.0888293982 0.149672315 -0.085598737 -0.0972040147 0.101113833 0.145851657 0.0576108471 0.0922829062 0.0284254942 0.0193997808 -0.000928774185 -0.0930889547 -0.0205265954 -tensor_16bias 50 --0.142527401 0.168874308 -0.0887828916 -0.0631441548 -0.0660232753 0.0544182472 0.0641550943 0.0606994219 -0.0223076269 0.102970138 0.0258652028 -0.0809393153 -0.0276761875 0.0546831638 0.128886178 -0.0795307532 -0.0189131647 -0.120750055 0.17368494 -0.0492844023 -0.0578795224 0.0671775565 0.0123591702 0.132642269 -0.0525798798 0.0173827056 0.0659839064 -0.0958536118 0.0788409114 0.0107072359 0.0492983423 -0.02552481 0.0519438088 0.137576833 -0.115964495 -0.0723876506 0.111166283 -0.119082645 -0.0866416842 0.0962954909 0.110601142 -0.0136935199 -0.138656154 -0.0623757802 0.0855045691 -0.12340495 0.0683390722 -0.0176106151 -0.0413498878 0.0233600959 diff --git a/Linear_16.hxx b/Linear_16.hxx deleted file mode 100644 index 8d831c0..0000000 --- a/Linear_16.hxx +++ /dev/null @@ -1,658 +0,0 @@ -//Code generated automatically by TMVA for GPU Inference using ALPAKA of Model file [Linear_16.onnx] at [Fri Apr 11 14:16:45 2025] - -#ifndef SOFIE_LINEAR_16 -#define SOFIE_LINEAR_16 - -#include -#include -#include -#include -#include -#include "SOFIE/SOFIE_common.hxx" -#include - -using Dim1D = alpaka::DimInt<1>; -using Acc = alpaka::TagToAcc; -using Queue = alpaka::Queue; - -namespace SOFIE_Linear_16{ -struct Session { - -// initialized tensors -auto deviceBuf_8weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_8bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_4bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_2weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_0bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_12bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_18bias = alpaka::allocBuf(devAcc, 10); -auto deviceBuf_14bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_4weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_10weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_6bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_18weight = alpaka::allocBuf(devAcc, 500); -auto deviceBuf_0weight = alpaka::allocBuf(devAcc, 5000); -auto deviceBuf_10bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_2bias = alpaka::allocBuf(devAcc, 50); -auto deviceBuf_6weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_14weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_16weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_12weight = alpaka::allocBuf(devAcc, 2500); -auto deviceBuf_16bias = alpaka::allocBuf(devAcc, 50); - -//--- declare and allocate the intermediate tensors -auto bufDev_18biasbcast = alpaka::allocBuf(devAcc,160); -auto bufDev_38 = alpaka::allocBuf(devAcc,800); -auto bufDev_14biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_34 = alpaka::allocBuf(devAcc,800); -auto bufDev_22 = alpaka::allocBuf(devAcc,800); -auto bufDev_2biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_24 = alpaka::allocBuf(devAcc,800); -auto bufDev_0biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_6biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_4biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_16biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_8biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_26 = alpaka::allocBuf(devAcc,800); -auto bufDev_28 = alpaka::allocBuf(devAcc,800); -auto bufDev_10biasbcast = alpaka::allocBuf(devAcc,800); -auto bufDev_30 = alpaka::allocBuf(devAcc,800); -auto bufDev_32 = alpaka::allocBuf(devAcc,800); -auto bufDev_36 = alpaka::allocBuf(devAcc,800); -auto bufDev_12biasbcast = alpaka::allocBuf(devAcc,800); - -Session(std::string filename ="Linear_16.dat") { - -//--- reading weights from file - std::ifstream f; - f.open(filename); - if (!f.is_open()) { - throw std::runtime_error("tmva-sofie failed to open file " + filename + " for input weights"); - } - std::string tensor_name; - size_t length; - f >> tensor_name >> length; - if (tensor_name != "tensor_8weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_8weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_8weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_8weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_8bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_8bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_8bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_8bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_4bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_4bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_4bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_4bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_2weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_2weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_2weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_2weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_0bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_0bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_0bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_0bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_12bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_12bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_12bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_12bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_18bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_18bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 10) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 10 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_18bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_18bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_14bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_14bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_14bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_14bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_4weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_4weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_4weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_4weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_10weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_10weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_10weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_10weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_6bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_6bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_6bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_6bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_18weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_18weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_18weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_18weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_0weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_0weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 5000) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 5000 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_0weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_0weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_10bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_10bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_10bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_10bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_2bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_2bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_2bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_2bias"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_6weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_6weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_6weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_6weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_14weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_14weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_14weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_14weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_16weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_16weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_16weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_16weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_12weight" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_12weight , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 2500) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 2500 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_12weight[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_12weight"); - } - f >> tensor_name >> length; - if (tensor_name != "tensor_16bias" ) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor name; expected name is tensor_16bias , read " + tensor_name; - throw std::runtime_error(err_msg); - } - if (length != 50) { - std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is 50 , read " + std::to_string(length) ; - throw std::runtime_error(err_msg); - } - for (size_t i = 0; i < length; ++i) - f >> tensor_16bias[i]; - if (f.fail()) { - throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor tensor_16bias"); - } - f.close(); - - auto hostBuf_8weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_8weight), tensor_8weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_8weight, hostBuf8weight, 2500); - auto hostBuf_8bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_8bias), tensor_8bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_8bias, hostBuf8bias, 50); - auto hostBuf_4bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_4bias), tensor_4bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_4bias, hostBuf4bias, 50); - auto hostBuf_2weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_2weight), tensor_2weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_2weight, hostBuf2weight, 2500); - auto hostBuf_0bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_0bias), tensor_0bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_0bias, hostBuf0bias, 50); - auto hostBuf_12bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_12bias), tensor_12bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_12bias, hostBuf12bias, 50); - auto hostBuf_18bias = alpaka::allocBuf(hostAcc,10); - std::memcpy(alpaka::getPtrNative(hostBuf_18bias), tensor_18bias, 10* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_18bias, hostBuf18bias, 10); - auto hostBuf_14bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_14bias), tensor_14bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_14bias, hostBuf14bias, 50); - auto hostBuf_4weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_4weight), tensor_4weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_4weight, hostBuf4weight, 2500); - auto hostBuf_10weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_10weight), tensor_10weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_10weight, hostBuf10weight, 2500); - auto hostBuf_6bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_6bias), tensor_6bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_6bias, hostBuf6bias, 50); - auto hostBuf_18weight = alpaka::allocBuf(hostAcc,500); - std::memcpy(alpaka::getPtrNative(hostBuf_18weight), tensor_18weight, 500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_18weight, hostBuf18weight, 500); - auto hostBuf_0weight = alpaka::allocBuf(hostAcc,5000); - std::memcpy(alpaka::getPtrNative(hostBuf_0weight), tensor_0weight, 5000* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_0weight, hostBuf0weight, 5000); - auto hostBuf_10bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_10bias), tensor_10bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_10bias, hostBuf10bias, 50); - auto hostBuf_2bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_2bias), tensor_2bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_2bias, hostBuf2bias, 50); - auto hostBuf_6weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_6weight), tensor_6weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_6weight, hostBuf6weight, 2500); - auto hostBuf_14weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_14weight), tensor_14weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_14weight, hostBuf14weight, 2500); - auto hostBuf_16weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_16weight), tensor_16weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_16weight, hostBuf16weight, 2500); - auto hostBuf_12weight = alpaka::allocBuf(hostAcc,2500); - std::memcpy(alpaka::getPtrNative(hostBuf_12weight), tensor_12weight, 2500* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_12weight, hostBuf12weight, 2500); - auto hostBuf_16bias = alpaka::allocBuf(hostAcc,50); - std::memcpy(alpaka::getPtrNative(hostBuf_16bias), tensor_16bias, 50* sizeof(float)); - alpaka::memcpy(queue, deviceBuf_16bias, hostBuf16bias, 50); - -//---- allocate the intermediate dynamic tensors -//--- broadcast bias tensor 0biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_0bias,{ 50 }, { 16 , 50 }); - auto hostBuf_0biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_0biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_0biasbcast, hostBuf_0biasbcast , 800); - } -//--- broadcast bias tensor 2biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_2bias,{ 50 }, { 16 , 50 }); - auto hostBuf_2biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_2biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_2biasbcast, hostBuf_2biasbcast , 800); - } -//--- broadcast bias tensor 4biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_4bias,{ 50 }, { 16 , 50 }); - auto hostBuf_4biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_4biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_4biasbcast, hostBuf_4biasbcast , 800); - } -//--- broadcast bias tensor 6biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_6bias,{ 50 }, { 16 , 50 }); - auto hostBuf_6biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_6biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_6biasbcast, hostBuf_6biasbcast , 800); - } -//--- broadcast bias tensor 8biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_8bias,{ 50 }, { 16 , 50 }); - auto hostBuf_8biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_8biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_8biasbcast, hostBuf_8biasbcast , 800); - } -//--- broadcast bias tensor 10biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_10bias,{ 50 }, { 16 , 50 }); - auto hostBuf_10biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_10biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_10biasbcast, hostBuf_10biasbcast , 800); - } -//--- broadcast bias tensor 12biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_12bias,{ 50 }, { 16 , 50 }); - auto hostBuf_12biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_12biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_12biasbcast, hostBuf_12biasbcast , 800); - } -//--- broadcast bias tensor 14biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_14bias,{ 50 }, { 16 , 50 }); - auto hostBuf_14biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_14biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_14biasbcast, hostBuf_14biasbcast , 800); - } -//--- broadcast bias tensor 16biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_16bias,{ 50 }, { 16 , 50 }); - auto hostBuf_16biasbcast = alpaka::allocBuf(hostAcc,800); - std::memcpy(alpaka::getPtrNative(hostBuf_16biasbcast), data, 800 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_16biasbcast, hostBuf_16biasbcast , 800); - } -//--- broadcast bias tensor 18biasfor Gemm op - { - float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_18bias,{ 10 }, { 16 , 10 }); - auto hostBuf_18biasbcast = alpaka::allocBuf(hostAcc,160); - std::memcpy(alpaka::getPtrNative(hostBuf_18biasbcast), data, 160 * sizeof(float)); - alpaka::memcpy(queue, deviceBuf_18biasbcast, hostBuf_18biasbcast , 160); - } -} - - - -std::vector infer(float* tensor_input1){ - -//--------- Gemm_GPU_ALPAKA - char op_0_transA = 'n'; - char op_0_transB = 't'; - int op_0_m = 16; - int op_0_n = 50; - int op_0_k = 100; - float op_0_alpha = 1; - float op_0_beta = 1; - int op_0_lda = 100; - int op_0_ldb = 100; - std::copy(tensor_0biasbcast, tensor_0biasbcast + 800, tensor_22); - Kokkos::View kokkos_dev_input1((float*)std::data(bufDev_input1), op_0_m, op_0_k); - Kokkos::View kokkos_dev_0weight((float*)std::data(bufDev_0weight), op_0_k, op_0_n); - Kokkos::View kokkos_dev_22((float*)std::data(bufDev_22), op_0_m, op_0_n); - KokkosBlas::gemm(&op_0_transB, &op_0_transA, op_0_alpha, kokkos_dev_input1, kokkos_dev_0weight, op_0_beta, kokkos_dev_22); - -//--------- Gemm_GPU_ALPAKA - char op_1_transA = 'n'; - char op_1_transB = 't'; - int op_1_m = 16; - int op_1_n = 50; - int op_1_k = 50; - float op_1_alpha = 1; - float op_1_beta = 1; - int op_1_lda = 50; - int op_1_ldb = 50; - std::copy(tensor_2biasbcast, tensor_2biasbcast + 800, tensor_24); - Kokkos::View kokkos_dev_22((float*)std::data(bufDev_22), op_1_m, op_1_k); - Kokkos::View kokkos_dev_2weight((float*)std::data(bufDev_2weight), op_1_k, op_1_n); - Kokkos::View kokkos_dev_24((float*)std::data(bufDev_24), op_1_m, op_1_n); - KokkosBlas::gemm(&op_1_transB, &op_1_transA, op_1_alpha, kokkos_dev_22, kokkos_dev_2weight, op_1_beta, kokkos_dev_24); - -//--------- Gemm_GPU_ALPAKA - char op_2_transA = 'n'; - char op_2_transB = 't'; - int op_2_m = 16; - int op_2_n = 50; - int op_2_k = 50; - float op_2_alpha = 1; - float op_2_beta = 1; - int op_2_lda = 50; - int op_2_ldb = 50; - std::copy(tensor_4biasbcast, tensor_4biasbcast + 800, tensor_26); - Kokkos::View kokkos_dev_24((float*)std::data(bufDev_24), op_2_m, op_2_k); - Kokkos::View kokkos_dev_4weight((float*)std::data(bufDev_4weight), op_2_k, op_2_n); - Kokkos::View kokkos_dev_26((float*)std::data(bufDev_26), op_2_m, op_2_n); - KokkosBlas::gemm(&op_2_transB, &op_2_transA, op_2_alpha, kokkos_dev_24, kokkos_dev_4weight, op_2_beta, kokkos_dev_26); - -//--------- Gemm_GPU_ALPAKA - char op_3_transA = 'n'; - char op_3_transB = 't'; - int op_3_m = 16; - int op_3_n = 50; - int op_3_k = 50; - float op_3_alpha = 1; - float op_3_beta = 1; - int op_3_lda = 50; - int op_3_ldb = 50; - std::copy(tensor_6biasbcast, tensor_6biasbcast + 800, tensor_28); - Kokkos::View kokkos_dev_26((float*)std::data(bufDev_26), op_3_m, op_3_k); - Kokkos::View kokkos_dev_6weight((float*)std::data(bufDev_6weight), op_3_k, op_3_n); - Kokkos::View kokkos_dev_28((float*)std::data(bufDev_28), op_3_m, op_3_n); - KokkosBlas::gemm(&op_3_transB, &op_3_transA, op_3_alpha, kokkos_dev_26, kokkos_dev_6weight, op_3_beta, kokkos_dev_28); - -//--------- Gemm_GPU_ALPAKA - char op_4_transA = 'n'; - char op_4_transB = 't'; - int op_4_m = 16; - int op_4_n = 50; - int op_4_k = 50; - float op_4_alpha = 1; - float op_4_beta = 1; - int op_4_lda = 50; - int op_4_ldb = 50; - std::copy(tensor_8biasbcast, tensor_8biasbcast + 800, tensor_30); - Kokkos::View kokkos_dev_28((float*)std::data(bufDev_28), op_4_m, op_4_k); - Kokkos::View kokkos_dev_8weight((float*)std::data(bufDev_8weight), op_4_k, op_4_n); - Kokkos::View kokkos_dev_30((float*)std::data(bufDev_30), op_4_m, op_4_n); - KokkosBlas::gemm(&op_4_transB, &op_4_transA, op_4_alpha, kokkos_dev_28, kokkos_dev_8weight, op_4_beta, kokkos_dev_30); - -//--------- Gemm_GPU_ALPAKA - char op_5_transA = 'n'; - char op_5_transB = 't'; - int op_5_m = 16; - int op_5_n = 50; - int op_5_k = 50; - float op_5_alpha = 1; - float op_5_beta = 1; - int op_5_lda = 50; - int op_5_ldb = 50; - std::copy(tensor_10biasbcast, tensor_10biasbcast + 800, tensor_32); - Kokkos::View kokkos_dev_30((float*)std::data(bufDev_30), op_5_m, op_5_k); - Kokkos::View kokkos_dev_10weight((float*)std::data(bufDev_10weight), op_5_k, op_5_n); - Kokkos::View kokkos_dev_32((float*)std::data(bufDev_32), op_5_m, op_5_n); - KokkosBlas::gemm(&op_5_transB, &op_5_transA, op_5_alpha, kokkos_dev_30, kokkos_dev_10weight, op_5_beta, kokkos_dev_32); - -//--------- Gemm_GPU_ALPAKA - char op_6_transA = 'n'; - char op_6_transB = 't'; - int op_6_m = 16; - int op_6_n = 50; - int op_6_k = 50; - float op_6_alpha = 1; - float op_6_beta = 1; - int op_6_lda = 50; - int op_6_ldb = 50; - std::copy(tensor_12biasbcast, tensor_12biasbcast + 800, tensor_34); - Kokkos::View kokkos_dev_32((float*)std::data(bufDev_32), op_6_m, op_6_k); - Kokkos::View kokkos_dev_12weight((float*)std::data(bufDev_12weight), op_6_k, op_6_n); - Kokkos::View kokkos_dev_34((float*)std::data(bufDev_34), op_6_m, op_6_n); - KokkosBlas::gemm(&op_6_transB, &op_6_transA, op_6_alpha, kokkos_dev_32, kokkos_dev_12weight, op_6_beta, kokkos_dev_34); - -//--------- Gemm_GPU_ALPAKA - char op_7_transA = 'n'; - char op_7_transB = 't'; - int op_7_m = 16; - int op_7_n = 50; - int op_7_k = 50; - float op_7_alpha = 1; - float op_7_beta = 1; - int op_7_lda = 50; - int op_7_ldb = 50; - std::copy(tensor_14biasbcast, tensor_14biasbcast + 800, tensor_36); - Kokkos::View kokkos_dev_34((float*)std::data(bufDev_34), op_7_m, op_7_k); - Kokkos::View kokkos_dev_14weight((float*)std::data(bufDev_14weight), op_7_k, op_7_n); - Kokkos::View kokkos_dev_36((float*)std::data(bufDev_36), op_7_m, op_7_n); - KokkosBlas::gemm(&op_7_transB, &op_7_transA, op_7_alpha, kokkos_dev_34, kokkos_dev_14weight, op_7_beta, kokkos_dev_36); - -//--------- Gemm_GPU_ALPAKA - char op_8_transA = 'n'; - char op_8_transB = 't'; - int op_8_m = 16; - int op_8_n = 50; - int op_8_k = 50; - float op_8_alpha = 1; - float op_8_beta = 1; - int op_8_lda = 50; - int op_8_ldb = 50; - std::copy(tensor_16biasbcast, tensor_16biasbcast + 800, tensor_38); - Kokkos::View kokkos_dev_36((float*)std::data(bufDev_36), op_8_m, op_8_k); - Kokkos::View kokkos_dev_16weight((float*)std::data(bufDev_16weight), op_8_k, op_8_n); - Kokkos::View kokkos_dev_38((float*)std::data(bufDev_38), op_8_m, op_8_n); - KokkosBlas::gemm(&op_8_transB, &op_8_transA, op_8_alpha, kokkos_dev_36, kokkos_dev_16weight, op_8_beta, kokkos_dev_38); - -//--------- Gemm_GPU_ALPAKA - char op_9_transA = 'n'; - char op_9_transB = 't'; - int op_9_m = 16; - int op_9_n = 10; - int op_9_k = 50; - float op_9_alpha = 1; - float op_9_beta = 1; - int op_9_lda = 50; - int op_9_ldb = 50; - std::copy(tensor_18biasbcast, tensor_18biasbcast + 160, tensor_39); - Kokkos::View kokkos_dev_38((float*)std::data(bufDev_38), op_9_m, op_9_k); - Kokkos::View kokkos_dev_18weight((float*)std::data(bufDev_18weight), op_9_k, op_9_n); - Kokkos::View kokkos_dev_39((float*)std::data(bufDev_39), op_9_m, op_9_n); - KokkosBlas::gemm(&op_9_transB, &op_9_transA, op_9_alpha, kokkos_dev_38, kokkos_dev_18weight, op_9_beta, kokkos_dev_39); - return {std::vector(tensor_39, tensor_39 + 160)}; -} -}; // end of Session -} //SOFIE_Linear_16 - -#endif // SOFIE_LINEAR_16 diff --git a/settings-dev.cmake b/settings-dev.cmake new file mode 100644 index 0000000..6a8496f --- /dev/null +++ b/settings-dev.cmake @@ -0,0 +1,7 @@ +set (CMAKE_BUILD_TYPE RelWithDebInfo CACHE STRING "" FORCE) +set (CMAKE_INSTALL_PREFIX ../install CACHE STRING "" FORCE) +set (CMAKE_INSTALL_BINDIR bin CACHE STRING "" FORCE) +set (CMAKE_INSTALL_INCLUDEDIR include CACHE STRING "" FORCE) +set (CMAKE_INSTALL_LIBDIR lib CACHE STRING "" FORCE) +set (testing ON CACHE BOOL "" FORCE) +set (mathmore ON CACHE BOOL "" FORCE) diff --git a/src/SOFIE_core/CMakeLists.txt b/src/SOFIE_core/CMakeLists.txt index 7297957..de13b58 100644 --- a/src/SOFIE_core/CMakeLists.txt +++ b/src/SOFIE_core/CMakeLists.txt @@ -76,6 +76,7 @@ list(TRANSFORM sources_headers PREPEND "inc/") set(sources_cxx src/RModel_Base.cxx src/RModel.cxx + src/RModel_ALPAKA.cxx src/RModel_GNN.cxx src/RModel_GraphIndependent.cxx src/RFunction.cxx diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index 83a47af..dbee25b 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -162,6 +162,10 @@ protected: void GenerateInitializedTensorInfo_GPU_ALPAKA(); // generate code for the intermediate tensors void GenerateIntermediateTensorInfo(); + + // generate code for the temporary initialized tensors containers + void GenerateTemporaryInitializedTensorContainers_GPU_ALPAKA(); + // generate code for the dynamic tensors void GenerateDynamicTensorInfo(); diff --git a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx index 0a615c5..073c6bc 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx @@ -29,8 +29,15 @@ enum class Options { enum class WeightFileType { None, RootBinary, Text }; -std::underlying_type_t operator|(Options opA, Options opB); -std::underlying_type_t operator|(std::underlying_type_t opA, Options opB); + +inline std::underlying_type_t operator|(Options opA, Options opB) { + return static_cast>(opA) | + static_cast>(opB); +} + +inline std::underlying_type_t operator|(std::underlying_type_t opA, Options opB) { + return opA | static_cast>(opB); +} class RModel_Base { diff --git a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx index d183052..f25b66b 100644 --- a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx +++ b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx @@ -704,6 +704,11 @@ inline GNN_Data Copy(const GNN_Data & data) { return out; } +// Function to generate the code for declaring and initializing constant tensors +// This is for tensors which are not part of weight files and can be created from the Constant operator +template +std::string GenerateConstantTensorCode(const std::pair &t); + }//SOFIE #endif //TMVA_SOFIE_RMODEL diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index f877b00..b78ad43 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -12,13 +12,6 @@ namespace SOFIE { -std::underlying_type_t operator|(Options opA, Options opB) { - return static_cast>(opA) | static_cast>(opB); -} -std::underlying_type_t operator|(std::underlying_type_t opA, Options opB) { - return opA | static_cast>(opB); -} - RModel::RModel(RModel&& other) { fInputTensorInfos = std::move(other.fInputTensorInfos); fReadyInputTensorInfos = std::move(other.fReadyInputTensorInfos); @@ -534,43 +527,6 @@ void RModel::InitializeSubGraph(std::shared_ptr graph) { } -// Function to generate the code for declaring and initializing constant tensors -// This is for tensors which are not part of weight files and can be created from the Constant operator -template -std::string GenerateConstantTensorCode(const std::pair &t) -{ - std::stringstream strs; - std::string type = ConvertTypeToString(t.second.type()); - size_t length = ConvertShapeToLength(t.second.shape()); - // avoid using stack sizes for constant tensors to reduce compilation time - bool allocateOnStack = (length > 100) ? false : true; - - const T *data = t.second.data(); - - // and check if all values are the same - bool sameData = false; - // for non stack allocation check if data are the same - if (!allocateOnStack && length > 1) { - size_t idx = 1; - do { - sameData = (data[idx] == data[idx - 1]); - idx++; - } while (sameData && idx < length); - } - if (allocateOnStack) { - strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; - } else { - strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; - if (sameData) - strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; - else { - strs << ConvertValuesToString(length, data) << ";\n"; - } - strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; - } - return strs.str(); -} - void RModel::GenerateInitializedTensorInfo() { if (!fInitializedTensors.empty()) @@ -594,28 +550,6 @@ void RModel::GenerateInitializedTensorInfo() } } -void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() -{ - if (!fInitializedTensors.empty()) - fGC += "// initialized tensors\n"; - - for (auto &i : fInitializedTensors) { - if (!fUseWeightFile || i.second.IsConstantTensor()) { - if (i.second.type() == ETensorType::FLOAT) - fGC += GenerateConstantTensorCode(i); - else if (i.second.type() == ETensorType::INT64) - fGC += GenerateConstantTensorCode(i); - - } else { - // case of tensors which are read from a file - size_t length = ConvertShapeToLength(i.second.shape()); - if (i.second.type() == ETensorType::FLOAT) { - fGC += "auto deviceBuf_"+i.first+" = alpaka::allocBuf(devAcc, "+std::to_string(length)+");\n"; - } - } - } -} - void RModel::GenerateIntermediateMemoryPool() { if (fIntermediateMemoryInfo.total_stack.size() == 0) return; fGC += "\n//--- Allocating session memory pool to be used for allocating intermediate tensors\n"; @@ -674,55 +608,6 @@ void RModel::GenerateIntermediateTensorInfo() { } } -void RModel::GenerateGPU_ALPAKA_Buffers(){ - if (!fIntermediateTensorInfos.empty()) { - std::string tensor_declaration_block = ""; - - for (auto &i : fIntermediateTensorInfos) { - if (i.second.type == ETensorType::BOOL) { - tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; - // No pointer allocation needed for BOOL - } - if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { - size_t length = ConvertShapeToLength(i.second.shape); - - if (i.second.type == ETensorType::FLOAT) { - tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; - } - else if (i.second.type == ETensorType::DOUBLE) { - tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; - } - else if (i.second.type == ETensorType::INT64) { - tensor_declaration_block += "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," + std::to_string(length) + ");\n"; - - } - } - } - - if (tensor_declaration_block.length()) { - fGC += "\n//--- declare and allocate the intermediate tensors\n" + tensor_declaration_block; - } - } - // add also the dynamic tensors (only declarations, allocation will be done later) - if (!fDynamicTensorInfos.empty()) { - fGC += "//--- declare the dynamic tensors\n"; - fGC += "using bufDev_float = alpaka::Buf, size_t>;\n"; - fGC += "using bufDev_double = alpaka::Buf, size_t>;\n"; - fGC += "using bufDev_int64= alpaka::Buf, size_t>;\n"; - for (auto &i : fDynamicTensorInfos) { - if (i.second.type == ETensorType::FLOAT) { - fGC += "bufDev_float bufDev_" + i.first + ";\n"; - } else if (i.second.type == ETensorType::DOUBLE) { - fGC += "bufDev_double bufDev_" + i.first + ";\n"; - } else if (i.second.type == ETensorType::INT64) { - fGC += "bufDev_int64 bufDev_" + i.first + ";\n"; - - } - } - } -} - - // generate code for specific operator declarations to be defined in the Session class void RModel::GenerateOperatorDeclarations() { std::string strcode; @@ -748,18 +633,6 @@ void RModel::GenerateDynamicTensorInfo() { fGC += out.str(); } -void RModel::GenerateDynamicTensorInfo_GPU_ALPAKA() { - fGC += "//---- allocate the intermediate dynamic tensors\n"; - std::stringstream out; - for (auto & i: fDynamicTensorInfos) { - auto length = ConvertDynamicShapeToLength(i.second.shape); - out << SP << "if (" << length << " > 0) {\n"; - out << "auto bufDev_" + i.first + " = alpaka::allocBuf(devAcc," << length << ");\n"; - out << SP << "}\n"; - } - fGC += out.str(); -} - std::string RModel::GenerateInferSignature(bool isdecl) { // generate the infer signature given the inputs: eg. "float * tensor1, float * tensor2" // if (decl = false) generate only calling signature (tensor1,tensor2,....) @@ -795,94 +668,6 @@ std::string RModel::GenerateInferSignature(bool isdecl) { return rGC; } -namespace { - -std::string createOutputTensor(RModel const &rmodel, std::string const &name, bool isIntermediateTensor) -{ - if(name.empty()) return "{}"; - ETensorType eOutputType = rmodel.GetTensorType(name); - std::string outputType = ConvertTypeToString(eOutputType); - if (isIntermediateTensor) { - - if (eOutputType == ETensorType::BOOL) { - return "fTensor_" + name; - } else { - // need to check is size is the same(don't want to return a vector with larger size) - // in that case better to copy - return "std::vector<" + ConvertTypeToString(eOutputType) + ">(tensor_" + name + ", tensor_" + name + " + " + - std::to_string(ConvertShapeToLength(rmodel.GetTensorShape(name))) + ")"; - } - } - // include also dynamic tensors since the vectors can be allocated with a size larger than their output - // we need a special handling for bool type allocated as vector - auto outputLength = ConvertDynamicShapeToLength(rmodel.GetDynamicTensorShape(name)); - if (rmodel.IsDynamicTensor(name) && eOutputType == ETensorType::BOOL) { - return "std::vector(fTensor_" + name + ".begin(), fTensor_" + name + ".begin() + " + outputLength + ")"; - } - return "std::vector<" + outputType + ">(tensor_" + name + ", tensor_" + name + " + " + outputLength + ")"; -} - -} // namespace - -void RModel::GenerateOutput_GPU_ALPAKA() { - - if (fVerbose) - std::cout << "Generating main inference code for " << fName << std::endl; - - size_t outputSize = fOutputTensorNames.size(); - // assume output types are all the same - if (outputSize == 0) - throw std::runtime_error("TMVA-SOFIE: output size=0 are not supported"); - - bool sameOutputTypes = true; - std::string inferReturnType; // type return by infer function - ETensorType eOutputType = GetTensorType(*fOutputTensorNames.begin()); - std::string outputType = ConvertTypeToString(eOutputType); - fGC += "\n\n"; - if (outputSize == 1) { - fGC += "std::vector<" + outputType + ">"; - } else { - // if all output types are the same we return an std::vector - otherwise a tuple - for (size_t i = 1; i < outputSize; i++) { - if (GetTensorType(fOutputTensorNames[i]) != eOutputType) - sameOutputTypes = false; - } - if (sameOutputTypes) - fGC += "std::vector>"; - else { - inferReturnType = "std::tuple<"; - for (size_t i = 0; i < outputSize; i++) { - inferReturnType += "std::vector<" + ConvertTypeToString(GetTensorType(fOutputTensorNames[i])) + ">"; - if (i < outputSize-1) inferReturnType += ","; - } - inferReturnType += ">"; - fGC += inferReturnType; - } - } - - fGC += " infer("; - - fGC += GenerateInferSignature(); - - fGC += "){\n"; - - for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { - if (fVerbose) std::cout << "Generating code for operator .... " << op_idx << std::endl; - fGC += (fOperators[op_idx]->Generate_GPU_ALPAKA(std::to_string(op_idx))); - } - - fGC += SP + "return {"; - for (size_t i = 0; i < outputSize; i++) { - std::string tensorName = *(fOutputTensorNames.begin() + i); - bool isIntermediate = fIntermediateTensorInfos.count(tensorName) > 0; - fGC += createOutputTensor(*this, tensorName, isIntermediate); - if (i < outputSize - 1) - fGC += ","; - } - fGC += "};\n"; - fGC += "}\n"; // end of infer function scope -} - void RModel::GenerateSessionCode() { @@ -992,117 +777,6 @@ void RModel::GenerateSessionCode() } } -void RModel::GenerateSessionCode_GPU_ALPAKA() -{ - - // define the Session struct (for GNN this is generated in RModel_GNN) - if (fUseSession) { - if (!fIsSubGraph) - fGC += "struct Session {\n\n"; - else - fGC += "struct Session_" + fName + " {\n\n"; - } - - // // generate code for declaring the initialized tensors - GenerateInitializedTensorInfo_GPU_ALPAKA(); - - // // evaluate total intermediate memory and position intermediate tensor addresses - // std::string intermediate_memory_alloc_string = ""; - // intermediate_memory_alloc_string += "\n// --- Positioning intermediate tensor memory --"; - // for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { - // intermediate_memory_alloc_string += AllocateIntermediateMemory(fOperators[op_idx]->GetOpOutputTensors()); - // CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); - // } - - // // to check remaining unused fragments after memory allocation (lesser the better) - // // for (const auto &it: fIntermediateMemoryInfo.available_stack){ - // // std::cout<<"chunk_idx: "<fName + " fSession_" + graph->fName + ";\n"; - } - - // Generate code for Session constructor - if (fUseSession) { - std::string sessionName = "Session"; - if (fIsSubGraph) - sessionName += "_" + fName; - // add here specific operator code that needs to define session data members - // fGC += "\n"; - // for (size_t id = 0; id < fOperators.size(); id++) { - // std::string opName = std::to_string(id); - // fGC += fOperators[id]->GenerateSessionMembersCode(opName); - // } - fGC += "\n"; - // here add initialization and reading of weight tensors - if (fUseWeightFile) { - std::string fileName = fName; - if (fWeightFile == WeightFileType::Text) { - fileName += ".dat"; - } - if (fWeightFile == WeightFileType::RootBinary) { - fileName += ".root"; - } - fGC += sessionName + "(std::string filename =\"" + fileName + "\""; - } else { - // no need to pass weight file since it is not used - // keep passing a string for compatibility - fGC += sessionName + "(std::string = \"\""; - } - // add initialization of shape parameters - // assume all parameters are of type size_t - if (!fShapeParams.empty()) { - for (auto &p : fShapeParams) { - fGC += ",\n"; - fGC += " size_t " + p.first + " = " + p.second; - } - } - fGC += ") {\n"; - - if (fUseWeightFile) { - fGC += "\n//--- reading weights from file\n"; - ReadInitializedTensorsFromFile(0); - fGC += "\n"; - // fUseWeightFile = fUseWeightFile; - } - - MoveInitializedTensorsToBuffers_ALPAKA(); - - // now we have passed the parameters we can allocate the dynamic tensors - GenerateDynamicTensorInfo_GPU_ALPAKA(); - - // add here initialization code for operator - for (size_t id = 0; id < fOperators.size(); id++) { - fGC += fOperators[id]->GenerateInitCode_GPU_ALPAKA(); - } - - fGC += "}\n\n"; - } - // generate the inference code - GenerateOutput_GPU_ALPAKA(); - - // end of session - if (fUseSession && !fIsGNNComponent) { - fGC += "}; // end of Session\n"; - } -} - void RModel::Generate(std::underlying_type_t options, int batchSize, long pos, bool verbose) { fVerbose = verbose; @@ -1161,62 +835,6 @@ void RModel::Generate(std::underlying_type_t options, int batchSize, lo } } -void RModel::GenerateGPU_ALPAKA(std::underlying_type_t options, int batchSize, bool verbose) -{ - fVerbose = verbose; - fBatchSize = batchSize; - - // session flag is used in operator initialize - if (static_cast>(Options::kNoSession) & options) { - fUseSession = false; - fWeightFile = WeightFileType::None; - } - if (static_cast>(Options::kNoWeightFile) & options) { - fUseWeightFile = false; - fWeightFile = WeightFileType::None; - } - if (static_cast>(Options::kRootBinaryWeightFile) & options) { - fUseWeightFile = true; - fWeightFile = WeightFileType::RootBinary; - } - if (fUseWeightFile && !fUseSession) { - throw std::runtime_error( - "TMVA-SOFIE: RModel::Generate: cannot use a separate weight file without generating a Session class"); - } - - if (static_cast>(Options::kGNN) & options || static_cast>(Options::kGNNComponent) & options) - throw std::runtime_error("SOFIE GPU does not yet supports GNN Inference."); - - // initialize the model including all operators and sub-graphs - Initialize(batchSize, verbose); - - std::string hgname; - if (!fIsSubGraph) { - fGC.clear(); - GenerateHeaderInfo_GPU_ALPAKA(hgname); - } - - // generate first code for the subgraphs - // for (auto &graph : fSubGraphs) { - // if (fVerbose) - // std::cout << "generate session code for subgraph " << graph->fName << std::endl; - // graph->GenerateSessionCode(); - // fGC += graph->fGC; - // } - - if (fVerbose) - std::cout << "generate Main session code - model " << fName << std::endl; - - // generate main session code - GenerateSessionCode_GPU_ALPAKA(); - - if (!fIsSubGraph) { - fGC += ("} //SOFIE_" + fName + "\n"); - fGC += "\n#endif // " + hgname + "\n"; - } -} - - void RModel::ReadInitializedTensorsFromFile(long pos) { // generate the code to read initialized tensors from a text data file if (fWeightFile == WeightFileType::Text) { @@ -1302,31 +920,6 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { } } - void RModel::MoveInitializedTensorsToBuffers_ALPAKA(){ - for (auto &i : fInitializedTensors) { - // skip Constant and shape tensors - if (!i.second.IsWeightTensor()) continue; - std::string tensor_name = "tensor_" + i.first; - auto length = ConvertShapeToLength(i.second.shape()); - std::string slength = std::to_string(length); - if (i.second.type() == ETensorType::FLOAT) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; - } else if (i.second.type() == ETensorType::DOUBLE) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; - } else if (i.second.type() == ETensorType::INT64) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf"+i.first+", "+slength+");\n"; - } else { - std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); - } - } - } - long RModel::WriteInitializedTensorsToFile(std::string filename) { // Determine the file extension based on the weight file type std::string fileExtension; diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx new file mode 100644 index 0000000..d0047c0 --- /dev/null +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -0,0 +1,366 @@ +#include +#include +#include +#include +#include + +#include "TFile.h" +#include "SOFIE/RModel.hxx" +#include "SOFIE/SOFIE_common.hxx" + +namespace SOFIE { + +//==================================================================== +// RModel - GPU Alpaka Codegen +//==================================================================== + +void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() { + if (!fInitializedTensors.empty()) + fGC += "\n// temporary initialized tensors for loading weights\n"; + + for (auto &i : fInitializedTensors) { + if (!fUseWeightFile || i.second.IsConstantTensor()) { + if (i.second.type() == ETensorType::FLOAT) + fGC += GenerateConstantTensorCode(i); + else if (i.second.type() == ETensorType::INT64) + fGC += GenerateConstantTensorCode(i); + + } else { + // case of tensors which are read from a file + size_t length = ConvertShapeToLength(i.second.shape()); + if (i.second.type() == ETensorType::FLOAT) { + fGC += "auto deviceBuf_" + i.first + + " = alpaka::allocBuf(devAcc, " + + std::to_string(length) + ");\n"; + } + } + } +} + +void RModel::GenerateTemporaryInitializedTensorContainers_GPU_ALPAKA() +{ + if (!fInitializedTensors.empty()) + fGC += "// initialized tensors\n"; + + for (auto &i : fInitializedTensors) { + if (!fUseWeightFile || i.second.IsConstantTensor()) { + if (i.second.type() == ETensorType::FLOAT) + fGC += GenerateConstantTensorCode(i); + else if (i.second.type() == ETensorType::INT64) + fGC += GenerateConstantTensorCode(i); + + } else { + // case of tensors which are read from a file + size_t length = ConvertShapeToLength(i.second.shape()); + if (i.second.type() == ETensorType::FLOAT) { + fGC += "float tensor_" + i.first + "[" + std::to_string(length) + "];\n"; + } + } + } +} + +void RModel::GenerateGPU_ALPAKA_Buffers() { + if (!fIntermediateTensorInfos.empty()) { + std::string tensor_declaration_block = ""; + + for (auto &i : fIntermediateTensorInfos) { + if (i.second.type == ETensorType::BOOL) { + tensor_declaration_block += "std::vector fTensor_" + i.first + + " = std::vector(" + + std::to_string(ConvertShapeToLength(i.second.shape)) + + ");\n"; + // No pointer allocation needed for BOOL + } + if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == + fOutputTensorNames.end()) { + size_t length = ConvertShapeToLength(i.second.shape); + + if (i.second.type == ETensorType::FLOAT) { + tensor_declaration_block += "auto bufDev_" + i.first + + " = alpaka::allocBuf(devAcc," + + std::to_string(length) + ");\n"; + } else if (i.second.type == ETensorType::DOUBLE) { + tensor_declaration_block += "auto bufDev_" + i.first + + " = alpaka::allocBuf(devAcc," + + std::to_string(length) + ");\n"; + } else if (i.second.type == ETensorType::INT64) { + tensor_declaration_block += "auto bufDev_" + i.first + + " = alpaka::allocBuf(devAcc," + + std::to_string(length) + ");\n"; + } + } + } + + if (tensor_declaration_block.length()) { + fGC += "\n//--- declare and allocate the intermediate tensors\n" + tensor_declaration_block; + } + } + + // add also the dynamic tensors (only declarations, allocation will be done later) + if (!fDynamicTensorInfos.empty()) { + fGC += "//--- declare the dynamic tensors\n"; + fGC += "using bufDev_float = alpaka::Buf, size_t>;\n"; + fGC += "using bufDev_double = alpaka::Buf, size_t>;\n"; + fGC += "using bufDev_int64 = alpaka::Buf, size_t>;\n"; + + for (auto &i : fDynamicTensorInfos) { + if (i.second.type == ETensorType::FLOAT) { + fGC += "bufDev_float bufDev_" + i.first + ";\n"; + } else if (i.second.type == ETensorType::DOUBLE) { + fGC += "bufDev_double bufDev_" + i.first + ";\n"; + } else if (i.second.type == ETensorType::INT64) { + fGC += "bufDev_int64 bufDev_" + i.first + ";\n"; + } + } + } +} + +void RModel::GenerateDynamicTensorInfo_GPU_ALPAKA() { + fGC += "//---- allocate the intermediate dynamic tensors\n"; + std::stringstream out; + + for (auto &i : fDynamicTensorInfos) { + auto length = ConvertDynamicShapeToLength(i.second.shape); + out << SP << "if (" << length << " > 0) {\n"; + out << "auto bufDev_" + i.first + + " = alpaka::allocBuf(devAcc," << length << ");\n"; + out << SP << "}\n"; + } + fGC += out.str(); +} + +namespace { + +std::string createOutputTensor(RModel const &rmodel, std::string const &name, bool isIntermediateTensor) +{ + if(name.empty()) return "{}"; + ETensorType eOutputType = rmodel.GetTensorType(name); + std::string outputType = ConvertTypeToString(eOutputType); + if (isIntermediateTensor) { + + if (eOutputType == ETensorType::BOOL) { + return "fTensor_" + name; + } else { + // need to check is size is the same(don't want to return a vector with larger size) + // in that case better to copy + return "std::vector<" + ConvertTypeToString(eOutputType) + ">(tensor_" + name + ", tensor_" + name + " + " + + std::to_string(ConvertShapeToLength(rmodel.GetTensorShape(name))) + ")"; + } + } + // include also dynamic tensors since the vectors can be allocated with a size larger than their output + // we need a special handling for bool type allocated as vector + auto outputLength = ConvertDynamicShapeToLength(rmodel.GetDynamicTensorShape(name)); + if (rmodel.IsDynamicTensor(name) && eOutputType == ETensorType::BOOL) { + return "std::vector(fTensor_" + name + ".begin(), fTensor_" + name + ".begin() + " + outputLength + ")"; + } + return "std::vector<" + outputType + ">(tensor_" + name + ", tensor_" + name + " + " + outputLength + ")"; +} + +} // namespace + +void RModel::GenerateOutput_GPU_ALPAKA() { + if (fVerbose) + std::cout << "Generating main inference code for " << fName << std::endl; + + size_t outputSize = fOutputTensorNames.size(); + if (outputSize == 0) + throw std::runtime_error("TMVA-SOFIE: output size=0 are not supported"); + + bool sameOutputTypes = true; + std::string inferReturnType; + ETensorType eOutputType = GetTensorType(*fOutputTensorNames.begin()); + std::string outputType = ConvertTypeToString(eOutputType); + + fGC += "\n\n"; + if (outputSize == 1) { + fGC += "std::vector<" + outputType + ">"; + } else { + for (size_t i = 1; i < outputSize; i++) { + if (GetTensorType(fOutputTensorNames[i]) != eOutputType) + sameOutputTypes = false; + } + if (sameOutputTypes) { + fGC += "std::vector>"; + } else { + inferReturnType = "std::tuple<"; + for (size_t i = 0; i < outputSize; i++) { + inferReturnType += "std::vector<" + + ConvertTypeToString(GetTensorType(fOutputTensorNames[i])) + + ">"; + if (i < outputSize - 1) + inferReturnType += ","; + } + inferReturnType += ">"; + fGC += inferReturnType; + } + } + + fGC += " infer("; + fGC += GenerateInferSignature(); + fGC += "){\n"; + + for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { + if (fVerbose) + std::cout << "Generating code for operator .... " << op_idx << std::endl; + fGC += (fOperators[op_idx]->Generate_GPU_ALPAKA(std::to_string(op_idx))); + } + + fGC += SP + "return {"; + for (size_t i = 0; i < outputSize; i++) { + std::string tensorName = *(fOutputTensorNames.begin() + i); + bool isIntermediate = fIntermediateTensorInfos.count(tensorName) > 0; + fGC += createOutputTensor(*this, tensorName, isIntermediate); + if (i < outputSize - 1) + fGC += ","; + } + fGC += "};\n"; + fGC += "}\n"; // end of infer function scope +} + +void RModel::GenerateSessionCode_GPU_ALPAKA() { + // define the Session struct (for GNN this is generated in RModel_GNN) + fGC += "template \n;"; + if (fUseSession) { + if (!fIsSubGraph) + fGC += "struct Session {\n\n"; + else + fGC += "struct Session_" + fName + " {\n\n"; + } + + // define host and device accelerators + fGC += "using Idx = alpaka::Idx;\n"; + fGC += "using devAcc = alpaka::AccGpuCudaRt, Idx, tagAcc>;\n"; + fGC += "using hostAcc = alpaka::AccCpuSerial, Idx>;\n\n"; + + + GenerateInitializedTensorInfo_GPU_ALPAKA(); + GenerateGPU_ALPAKA_Buffers(); + GenerateOperatorDeclarations(); + + // add subgraph session + if (!fSubGraphs.empty()) + fGC += "// subgraph sessions\n"; + for (auto &graph : fSubGraphs) { + fGC += "Session_" + graph->fName + " fSession_" + graph->fName + ";\n"; + } + + // Session constructor + if (fUseSession) { + std::string sessionName = "\n\nSession"; + if (fIsSubGraph) + sessionName += "_" + fName; + + if (fUseWeightFile) { + std::string fileName = fName; + if (fWeightFile == WeightFileType::Text) + fileName += ".dat"; + if (fWeightFile == WeightFileType::RootBinary) + fileName += ".root"; + + fGC += sessionName + "(std::string filename =\"" + fileName + "\""; + } else { + fGC += sessionName + "(std::string = \"\""; + } + + if (!fShapeParams.empty()) { + for (auto &p : fShapeParams) { + fGC += ",\n"; + fGC += " size_t " + p.first + " = " + p.second; + } + } + fGC += ") {\n"; + + GenerateTemporaryInitializedTensorContainers_GPU_ALPAKA(); + if (fUseWeightFile) { + fGC += "\n//--- reading weights from file\n"; + ReadInitializedTensorsFromFile(0); + fGC += "\n"; + } + + MoveInitializedTensorsToBuffers_ALPAKA(); + GenerateDynamicTensorInfo_GPU_ALPAKA(); + + for (size_t id = 0; id < fOperators.size(); id++) { + fGC += fOperators[id]->GenerateInitCode_GPU_ALPAKA(); + } + + fGC += "}\n\n"; + } + + GenerateOutput_GPU_ALPAKA(); + + if (fUseSession && !fIsGNNComponent) { + fGC += "}; // end of Session\n"; + } +} + +void RModel::GenerateGPU_ALPAKA(std::underlying_type_t options, int batchSize, bool verbose) { + fVerbose = verbose; + fBatchSize = batchSize; + + if (static_cast>(Options::kNoSession) & options) { + fUseSession = false; + fWeightFile = WeightFileType::None; + } + if (static_cast>(Options::kNoWeightFile) & options) { + fUseWeightFile = false; + fWeightFile = WeightFileType::None; + } + if (static_cast>(Options::kRootBinaryWeightFile) & options) { + fUseWeightFile = true; + fWeightFile = WeightFileType::RootBinary; + } + if (fUseWeightFile && !fUseSession) { + throw std::runtime_error( + "TMVA-SOFIE: RModel::Generate: cannot use a separate weight file without generating a Session class"); + } + + if (static_cast>(Options::kGNN) & options || + static_cast>(Options::kGNNComponent) & options) + throw std::runtime_error("SOFIE GPU does not yet supports GNN Inference."); + + Initialize(batchSize, verbose); + + std::string hgname; + if (!fIsSubGraph) { + fGC.clear(); + GenerateHeaderInfo_GPU_ALPAKA(hgname); + } + + if (fVerbose) + std::cout << "generate Main session code - model " << fName << std::endl; + + GenerateSessionCode_GPU_ALPAKA(); + + if (!fIsSubGraph) { + fGC += ("} //SOFIE_" + fName + "\n"); + fGC += "\n#endif // " + hgname + "\n"; + } +} + +void RModel::MoveInitializedTensorsToBuffers_ALPAKA(){ + for (auto &i : fInitializedTensors) { + // skip Constant and shape tensors + if (!i.second.IsWeightTensor()) continue; + std::string tensor_name = "tensor_" + i.first; + auto length = ConvertShapeToLength(i.second.shape()); + std::string slength = std::to_string(length); + if (i.second.type() == ETensorType::FLOAT) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + } else if (i.second.type() == ETensorType::DOUBLE) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + } else if (i.second.type() == ETensorType::INT64) { + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + } else { + std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); + } + } + } + +} // namespace SOFIE diff --git a/src/SOFIE_core/src/RModel_Base.cxx b/src/SOFIE_core/src/RModel_Base.cxx index b5524d1..f212c53 100644 --- a/src/SOFIE_core/src/RModel_Base.cxx +++ b/src/SOFIE_core/src/RModel_Base.cxx @@ -75,8 +75,7 @@ void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { fGC += "#include \"" + i + "\"\n"; } fGC += "#include \n"; - fGC += "#include \n"; - fGC += "#include \n"; + fGC += "#include \n"; // for the session we need to include SOFIE_Common functions //needed for convolution operator (need to add a flag) @@ -88,8 +87,6 @@ void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { fGC += "#include \"TFile.h\"\n"; fGC += "\nusing Dim1D = alpaka::DimInt<1>;\n"; - fGC += "using Acc = alpaka::TagToAcc;\n"; - fGC += "using Queue = alpaka::Queue;\n"; fGC += "\nnamespace SOFIE_" + fName + "{\n"; } diff --git a/src/SOFIE_core/src/SOFIE_common.cxx b/src/SOFIE_core/src/SOFIE_common.cxx index ad74313..bb288cf 100644 --- a/src/SOFIE_core/src/SOFIE_common.cxx +++ b/src/SOFIE_core/src/SOFIE_common.cxx @@ -423,5 +423,40 @@ std::vector UTILITY::ComputeStrideFromShape(const std::vector & shape) return strides; } +template +std::string GenerateConstantTensorCode(const std::pair &t) +{ + std::stringstream strs; + std::string type = ConvertTypeToString(t.second.type()); + size_t length = ConvertShapeToLength(t.second.shape()); + // avoid using stack sizes for constant tensors to reduce compilation time + bool allocateOnStack = (length > 100) ? false : true; + + const T *data = t.second.data(); + + // and check if all values are the same + bool sameData = false; + // for non stack allocation check if data are the same + if (!allocateOnStack && length > 1) { + size_t idx = 1; + do { + sameData = (data[idx] == data[idx - 1]); + idx++; + } while (sameData && idx < length); + } + if (allocateOnStack) { + strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; + } else { + strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; + if (sameData) + strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; + else { + strs << ConvertValuesToString(length, data) << ";\n"; + } + strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; + } + return strs.str(); +} + }//SOFIE From e31303fe9a9f91ad4b904a4e21ddd755361c1ddd Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Tue, 14 Oct 2025 09:26:53 +0200 Subject: [PATCH 05/22] feat: use sofieblas efficiently and add leaky relu, sigmoid support --- .vscode/settings.json | 7 +- src/.vscode/settings.json | 61 +++ src/SOFIE_core/inc/SOFIE/RModel.hxx | 6 + src/SOFIE_core/inc/SOFIE/ROperator.hxx | 54 +- .../inc/SOFIE/ROperator_BasicBinary.hxx | 462 +++++++++++++----- src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 59 ++- .../inc/SOFIE/ROperator_LeakyRelu.hxx | 52 ++ src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx | 52 +- .../inc/SOFIE/ROperator_Sigmoid.hxx | 52 ++ src/SOFIE_core/src/RModel.cxx | 56 ++- src/SOFIE_core/src/RModel_ALPAKA.cxx | 185 ++++--- src/SOFIE_core/src/RModel_Base.cxx | 4 +- 12 files changed, 784 insertions(+), 266 deletions(-) create mode 100644 src/.vscode/settings.json diff --git a/.vscode/settings.json b/.vscode/settings.json index 381ce8f..fd03126 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -72,6 +72,9 @@ "thread": "cpp", "cfenv": "cpp", "variant": "cpp", - "format": "cpp" + "format": "cpp", + "any": "cpp", + "source_location": "cpp", + "run_inference_particle_net.C": "cpp" } -} \ No newline at end of file +} diff --git a/src/.vscode/settings.json b/src/.vscode/settings.json new file mode 100644 index 0000000..8bc121a --- /dev/null +++ b/src/.vscode/settings.json @@ -0,0 +1,61 @@ +{ + "files.associations": { + "*.icc": "cpp", + "iostream": "cpp", + "ostream": "cpp", + "cctype": "cpp", + "clocale": "cpp", + "cmath": "cpp", + "cstdarg": "cpp", + "cstddef": "cpp", + "cstdio": "cpp", + "cstdlib": "cpp", + "cstring": "cpp", + "ctime": "cpp", + "cwchar": "cpp", + "cwctype": "cpp", + "array": "cpp", + "atomic": "cpp", + "bit": "cpp", + "bitset": "cpp", + "compare": "cpp", + "complex": "cpp", + "concepts": "cpp", + "cstdint": "cpp", + "deque": "cpp", + "map": "cpp", + "set": "cpp", + "string": "cpp", + "unordered_map": "cpp", + "unordered_set": "cpp", + "vector": "cpp", + "exception": "cpp", + "algorithm": "cpp", + "functional": "cpp", + "iterator": "cpp", + "memory": "cpp", + "memory_resource": "cpp", + "numeric": "cpp", + "optional": "cpp", + "random": "cpp", + "regex": "cpp", + "string_view": "cpp", + "system_error": "cpp", + "tuple": "cpp", + "type_traits": "cpp", + "utility": "cpp", + "fstream": "cpp", + "initializer_list": "cpp", + "iomanip": "cpp", + "iosfwd": "cpp", + "istream": "cpp", + "limits": "cpp", + "new": "cpp", + "numbers": "cpp", + "sstream": "cpp", + "stdexcept": "cpp", + "streambuf": "cpp", + "cinttypes": "cpp", + "typeinfo": "cpp" + } +} \ No newline at end of file diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index dbee25b..6083e47 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -150,6 +150,10 @@ public: // used to infer the sub-graphs std::string GenerateInferSignature(bool isdecl = true); + void RemoveIntermediateTensor(const std::string& tensor_name){ + fIntermediateTensorInfos.erase(tensor_name); + } + // calculate total intermediate memory and position intermediate tensor addresses std::string AllocateIntermediateMemory(std::span op_output_tensors); void CheckAndFlushIntermediateMemory(std::span op_output_tensors, const size_t& op_idx); @@ -185,6 +189,8 @@ protected: void GenerateSessionCode_GPU_ALPAKA(); void GenerateGPU_ALPAKA_Buffers(); + void CheckAndFuseOperators(); + public: const std::vector &GetInputTensorNames() const { return fInputTensorNames; } const std::vector &GetOutputTensorNames() const { return fOutputTensorNames; } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator.hxx b/src/SOFIE_core/inc/SOFIE/ROperator.hxx index f7db548..0ad57b3 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator.hxx @@ -2,6 +2,7 @@ #define SOFIE_ROPERATOR #include +#include #include #include "SOFIE/SOFIE_common.hxx" @@ -14,6 +15,33 @@ namespace SOFIE{ class RModel; +enum class OperatorKind { + GEMM = 0, + LAYERNORM = 1, + RELU = 2, + CONSTANT = 3, + CONSTANTOFSHAPE = 4, + UNDEFINED = 5, + CONV=6, + BATCHNORM=7 +}; + +inline const char* toString(OperatorKind kind) { + switch (kind) { + case OperatorKind::GEMM: return "GEMM"; + case OperatorKind::LAYERNORM: return "LAYERNORM"; + case OperatorKind::RELU: return "RELU"; + case OperatorKind::CONSTANT: return "CONSTANT"; + case OperatorKind::CONSTANTOFSHAPE: return "CONSTANTOFSHAPE"; + case OperatorKind::BATCHNORM: return "batchnorm"; + case OperatorKind::CONV: return "conv"; + case OperatorKind::UNDEFINED: return "UNDEFINED"; + default: return "UNKNOWN"; + } +} + +inline std::set FusableKinds = { OperatorKind::RELU, OperatorKind::LAYERNORM, OperatorKind::BATCHNORM}; + class ROperator{ @@ -32,29 +60,45 @@ public: virtual std::string GenerateDeclCode() { return "";} // generate session data members specific to operator virtual std::string GenerateSessionMembersCode(std::string /*opName*/) { return ""; } + virtual std::string Generate_GPU_Kernel_ALPAKA() { return ""; } + virtual std::string Generate_GPU_Kernel_Definitions_ALPAKA() { return ""; } virtual std::string Header() { return "";} + virtual std::string GetFusableOutputTensorName() { return "";} + virtual std::string GetBlasConfig() { return ""; } + virtual void UpdateFusableTensorName(std::string, const std::function& removal_func){ return;}; + //virtual void Forward_reference() = 0; //virtual void Forward_blas() = 0; virtual ~ROperator(){} protected: - + OperatorKind fKind = OperatorKind::UNDEFINED; + size_t fOpOrder = 0; const std::string SP = " "; ///< space used to correctly indent the generated C++ code bool fUseSession = false; ///< flag to identify if using the session class bool fIsOutputConstant = false; ///< flag to identify if operator has a constant output (no need to generate code) - mutable std::vector fInputTensorNames; - mutable std::vector fOutputTensorNames; + mutable std::vector fInputTensorNames; + mutable std::vector fOutputTensorNames; public: - std::span GetOpInputTensors() const { + std::span GetOpInputTensors() const { return fInputTensorNames; } - std::span GetOpOutputTensors() const { + std::span GetOpOutputTensors() const { return fOutputTensorNames; } + + OperatorKind GetKind() const { return fKind; } + + void RegisterOperatorOrder(const size_t ord){ + fOpOrder = ord; + } + size_t GetOpOrder(){ + return fOpOrder; + } }; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index 127eaff..756c3b9 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -1,15 +1,23 @@ -#ifndef SOFIE_ROperator_BasicBinary -#define SOFIE_ROperator_BasicBinary +#ifndef TMVA_SOFIE_ROperator_BasicBinary +#define TMVA_SOFIE_ROperator_BasicBinary -#include "SOFIE/SOFIE_common.hxx" -#include "SOFIE/ROperator.hxx" -#include "SOFIE/RModel.hxx" +#include "TMVA/SOFIE_common.hxx" +#include "TMVA/ROperator.hxx" +#include "TMVA/RModel.hxx" #include -namespace SOFIE{ +namespace TMVA { +namespace Experimental { +namespace SOFIE { -enum EBasicBinaryOperator { Add, Sub, Mul, Div, Pow }; +enum EBasicBinaryOperator { + Add, + Sub, + Mul, + Div, + Pow +}; template struct BinaryOperatorTrait {}; @@ -17,42 +25,42 @@ struct BinaryOperatorTrait {}; template struct BinaryOperatorTrait { static const std::string Name() { return "Add"; } - static std::string Op(const std::string & t1, const std::string t2) { return t1 + " + " + t2; } - static T Func(T t1, T t2) {return t1 + t2;} + static std::string Op(const std::string &t1, const std::string t2) { return t1 + " + " + t2; } + static T Func(T t1, T t2) { return t1 + t2; } }; template struct BinaryOperatorTrait { static const std::string Name() { return "Sub"; } - static std::string Op(const std::string & t1, const std::string t2) { return t1 + " - " + t2; } - static T Func (T t1, T t2) { return t1 - t2;} + static std::string Op(const std::string &t1, const std::string t2) { return t1 + " - " + t2; } + static T Func(T t1, T t2) { return t1 - t2; } }; template struct BinaryOperatorTrait { static const std::string Name() { return "Mul"; } - static std::string Op(const std::string & t1, const std::string t2) { return t1 + " * " + t2; } - static T Func (T t1, T t2) { return t1 * t2;} + static std::string Op(const std::string &t1, const std::string t2) { return t1 + " * " + t2; } + static T Func(T t1, T t2) { return t1 * t2; } }; template struct BinaryOperatorTrait { static const std::string Name() { return "Div"; } - static std::string Op(const std::string & t1, const std::string t2) { return t1 + " / " + t2; } - static T Func (T t1, T t2) { return t1/t2;} + static std::string Op(const std::string &t1, const std::string t2) { return t1 + " / " + t2; } + static T Func(T t1, T t2) { return t1 / t2; } }; template struct BinaryOperatorTrait { static const std::string Name() { return "Pow"; } - static std::string Op(const std::string & t1, const std::string t2) { return "std::pow(" + t1 + "," + t2 + ")"; } - static T Func (T t1, T t2) { return std::pow(t1,t2);} + static std::string Op(const std::string &t1, const std::string t2) { return "std::pow(" + t1 + "," + t2 + ")"; } + static T Func(T t1, T t2) { return std::pow(t1, t2); } }; -template -class ROperator_BasicBinary final : public ROperator{ +template +class ROperator_BasicBinary final : public ROperator { private: - + int fBroadcastFlag = 0; std::string fNA; std::string fNB; std::string fNBroadcastedA; @@ -63,154 +71,364 @@ private: std::vector fShapeB; std::vector fShapeY; + std::vector fDimShapeA; + std::vector fDimShapeB; + std::vector fDimShapeY; + public: - ROperator_BasicBinary(){} - ROperator_BasicBinary(std::string nameA, std::string nameB, std::string nameY): - fNA(UTILITY::Clean_name(nameA)), fNB(UTILITY::Clean_name(nameB)), fNY(UTILITY::Clean_name(nameY)){ - fInputTensorNames = { fNA, fNB }; - fOutputTensorNames = { fNY }; - } + ROperator_BasicBinary() {} + ROperator_BasicBinary(std::string nameA, std::string nameB, std::string nameY) + : fNA(UTILITY::Clean_name(nameA)), fNB(UTILITY::Clean_name(nameB)), fNY(UTILITY::Clean_name(nameY)) + { + fInputTensorNames = {fNA, fNB}; + fOutputTensorNames = {fNY}; + } // type of output given input - std::vector TypeInference(std::vector input) override { - return input; - } + std::vector TypeInference(std::vector input) override { return input; } // shape of output tensors given input tensors - std::vector> ShapeInference(std::vector> input) override { + std::vector> ShapeInference(std::vector> input) override + { // assume now inputs have same shape (no broadcasting) auto ret = std::vector>(1, input[0]); // return vector size 1 with first input return ret; } - void Initialize(RModel& model) override { + void Initialize(RModel &model) override + { // input must be a graph input, or already initialized intermediate tensor - if (!model.CheckIfTensorAlreadyExist(fNA)){ + if (!model.CheckIfTensorAlreadyExist(fNA)) { throw std::runtime_error(std::string("TMVA SOFIE Binary Op Input Tensor ") + fNA + "is not found in model"); } if (!model.CheckIfTensorAlreadyExist(fNB)) { throw std::runtime_error(std::string("TMVA SOFIE Binary Op Input Tensor ") + fNB + "is not found in model"); } - fShapeA = model.GetTensorShape(fNA); - fShapeB = model.GetTensorShape(fNB); - bool broadcast = !UTILITY::AreSameShape(fShapeA, fShapeB); - if (broadcast) { - // Y is the common shape of A and B - fShapeY = UTILITY::UnidirectionalBroadcastShape(fShapeA, fShapeB); - bool broadcastA = !UTILITY::AreSameShape(fShapeA, fShapeY); - bool broadcastB = !UTILITY::AreSameShape(fShapeB, fShapeY); - // Broadcast A to Y - if (broadcastA) { - fNBroadcastedA = "Broadcasted" + fNA + "to" + fNY; - if (model.IsInitializedTensor(fNA)) { - auto data = model.GetInitializedTensorData(fNA); - std::shared_ptr broadcastedData( - UTILITY::UnidirectionalBroadcast(static_cast(data.get()), fShapeA, fShapeY), - std::default_delete()); - // Update the data and the shape of A - model.AddConstantTensor(fNBroadcastedA, model.GetTensorType(fNA), fShapeY, broadcastedData); - fShapeA = fShapeY; + int dynamicInputs = 0; + if (model.IsDynamicTensor(fNA)) { + fDimShapeA = model.GetDynamicTensorShape(fNA); + dynamicInputs |= 1; + } else { + fShapeA = model.GetTensorShape(fNA); + fDimShapeA = ConvertShapeToDim(fShapeA); + } + if (model.IsDynamicTensor(fNB)) { + dynamicInputs |= 2; + fDimShapeB = model.GetDynamicTensorShape(fNB); + } else { + fShapeB = model.GetTensorShape(fNB); + fDimShapeB = ConvertShapeToDim(fShapeB); + } + if (dynamicInputs & 1 && model.Verbose()) + std::cout << BinaryOperatorTrait::Name() << " : input " << fNA << " is dynamic " + << ConvertShapeToString(fDimShapeA) << " "; + if (dynamicInputs & 2 && model.Verbose()) + std::cout << BinaryOperatorTrait::Name() << " : input " << fNB << " is dynamic " + << ConvertShapeToString(fDimShapeB) << " "; + std::cout << std::endl; + // check if need to broadcast at initialization time if shapes are known and different + // (we could broadcast the tensor tensor to maximum values of dynamic shapes - to be done) + // case of known shapes + // if shapes are known find the output shape from broadcasting + if (dynamicInputs == 0) { + auto ret = UTILITY::MultidirectionalBroadcastShape(fShapeA, fShapeB); + fBroadcastFlag = ret.first; + fShapeY = ret.second; + if (model.IsConstantTensor(fNA) && model.IsConstantTensor(fNB)) { + bool broadcast = fBroadcastFlag > 0; + if (broadcast) { + // Y is the common shape of A and B + bool broadcastA = fBroadcastFlag & 2; + bool broadcastB = fBroadcastFlag & 1; + // Broadcast A to Y + if (broadcastA) { + fNBroadcastedA = "Broadcasted" + fNA + "to" + fNY; + auto data = model.GetInitializedTensorData(fNA); + std::shared_ptr broadcastedData( + UTILITY::UnidirectionalBroadcast(static_cast(data.get()), fShapeA, fShapeY), + std::default_delete()); + if (model.Verbose()) + std::cout << "broadcasted data A " << ConvertShapeToString(fShapeY) << " : " + << ConvertValuesToString(ConvertShapeToLength(fShapeY), + static_cast(broadcastedData.get())) + << std::endl; + // Update the data and the shape of A + model.AddConstantTensor(fNBroadcastedA, model.GetTensorType(fNA), fShapeY, broadcastedData); + fShapeA = fShapeY; + fDimShapeA = ConvertShapeToDim(fShapeA); + } + // Broadcast B to Y + if (broadcastB) { + fNBroadcastedB = "Broadcasted" + fNB + "to" + fNY; + auto data = model.GetInitializedTensorData(fNB); + if (model.Verbose()) + std::cout << "data B " << ConvertShapeToString(fShapeB) << " : " + << ConvertValuesToString(ConvertShapeToLength(fShapeB), static_cast(data.get())) + << std::endl; + std::shared_ptr broadcastedData( + UTILITY::UnidirectionalBroadcast(static_cast(data.get()), fShapeB, fShapeY), + std::default_delete()); + // do not update tensor B but add broadcasted one (since it can be input to some other operators) + if (model.Verbose()) + std::cout << "broadcasted data B " << ConvertShapeToString(fShapeY) << " : " + << ConvertValuesToString(ConvertShapeToLength(fShapeY), + static_cast(broadcastedData.get())) + << std::endl; + model.AddConstantTensor(fNBroadcastedB, model.GetTensorType(fNB), fShapeY, broadcastedData); + fShapeB = fShapeY; + fDimShapeB = ConvertShapeToDim(fShapeB); + } } else { - // Add an intermediate tensor for broadcasting A - model.AddIntermediateTensor(fNBroadcastedA, model.GetTensorType(fNA), fShapeY); + fShapeY = fShapeA; } - } - // Broadcast B to Y - if (broadcastB) { - fNBroadcastedB = "Broadcasted" + fNB + "to" + fNY; - if (model.IsInitializedTensor(fNB)) { - auto data = model.GetInitializedTensorData(fNB); - std::cout << "data B " << ConvertShapeToString(fShapeB) << " : " << - ConvertValuesToString(ConvertShapeToLength(fShapeB), static_cast(data.get())) << std::endl; - std::shared_ptr broadcastedData( - UTILITY::UnidirectionalBroadcast(static_cast(data.get()), fShapeB, fShapeY), - std::default_delete()); - // do not update tensor B but add broadcasted one (since it can be input to some other operators) - std::cout << "broadcasted data B " << ConvertShapeToString(fShapeY) << " : " << - ConvertValuesToString(ConvertShapeToLength(fShapeY), static_cast(broadcastedData.get())) << std::endl; - model.AddConstantTensor(fNBroadcastedB, model.GetTensorType(fNB), fShapeY, broadcastedData); - fShapeB = fShapeY; - } else { - // Add an intermediate tensor for broadcasting B - model.AddIntermediateTensor(fNBroadcastedB, model.GetTensorType(fNB), fShapeY); + // tensors are constant: perform here the binary operation + + const std::string &nameA = fNBroadcastedA.empty() ? fNA : fNBroadcastedA; + const std::string &nameB = fNBroadcastedB.empty() ? fNB : fNBroadcastedB; + auto dataA = static_cast(model.GetInitializedTensorData(nameA).get()); + auto dataB = static_cast(model.GetInitializedTensorData(nameB).get()); + std::vector dataY(ConvertShapeToLength(fShapeY)); + for (size_t i = 0; i < dataY.size(); i++) { + dataY[i] = BinaryOperatorTrait::Func(dataA[i], dataB[i]); + } + model.AddConstantTensor(fNY, fShapeY, dataY.data()); + // flag tensors to not be written in the weight file + model.SetNotWritableInitializedTensor(nameA); + model.SetNotWritableInitializedTensor(nameB); + fIsOutputConstant = true; + if (model.Verbose()) { + std::cout << BinaryOperatorTrait::Name() << " : " << fNA << " " << ConvertShapeToString(fShapeA) + << " , " << fNB << " " << ConvertShapeToString(fShapeB) << " ---> " << fNY << " " + << ConvertShapeToString(fShapeY) << " : " << ConvertValuesToString(dataY) << std::endl; } + } else { + // case of defined and non-constant tensors + model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), fShapeY); + if (model.Verbose()) { + std::cout << BinaryOperatorTrait::Name() << " : " << fNA << " " << ConvertShapeToString(fShapeA) + << " , " << fNB << " " << ConvertShapeToString(fShapeB) << " ---> " << fNY << " " + << ConvertShapeToString(fShapeY) << std::endl; + } + // we convert non-dim shapes to Dim shapes + fDimShapeY = ConvertShapeToDim(fShapeY); } } else { - fShapeY = fShapeA; - } - // check case of constant output (if all inputs are defined) - if (model.IsInitializedTensor(fNA) && model.IsInitializedTensor(fNB)) { - const std::string& nameA = fNBroadcastedA.empty()? fNA : fNBroadcastedA; - const std::string& nameB = fNBroadcastedB.empty()? fNB : fNBroadcastedB; - auto dataA = static_cast(model.GetInitializedTensorData(nameA).get()); - auto dataB = static_cast(model.GetInitializedTensorData(nameB).get()); - std::vector dataY(ConvertShapeToLength(fShapeY)); - for (size_t i = 0; i < dataY.size(); i++) { - dataY[i] = BinaryOperatorTrait::Func(dataA[i], dataB[i]); + // case A or B have dynamic shapes. We need to broadcast if shape are not same + auto ret = UTILITY::MultidirectionalBroadcastShape(fDimShapeA, fDimShapeB); + fBroadcastFlag = ret.first; + fDimShapeY = ret.second; + // case of all parametric shapes and MultiDirectionalBroadcastShape return the max of the 2 + // need to do before we declare the output tensor shape and the broadcasted ones + if (ret.first & 4) { + // check if one of the parameter is an input dimension + // define function to find this + auto IsInputDimParam = [&](const std::string &p) { + auto inputNames = model.GetInputTensorNames(); + for (auto &input : inputNames) { + for (auto &i_s : model.GetDimTensorShape(input)) { + if (i_s.isParam && i_s.param == p) + return true; + } + } + return false; + }; + for (size_t i = 0; i < fDimShapeY.size(); i++) { + auto &s = fDimShapeY[i]; + if (s.isParam && s.param.find("std::max") != std::string::npos) { + if (IsInputDimParam(fDimShapeA[i].param)) { + // case dim is 1 we indicate that the input parameter is equal to 1 + if (fDimShapeA[i].dim != 1) + s = fDimShapeA[i]; + else + s = fDimShapeB[i]; + } else if (IsInputDimParam(fDimShapeB[i].param)) { + if (fDimShapeB[i].dim != 1) + s = fDimShapeB[i]; + else + s = fDimShapeA[i]; + } + } + } + } + + model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), fDimShapeY); + if (model.Verbose()) { + std::cout << BinaryOperatorTrait::Name() << " : " << ConvertShapeToString(fDimShapeA) << " , " + << ConvertShapeToString(fDimShapeB) << " --> " << ConvertShapeToString(fDimShapeY) << std::endl; } - model.AddConstantTensor(fNY, fShapeY, dataY.data()); - // flag tensors to not be written in a fil - model.SetNotWritableInitializedTensor(nameA); - model.SetNotWritableInitializedTensor(nameB); - fIsOutputConstant = true; - if (model.Verbose()) - std::cout << "Binary op ---> " << fNY << " " << ConvertShapeToString(fShapeY) << " : " - << ConvertValuesToString(dataY) << std::endl; - } - else { - model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), fShapeY); } } - std::string GenerateInitCode() override { + std::string GenerateInitCode() override + { std::stringstream out; return out.str(); } - std::string Generate(std::string OpName) override { + std::string Generate(std::string opName) override + { - if (fIsOutputConstant) return ""; + if (fIsOutputConstant) + return ""; - OpName = "op_" + OpName; + opName = "op_" + opName; - if (fShapeY.empty()) { + if (fDimShapeY.empty()) { throw std::runtime_error("TMVA SOFIE Binary Op called to Generate without being initialized first"); } std::stringstream out; - out << SP << "\n//------ " << BinaryOperatorTrait::Name() << "\n"; - size_t length = ConvertShapeToLength(fShapeY); + out << SP << "\n//------ " << opName << " " << BinaryOperatorTrait::Name() << " --> " + << ConvertDimShapeToString(fDimShapeY) << "\n"; + auto length = ConvertDimShapeToLength(fDimShapeY); std::string typeName = TensorType::Name(); - // Broadcast A if it's uninitialized - // use broadcasting function where we pass an already allocated tensor to minimize memory allocations - if (fShapeA != fShapeY) { - out << SP << "// Broadcasting uninitialized tensor " << fNA << "\n"; - out << SP << "SOFIE::UTILITY::UnidirectionalBroadcast<" << typeName << ">(tensor_" << fNA << ", " << ConvertShapeToString(fShapeA) << ", " << ConvertShapeToString(fShapeY) - << ", fTensor_" << fNBroadcastedA << ");\n"; + + // we need to check if we can broadcast (case flag has bit 4 set) + + if (fBroadcastFlag & 4) { + // need to check if shapes are the same + auto lengthA = ConvertDimShapeToLength(fDimShapeA); + auto lengthB = ConvertDimShapeToLength(fDimShapeB); + out << SP << "if (" << lengthA << "!=" << lengthB << ") {\n"; + // check if A->B or B->A + // bool broadcastable = true; + for (size_t i = 0; i < fDimShapeY.size(); i++) { + if (fBroadcastFlag & 5 && fDimShapeY[i] == fDimShapeA[i] && fDimShapeA[i].dim > 1 && + fDimShapeB[i].isParam) { + // B->A B[i] needs to be 1 + out << SP << SP << "if (" << fDimShapeB[i] << "!= 1)\n"; + out << SP << SP << SP << "throw std::runtime_error(\"SOFIE - Cannot broadcast B->A in operator " + << opName << "\");\n"; + } + if (fBroadcastFlag & 6 && fDimShapeY[i] == fDimShapeB[i] && fDimShapeB[i].dim > 1 && + fDimShapeA[i].isParam) { + // A-> B A[i] needs to be 1 + out << SP << SP << "if (" << fDimShapeA[i] << "!= 1)\n"; + out << SP << SP << SP << "throw std::runtime_error(\"SOFIE - Cannot broadcast A->B in operator " + << opName << "\");\n"; + } else if (fDimShapeA[i].isParam && fDimShapeB[i].isParam) { + // both shapes are parametric and we broadcast to maximum + // we allocate here output vector + out << SP << SP << "if (" << fDimShapeA[i] << " != " << fDimShapeB[i] << " && (" << fDimShapeA[i] + << " != 1 || " << fDimShapeB[i] << " != 1))\n"; + out << SP << SP << SP << "throw std::runtime_error(\"SOFIE - Cannot broadcast shapes in operator " << opName + << "\");\n"; + } + } + out << SP << "}\n"; + } + + auto stridesA = UTILITY::ComputeStrideFromShape(fDimShapeA); + auto stridesB = UTILITY::ComputeStrideFromShape(fDimShapeB); + auto stridesY = UTILITY::ComputeStrideFromShape(fDimShapeY); + + std::string compute_idx_A, compute_idx_B, compute_idx_Y; + if (fDimShapeA.empty() || + std::all_of(fDimShapeA.begin(), fDimShapeA.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_A = "0"; + } else { + for (size_t i = 0; i < fDimShapeA.size(); ++i) { + if (fDimShapeA[i].dim == 1 || fDimShapeA[i].GetVal() == "1") + continue; + compute_idx_A += "idx_" + std::to_string(i + (fDimShapeY.size() - fDimShapeA.size())); + if (stridesA[i].GetVal() != "1") + compute_idx_A += " * " + stridesA[i].GetVal(); + compute_idx_A += " + "; + } + // remove last 3 character " + " + for (int j = 0; j < 3; j++) + compute_idx_A.pop_back(); + } + if (fDimShapeB.empty() || + std::all_of(fDimShapeB.begin(), fDimShapeB.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_B = "0"; + } else { + for (size_t i = 0; i < fDimShapeB.size(); ++i) { + if (fDimShapeB[i].dim == 1 || fDimShapeB[i].GetVal() == "1") + continue; + compute_idx_B += "idx_" + std::to_string(i + (fDimShapeY.size() - fDimShapeB.size())); + if (stridesB[i].GetVal() != "1") + compute_idx_B += " * " + stridesB[i].GetVal(); + compute_idx_B += " + "; + } + // remove last 3 character " + " + for (int j = 0; j < 3; j++) + compute_idx_B.pop_back(); } - // Broadcast B if it's uninitialized - if (fShapeB != fShapeY) { - out << SP << "// Broadcasting uninitialized tensor " << fNB << "\n"; - out << SP << "SOFIE::UTILITY::UnidirectionalBroadcast<" << typeName << ">(tensor_" << fNB << ", " << ConvertShapeToString(fShapeB) << ", " << ConvertShapeToString(fShapeY) - << ", fTensor_" << fNBroadcastedB << ");\n"; + int nloop = 0; + if (fDimShapeY.empty() || + std::all_of(fDimShapeY.begin(), fDimShapeY.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_Y = "0"; + } else { + for (size_t i = 0; i < fDimShapeY.size(); ++i) { + if (fDimShapeY[i].dim != 1 && fDimShapeY[i].GetVal() != "1") { + nloop++; + for (int j = 0; j < nloop; j++) out << SP; + out << "for (size_t idx_" << i << " = 0; idx_" << i << " < " << fDimShapeY[i] + << "; ++idx_" << i << "){\n"; + compute_idx_Y += "idx_" + std::to_string(i); + if (stridesY[i].GetVal() != "1") + compute_idx_Y += " * " + stridesY[i].GetVal(); + compute_idx_Y += " + "; + } + } + // remove last 3 characters " + " + for (int j = 0; j < 3; j++) + compute_idx_Y.pop_back(); + } + for (int j = 0; j < nloop + 1; j++) out << SP; + out << "tensor_" << fNY << "[" << compute_idx_Y << "] = " + << BinaryOperatorTrait::Op("tensor_" + fNA + "[" + compute_idx_A + "]", + "tensor_" + fNB + "[" + compute_idx_B + "]") + << " ;\n"; + + for (int i = nloop; i > 0; i--) { + for (int j = 0; j < i; j++) out << SP; + out << "}\n"; } - const std::string& nameA = fNBroadcastedA.empty()? fNA : fNBroadcastedA; - const std::string& nameB = fNBroadcastedB.empty()? fNB : fNBroadcastedB; - out << SP << "for (size_t id = 0; id < " << length << " ; id++){\n"; - out << SP << SP << "tensor_" << fNY << "[id] = " << BinaryOperatorTrait::Op( "tensor_" + nameA + "[id]" , "tensor_" + nameB + "[id]") << " ;\n"; - out << SP << "}\n"; return out.str(); } - std::vector GetStdLibs() override { + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ BINARY_"+BinaryOperatorTrait::Name()+"_KERNEL_ALPAKA\n"; + op += SP + "struct Binary"+BinaryOperatorTrait::Name()+"Kernel {\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * A, T const * B, T * C, const size_t * shape_A, const size_t * shape_B,\n"; + op += SP + SP + SP + "const size_t * shape_C, const size_t * strides_A, const size_t * strides_B, const size_t * strides_C, size_t ndim) const{\n"; + op += SP + SP + SP + SP + "size_t threadIdx1 = alpaka::getIdx(acc)[0];\n"; + op += SP + SP + SP + SP + "size_t blockIdx1 = alpaka::getIdx(acc)[0];\n"; + op += SP + SP + SP + SP + "size_t blockDim1 = alpaka::getWorkDiv(acc)[0];\n\n"; + op += SP + SP + SP + SP + "size_t outer_dim = blockIdx1;\n"; + op += SP + SP + SP + SP + "size_t inner_start = threadIdx1;\n"; + op += SP + SP + SP + SP + "size_t inner_stride = blockDim1;\n"; + op += SP + SP + SP + SP + "if (outer_dim >= shape_C[0]) return;\n\n"; + op += SP + SP + SP + SP + "size_t idx_A[ndim], idx_B[ndim];\n\n"; + op += SP + SP + SP + SP + "size_t flat_idx_A = 0, flat_idx_B = 0, flat_idx_C = 0;\n\n"; + op += SP + SP + SP + SP + "for(size_t inner = inner_start; inner < shape_C[1]; inner += inner_stride){\n"; + op += SP + SP + SP + SP + "for(size_t tensor_idx=0; tensor_idx GetStdLibs() override + { if (Op == EBasicBinaryOperator::Pow) { - return { std::string("cmath") }; + return {std::string("cmath")}; } else { return {}; } } -}; -}//SOFIE + +}; +} // namespace SOFIE +} // namespace Experimental +} // namespace TMVA -#endif //SOFIE_ROperator_BasicBinary +#endif // TMVA_SOFIE_ROperator_BasicBinary \ No newline at end of file diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index 7410cf3..e3d0595 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -48,6 +48,8 @@ namespace SOFIE{ fAttrAlpha(alpha), fAttrBeta(beta), fAttrTransA(transA), fAttrTransB(transB), fNA(UTILITY::Clean_name(nameA)), fNB(UTILITY::Clean_name(nameB)), fNY(UTILITY::Clean_name(nameY)) { + + fKind = OperatorKind::GEMM; fActivation = activation; fType = "float"; static_assert(std::is_same_v, @@ -60,9 +62,11 @@ namespace SOFIE{ fAttrAlpha(alpha), fAttrBeta(beta), fAttrTransA(transA), fAttrTransB(transB), fNA(UTILITY::Clean_name(nameA)), fNB(UTILITY::Clean_name(nameB)), fNC(UTILITY::Clean_name(nameC)), fNY(UTILITY::Clean_name(nameY)), fActivation(activation) { + fKind = OperatorKind::GEMM; fActivation = activation; fType = "float"; + fInputTensorNames = { fNA, fNB, fNC }; fOutputTensorNames = { fNY }; } @@ -252,8 +256,10 @@ namespace SOFIE{ shapeY.erase(shapeY.end()-1); } - if (!fIsDynamic) + if (!fIsDynamic){ model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), shapeY); + std::cout<<"currently adding: "<(tensor_" + out << " float * data = TMVA::Experimental::SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" << fNC << "," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; auto length = SOFIE::ConvertDynamicShapeToLength(fShapeY); // output size out << SP << SP << "std::copy(data, data + " << length << ", tensor_" << fNC2 << ");\n"; @@ -300,12 +306,13 @@ namespace SOFIE{ // include a separate scope to avoid defining unique operator temp variables out << "//--- broadcast bias tensor " << fNC << "for Gemm op\n"; out << SP << "{\n"; - out << " float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" - << fNC << "," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; + out << " float * data = TMVA::Experimental::SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" + << fNC << ".data()," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; auto length = SOFIE::ConvertDynamicShapeToLength(fShapeY); // output size - out << SP << SP << "auto hostBuf_"<< fNC2 << " = alpaka::allocBuf(hostAcc,"+ length +");\n"; + out << SP << SP << "auto hostBuf_"<< fNC2 << " = alpaka::allocBuf(hostAcc, Ext1D::all(Idx{" << length << "}));\n"; out << SP << SP << "std::memcpy(alpaka::getPtrNative(hostBuf_"<< fNC2 <<"), data, "<< length << " * sizeof(float));\n"; - out << SP << SP << "alpaka::memcpy(queue, deviceBuf_"<< fNC2 << ", hostBuf_"<< fNC2 << " , "<< length << ");\n"; + out << SP << SP << "alpaka::memcpy(queue, deviceBuf_"<< fNC2 << ", hostBuf_"<< fNC2 << ");\n"; + out << SP << SP << "delete [] data;\n"; out << SP << "}\n"; } return out.str(); @@ -429,8 +436,8 @@ namespace SOFIE{ throw std::runtime_error("TMVA SOFIE Gemm(MatMul) has invalid shape for inputs or output"); } auto m = (fAttrTransA ? fShapeA[dimA-1].GetVal() : fShapeA[dimA-2].GetVal()); - auto n = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); - auto k = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); + auto n = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); + auto k = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); std::vector sY = {fShapeY[dimY-2], fShapeY[dimY-1]}; // extra dimensions in case of stacked MatMul std::vector sA; @@ -445,8 +452,6 @@ namespace SOFIE{ out << SP << "int " << opName << "_k = " << k << ";\n"; out << SP << "float " << opName << "_alpha = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrAlpha << ";\n"; out << SP << "float " << opName << "_beta = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrBeta << ";\n"; - out << SP << "int " << opName << "_lda = " << (fAttrTransA ? m : k) << ";\n"; - out << SP << "int " << opName << "_ldb = " << (fAttrTransB ? k : n) << ";\n"; // case bias is present if (!fNC.empty()){ @@ -479,27 +484,35 @@ namespace SOFIE{ out << SP; } // in the case of bias - if (!fNC.empty()){ - out << SP << "std::copy(" << "tensor_" << fNC2 << ", " << "tensor_" << fNC2 << " + " << lengthGemm << ", " - << "tensor_" << fNY; - if (doStackMul) out << " + " << opName << "_yoffset"; - out << ");\n"; + if (!fNC.empty() && fActivation == EActivationType::RELU){ + out << SP << "blas.gemmrelu("< kokkos_dev_"< kokkos_dev_"< kokkos_dev_"< GetBlasRoutines() override { return { std::string("Gemm"), std::string("Gemv") }; } + std::string GetFusableOutputTensorName() override { + return fNY; + } + + void UpdateFusableTensorName(std::string fusable_tensor_name, const std::function& removal_func){ + removal_func(fNY); + fNY = fusable_tensor_name; + fOutputTensorNames[0] = fNY; + } + + std::string GetBlasConfig(){ + + int64_t dimA = fShapeA.size(); + int64_t dimB = fShapeB.size(); + std::string m = (fAttrTransA ? fShapeA[dimA-1].GetVal() : fShapeA[dimA-2].GetVal()); + std::string n = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); + std::string k = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); + return m+", "+n+", "+k; + } }; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx index 8fefa6d..66b31e5 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx @@ -75,6 +75,58 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ LEAKY_RELU_KERNEL_ALPAKA\n"; + op += SP + "struct LeakyReluKernel {\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements, T alpha = static_cast(0.01)) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; + op += SP + SP + SP + SP + "data[i] = (data[i] < static_cast(0)) ? alpha * data[i] : data[i];\n"; + op += SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "LeakyReluKernel leakyReluKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator LeakyRelu called to Generate without being initialized first"); + } + + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ LEAKY_RELU_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_" << fNX + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNX + << ", leakyReluKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), static_cast(" << length << "), static_cast(0.01));\n"; + + return out.str(); + } + + + std::string GetFusableOutputTensorName() override { + return fNY; + } + + void UpdateFusableTensorName(std::string fusable_tensor_name, const std::function& removal_func){ + removal_func(fNX); + removal_func(fNY); + fNX = fusable_tensor_name; + fNY = fusable_tensor_name; + fInputTensorNames[0] = fNX; + fOutputTensorNames[0] = fNY; + } + }; }//SOFIE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx index 12ec337..66c6f2d 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx @@ -24,6 +24,7 @@ public: ROperator_Relu(){} ROperator_Relu(std::string nameX, std::string nameY): fNX(UTILITY::Clean_name(nameX)), fNY(UTILITY::Clean_name(nameY)){ + fKind = OperatorKind::RELU; fInputTensorNames = { fNX }; fOutputTensorNames = { fNY }; } @@ -65,6 +66,23 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ RELU_KERNEL_ALPAKA\n"; + op += SP + "struct ReluKernel{\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; + op += SP + SP + SP + "data[i] = (data[i] < 0) ? 0 : data[i];\n"; + op += SP + SP + "}\n"; + op += SP + "}\n};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "ReluKernel reluKernel;\n"; + } + std::string Generate_GPU_ALPAKA(std::string OpName) override { OpName = "op_" + OpName; if (fShape.empty()) { @@ -73,23 +91,27 @@ public: std::stringstream out; auto length = ConvertDynamicShapeToLength(fShape); out << "\n//------ RELU_GPU_ALPAKA\n"; - out << SP << "{\n"; - out << SP << SP <<"Idx totalElems = "<{\n" - <<"alpaka::workdiv::getValidWorkDiv(devAcc, {totalElems}, true, alpaka::GridBlockExtent::All)\n" - <<"};\n"; - out<< SP << SP << "alpaka::exec(queue, workDiv,\n" - <<"[] ALPAKA_FN_ACC (auto const& acc, auto buf, Idx size) {\n" - <<"Idx const idx = alpaka::getIdx(acc)[0];\n" - <<" if (idx < size) {\n" - <<" auto& x = alpaka::getPtrNative(buf)[idx];\n" - <<" x = x < 0 ? 0 : x;\n" - <<" }\n" - <<"}, bufDev_"<(1);\n"; + // out << SP << "Vec elementsPerGrid_" << fNX << " = static_cast(" << length << ");\n"; + // out << SP << "alpaka::KernelCfg kernelCfg_" << fNX << " = {elementsPerGrid_" << fNX << ", elementsPerThread_" << fNX << "};\n"; + // out << SP << "auto workDiv_" << fNX << " = alpaka::getValidWorkDiv(kernelCfg_" << fNX << ", devAcc, reluKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << "));\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", reluKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << ")); \n"; + return out.str(); + } + + std::string GetFusableOutputTensorName() override { + return fNY; } + void UpdateFusableTensorName(std::string fusable_tensor_name, const std::function& removal_func){ + removal_func(fNX); + removal_func(fNY); + fNX = fusable_tensor_name; + fNY = fusable_tensor_name; + fInputTensorNames[0] = fNX; + fOutputTensorNames[0] = fNY; + } }; }//SOFIE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx index 68edd01..783e391 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx @@ -61,6 +61,58 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ SIGMOID_KERNEL_ALPAKA\n"; + op += SP + "struct SigmoidKernel {\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; + op += SP + SP + SP + SP + "data[i] = static_cast(1) / (static_cast(1) + exp(-data[i]));\n"; + op += SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + return op; + } + + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "SigmoidKernel sigmoidKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Sigmoid called to Generate without being initialized first"); + } + + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ SIGMOID_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_" << fNX + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNX + << ", sigmoidKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), static_cast(" << length << "));\n"; + + return out.str(); + } + + std::string GetFusableOutputTensorName() override { + return fNY; + } + + void UpdateFusableTensorName(std::string fusable_tensor_name, const std::function& removal_func){ + removal_func(fNX); + removal_func(fNY); + fNX = fusable_tensor_name; + fNY = fusable_tensor_name; + fInputTensorNames[0] = fNX; + fOutputTensorNames[0] = fNY; + } + std::vector GetStdLibs() override { return { std::string("cmath") };} }; diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index b78ad43..61357e8 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -386,6 +386,55 @@ void RModel::CheckAndFlushIntermediateMemory(std::span o } } +void RModel::CheckAndFuseOperators() { + size_t idx = 0; + std::vector fusable_indices; + std::string fusable_propagate_tensor_name; + while (idx < fOperators.size()) { + if (fOperators[idx]->GetKind() != OperatorKind::GEMM && fOperators[idx]->GetKind() != OperatorKind::CONV) { + ++idx; + continue; + } + + fusable_indices.clear(); + fusable_propagate_tensor_name.clear(); + + fusable_indices.push_back(idx); + size_t j = idx + 1; + for (; j < fOperators.size()-1; ++j) { + auto opKind = fOperators[j]->GetKind(); + // Only consider operators with fusable kinds + if (!FusableKinds.count(opKind)) { + break; + } + + const auto& tensorName = fOperators[j]->GetFusableOutputTensorName(); + auto freqIt = fIntermediateTensorFrequencyLookup.find(tensorName); + + // Propagate tensor name only if it's not used multiple times + fusable_indices.push_back(j); + if (freqIt != fIntermediateTensorFrequencyLookup.end() && + (freqIt->second != fOperators[j + 1]->GetOpOrder() || + FusableKinds.count(fOperators[j + 1]->GetKind()) == 0)) { + fusable_propagate_tensor_name = tensorName; + break; + } + } + if (!fusable_propagate_tensor_name.empty()) { + auto fusable_tensor_type = GetTensorType(fusable_propagate_tensor_name); + auto fusable_tensor_shape = GetDynamicTensorShape(fusable_propagate_tensor_name); + for (auto& index : fusable_indices) { + fOperators[index]->UpdateFusableTensorName(fusable_propagate_tensor_name, [this](const std::string& name) { + this->RemoveIntermediateTensor(name); + }); + } + AddIntermediateTensor(fusable_propagate_tensor_name, fusable_tensor_type, fusable_tensor_shape); + } + + idx = std::max(idx + 1, j); + } +} + void RModel::Initialize(int batchSize, bool verbose) { @@ -494,7 +543,7 @@ void RModel::Initialize(const std::map & inputParams, bool } i++; } - + CheckAndFuseOperators(); fIsInitialized = true; } @@ -653,14 +702,15 @@ std::string RModel::GenerateInferSignature(bool isdecl) { } } } + rGC += "alpaka::Buf "; } - rGC += "tensor_" + name + ","; + rGC += "deviceBuf_" + name + ","; i_input++; } diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx index d0047c0..549a3dd 100644 --- a/src/SOFIE_core/src/RModel_ALPAKA.cxx +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -10,13 +10,14 @@ namespace SOFIE { -//==================================================================== -// RModel - GPU Alpaka Codegen -//==================================================================== - void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() { - if (!fInitializedTensors.empty()) - fGC += "\n// temporary initialized tensors for loading weights\n"; + if (!fInitializedTensors.empty()){ + fGC += "\n// initialized tensors for weights\n"; + fGC += "using BufF1D = alpaka::Buf;\n"; + fGC += "using BufD1D = alpaka::Buf;\n"; + fGC += "using BufI641D = alpaka::Buf;\n"; + + } for (auto &i : fInitializedTensors) { if (!fUseWeightFile || i.second.IsConstantTensor()) { @@ -29,9 +30,9 @@ void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() { // case of tensors which are read from a file size_t length = ConvertShapeToLength(i.second.shape()); if (i.second.type() == ETensorType::FLOAT) { - fGC += "auto deviceBuf_" + i.first + - " = alpaka::allocBuf(devAcc, " + - std::to_string(length) + ");\n"; + fGC += "BufF1D deviceBuf_" + i.first + + " = alpaka::allocBuf(devAcc, Ext1D::all(Idx{" + + std::to_string(length) + "}));\n"; } } } @@ -40,7 +41,7 @@ void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() { void RModel::GenerateTemporaryInitializedTensorContainers_GPU_ALPAKA() { if (!fInitializedTensors.empty()) - fGC += "// initialized tensors\n"; + fGC += "// temporary initialized tensors for loading weights\n"; for (auto &i : fInitializedTensors) { if (!fUseWeightFile || i.second.IsConstantTensor()) { @@ -53,7 +54,7 @@ void RModel::GenerateTemporaryInitializedTensorContainers_GPU_ALPAKA() // case of tensors which are read from a file size_t length = ConvertShapeToLength(i.second.shape()); if (i.second.type() == ETensorType::FLOAT) { - fGC += "float tensor_" + i.first + "[" + std::to_string(length) + "];\n"; + fGC += "std::vector tensor_" + i.first + "(" + std::to_string(length) + ");\n"; } } } @@ -71,23 +72,21 @@ void RModel::GenerateGPU_ALPAKA_Buffers() { ");\n"; // No pointer allocation needed for BOOL } - if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == - fOutputTensorNames.end()) { - size_t length = ConvertShapeToLength(i.second.shape); - - if (i.second.type == ETensorType::FLOAT) { - tensor_declaration_block += "auto bufDev_" + i.first + - " = alpaka::allocBuf(devAcc," + - std::to_string(length) + ");\n"; - } else if (i.second.type == ETensorType::DOUBLE) { - tensor_declaration_block += "auto bufDev_" + i.first + - " = alpaka::allocBuf(devAcc," + - std::to_string(length) + ");\n"; - } else if (i.second.type == ETensorType::INT64) { - tensor_declaration_block += "auto bufDev_" + i.first + - " = alpaka::allocBuf(devAcc," + - std::to_string(length) + ");\n"; - } + + size_t length = ConvertShapeToLength(i.second.shape); + + if (i.second.type == ETensorType::FLOAT) { + tensor_declaration_block += "BufF1D deviceBuf_" + i.first + + " = alpaka::allocBuf(devAcc, Ext1D::all(Idx{" + + std::to_string(length) + "}));\n"; + } else if (i.second.type == ETensorType::DOUBLE) { + tensor_declaration_block += "BufD1D deviceBuf_" + i.first + + " = alpaka::allocBuf(devAcc, Ext1D::all(Idx{" + + std::to_string(length) + "}));\n"; + } else if (i.second.type == ETensorType::INT64) { + tensor_declaration_block += "BufI641D deviceBuf_" + i.first + + " = alpaka::allocBuf(devAcc, Ext1D::all(Idx{" + + std::to_string(length) + "}));\n"; } } @@ -123,41 +122,12 @@ void RModel::GenerateDynamicTensorInfo_GPU_ALPAKA() { auto length = ConvertDynamicShapeToLength(i.second.shape); out << SP << "if (" << length << " > 0) {\n"; out << "auto bufDev_" + i.first + - " = alpaka::allocBuf(devAcc," << length << ");\n"; + " = alpaka::allocBuf(devAcc, Ext1D::all(Idx{" << length << "}));\n"; out << SP << "}\n"; } fGC += out.str(); } -namespace { - -std::string createOutputTensor(RModel const &rmodel, std::string const &name, bool isIntermediateTensor) -{ - if(name.empty()) return "{}"; - ETensorType eOutputType = rmodel.GetTensorType(name); - std::string outputType = ConvertTypeToString(eOutputType); - if (isIntermediateTensor) { - - if (eOutputType == ETensorType::BOOL) { - return "fTensor_" + name; - } else { - // need to check is size is the same(don't want to return a vector with larger size) - // in that case better to copy - return "std::vector<" + ConvertTypeToString(eOutputType) + ">(tensor_" + name + ", tensor_" + name + " + " + - std::to_string(ConvertShapeToLength(rmodel.GetTensorShape(name))) + ")"; - } - } - // include also dynamic tensors since the vectors can be allocated with a size larger than their output - // we need a special handling for bool type allocated as vector - auto outputLength = ConvertDynamicShapeToLength(rmodel.GetDynamicTensorShape(name)); - if (rmodel.IsDynamicTensor(name) && eOutputType == ETensorType::BOOL) { - return "std::vector(fTensor_" + name + ".begin(), fTensor_" + name + ".begin() + " + outputLength + ")"; - } - return "std::vector<" + outputType + ">(tensor_" + name + ", tensor_" + name + " + " + outputLength + ")"; -} - -} // namespace - void RModel::GenerateOutput_GPU_ALPAKA() { if (fVerbose) std::cout << "Generating main inference code for " << fName << std::endl; @@ -173,26 +143,9 @@ void RModel::GenerateOutput_GPU_ALPAKA() { fGC += "\n\n"; if (outputSize == 1) { - fGC += "std::vector<" + outputType + ">"; + fGC += "alpaka::Buf"; } else { - for (size_t i = 1; i < outputSize; i++) { - if (GetTensorType(fOutputTensorNames[i]) != eOutputType) - sameOutputTypes = false; - } - if (sameOutputTypes) { - fGC += "std::vector>"; - } else { - inferReturnType = "std::tuple<"; - for (size_t i = 0; i < outputSize; i++) { - inferReturnType += "std::vector<" + - ConvertTypeToString(GetTensorType(fOutputTensorNames[i])) + - ">"; - if (i < outputSize - 1) - inferReturnType += ","; - } - inferReturnType += ">"; - fGC += inferReturnType; - } + throw std::runtime_error("TMVA-SOFIE: multiple output tensors are not supported in ALPAKA code generation"); } fGC += " infer("; @@ -205,21 +158,37 @@ void RModel::GenerateOutput_GPU_ALPAKA() { fGC += (fOperators[op_idx]->Generate_GPU_ALPAKA(std::to_string(op_idx))); } - fGC += SP + "return {"; + fGC += "\n\n alpaka::wait(queue);\n"; + fGC += SP + "return "; + if (outputSize>1) fGC += " {"; for (size_t i = 0; i < outputSize; i++) { std::string tensorName = *(fOutputTensorNames.begin() + i); bool isIntermediate = fIntermediateTensorInfos.count(tensorName) > 0; - fGC += createOutputTensor(*this, tensorName, isIntermediate); + fGC += "deviceBuf_"+tensorName; if (i < outputSize - 1) fGC += ","; } - fGC += "};\n"; + if (outputSize>1) fGC += " };\n"; + else fGC += ";\n"; fGC += "}\n"; // end of infer function scope } void RModel::GenerateSessionCode_GPU_ALPAKA() { + + std::set registered_operators; + + fGC += "\n//--- ALPAKA Kernels\n"; + for (size_t id = 0; id < fOperators.size(); id++) { + std::cout<GetKind())<GetKind()) == registered_operators.end()) { + std::cout<<"Generating ALPAKA kernel for operator"<< std::endl; + fGC += fOperators[id]->Generate_GPU_Kernel_ALPAKA(); + registered_operators.insert(fOperators[id]->GetKind()); + } + } + // define the Session struct (for GNN this is generated in RModel_GNN) - fGC += "template \n;"; + fGC += "\n\ntemplate \n"; if (fUseSession) { if (!fIsSubGraph) fGC += "struct Session {\n\n"; @@ -228,11 +197,25 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { } // define host and device accelerators - fGC += "using Idx = alpaka::Idx;\n"; - fGC += "using devAcc = alpaka::AccGpuCudaRt, Idx, tagAcc>;\n"; - fGC += "using hostAcc = alpaka::AccCpuSerial, Idx>;\n\n"; + fGC += "using Idx = std::size_t;\n"; + fGC += "using Dim = alpaka::DimInt<1>;\n"; + fGC += "using Acc = alpaka::TagToAcc;\n"; + fGC += "using DevAcc = alpaka::Dev;\n"; + fGC += "using QueueProperty = alpaka::NonBlocking;\n"; + fGC += "using QueueAcc = alpaka::Queue;\n"; + fGC += "\nalpaka::Platform const platform{};\n"; + fGC += "DevAcc devAcc = alpaka::getDevByIdx(platform, 0);\n"; + fGC += "alpaka::PlatformCpu platformHost{};\n"; + fGC += "alpaka::DevCpu hostAcc = alpaka::getDevByIdx(platformHost, 0);\n"; + fGC += "QueueAcc queue{devAcc};\n"; + fGC += "Idx threadsPerBlock = 256;\n"; + fGC += "\nusing Ext1D = alpaka::Vec;\n"; + fGC += "using Vec = alpaka::Vec;\n"; + if (registered_operators.find(SOFIE::OperatorKind::GEMM) != registered_operators.end()) { + fGC += "\n\n// BLAS declarations\n"; + fGC += "sofieBLAS blas{queue};\n"; + } - GenerateInitializedTensorInfo_GPU_ALPAKA(); GenerateGPU_ALPAKA_Buffers(); GenerateOperatorDeclarations(); @@ -282,11 +265,25 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { for (size_t id = 0; id < fOperators.size(); id++) { fGC += fOperators[id]->GenerateInitCode_GPU_ALPAKA(); + if (fOperators[id]->GetKind() == OperatorKind::GEMM){ + fGC += "\nblas.AddLayoutConfig("+fOperators[id]->GetBlasConfig()+");"; + } } + fGC += "alpaka::wait(queue);\n"; fGC += "}\n\n"; } + registered_operators.clear(); + for (size_t id = 0; id < fOperators.size(); id++) { + std::cout<GetKind())<GetKind()) == registered_operators.end()) { + std::cout<<"Declaring ALPAKA kernel for operator"<< std::endl; + fGC += fOperators[id]->Generate_GPU_Kernel_Definitions_ALPAKA(); + registered_operators.insert(fOperators[id]->GetKind()); + } + } + GenerateOutput_GPU_ALPAKA(); if (fUseSession && !fIsGNNComponent) { @@ -346,17 +343,17 @@ void RModel::MoveInitializedTensorsToBuffers_ALPAKA(){ auto length = ConvertShapeToLength(i.second.shape()); std::string slength = std::to_string(length); if (i.second.type() == ETensorType::FLOAT) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(float));\n"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc, Ext1D::all(Idx{"+ slength+"}));\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+".data(), "+slength+"* sizeof(float));\n"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+");\n"; } else if (i.second.type() == ETensorType::DOUBLE) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(doub;e));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc, Ext1D::all(Idx{"+ slength+"}));\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+".data(), "+slength+"* sizeof(double));\n"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+");\n"; } else if (i.second.type() == ETensorType::INT64) { - fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc,"+ slength+");\n"; - fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+", "+slength+"* sizeof(int64_t));"; - fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+", "+slength+");\n"; + fGC += " auto hostBuf_"+i.first+" = alpaka::allocBuf(hostAcc, Ext1D::all(Idx{" + slength + "}));\n"; + fGC += " std::memcpy(alpaka::getPtrNative(hostBuf_"+i.first+"), tensor_"+i.first+".data(), "+slength+"* sizeof(int64_t));"; + fGC += " alpaka::memcpy(queue, deviceBuf_"+i.first+", hostBuf_"+i.first+");\n"; } else { std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); } diff --git a/src/SOFIE_core/src/RModel_Base.cxx b/src/SOFIE_core/src/RModel_Base.cxx index f212c53..3b1885c 100644 --- a/src/SOFIE_core/src/RModel_Base.cxx +++ b/src/SOFIE_core/src/RModel_Base.cxx @@ -29,7 +29,7 @@ void RModel_Base::GenerateHeaderInfo(std::string& hgname) { } // for the session we need to include SOFIE_Common functions //needed for convolution operator (need to add a flag) - fGC += "#include \"SOFIE/SOFIE_common.hxx\"\n"; + fGC += "#include \"TMVA/SOFIE_common.hxx\"\n"; if (fUseWeightFile) fGC += "#include \n"; // Include TFile when saving the weights in a binary ROOT file @@ -79,7 +79,7 @@ void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { // for the session we need to include SOFIE_Common functions //needed for convolution operator (need to add a flag) - fGC += "#include \"SOFIE/SOFIE_common.hxx\"\n"; + fGC += "#include \"TMVA/SOFIE_common.hxx\"\n"; if (fUseWeightFile) fGC += "#include \n"; // Include TFile when saving the weights in a binary ROOT file From afae7c38e9c5b35b5aa04ec1e494df4fd5090bfc Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Fri, 17 Oct 2025 12:53:38 +0200 Subject: [PATCH 06/22] feat: add basic binary kernel --- .../inc/SOFIE/ROperator_BasicBinary.hxx | 116 +++++++++++++++--- 1 file changed, 96 insertions(+), 20 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index 756c3b9..09f690c 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -390,29 +390,105 @@ public: std::string Generate_GPU_Kernel_ALPAKA() override { std::string op; - op = "\n//------ BINARY_"+BinaryOperatorTrait::Name()+"_KERNEL_ALPAKA\n"; + op = "\n//------ "+opName+"_"+BinaryOperatorTrait::Name()+"_KERNEL_ALPAKA\n"; op += SP + "struct Binary"+BinaryOperatorTrait::Name()+"Kernel {\n"; op += SP + SP + "template\n"; - op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * A, T const * B, T * C, const size_t * shape_A, const size_t * shape_B,\n"; - op += SP + SP + SP + "const size_t * shape_C, const size_t * strides_A, const size_t * strides_B, const size_t * strides_C, size_t ndim) const{\n"; - op += SP + SP + SP + SP + "size_t threadIdx1 = alpaka::getIdx(acc)[0];\n"; - op += SP + SP + SP + SP + "size_t blockIdx1 = alpaka::getIdx(acc)[0];\n"; - op += SP + SP + SP + SP + "size_t blockDim1 = alpaka::getWorkDiv(acc)[0];\n\n"; - op += SP + SP + SP + SP + "size_t outer_dim = blockIdx1;\n"; - op += SP + SP + SP + SP + "size_t inner_start = threadIdx1;\n"; - op += SP + SP + SP + SP + "size_t inner_stride = blockDim1;\n"; - op += SP + SP + SP + SP + "if (outer_dim >= shape_C[0]) return;\n\n"; - op += SP + SP + SP + SP + "size_t idx_A[ndim], idx_B[ndim];\n\n"; - op += SP + SP + SP + SP + "size_t flat_idx_A = 0, flat_idx_B = 0, flat_idx_C = 0;\n\n"; - op += SP + SP + SP + SP + "for(size_t inner = inner_start; inner < shape_C[1]; inner += inner_stride){\n"; - op += SP + SP + SP + SP + "for(size_t tensor_idx=0; tensor_idx("; + for (size_t i = 0; i < fDimShapeY.size(); i++) { + op += "size_" + std::to_string(i); + } + op.pop_back(); + op += "));\n"; + op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; + auto stridesA = UTILITY::ComputeStrideFromShape(fDimShapeA); + auto stridesB = UTILITY::ComputeStrideFromShape(fDimShapeB); + auto stridesY = UTILITY::ComputeStrideFromShape(fDimShapeY); + + std::string compute_idx_A, compute_idx_B, compute_idx_Y; + if (fDimShapeA.empty() || + std::all_of(fDimShapeA.begin(), fDimShapeA.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_A = "0"; + } else { + for (size_t i = 0; i < fDimShapeA.size(); ++i) { + if (fDimShapeA[i].dim == 1 || fDimShapeA[i].GetVal() == "1") + continue; + compute_idx_A += "elem[" + std::to_string(i + (fDimShapeY.size() - fDimShapeA.size())) + "]"; + if (stridesA[i].GetVal() != "1") + compute_idx_A += " * " + stridesA[i].GetVal(); + compute_idx_A += " + "; + } + // remove last 3 character " + " + for (int j = 0; j < 3; j++) + compute_idx_A.pop_back(); + } + if (fDimShapeB.empty() || + std::all_of(fDimShapeB.begin(), fDimShapeB.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_B = "0"; + } else { + for (size_t i = 0; i < fDimShapeB.size(); ++i) { + if (fDimShapeB[i].dim == 1 || fDimShapeB[i].GetVal() == "1") + continue; + compute_idx_B += "elem[" + std::to_string(i + (fDimShapeY.size() - fDimShapeB.size())) + "]"; + if (stridesB[i].GetVal() != "1") + compute_idx_B += " * " + stridesB[i].GetVal(); + compute_idx_B += " + "; + } + // remove last 3 character " + " + for (int j = 0; j < 3; j++) + compute_idx_B.pop_back(); + } + int nloop = 0; + if (fDimShapeY.empty() || + std::all_of(fDimShapeY.begin(), fDimShapeY.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { + compute_idx_Y = "0"; + } else { + for (size_t i = 0; i < fDimShapeY.size(); ++i) { + if (fDimShapeY[i].dim != 1 && fDimShapeY[i].GetVal() != "1") { + nloop++; + for (int j = 0; j < nloop; j++) out << SP; + compute_idx_Y += "elem[" + std::to_string(i) + "]"; + if (stridesY[i].GetVal() != "1") + compute_idx_Y += " * " + stridesY[i].GetVal(); + compute_idx_Y += " + "; + } + } + // remove last 3 characters " + " + for (int j = 0; j < 3; j++) + compute_idx_Y.pop_back(); + } + for (int j = 0; j < nloop + 1; j++) out << SP; + out << "C[" << compute_idx_Y << "] = " + << BinaryOperatorTrait::Op("A[" + compute_idx_A + "]", + "B[" + compute_idx_B + "]") + << " ;\n"; + + for (int i = nloop; i > 0; i--) { + for (int j = 0; j < i; j++) out << SP; + out << "}\n"; + } + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "Binary"+BinaryOperatorTrait::Name()+"Kernel " + OpName + "Kernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Basic Binary called to Generate without being initialized first"); + } + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ "+OpName+"_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", " << OpName << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNA << "), alpaka::getPtrNative(deviceBuf_"< GetStdLibs() override @@ -431,4 +507,4 @@ public: } // namespace Experimental } // namespace TMVA -#endif // TMVA_SOFIE_ROperator_BasicBinary \ No newline at end of file +#endif // TMVA_SOFIE_ROperator_BasicBinary From c845fe7116e32063c9f8e9da018c9b1787a7df86 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Fri, 17 Oct 2025 13:01:07 +0200 Subject: [PATCH 07/22] feat: add cast kernel --- src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx | 30 +++++++++++++++++++++ src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx | 4 --- 2 files changed, 30 insertions(+), 4 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx index 47c3d66..7532fa1 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx @@ -90,6 +90,36 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ CAST_KERNEL_ALPAKA\n"; + op += SP + "struct CastKernel{\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, SrcT const * src, DstT * dst, std::size_t numElements) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; + op += SP + SP + SP + "dst[i] = static_cast(src[i]);\n"; + op += SP + SP + "}\n"; + op += SP + "}\n};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "CastKernel castKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Cast called to Generate without being initialized first"); + } + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ CAST_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", castKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << ")); \n"; + return out.str(); + } + }; }//SOFIE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx index 66c6f2d..351f7c4 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx @@ -91,10 +91,6 @@ public: std::stringstream out; auto length = ConvertDynamicShapeToLength(fShape); out << "\n//------ RELU_GPU_ALPAKA\n"; - // out << SP << "Vec elementsPerThread_" << fNX << " = static_cast(1);\n"; - // out << SP << "Vec elementsPerGrid_" << fNX << " = static_cast(" << length << ");\n"; - // out << SP << "alpaka::KernelCfg kernelCfg_" << fNX << " = {elementsPerGrid_" << fNX << ", elementsPerThread_" << fNX << "};\n"; - // out << SP << "auto workDiv_" << fNX << " = alpaka::getValidWorkDiv(kernelCfg_" << fNX << ", devAcc, reluKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << "));\n"; out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", reluKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << ")); \n"; return out.str(); From 3d9f8129ab12c1862831aa200f3c61f722093718 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Wed, 22 Oct 2025 00:36:26 +0200 Subject: [PATCH 08/22] feat: add squeeze, unsqueeze, flatten and reshape --- .../inc/SOFIE/ROperator_Reshape.hxx | 29 +++++++++++++++++++ .../inc/SOFIE/ROperator_Transpose.hxx | 12 ++++++++ 2 files changed, 41 insertions(+) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx index 66a7e09..ddb373e 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx @@ -244,6 +244,35 @@ public: << ");\n"; return out.str(); } + + + std::string Generate_GPU_ALPAKA(std::string opName) override { + if (fIsOutputConstant) return ""; //no op for constant tensors + + OpName = "op_" + OpName; + + // output of reshape is same as input + size_t length = ConvertShapeToLength(fShapeOutput); + if (length != ConvertShapeToLength(fShapeInput)) { + throw std::runtime_error("TMVA SOFIE Reshape Op : wrong output shape - is " + + ConvertShapeToString(fShapeOutput) + " and input is " + + ConvertShapeToString(fShapeInput)); + } + std::stringstream out; + std::string opName = "Reshape"; + if (fOpMode == Flatten) + opName = "Flatten"; + else if (fOpMode == Squeeze) + opName = "Squeeze"; + else if (fOpMode == Unsqueeze) + opName = "Unsquueze"; + + + out << SP << "///-------" << opName << " operator\n" << std::endl; + out << SP << "alpaka::memcpy(queue, deviceBuf_" << fNOutput << ", deviceBuf_" << fNData << ");\n"; + return out.str(); + } + }; }//SOFIE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Transpose.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Transpose.hxx index 11c40bb..5c0f70e 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Transpose.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Transpose.hxx @@ -165,6 +165,18 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ TRANSPOSE_KERNEL_ALPAKA\n"; + op += SP + "struct TransposeKernel{\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * input, T const * output, std::size_t * shape, std::size_t * strides) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElementsND(acc, shape)) {\n"; + op += SP + SP + SP + SP + "size_t input_idx = 0;\n"; + + return op; + } + }; From 284405e329afef54c114c64a28701b771ba33ca5 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Wed, 22 Oct 2025 01:19:21 +0200 Subject: [PATCH 09/22] feat: add support for basic unary --- .../inc/SOFIE/ROperator_BasicUnary.hxx | 30 +++++++++++++++++++ 1 file changed, 30 insertions(+) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicUnary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicUnary.hxx index c18c17e..48d699e 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicUnary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicUnary.hxx @@ -107,6 +107,36 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ " + UnaryOpTraits::Name() + "_KERNEL_ALPAKA\n"; + op += SP + "struct Unary" + UnaryOpTraits::Name() + "Kernel{\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements) const {\n"; + op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; + op += SP + SP + SP + "data[i] = " << UnaryOpTraits::Op("data[i]") << ";\n"; + op += SP + SP + "}\n"; + op += SP + "}\n};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "Unary" + UnaryOpTraits::Name() + "Kernel " + UnaryOpTraits::Name() + "Kernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Relu called to Generate without being initialized first"); + } + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ "+OpName+"_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", " << UnaryOpTraits::Name() << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << ")); \n"; + return out.str(); + } + std::vector GetStdLibs() override { if (Op == EBasicUnaryOperator::kSqrt || Op == EBasicUnaryOperator::kExp || Op == EBasicUnaryOperator::kLog) { return { std::string("cmath") }; From d64a40ff6261bdbd352e10ffb9da7f82b2dd6b9f Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Wed, 22 Oct 2025 01:21:16 +0200 Subject: [PATCH 10/22] feat: add support for Constant operator --- src/SOFIE_core/inc/SOFIE/ROperator_Constant.hxx | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Constant.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Constant.hxx index 0d08432..6590909 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Constant.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Constant.hxx @@ -101,6 +101,11 @@ public: // no code to generate here. Tensor are defined in Session constructor return "//---------------------------------------\n"; } + + std::string Generate_GPU_ALPAKA(std::string /* OpName */) override { + // no code to generate here. Tensor are defined in Session constructor + return "//---------------------------------------\n"; + } }; }//SOFIE From ac8d6628204b19fd691017bef0308633f86670ee Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Wed, 22 Oct 2025 17:44:08 +0200 Subject: [PATCH 11/22] feat: add support for shape operator --- src/SOFIE_core/inc/SOFIE/ROperator_Shape.hxx | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Shape.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Shape.hxx index 52bdeae..34e69eb 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Shape.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Shape.hxx @@ -101,6 +101,26 @@ public: return out.str(); } + std::string Generate_GPU_ALPAKA(std::string OpName) override { + // no need to generate code if the output is constant + if (fIsOutputConstant) return ""; + + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Shape op called to Generate without being initialized first"); + } + std::stringstream out; + + out << "\n//------ Shape\n"; + // add a dummy statement to avoid warning for unused input + out << SP << "(void) deviceBuf_" << fNX << ";\n"; + size_t length = ConvertShapeToLength(fOutput_shape); + for (size_t id = 0; id < length; id++) { + out << SP << "deviceBuf_" << fNY << "["<< id << "] = " << fShape[fStart+id] << ";\n"; + } + return out.str(); + } + }; }//SOFIE From d75eac3375f1f372036d48f3e828473614a538fe Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Thu, 20 Nov 2025 14:03:58 +0100 Subject: [PATCH 12/22] feat: add support for Basic Binary operations --- .../inc/SOFIE/ROperator_BasicBinary.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx | 54 ++++++++++++++ src/SOFIE_core/inc/SOFIE/ROperator_Gather.hxx | 71 +++++++++++++++++++ src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx | 66 +++++++++++++++++ src/SOFIE_core/inc/SOFIE/ROperator_Tile.hxx | 50 +++++++++++++ .../test/EmitFromONNX_GPU_ALPAKA.cxx.in | 24 +++++++ 6 files changed, 268 insertions(+), 3 deletions(-) create mode 100644 src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index 09f690c..8af8e3a 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -484,10 +484,10 @@ public: throw std::runtime_error("TMVA SOFIE Operator Basic Binary called to Generate without being initialized first"); } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertDynamicShapeToLength(fShapeY); out << "\n//------ "+OpName+"_ALPAKA\n"; - out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; - out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", " << OpName << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNA << "), alpaka::getPtrNative(deviceBuf_"< workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::exec(queue, workDiv_" << fNY << ", " << OpName << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNA << "), alpaka::getPtrNative(deviceBuf_"<\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * input, T * output, const size_t * input_shape, const size_t * output_shape, const size_t * input_strides, const size_t * output_strides, const size_t ndim){\n"; + op += SP + SP + SP + SP + "size_t input_idx = 0;\n"; + op += SP + SP + SP + SP + "size_t output_idx = 0;\n"; + op += SP + SP + SP + SP + "size_t coord_out;\n"; + op += SP + SP + SP + SP + "size_t coord_in;\n"; + op += SP + SP + SP + SP + "auto elements = alpaka::uniformElementsND(acc, alpaka::Vec(output_shape));\n"; + op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; + op += SP + SP + SP + SP + "input_idx = 0;\n"; + op += SP + SP + SP + SP + "output_idx = 0;\n"; + op += SP + SP + SP + SP + "for (int i = 0; i < ndim; ++i) {\n"; + op += SP + SP + SP + SP + SP + "coord_out = elem[i];\n"; + op += SP + SP + SP + SP + SP + "coord_in = (input_shape[i] == 1) ? 0 : coord_out;\n"; + op += SP + SP + SP + SP + SP + "input_idx += coord_in * input_strides[i];\n}\n"; + op += SP + SP + SP + SP + SP + "output_idx += coord_out * output_strides[i];\n}\n"; + op += SP + SP + SP + SP + SP + "output[output_idx] = input[input_idx];\n"; + op += SP + SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "ExpandKernel expandKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Expand called to Generate without being initialized first"); + } + + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShape); + out << "\n//------ EXPAND_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_" << fNX + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNX + << ", expandKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), alpaka::getPtrNative(deviceBuf_" << fNY + << "), "<< UTILITY::ConvertShapeToString(fShapeX) <<", "<\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * input, T const * indices, T * output, std::size_t const * output_shape, std::size_t const axis, std::size_t const axisDim, std::size_t const indicesNumElements, std::size_t const * output_strides, std::size_t const * input_strides, std::size_t const ndim) const {\n"; + op += SP + SP + SP + SP + "auto elements = alpaka::uniformElementsND(acc, alpaka::Vec(output_shape));\n"; + op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; + + // find flattened index for indices tensor + op += SP + SP + SP + SP + "int64_t idxLinear = 0;\n{\n"; + op += SP + SP + SP + SP + SP + "int64_t stride = 1;\n"; + op += SP + SP + SP + SP + SP + "for (int i = ndim - 1; i >= axis; --i) {;\n"; + op += SP + SP + SP + SP + SP + "stride *= (i > axis ? output_shape[i] : 1);\n}\n"; + op += SP + SP + SP + SP + SP + "idxLinear = elem[axis];\n"; + op += SP + SP + SP + SP + SP + "if (idxLinear >= indicesNumElements) idxLinear %= indicesNumElements;\n}\n"; + + // load gather index and wrap negative if any + op += SP + SP + SP + SP + "int64_t k = indices[idxLinear];\n"; + op += SP + SP + SP + SP + "if (k < 0) k += axisDim;\n"; + op += SP + SP + SP + SP + "if (k < 0) k = 0;\n"; + op += SP + SP + SP + SP + "if (k >= axisDim) k = axisDim - 1;\n"; + + // compute input flattened index + op += SP + SP + SP + SP + "size_t input_idx = 0;\n"; + op += SP + SP + SP + SP + "size_t output_idx = 0;\n"; + op += SP + SP + SP + SP + "for (int i = 0; i < ndim; ++i) {\n"; + op += SP + SP + SP + SP + SP + "size_t coord = elem[i];\n"; + op += SP + SP + SP + SP + SP + "output_idx += coord * output_strides[i];\n}\n"; + op += SP + SP + SP + SP + SP + "if (i == axis) coord = k;\n"; + op += SP + SP + SP + SP + SP + "input_idx += coord * input_strides[i];\n}\n"; + + // write to output tensor + op += SP + SP + SP + SP + "output[output_idx] = input[input_idx];\n"; + op += SP + SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "GatherKernel gatherKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Gather called to Generate without being initialized first"); + } + + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShapeY); + out << "\n//------ GATHER_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_" << fNY + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNY + << ", gatherKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), alpaka::getPtrNative(deviceBuf_" << fNIndices + << "), alpaka::getPtrNative(deviceBuf_" << fNY + << "), "<< UTILITY::ConvertShapeToString(fShapeY) <<", "<< fAttrAxis <<", "<< fShapeX[fAttrAxis] <<", " + << fShapeIndices.size() <<", " + << UTILITY::ConvertShapeToString(ComputeStrideFromShape(fShapeY)) <<", " + << UTILITY::ConvertShapeToString(ComputeStrideFromShape(fShapeX)) <<", "<< fShapeY.size() + << ",static_cast(" << length << "));\n"; + + return out.str(); + } + }; }//SOFIE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx index 63fbcb3..0fd8f5a 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx @@ -153,6 +153,72 @@ public: return out.str(); } + std::string Generate_GPU_Kernel_ALPAKA() override { + std::string op; + op = "\n//------ SPLIT_KERNEL_ALPAKA\n"; + op += SP + "struct SplitKernel {\n"; + op += SP + SP + "template\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * input, T * output,"; + op += "std::size_t const * input_strides, std::size_t const * output_strides, std::size_t const split_axis, "; + op += "std::size_t const axis_offset, std::size_t const ndim) const {\n"; + op += SP + SP + SP + SP + "auto elements = alpaka::uniformElementsND(acc, alpaka::Vec(output_shape));\n"; + op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; + op += SP + SP + SP + SP + SP + "size_t input_idx = 0;\n"; + op += SP + SP + SP + SP + SP + "size_t output_idx = 0;\n"; + op += SP + SP + SP + SP + SP + "for (int i = 0; i < ndim; ++i) {\n"; + op += SP + SP + SP + SP + SP + SP + "size_t output_coord = elem[i];\n"; + op += SP + SP + SP + SP + SP + SP + "size_t input_coord = (i == split_axis) ? (output_coord + axis_offset) : output_coord;\n"; + op += SP + SP + SP + SP + SP + SP + "input_idx += input_coord * input_strides[i];\n"; + op += SP + SP + SP + SP + SP + SP + "output_idx += output_coord * output_strides[i];\n}\n"; + op += SP + SP + SP + SP + SP + "output[output_idx] = input[input_idx];\n"; + op += SP + SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "SplitKernel splitKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Split called to Generate without being initialized first"); + } + + std::stringstream out; + out << "\n//------ SPLIT_GPU_ALPAKA\n"; + + bool axis_is_innermost = (axis == static_cast(fInputShape.size()) - 1) + && (UTILITY::ComputeStridesFromShape(fInputShape)[fInputShape.size()-1] == 1); + out << SP <<"size_t "<(" << length << ") * sizeof(float);\n"; + out << SP << SP << SP << "alpaka::memcpy(queue, "< workDiv_" << fNYs[i] + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNYs[i] + << ", splitKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), alpaka::getPtrNative(deviceBuf_" << fNY + << "), "<< UTILITY::ConvertShapeToString(UTILITY::ComputeStrideFromShape(fInputShape)) <<", "<\n"; + op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const * __restrict__ tensor_X,"; + op += SP + SP + SP + "T * __restrict__ tensor_Y, const int64_t * __restrict__ shape_X,"; + op += SP + SP + SP + "const int64_t * __restrict__ stride_X, const int64_t * __restrict__ shape_Y,"; + op += SP + SP + SP + "const int64_t * __restrict__ stride_Y, std::size_t const ndim) const {\n"; + op += SP + SP + SP + SP + "auto elements = alpaka::uniformElementsND(acc, alpaka::Vec(shape_Y));\n"; + op += SP + SP + SP + SP + "for (auto const& elem: elements) {\n"; + op += SP + SP + SP + SP + SP + "size_t input_idx = 0;\n"; + op += SP + SP + SP + SP + SP + "size_t output_idx = 0;\n"; + op += SP + SP + SP + SP + SP + "for (int i = 0; i < ndim; ++i) {\n"; + op += SP + SP + SP + SP + SP + SP + "size_t input_coord = elem[i] % shape_X[i];\n"; + op += SP + SP + SP + SP + SP + SP + "input_idx += input_coord * stride_X[i];\n"; + op += SP + SP + SP + SP + SP + "output_idx += elem[i] * stride_Y[i];\n}\n"; + op += SP + SP + SP + SP + SP + "tensor_Y[output_idx] = tensor_X[input_idx];\n"; + op += SP + SP + SP + SP + "}\n"; + op += SP + SP + "}\n"; + op += SP + "};\n"; + return op; + } + + std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + return SP + "TileKernel tileKernel;\n"; + } + + std::string Generate_GPU_ALPAKA(std::string OpName) override { + OpName = "op_" + OpName; + if (fShape.empty()) { + throw std::runtime_error("TMVA SOFIE Operator Tile called to Generate without being initialized first"); + } + std::stringstream out; + auto length = ConvertDynamicShapeToLength(fShapeY); + out << "\n//------ TILE_GPU_ALPAKA\n"; + out << SP << "alpaka::WorkDivMembers workDiv_" << fNY + << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " + << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + + out << SP << "alpaka::exec(queue, workDiv_" << fNY + << ", tileKernel, alpaka::getPtrNative(deviceBuf_" << fNInput + << "), alpaka::getPtrNative(deviceBuf_" << fNY + << "), "<< UTILITY::ConvertShapeToString(fShapeInput)<<", "<< UTILITY::ConvertShapeToString(UTILITY::ComputeStrideFromShape(fShapeInput)) <<", " + < Date: Sun, 23 Nov 2025 14:41:09 +0100 Subject: [PATCH 13/22] fix: compilation issues due to faulty rebase --- src/SOFIE_core/inc/SOFIE/RModel.hxx | 40 ++- src/SOFIE_core/inc/SOFIE/RModel_Base.hxx | 46 +++ src/SOFIE_core/inc/SOFIE/ROperator.hxx | 34 +- .../inc/SOFIE/ROperator_BasicBinary.hxx | 44 ++- .../inc/SOFIE/ROperator_BasicUnary.hxx | 11 +- .../SOFIE/ROperator_BatchNormalization.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx | 8 +- src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx | 120 +++++-- .../inc/SOFIE/ROperator_ConvTranspose.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx | 10 +- src/SOFIE_core/inc/SOFIE/ROperator_Gather.hxx | 18 +- src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 20 +- .../SOFIE/ROperator_LayerNormalization.hxx | 15 +- .../inc/SOFIE/ROperator_LeakyRelu.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx | 15 +- src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx | 8 +- .../inc/SOFIE/ROperator_Reshape.hxx | 333 +++++++++++------ .../inc/SOFIE/ROperator_Sigmoid.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx | 18 +- src/SOFIE_core/inc/SOFIE/ROperator_Tile.hxx | 12 +- .../inc/SOFIE/ROperator_Transpose.hxx | 2 +- src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx | 110 ++++-- src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx | 161 +++++++-- src/SOFIE_core/src/RModel.cxx | 338 ++++++++++++------ src/SOFIE_core/src/RModel_ALPAKA.cxx | 4 +- src/SOFIE_core/src/RModel_Base.cxx | 4 +- src/SOFIE_core/src/RModel_GNN.cxx | 4 +- .../src/RModel_GraphIndependent.cxx | 6 +- src/SOFIE_core/src/SOFIE_common.cxx | 230 ++++++++---- src/SOFIE_core/test/CMakeLists.txt | 15 + .../test/TestCustomModelsFromONNX.cxx | 10 +- 31 files changed, 1134 insertions(+), 526 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index 6083e47..09feb17 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -16,14 +16,21 @@ private: int fVerbose = 0; int fBatchSize = -1; long fReadPos = 0; // reading file position + size_t fConstantTensorSize = 0; // size (in Bytes) of the allocated constant tensors + size_t fWeightsTensorSize = 0; // size (in Bytes) of the allocated weight tensors + size_t fOtherTensorSize = 0; // size (in Bytes) of intermediate tensors which are not managed by the memory pool + + OptimizationLevel fOptimizationLevel = OptimizationLevel::kExtended; std::unordered_map fInputTensorInfos; // input tensors where shape may not fully defined or other graph inputs? std::unordered_map fReadyInputTensorInfos; // input tensors where shape is full defined std::unordered_map fInitializedTensors; std::unordered_map fIntermediateTensorInfos; std::unordered_map fDynamicTensorInfos; + std::unordered_map, bool>> fShapeTensors; // constant tensors describing a shape std::unordered_map fShapeParams; // parameters defining the dynamic shape (e.g. batch size), store also its default value + std::vector fDimShapeNames; // parameter names used to define the shapes std::vector fOutputTensorNames; std::vector fInputTensorNames; // input tensor names using ONNX order @@ -59,8 +66,13 @@ public: int Verbose() const { return fVerbose;} const std::vector &GetTensorShape(std::string name) const; - std::vector GetDynamicTensorShape(std::string name) const; + std::vector GetDimTensorShape(const std::string & name) const; const ETensorType &GetTensorType(std::string name) const; + std::vector GetDynamicTensorShape(const std::string & name) const ; + + // get the values for the tensor representing a shape + const std::vector & GetShapeTensorValues(const std::string & tensor_name) const; + bool CheckIfTensorAlreadyExist(std::string tensor_name); void AddInputTensorInfo(std::string input_name, ETensorType type, std::vector shape); @@ -102,6 +114,8 @@ public: AddInitializedTensor(tensor_name, GetTemplatedType(T()), shape, data); } + void AddShapeTensor(const std::string & name, const std::vector & shapeValues, bool scalar = false); + // add and initialize subgraph to the model void InitializeSubGraph(std::shared_ptr graph); @@ -118,6 +132,8 @@ public: bool IsDimInputTensor(const std::string &name) const; // check if tensor is a fully specified input tensor bool IsReadyInputTensor(const std::string &name) const; + /// check if a tensor is a shape tensor + bool IsShapeTensor(const std::string & name) const; // Add intermediate tensor void AddIntermediateTensor(std::string tensor_name, ETensorType type, std::vector dim_shape); @@ -132,6 +148,8 @@ public: void UpdateInitializedTensor(std::string tensor_name, ETensorType type, std::vector shape, std::shared_ptr data); std::shared_ptr GetInitializedTensorData(std::string tensor_name); + template + std::vector GetTensorData(const std::string & name); void Initialize(int batchSize = -1, bool verbose = false); void Initialize(const std::map & inputParams, bool verbose = false); @@ -155,8 +173,8 @@ public: } // calculate total intermediate memory and position intermediate tensor addresses - std::string AllocateIntermediateMemory(std::span op_output_tensors); - void CheckAndFlushIntermediateMemory(std::span op_output_tensors, const size_t& op_idx); + std::string AllocateIntermediateMemory(std::span op_output_tensors); + void CheckAndFlushIntermediateMemory(std::span op_output_tensors, const size_t& op_idx); protected: // internal functions @@ -194,6 +212,7 @@ protected: public: const std::vector &GetInputTensorNames() const { return fInputTensorNames; } const std::vector &GetOutputTensorNames() const { return fOutputTensorNames; } + const std::vector & GetDimShapeNames() const { return fDimShapeNames; } void ReadInitializedTensorsFromFile(long); long WriteInitializedTensorsToFile(std::string filename = ""); @@ -228,6 +247,21 @@ public: ClassDefNV(RModel, 3); }; +template +inline std::vector RModel::GetTensorData(const std::string & name) { + if (!IsInitializedTensor(name)) return std::vector{}; + T * data = static_cast(GetInitializedTensorData(name).get()); + size_t size = ConvertShapeToLength(GetTensorShape(name)); + return std::vector(data, data+size); +} + +template<> +inline std::vector RModel::GetTensorData(const std::string & name) { + if (!IsShapeTensor(name)) return std::vector{}; + return GetShapeTensorValues(name); +} + + } // namespace SOFIE #endif // SOFIE_RMODEL diff --git a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx index 073c6bc..460372a 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx @@ -27,6 +27,15 @@ enum class Options { kGNNComponent = 0x10, }; +// Optimization levels inspired by ONNXRuntime. +// We only get Operator Fusion with the Basic, and +// memory reuse with Extended. kExtended is enabled +// by default +enum class OptimizationLevel { + kBasic = 0x0, + kExtended = 0x1, +}; + enum class WeightFileType { None, RootBinary, Text }; @@ -60,6 +69,43 @@ protected: bool fIsGNN = false; bool fIsGNNComponent = false; + // Function to generate the code for declaring and initializing constant tensors + // This is for tensors which are not part of weight files and can be created from the Constant operator + template + std::string GenerateConstantTensorCode(const std::pair &t) + { + std::stringstream strs; + std::string type = ConvertTypeToString(t.second.type()); + size_t length = ConvertShapeToLength(t.second.shape()); + // avoid using stack sizes for constant tensors to reduce compilation time + bool allocateOnStack = (length > 100) ? false : true; + + const T *data = t.second.data(); + + // and check if all values are the same + bool sameData = false; + // for non stack allocation check if data are the same + if (!allocateOnStack && length > 1) { + size_t idx = 1; + do { + sameData = (data[idx] == data[idx - 1]); + idx++; + } while (sameData && idx < length); + } + if (allocateOnStack) { + strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; + } else { + strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; + if (sameData) + strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; + else { + strs << ConvertValuesToString(length, data) << ";\n"; + } + strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; + } + return strs.str(); + } + public: /** Default constructor. Needed to allow serialization of ROOT objects. See diff --git a/src/SOFIE_core/inc/SOFIE/ROperator.hxx b/src/SOFIE_core/inc/SOFIE/ROperator.hxx index 0ad57b3..9bccc5b 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator.hxx @@ -1,14 +1,11 @@ -#ifndef SOFIE_ROPERATOR -#define SOFIE_ROPERATOR +#ifndef TMVA_SOFIE_ROPERATOR +#define TMVA_SOFIE_ROPERATOR #include #include #include #include "SOFIE/SOFIE_common.hxx" -//#include "RModel.hxx" - - namespace SOFIE{ @@ -31,10 +28,10 @@ inline const char* toString(OperatorKind kind) { case OperatorKind::GEMM: return "GEMM"; case OperatorKind::LAYERNORM: return "LAYERNORM"; case OperatorKind::RELU: return "RELU"; - case OperatorKind::CONSTANT: return "CONSTANT"; - case OperatorKind::CONSTANTOFSHAPE: return "CONSTANTOFSHAPE"; - case OperatorKind::BATCHNORM: return "batchnorm"; - case OperatorKind::CONV: return "conv"; + case OperatorKind::CONSTANT: return "CONSTANT"; + case OperatorKind::CONSTANTOFSHAPE: return "CONSTANTOFSHAPE"; + case OperatorKind::BATCHNORM: return "BATCHNORM"; + case OperatorKind::CONV: return "CONV"; case OperatorKind::UNDEFINED: return "UNDEFINED"; default: return "UNKNOWN"; } @@ -48,11 +45,11 @@ class ROperator{ public: virtual std::vector GetBlasRoutines() { return {}; } virtual std::vector GetStdLibs() { return {}; } - virtual std::vector> ShapeInference(std::vector>) = 0; - virtual std::vector TypeInference(std::vector) = 0; + virtual std::vector> ShapeInference(std::vector>) { return {}; }; + virtual std::vector TypeInference(std::vector) { return {}; }; virtual void Initialize(RModel&) = 0; virtual std::string Generate(std::string OpName) = 0; //expect unique opName for each operator within the same RModel - virtual std::string Generate_GPU_ALPAKA(std::string OpName){ return "";} //expect unique opName for each operator within the same RModel + virtual std::string Generate_GPU_ALPAKA(std::string OpName){ return "";} //expect unique opName for each operator within the same RModel // generate initialization code for session constructor virtual std::string GenerateInitCode() { return "";} virtual std::string GenerateInitCode_GPU_ALPAKA() { return "";}; @@ -60,10 +57,9 @@ public: virtual std::string GenerateDeclCode() { return "";} // generate session data members specific to operator virtual std::string GenerateSessionMembersCode(std::string /*opName*/) { return ""; } - virtual std::string Generate_GPU_Kernel_ALPAKA() { return ""; } - virtual std::string Generate_GPU_Kernel_Definitions_ALPAKA() { return ""; } + virtual std::string Generate_GPU_Kernel_ALPAKA(std::string /*opName*/) { return ""; } + virtual std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) { return ""; } virtual std::string Header() { return "";} - virtual std::string GetFusableOutputTensorName() { return "";} virtual std::string GetBlasConfig() { return ""; } virtual void UpdateFusableTensorName(std::string, const std::function& removal_func){ return;}; @@ -78,7 +74,8 @@ protected: const std::string SP = " "; ///< space used to correctly indent the generated C++ code bool fUseSession = false; ///< flag to identify if using the session class bool fIsOutputConstant = false; ///< flag to identify if operator has a constant output (no need to generate code) - + bool fIsOutputParamShape = false; ///< flag to identify of the output represents a parametric shape (can be knwon at compile time) + mutable std::vector fInputTensorNames; mutable std::vector fOutputTensorNames; @@ -99,12 +96,11 @@ public: size_t GetOpOrder(){ return fOpOrder; } - + }; }//SOFIE - -#endif //SOFIE_OPERATOR +#endif //TMVA_SOFIE_OPERATOR diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index 8af8e3a..da7cf63 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -1,14 +1,12 @@ #ifndef TMVA_SOFIE_ROperator_BasicBinary #define TMVA_SOFIE_ROperator_BasicBinary -#include "TMVA/SOFIE_common.hxx" -#include "TMVA/ROperator.hxx" -#include "TMVA/RModel.hxx" +#include "SOFIE/SOFIE_common.hxx" +#include "SOFIE/ROperator.hxx" +#include "SOFIE/RModel.hxx" #include -namespace TMVA { -namespace Experimental { namespace SOFIE { enum EBasicBinaryOperator { @@ -106,7 +104,7 @@ public: } int dynamicInputs = 0; if (model.IsDynamicTensor(fNA)) { - fDimShapeA = model.GetDynamicTensorShape(fNA); + fDimShapeA = model.GetDimTensorShape(fNA); dynamicInputs |= 1; } else { fShapeA = model.GetTensorShape(fNA); @@ -114,17 +112,17 @@ public: } if (model.IsDynamicTensor(fNB)) { dynamicInputs |= 2; - fDimShapeB = model.GetDynamicTensorShape(fNB); + fDimShapeB = model.GetDimTensorShape(fNB); } else { fShapeB = model.GetTensorShape(fNB); fDimShapeB = ConvertShapeToDim(fShapeB); } if (dynamicInputs & 1 && model.Verbose()) std::cout << BinaryOperatorTrait::Name() << " : input " << fNA << " is dynamic " - << ConvertShapeToString(fDimShapeA) << " "; + << ConvertDimShapeToString(fDimShapeA) << " "; if (dynamicInputs & 2 && model.Verbose()) std::cout << BinaryOperatorTrait::Name() << " : input " << fNB << " is dynamic " - << ConvertShapeToString(fDimShapeB) << " "; + << ConvertDimShapeToString(fDimShapeB) << " "; std::cout << std::endl; // check if need to broadcast at initialization time if shapes are known and different // (we could broadcast the tensor tensor to maximum values of dynamic shapes - to be done) @@ -388,7 +386,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA(std::string opName) { std::string op; op = "\n//------ "+opName+"_"+BinaryOperatorTrait::Name()+"_KERNEL_ALPAKA\n"; op += SP + "struct Binary"+BinaryOperatorTrait::Name()+"Kernel {\n"; @@ -452,7 +450,7 @@ public: for (size_t i = 0; i < fDimShapeY.size(); ++i) { if (fDimShapeY[i].dim != 1 && fDimShapeY[i].GetVal() != "1") { nloop++; - for (int j = 0; j < nloop; j++) out << SP; + for (int j = 0; j < nloop; j++) op += SP; compute_idx_Y += "elem[" + std::to_string(i) + "]"; if (stridesY[i].GetVal() != "1") compute_idx_Y += " * " + stridesY[i].GetVal(); @@ -463,31 +461,31 @@ public: for (int j = 0; j < 3; j++) compute_idx_Y.pop_back(); } - for (int j = 0; j < nloop + 1; j++) out << SP; - out << "C[" << compute_idx_Y << "] = " - << BinaryOperatorTrait::Op("A[" + compute_idx_A + "]", + for (int j = 0; j < nloop + 1; j++) op += SP; + op += "C[" + compute_idx_Y + "] = " + + BinaryOperatorTrait::Op("A[" + compute_idx_A + "]", "B[" + compute_idx_B + "]") - << " ;\n"; + + " ;\n"; for (int i = nloop; i > 0; i--) { - for (int j = 0; j < i; j++) out << SP; - out << "}\n"; + for (int j = 0; j < i; j++) op += SP; + op += "}\n"; } } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string OpName) { return SP + "Binary"+BinaryOperatorTrait::Name()+"Kernel " + OpName + "Kernel;\n"; } - std::string Generate_GPU_ALPAKA(std::string OpName) override { - if (fShape.empty()) { + std::string Generate_GPU_ALPAKA(std::string OpName) { + if (fDimShapeY.empty()) { throw std::runtime_error("TMVA SOFIE Operator Basic Binary called to Generate without being initialized first"); } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShapeY); + auto length = ConvertDimShapeToLength(fDimShapeY); out << "\n//------ "+OpName+"_ALPAKA\n"; out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; - out << SP << "alpaka::exec(queue, workDiv_" << fNY << ", " << OpName << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNA << "), alpaka::getPtrNative(deviceBuf_"<(queue, workDiv_" << fNY << ", " << OpName << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNA << "), alpaka::getPtrNative(deviceBuf_"<::Name() + "_KERNEL_ALPAKA\n"; op += SP + "struct Unary" + UnaryOpTraits::Name() + "Kernel{\n"; @@ -120,19 +120,16 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "Unary" + UnaryOpTraits::Name() + "Kernel " + UnaryOpTraits::Name() + "Kernel;\n"; } std::string Generate_GPU_ALPAKA(std::string OpName) override { OpName = "op_" + OpName; - if (fShape.empty()) { - throw std::runtime_error("TMVA SOFIE Operator Relu called to Generate without being initialized first"); - } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertShapeToLength(fShapeX); out << "\n//------ "+OpName+"_ALPAKA\n"; - out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(length+255)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", " << UnaryOpTraits::Name() << "Kernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), static_cast(" << length << ")); \n"; return out.str(); } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BatchNormalization.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BatchNormalization.hxx index a27cea4..1a6098d 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BatchNormalization.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BatchNormalization.hxx @@ -1,9 +1,9 @@ #ifndef SOFIE_ROPERATOR_BatchNormalization #define SOFIE_ROPERATOR_BatchNormalization -#include "SOFIE_common.hxx" -#include "ROperator.hxx" -#include "RModel.hxx" +#include "SOFIE/SOFIE_common.hxx" +#include "SOFIE/ROperator.hxx" +#include "SOFIE/RModel.hxx" #include diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx index 7532fa1..c813f7c 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Cast.hxx @@ -90,7 +90,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA() { std::string op; op = "\n//------ CAST_KERNEL_ALPAKA\n"; op += SP + "struct CastKernel{\n"; @@ -103,7 +103,7 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) { return SP + "CastKernel castKernel;\n"; } @@ -113,9 +113,9 @@ public: throw std::runtime_error("TMVA SOFIE Operator Cast called to Generate without being initialized first"); } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertShapeToLength(fShape); out << "\n//------ CAST_GPU_ALPAKA\n"; - out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<<(stoi(length)+256-1)/256<<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; + out << SP << "alpaka::WorkDivMembers workDiv_"<::all("<< (length+255)/256 <<"), alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", castKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << ")); \n"; return out.str(); } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx index 0d5e574..3ef0ee4 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx @@ -1,5 +1,5 @@ -#ifndef SOFIE_ROPERATOR_Concat - #define SOFIE_ROPERATOR_Concat +#ifndef TMVA_SOFIE_ROPERATOR_Concat + #define TMVA_SOFIE_ROPERATOR_Concat #include "SOFIE/SOFIE_common.hxx" @@ -25,6 +25,7 @@ std::vector> fInputShapes; public: + ROperator_Concat(){} ROperator_Concat(std::vector inputs, int axis, int newAxis, std::string output): fAxis(axis), fnewAxis(newAxis), fOutput(UTILITY::Clean_name(output)) { @@ -53,6 +54,7 @@ throw std::runtime_error("TMVA SOFIE Concat Op - invalid axis value "); int concat_dim=0; + // case of Concat (fNewAxis = 0) and not ConcatFromSequence if(fnewAxis == 0){ for (size_t i = 0; i < inputs.size(); i++) { if (i > 0 && inputs[i].size() != inputs[i - 1].size()) @@ -73,6 +75,7 @@ ret[0][fAxis] = concat_dim; } std::vector stack; + // case ConCatFromSequence if(fnewAxis == 1){ for(size_t i = 0; i < inputs.size(); i++) { if (i > 0 && inputs[i].size() != inputs[i-1].size() ) @@ -96,8 +99,8 @@ } // get shape of output given inputs. It is going to be called after initialized - std::vector> ShapeInference(const std::vector> & inputs) { - std::vector> ret(1); + std::vector ShapeInference(const std::vector> & inputs, const RModel & model) { + std::vector ret(inputs[0].size()); // treat negative axis case if (fAxis<0) { fAxis = inputs[0].size()+fAxis; @@ -105,31 +108,54 @@ if (fAxis < 0 || fAxis >= (int) inputs[0].size()) throw std::runtime_error("TMVA SOFIE Concat Op - invalid axis value "); - int concat_dim=0; + Dim concat_dim; if(fnewAxis == 0){ for (size_t i = 0; i < inputs.size(); i++) { if (i > 0 && inputs[i].size() != inputs[i - 1].size()) throw std::runtime_error("TMVA SOFIE Concat Op - input tensors have different shapes " + fInputs[i] + " : " + - ConvertDynamicShapeToString(inputs[i]) + " and " + fInputs[i-1] + " : " + ConvertDynamicShapeToString(inputs[i - 1])); + ConvertShapeToString(inputs[i]) + " and " + fInputs[i-1] + " : " + ConvertShapeToString(inputs[i - 1])); for (size_t iaxis = 0; iaxis < inputs[i].size(); iaxis++) { if ((int)iaxis == fAxis) { - // support only non-params shape for the concatenation axis - if (inputs[i][iaxis].isParam) - throw std::runtime_error("TMVA SOFIE Concat Op - not supporting input param dimensions for concatenation axis. Input shape is " + - ConvertDynamicShapeToString(inputs[i])); - concat_dim += inputs[i][iaxis].dim; + // support both integer and params shape for the concatenation axis + if (concat_dim.param.empty() && concat_dim.dim == 0) + concat_dim = inputs[i][iaxis]; + else if (inputs[i][iaxis].isParam || concat_dim.isParam) { + concat_dim = + Dim{ concat_dim.GetVal() + std::string("+ ") + inputs[i][iaxis].GetVal(), + static_cast(-1)}; + } else { + concat_dim = Dim { concat_dim.dim + inputs[i][iaxis].dim }; + } + } + else if (i == 0) { + ret[iaxis] = inputs[i][iaxis]; } - // other dimensions must be the same - else if (i > 0 && inputs[i][iaxis].GetVal() != inputs[i - 1][iaxis].GetVal()) + else if ((!inputs[i][iaxis].isParam && !ret[iaxis].isParam) && (inputs[i][iaxis].dim != ret[iaxis].dim)) { throw std::runtime_error("TMVA SOFIE Concat Op - input tensors have wrong shapes " + - ConvertDynamicShapeToString(inputs[i]) + " and " + - ConvertDynamicShapeToString(inputs[i - 1])); + ConvertShapeToString(inputs[i]) + " and " + + ConvertShapeToString(inputs[i - 1])); + } + else if (!inputs[i][iaxis].isParam && ret[iaxis].isParam){ + // if shape is not parametric use it + ret[iaxis] = inputs[i][iaxis]; + } + else if (inputs[i][iaxis].isParam && ret[iaxis].isParam) { + // check which parameter is first in RModel list + auto & dimNames = model.GetDimShapeNames(); + auto p1 = std::find(dimNames.begin(), dimNames.end(), inputs[i][iaxis].param); + auto p2 = std::find(dimNames.begin(), dimNames.end(), ret[iaxis].param); + if (p1 < p2) ret[iaxis] = inputs[i][iaxis]; + } + } + // add parenthesis in case is an expression + if (concat_dim.isParam && concat_dim.dim == static_cast(-1)) + concat_dim = Dim{ std::string("(") + concat_dim.GetVal() + std::string(")"), concat_dim.dim }; } - // output shape - ret[0] = inputs[0]; - ret[0][fAxis].dim = concat_dim; + // output shape for concatenated axis + ret[fAxis] = Dim{concat_dim}; + } // case of stacking (not supported yet) // here we need to check that input shapes are the same @@ -141,24 +167,30 @@ return ret; } - void Initialize(RModel& model) override { + void Initialize(RModel& model) override { for (auto &it : fInputs) { if (model.CheckIfTensorAlreadyExist(it) == false) { throw std::runtime_error("TMVA SOFIE Concat Op Input Tensor " + it + " is not found in model"); } - fInputShapes.push_back(model.GetDynamicTensorShape(it)); + fInputShapes.push_back(model.GetDimTensorShape(it)); } - fOutputShape = ShapeInference(fInputShapes)[0]; + fOutputShape = ShapeInference(fInputShapes, model); if (model.Verbose()) - std::cout << "Output of concat operator has shape " << ConvertDynamicShapeToString(fOutputShape) << std::endl; + std::cout << "Output of concat operator has shape " << ConvertDimShapeToString(fOutputShape) << std::endl; // check if concat has constant inputs , axis 0(concat contigous memory and type is integer) + bool isOutputShape = false; if (model.GetTensorType(fInputs[0]) == ETensorType::INT64 && fAxis == 0) { fIsOutputConstant = true; + isOutputShape = true; + for ( auto & input : fInputs) { if (!model.IsInitializedTensor(input)) { fIsOutputConstant = false; - break; + if (!model.IsShapeTensor(input)) { + isOutputShape = false; + break; + } } } if (fIsOutputConstant) { @@ -177,26 +209,53 @@ model.AddConstantTensor(fOutput, outputShape, outputData.data()); if (model.Verbose()) { std::cout << "output of Concat is a constant tensor " << ConvertShapeToString(outputShape) << " : " - << ConvertValuesToString(outputData) << std::endl; + << ConvertValuesToString(outputData) << " (constant)" << std::endl; } + } else if (isOutputShape) { + auto outputShape = ConvertShapeToInt(fOutputShape); // conversion must be possible + std::vector outputData(ConvertShapeToLength(outputShape)); + size_t offset = 0; + for ( auto & input : fInputs) { + std::vector inputData; + auto inputShape = model.GetTensorShape(input); // shape is not dynamic + size_t inputLength = ConvertShapeToLength(inputShape); // shape can be a scalar + if (model.IsShapeTensor(input)) + inputData = model.GetShapeTensorValues(input); + else if (model.IsConstantTensor(input)) { + inputData.resize(inputLength); + auto intData = static_cast(model.GetInitializedTensorData(input).get()); + for (size_t i = 0; i < inputData.size(); i++) + inputData[i] = Dim{ static_cast(intData[i])}; + } + std::cout << "concatenating input data " << inputLength << " " << inputData[0] << std::endl; + std::copy(inputData.begin(), inputData.end(), outputData.begin() + offset ); + offset += inputLength; + } + // add output tensor + model.AddShapeTensor(fOutput,outputData, false); // cannot be a scalar + if (model.Verbose()) { + std::cout << "output of Concat is a shape tensor " << ConvertShapeToString(outputShape) << " : " + << ConvertShapeToString(outputData) << " (shape)" << std::endl; + } + fIsOutputConstant = true; } } if (!fIsOutputConstant) { model.AddIntermediateTensor(fOutput, model.GetTensorType(fInputs[0]), fOutputShape); if (model.Verbose()) { - std::cout << "Concat ---> " << fOutput << " " << ConvertDynamicShapeToString(fOutputShape) << std::endl; + std::cout << "Concat ---> " << fOutput << " " << ConvertDimShapeToString(fOutputShape) << std::endl; } } } - std::string Generate(std::string OpName) override { + std::string Generate(std::string opName) override { if (fIsOutputConstant) return ""; - OpName = "op_"+OpName; + opName = "op_" + opName; if(fOutputShape.empty()){ throw std::runtime_error("TMVA SOFIE Concat called to Generate without being initialized first"); } std::stringstream out; - out<<"\n//--------- Concat\n"; + out<<"\n//--------- Concat " << opName << " --> " << ConvertShapeToString(fOutputShape) << "\n"; // special case when memory is contiguous bool hasShapeOnes = true; for(int i = 0; i 0) out << offset; offset += " + " + length; @@ -260,4 +319,5 @@ }; }//SOFIE - #endif //SOFIE_ROPERATOR_CONCAT + + #endif //TMVA_SOFIE_ROPERATOR_CONCAT \ No newline at end of file diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_ConvTranspose.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_ConvTranspose.hxx index 0467385..b9d917b 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_ConvTranspose.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_ConvTranspose.hxx @@ -1,9 +1,9 @@ #ifndef SOFIE_SOFIE_ROPERATOR_CONVTRANSPOSE_HXX #define SOFIE_SOFIE_ROPERATOR_CONVTRANSPOSE_HXX -#include -#include -#include +#include "SOFIE/SOFIE_common.hxx" +#include "SOFIE/ROperator.hxx" +#include "SOFIE/RModel.hxx" #include #include diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx index 9da8f76..bf163b7 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Expand.hxx @@ -122,7 +122,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA() { std::string op; op = "\n//------ Expand_KERNEL_ALPAKA\n"; op += SP + "struct ExpandKernel {\n"; @@ -148,7 +148,7 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "ExpandKernel expandKernel;\n"; } @@ -159,7 +159,7 @@ public: } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertShapeToLength(fShape); out << "\n//------ EXPAND_GPU_ALPAKA\n"; out << SP << "alpaka::WorkDivMembers workDiv_" << fNX << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " @@ -168,8 +168,8 @@ public: out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", expandKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), alpaka::getPtrNative(deviceBuf_" << fNY - << "), "<< UTILITY::ConvertShapeToString(fShapeX) <<", "< workDiv_" << fNY << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; @@ -274,10 +270,10 @@ public: << ", gatherKernel, alpaka::getPtrNative(deviceBuf_" << fNX << "), alpaka::getPtrNative(deviceBuf_" << fNIndices << "), alpaka::getPtrNative(deviceBuf_" << fNY - << "), "<< UTILITY::ConvertShapeToString(fShapeY) <<", "<< fAttrAxis <<", "<< fShapeX[fAttrAxis] <<", " + << "), "<< ConvertShapeToString(fShapeY) <<", "<< fAttrAxis <<", "<< fShapeX[fAttrAxis] <<", " << fShapeIndices.size() <<", " - << UTILITY::ConvertShapeToString(ComputeStrideFromShape(fShapeY)) <<", " - << UTILITY::ConvertShapeToString(ComputeStrideFromShape(fShapeX)) <<", "<< fShapeY.size() + << ConvertShapeToString(UTILITY::ComputeStrideFromShape(fShapeY)) <<", " + << ConvertShapeToString(UTILITY::ComputeStrideFromShape(fShapeX)) <<", "<< fShapeY.size() << ",static_cast(" << length << "));\n"; return out.str(); diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index e3d0595..7756f9a 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -152,7 +152,7 @@ namespace SOFIE{ } } if (model.IsDynamicTensor(fNA) || model.IsDimInputTensor(fNA) ) { - fShapeA = model.GetDynamicTensorShape(fNA); + fShapeA = model.GetDimTensorShape(fNA); fIsDynamic = true; } else { auto shapeA_int = model.GetTensorShape(fNA); @@ -166,7 +166,7 @@ namespace SOFIE{ } if (model.IsDynamicTensor(fNB) || model.IsDimInputTensor(fNB)) { - fShapeB = model.GetDynamicTensorShape(fNB); + fShapeB = model.GetDimTensorShape(fNB); fIsDynamic = true; } else { @@ -195,7 +195,7 @@ namespace SOFIE{ if (!fIsDynamic) { shapeY = ConvertShapeToInt(fShapeY); if (shapeY.empty()) { - throw std::runtime_error("TMVA SOFIE Gemm Op " + fNY + " has invalid shape" + ConvertDynamicShapeToString(fShapeY)); + throw std::runtime_error("TMVA SOFIE Gemm Op " + fNY + " has invalid shape" + ConvertDimShapeToString(fShapeY)); } } @@ -266,7 +266,7 @@ namespace SOFIE{ if (model.Verbose()){ std::cout << "Gemm (or MatMul) " << " ---> " << fNY << " shape "; if (fIsDynamic) - std::cout << ConvertDynamicShapeToString(fShapeY) << std::endl; + std::cout << ConvertDimShapeToString(fShapeY) << std::endl; else std::cout << ConvertShapeToString(shapeY) << std::endl; } @@ -285,9 +285,9 @@ namespace SOFIE{ // include a separate scope to avoid defining unique operator temp variables out << "//--- broadcast bias tensor " << fNC << "for Gemm op\n"; out << SP << "{\n"; - out << " float * data = TMVA::Experimental::SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" - << fNC << "," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; - auto length = SOFIE::ConvertDynamicShapeToLength(fShapeY); // output size + out << " float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" + << fNC << "," << ConvertShapeToString(fShapeC) << ", " << ConvertDimShapeToString(fShapeY) << ");\n"; + auto length = SOFIE::ConvertDimShapeToLength(fShapeY); // output size out << SP << SP << "std::copy(data, data + " << length << ", tensor_" << fNC2 << ");\n"; out << SP << SP << "delete [] data;\n"; out << SP << "}\n"; @@ -306,9 +306,9 @@ namespace SOFIE{ // include a separate scope to avoid defining unique operator temp variables out << "//--- broadcast bias tensor " << fNC << "for Gemm op\n"; out << SP << "{\n"; - out << " float * data = TMVA::Experimental::SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" - << fNC << ".data()," << ConvertShapeToString(fShapeC) << ", " << ConvertDynamicShapeToString(fShapeY) << ");\n"; - auto length = SOFIE::ConvertDynamicShapeToLength(fShapeY); // output size + out << " float * data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_" + << fNC << ".data()," << ConvertShapeToString(fShapeC) << ", " << ConvertDimShapeToString(fShapeY) << ");\n"; + auto length = SOFIE::ConvertDimShapeToLength(fShapeY); // output size out << SP << SP << "auto hostBuf_"<< fNC2 << " = alpaka::allocBuf(hostAcc, Ext1D::all(Idx{" << length << "}));\n"; out << SP << SP << "std::memcpy(alpaka::getPtrNative(hostBuf_"<< fNC2 <<"), data, "<< length << " * sizeof(float));\n"; out << SP << SP << "alpaka::memcpy(queue, deviceBuf_"<< fNC2 << ", hostBuf_"<< fNC2 << ");\n"; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx index 17b77b3..e6c4c99 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx @@ -1,5 +1,5 @@ -#ifndef SOFIE_ROPERATOR_LAYERNORMALIZATION -#define SOFIE_ROPERATOR_LAYERNORMALIZATION +#ifndef TMVA_SOFIE_ROPERATOR_LAYERNORMALIZATION +#define TMVA_SOFIE_ROPERATOR_LAYERNORMALIZATION #include "SOFIE/RModel.hxx" #include "SOFIE/SOFIE_common.hxx" @@ -81,7 +81,7 @@ public: throw std::runtime_error("TMVA::SOFIE - Tensor " + fNX + " not found."); } bool isDynamic = model.IsDynamicTensor(fNX); - fShapeX = model.GetDynamicTensorShape(fNX); + fShapeX = model.GetDimTensorShape(fNX); fShapeY = fShapeX; model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShapeY); // Type of the output @@ -93,13 +93,13 @@ public: // Shape of fShapeX[0, ..., fAxis) fAxesShape = std::vector(fShapeX.begin(), fShapeX.begin() + fAxis); // Length of the axes - fAxesLength = ConvertDynamicShapeToLength(fAxesShape); + fAxesLength = ConvertDimShapeToLength(fAxesShape); // Shape of fShapeX[fAxis, ..., fSize) fNormalizedShape = std::vector(fShapeX.begin() + fAxis, fShapeX.end()); // Length of the normalized axis - fNormalizedLength = ConvertDynamicShapeToLength(fNormalizedShape); + fNormalizedLength = ConvertDimShapeToLength(fNormalizedShape); // length of the input - fLength = ConvertDynamicShapeToLength(fShapeX); + fLength = ConvertDimShapeToLength(fShapeX); // Type of mean and std ETensorType type = (fAttrStashType == 1) ? ETensorType::FLOAT : model.GetTensorType(fNX); // Mean @@ -146,7 +146,7 @@ public: out << SP << "// Broadcasting the bias of LayerNormalization op\n"; out << SP << "{\n"; out << SP << SP << "float* data = SOFIE::UTILITY::UnidirectionalBroadcast(tensor_"; - out << fNB << ", " << ConvertShapeToString(fShapeB) << ", " << ConvertDynamicShapeToString(fShapeX) << ");\n"; + out << fNB << ", " << ConvertShapeToString(fShapeB) << ", " << ConvertShapeToString(fShapeX) << ");\n"; out << SP << "std::copy(data, data + " << fLength << ", tensor_" << fNBroadcastedB << ");\n"; out << SP << "delete[] data;\n"; out << SP << "}\n"; @@ -339,5 +339,4 @@ public: } // namespace SOFIE - #endif diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx index 66b31e5..02eca17 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx @@ -75,7 +75,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA() { std::string op; op = "\n//------ LEAKY_RELU_KERNEL_ALPAKA\n"; op += SP + "struct LeakyReluKernel {\n"; @@ -89,7 +89,7 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "LeakyReluKernel leakyReluKernel;\n"; } @@ -100,7 +100,7 @@ public: } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertShapeToLength(fShape); out << "\n//------ LEAKY_RELU_GPU_ALPAKA\n"; out << SP << "alpaka::WorkDivMembers workDiv_" << fNX << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx index 8af272d..0930a0b 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx @@ -1,5 +1,5 @@ -#ifndef SOFIE_ROPERATOR_RANGE -#define SOFIE_ROPERATOR_RANGE +#ifndef TMVA_SOFIE_ROPERATOR_RANGE +#define TMVA_SOFIE_ROPERATOR_RANGE #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" @@ -8,7 +8,6 @@ #include #include - namespace SOFIE{ template @@ -89,9 +88,9 @@ public: model.AddDynamicTensor(fNOutput, type, fShape); } if (model.Verbose()) { - std::cout << "Range -> output is " << fNOutput << " "; - if (fIsOutputConstant) std::cout << ConvertDynamicShapeToString(fShape) << std::endl; - else std::cout << ConvertDynamicShapeToString(model.GetDynamicTensorShape(fNOutput)) << std::endl; + std::cout << "Range -> output is " << fNOutput << " : " << ConvertShapeToString(fShape); + if (fIsOutputConstant) std::cout << " : " << ConvertValuesToString(model.GetTensorData(fNOutput)); + std::cout << std::endl; } } @@ -121,5 +120,5 @@ public: }; }//SOFIE - -#endif //SOFIE_ROPERATOR_RANGE + +#endif //TMVA_SOFIE_ROPERATOR_RANGE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx index 351f7c4..5fb2f04 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Relu.hxx @@ -43,11 +43,11 @@ public: throw std::runtime_error("TMVA SOFIE Relu Op Input Tensor " + fNX + " is not found in model"); } - fShape = model.GetDynamicTensorShape(fNX); + fShape = model.GetDimTensorShape(fNX); model.AddIntermediateTensor(fNY, model.GetTensorType(fNX), fShape); if (model.Verbose()) { - std::cout << "Relu : " << fNX << " -> " << fNY << " " << ConvertDynamicShapeToString(fShape) << std::endl; + std::cout << "Relu : " << fNX << " -> " << fNY << " " << ConvertDimShapeToString(fShape) << std::endl; } } @@ -66,7 +66,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA() { std::string op; op = "\n//------ RELU_KERNEL_ALPAKA\n"; op += SP + "struct ReluKernel{\n"; @@ -79,7 +79,7 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "ReluKernel reluKernel;\n"; } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx index ddb373e..a014547 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx @@ -1,12 +1,15 @@ -#ifndef SOFIE_ROPERATOR_RESHAPE -#define SOFIE_ROPERATOR_RESHAPE +#ifndef TMVA_SOFIE_ROPERATOR_RESHAPE +#define TMVA_SOFIE_ROPERATOR_RESHAPE #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" #include "SOFIE/RModel.hxx" #include +#include #include +#include + namespace SOFIE{ @@ -19,17 +22,20 @@ class ROperator_Reshape final : public ROperator private: bool fVerbose = false; + bool fDimInput = false; + bool fDynamicShape = false; ReshapeOpMode fOpMode = Reshape; // type of Reshape operator int fAllowZero = 0; // (for Reshape) zero in tensor shape makes output shape equal to input tensor shape int fAxis = 1; // (for Flatten) std::string fNData; // input data tensor name - std::string fNShape; // reshape tensor name + std::string fNInput2; // reshape or axes tensor name depending on operator std::string fNOutput; // output tensor name - std::vector fShapeInput; // input shape data - std::vector fShapeOutput; // output shape data + std::vector fShapeInput; // input shape data + std::vector fShapeOutput; // output shape data std::vector fAttrAxes; // axes attributes (provided for all version of Squeeze/Unsqueeze) + std::vector fShape; // shape tensor values provided for Reshape public: @@ -42,16 +48,16 @@ public: } ROperator_Reshape(){} - ROperator_Reshape(ReshapeOpMode opMode, int attr_value, std::string nameData, std::string nameShape, std::string nameOutput) - : fOpMode(opMode), fNData(UTILITY::Clean_name(nameData)), fNShape(UTILITY::Clean_name(nameShape)), - fNOutput(UTILITY::Clean_name(nameOutput)) + ROperator_Reshape(ReshapeOpMode opMode, int attr_value, std::string nameData, std::string nameInput2, std::string nameOutput) + : fOpMode(opMode), fNData(UTILITY::Clean_name(nameData)), fNInput2(UTILITY::Clean_name(nameInput2)), + fNOutput(UTILITY::Clean_name(nameOutput)) { if (opMode == Reshape) fAllowZero = attr_value; if (opMode == Flatten) fAxis = attr_value; fInputTensorNames = { fNData }; - if(!fNShape.empty()){ - fInputTensorNames.emplace_back(fNShape); + if(!fNInput2.empty()){ + fInputTensorNames.emplace_back(fNInput2); } fOutputTensorNames = { fNOutput }; } @@ -70,94 +76,153 @@ public: auto ret = std::vector(1, input[0]); return ret; } + std::vector> ShapeInference(std::vector> input) override { + return input; + } // output shape - std::vector> ShapeInference(std::vector> input) override { - std::vector> ret; + std::vector> ShapeInference(const std::vector> & input) { + std::vector> ret; auto & input_shape = input[0]; - if (fOpMode == Reshape) { - if (input.size() != 2) throw std::runtime_error("TMVA SOFIE Reshape Op needs 2 input tensors"); - auto output_shape = input[1]; // the provided shape - size_t input_length = ConvertShapeToLength(input_shape); - size_t output_length = ConvertShapeToLength(output_shape); - // (input_length == output_length) is the easy case : (2,3,4) -> (2,12) - if (input_length != output_length) { - if ((output_length == 0 && fAllowZero == 0) || static_cast(output_length) < 0) { - // in this case value 0 or -1 in shape are automatically corrected - bool replacementDone = false; - for (size_t i = 0; i < output_shape.size(); i++) { - if (output_shape[i] == 0 || output_shape[i] == static_cast(-1)) { - if (replacementDone) { - throw std::runtime_error("TMVA Reshape Op : output shape has multiple negative or zero values"); + // correct the provided shape (here we have the value) for 0 or -1 + std::vector output_shape(fShape.size()); + assert(!fShape.empty() && !fDynamicShape); + for (size_t i = 0; i < output_shape.size(); i++) { + if (fShape[i] > 0 || (fAllowZero && fShape[i] >= 0)) + output_shape[i] = Dim{ static_cast(fShape[i]) }; + else if (!fAllowZero && fShape[i] == 0) + output_shape[i] = input_shape[i]; + } + // now case of -1 in shape + for (size_t i = 0; i < output_shape.size(); i++) { + if (fShape[i] == -1) { + auto tmp = output_shape; + tmp.erase(tmp.begin() + i); + auto tmp_length = ConvertDimShapeToLength(tmp); + auto input_length = ConvertDimShapeToLength(input_shape); + if (fVerbose) + std::cout << "reshape- try simplifying " << ConvertDimShapeToString(input_shape) << " with length " + << input_length << " to " << tmp_length << std::endl; + + if (IsInteger(tmp_length) && IsInteger(input_length)) + output_shape[i] = Dim{static_cast(std::stoi(input_length) / std::stoi(tmp_length))}; + else { + //we can try simplifying expression if tmp_length is integer and part of input_length + // contains tmp_length + bool canSimplify = false; + std::vector reduced_input; + if (IsInteger(tmp_length)) { + + // try to tokenize with * the input length + + std::stringstream ss(input_length); + + std::string token; + + // Tokenizing w.r.t. space '*' + while(getline(ss, token, '*')) + { + // remove any whitespace + token.erase(std::remove_if(token.begin(), token.end(), + [](unsigned char x) { return std::isspace(x); }), token.end()); + if (token != tmp_length) { + if (IsInteger(token)) { + size_t il = static_cast(std::stoi(input_length)); + size_t tl = static_cast(std::stoi(tmp_length)); + if ((il % tl) == 0) { + canSimplify = true; + reduced_input.push_back(Dim{il / tl}); + } + } else { + reduced_input.push_back(Dim{token}); + } + } else { + // token is equal to tmp_length, can be not considered and is simplified + canSimplify = true; + } } - auto tmp = output_shape; - tmp.erase(tmp.begin() + i); - auto tmp_length = ConvertShapeToLength(tmp); - output_shape[i] = input_length / tmp_length; - replacementDone = true; } + if (canSimplify) { + // if length contains * we need to add some brackets + std::string res_shape = ConvertDimShapeToLength(reduced_input); + if (res_shape.find('*') != std::string::npos) + output_shape[i] = Dim{std::string("(") + res_shape + ")", static_cast(-1)}; + else + output_shape[i] = Dim{res_shape}; + } + if (!canSimplify) + output_shape[i] = Dim{std::string("(") + input_length + " / (" + tmp_length + "))", static_cast(-1)}; } - if (fVerbose) - std::cout << "Reshape: correct output shape from " << ConvertShapeToString(input[1]) - << " to " << ConvertShapeToString(output_shape) << std::endl; - } - if (ConvertShapeToLength(output_shape) != input_length) { - throw std::runtime_error("TMVA Reshape Op : Invalid shapes : " + ConvertShapeToString(input_shape) + - ConvertShapeToString(output_shape)); + + break; // cannot have more than -1 } + // throw std::runtime_error( + // "TMVA Reshape Op : output shape has multiple negative or zero values"); + } + + if (fVerbose) + std::cout << "Reshape: correct output shape to " << ConvertShapeToString(output_shape) << std::endl; + + if (!fDimInput && ConvertDimShapeToLength(output_shape) != ConvertDimShapeToLength(input_shape)) { + throw std::runtime_error("TMVA Reshape Op : Invalid shapes : " + ConvertShapeToString(input_shape) + + ConvertShapeToString(output_shape)); } ret.push_back(output_shape); } else if (fOpMode == Flatten) { - // flattenig case - size_t inputSize = ConvertShapeToLength(input_shape); - size_t b = input[0][0]; - std::vector newShape = {b, inputSize / b}; + // flatten case + if (fAxis < 0) + fAxis += input_shape.size(); + auto s1 = std::vector(input_shape.begin(), input_shape.begin() + fAxis); + auto s2 = std::vector(input_shape.begin() + fAxis, input_shape.end()); + auto l1 = ConvertDimShapeToLength(s1); + auto l2 = ConvertDimShapeToLength(s2); + std::vector newShape = {Dim{l1}, Dim{l2}}; ret.push_back(newShape); - } else if (fOpMode == Squeeze) { // squeeze // assume no axis is provided - remove all axes with value equal to 1 - auto output_shape = input[0]; - if (input.size() == 1) { + auto output_shape = input_shape; + if (fAttrAxes.empty()) { size_t i = 0; while (i < output_shape.size()) { - if (output_shape[i] == 1 ) { + if (output_shape[i] == Dim{1}) { output_shape.erase(output_shape.begin() + i); - } - else { + } else { i++; } } - } else if (input.size() == 2) { - auto & axes = input[1]; - for (size_t i = 0; i < axes.size(); i++){ - if (output_shape[axes[i]] != 1) - throw std::runtime_error("TMVA Squeeze Op : Invalid axes : " + ConvertShapeToString(axes) + - ConvertShapeToString(output_shape)); + } else { + auto &axes = fAttrAxes; + for (size_t i = 0; i < axes.size(); i++) { + if (axes[i] < 0) + axes[i] += input_shape.size(); + if (!(output_shape[axes[i]] == Dim{1})) + throw std::runtime_error("TMVA Squeeze Op : Invalid axis value " + std::to_string(axes[i]) + + " for " + ConvertShapeToString(output_shape)); output_shape.erase(output_shape.begin() + axes[i]); } } ret.push_back(output_shape); } - else if (fOpMode == Unsqueeze) { // unsqueeze - assert(input.size() == 2); - auto output_shape = input[0]; - auto &axes = input[1]; + std::cout << "doing unsqueeze....\n"; + assert(!fAttrAxes.empty()); + auto output_shape = input_shape; + auto &axes = fAttrAxes; // output rank int64_t r = input[0].size() + axes.size(); - for (auto & a : axes) { + for (auto &a : axes) { int64_t i = static_cast(a); - if ( i < -r || i > r - 1 ) + if (i < -r || i > r - 1) throw std::runtime_error("TMVA Unsqueeze Op - axes input is not in correct range"); if (i >= 0) - output_shape.insert(output_shape.begin() + i, 1); + output_shape.insert(output_shape.begin() + i, Dim{1}); else - //negative axes - output_shape.insert(output_shape.end() + i + 1, 1); + // negative axes + output_shape.insert(output_shape.end() + i + 1, Dim{1}); } ret.push_back(output_shape); } @@ -166,34 +231,51 @@ public: void Initialize(RModel& model) override { + std::cout << "initialize reshape op type " << fOpMode << " - " << fNInput2 << " " << fNData << std::endl; fVerbose = model.Verbose(); if (model.CheckIfTensorAlreadyExist(fNData) == false) { // input must be a graph input, or already initialized intermediate tensor throw std::runtime_error("TMVA Reshape Op Input Tensor " + fNData + " is not found in model"); } - fShapeInput = model.GetTensorShape(fNData); - // check if optional shape tensor exist - if (!fNShape.empty()) { - if (model.CheckIfTensorAlreadyExist(fNShape)) { - auto dptr = model.GetInitializedTensorData(fNShape); - auto input_shape = static_cast(dptr.get()); - auto vec = model.GetTensorShape(fNShape); - assert(vec.size() == 1); - size_t n = vec[0]; // size of shape input tensor - - std::vector descShape(n); - std::copy(input_shape, input_shape + n, descShape.begin()); - fShapeOutput = ShapeInference({fShapeInput, descShape})[0]; - // set flag to not write tensor in weight file. Its data will be hard-coded in way model is constructed - model.SetNotWritableInitializedTensor(fNShape); + fShapeInput = model.GetDimTensorShape(fNData); + fDimInput = model.IsDynamicTensor(fNData); + // check if optional tensor exists defining shape or axes + if (!fNInput2.empty()) { + if (model.CheckIfTensorAlreadyExist(fNInput2)) { + if (model.IsConstantTensor(fNInput2) || model.IsInitializedTensor(fNInput2)) { + // assume input shape is an initialized tensor + auto dptr = model.GetInitializedTensorData(fNInput2); + auto values = static_cast(dptr.get()); + auto vec = model.GetTensorShape(fNInput2); + size_t n = 1; + if (vec.size() > 0) + n = vec[0]; // size of shape input tensor + // copy values in fShape vector or fAttrAxes + if (fOpMode == Reshape) + fShape = std::vector(values, values + n); + else + fAttrAxes = std::vector(values, values + n); + + fShapeOutput = ShapeInference({fShapeInput})[0]; + // set flag to not write tensor in weight file. Its data will be hard-coded in way model is constructed + model.SetNotWritableInitializedTensor(fNInput2); + } else { + // we cannot get shape at initialization time but at run-time + fDynamicShape = true; + // size of shape output us given by size of shape input tensor + auto shapeInput2 = model.GetTensorShape(fNInput2); + fShapeOutput.resize(shapeInput2[0]); + for (size_t i = 0; i < fShapeOutput.size(); i++) { + fShapeOutput[i] = Dim{ std::string("s_") + fNOutput + "_" + std::to_string(i)}; + } + } } else { - throw std::runtime_error("TMVA Reshape Op Shape Tensor " + fNShape + " is not found in model"); + throw std::runtime_error("TMVA Reshape Op 2nd input Tensor " + fNInput2 + " is not found in model"); } } else if (!fAttrAxes.empty()) { - // case fNShape is empty and axes are provided as attributes - std::vector descShape(fAttrAxes.size()); - std::copy(fAttrAxes.begin(), fAttrAxes.end(), descShape.begin()); - fShapeOutput = ShapeInference({fShapeInput, descShape})[0]; + // case fNShape is empty and axes are provided as attributes (e.g. for Unsqueeze) + std::cout << "attribute axes exists\n"; + fShapeOutput = ShapeInference({fShapeInput})[0]; } else if (fOpMode == Flatten || fOpMode == Squeeze) { fShapeOutput = ShapeInference({fShapeInput})[0]; } else { @@ -203,14 +285,26 @@ public: if (model.IsInitializedTensor(fNData) && model.GetTensorType(fNData) == ETensorType::INT64) { fIsOutputConstant = true; auto inputData = static_cast(model.GetInitializedTensorData(fNData).get()); - if (ConvertShapeToLength(fShapeInput) != ConvertShapeToLength(fShapeOutput)) + auto o_shape = ConvertShapeToInt(fShapeOutput); + if (ConvertShapeToLength(ConvertShapeToInt(fShapeInput)) != ConvertShapeToLength(o_shape) ) throw std::runtime_error("TMVA Reshape Op : Invalid Input/Output lengths"); - model.AddConstantTensor(fNOutput, fShapeOutput, inputData); + model.AddConstantTensor(fNOutput, o_shape, inputData); if (model.Verbose()) { std::cout << Name() << " : " << fNData << " " << ConvertShapeToString(fShapeInput) << " --> " << fNOutput << " (constant) " << ConvertShapeToString(fShapeOutput) << " : " << - ConvertValuesToString(ConvertShapeToLength(fShapeOutput), inputData) << std::endl; + ConvertValuesToString(ConvertShapeToLength(o_shape), inputData) << std::endl; } - } else { + } + // for shape tensors we can have it if output shape is size==1 or a scalar + else if (model.IsShapeTensor(fNData) && fShapeOutput.size() <=1) { + fIsOutputConstant = true; + auto inputData = model.GetShapeTensorValues(fNData); + model.AddShapeTensor(fNOutput, inputData); + if (model.Verbose()) { + std::cout << Name() << " : " << fNData << " " << ConvertShapeToString(fShapeInput) << " --> " << fNOutput << " (shape) " << ConvertShapeToString(fShapeOutput) << " : " << + ConvertShapeToString(inputData) << std::endl; + } + } + else { // non-constant case model.AddIntermediateTensor(fNOutput, model.GetTensorType(fNData), fShapeOutput); if (model.Verbose()) @@ -218,54 +312,67 @@ public: } } - std::string Generate(std::string OpName) override { + std::string Generate(std::string opName) override { if (fIsOutputConstant) return ""; //no op for constant tensors - OpName = "op_" + OpName; - - // output of reshape is same as input - size_t length = ConvertShapeToLength(fShapeOutput); - if (length != ConvertShapeToLength(fShapeInput)) { - throw std::runtime_error("TMVA SOFIE Reshape Op : wrong output shape - is " + - ConvertShapeToString(fShapeOutput) + " and input is " + - ConvertShapeToString(fShapeInput)); - } std::stringstream out; - std::string opName = "Reshape"; + std::string opType = "Reshape"; if (fOpMode == Flatten) - opName = "Flatten"; + opType = "Flatten"; else if (fOpMode == Squeeze) - opName = "Squeeze"; + opType = "Squeeze"; else if (fOpMode == Unsqueeze) - opName = "Unsquueze"; + opType = "Unsquueze"; + + out << SP << "///--------" << opType << " operator " << opName << " --> " << ConvertShapeToString(fShapeOutput) << "\n"; + + // in case of dynamic output shape we need to set the shape value from input shape tensor + // and take case of the zero values + if (fDynamicShape) { + for (size_t i = 0; i < fShapeOutput.size(); i++) { + // since fNInput2 values are int64_t, should we check if they are negative? + out << SP << "size_t " << fShapeOutput[i].param << " = " << "tensor_" << fNInput2 << "[" << i << "];\n"; + if (!fAllowZero) + out << SP << "if (tensor_" << fNInput2 << "[" << i << "] <= 0 ) " + << fShapeOutput[i].param << " = " << fShapeInput[i] << ";\n"; + } + } - out << SP << "///--------" << opName << " operator\n" << std::endl; - out << SP << "std::copy( tensor_" << fNData << ", tensor_" << fNData << " + " << length << ", " << "tensor_" << fNOutput + // output of reshape is same as input + auto lengthOut = ConvertDimShapeToLength(fShapeOutput); + auto lengthIn = ConvertDimShapeToLength(fShapeInput); + if (lengthOut != lengthIn) { + // check needs to be done at run-time + out << SP << "if (" << lengthOut << "!=" << lengthIn << ")\n"; + out << "throw std::runtime_error(\"TMVA SOFIE Reshape Op : output lengths is different than input one\");\n"; + } + + + out << SP << "std::copy( tensor_" << fNData << ", tensor_" << fNData << " + " << lengthIn << ", " << "tensor_" << fNOutput << ");\n"; return out.str(); } - std::string Generate_GPU_ALPAKA(std::string opName) override { if (fIsOutputConstant) return ""; //no op for constant tensors - OpName = "op_" + OpName; + opName = "op_" + opName; // output of reshape is same as input - size_t length = ConvertShapeToLength(fShapeOutput); - if (length != ConvertShapeToLength(fShapeInput)) { + auto length = ConvertDimShapeToLength(fShapeOutput); + if (length != ConvertDimShapeToLength(fShapeInput)) { throw std::runtime_error("TMVA SOFIE Reshape Op : wrong output shape - is " + - ConvertShapeToString(fShapeOutput) + " and input is " + - ConvertShapeToString(fShapeInput)); + ConvertDimShapeToString(fShapeOutput) + " and input is " + + ConvertDimShapeToString(fShapeInput)); } std::stringstream out; - std::string opName = "Reshape"; + opName += "_Reshape"; if (fOpMode == Flatten) - opName = "Flatten"; + opName += "_Flatten"; else if (fOpMode == Squeeze) - opName = "Squeeze"; + opName += "_Squeeze"; else if (fOpMode == Unsqueeze) - opName = "Unsquueze"; + opName += "_Unsquueze"; out << SP << "///-------" << opName << " operator\n" << std::endl; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx index 783e391..f2e2e25 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx @@ -61,7 +61,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA() { std::string op; op = "\n//------ SIGMOID_KERNEL_ALPAKA\n"; op += SP + "struct SigmoidKernel {\n"; @@ -76,7 +76,7 @@ public: } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "SigmoidKernel sigmoidKernel;\n"; } @@ -87,7 +87,7 @@ public: } std::stringstream out; - auto length = ConvertDynamicShapeToLength(fShape); + auto length = ConvertShapeToLength(fShape); out << "\n//------ SIGMOID_GPU_ALPAKA\n"; out << SP << "alpaka::WorkDivMembers workDiv_" << fNX << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx index 0fd8f5a..6335db3 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Split.hxx @@ -153,7 +153,7 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() override { + std::string Generate_GPU_Kernel_ALPAKA(std::string /*opName*/) { std::string op; op = "\n//------ SPLIT_KERNEL_ALPAKA\n"; op += SP + "struct SplitKernel {\n"; @@ -178,24 +178,24 @@ public: return op; } - std::string Generate_GPU_Kernel_Definitions_ALPAKA() override { + std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { return SP + "SplitKernel splitKernel;\n"; } std::string Generate_GPU_ALPAKA(std::string OpName) override { OpName = "op_" + OpName; - if (fShape.empty()) { + if (fOutputShapes.empty()){ throw std::runtime_error("TMVA SOFIE Operator Split called to Generate without being initialized first"); } std::stringstream out; out << "\n//------ SPLIT_GPU_ALPAKA\n"; - bool axis_is_innermost = (axis == static_cast(fInputShape.size()) - 1) - && (UTILITY::ComputeStridesFromShape(fInputShape)[fInputShape.size()-1] == 1); + bool axis_is_innermost = (fAxis == static_cast(fInputShape.size()) - 1) + && (UTILITY::ComputeStrideFromShape(fInputShape)[fInputShape.size()-1] == 1); out << SP <<"size_t "<(queue, workDiv_" << fNYs[i] << ", splitKernel, alpaka::getPtrNative(deviceBuf_" << fNX - << "), alpaka::getPtrNative(deviceBuf_" << fNY - << "), "<< UTILITY::ConvertShapeToString(UTILITY::ComputeStrideFromShape(fInputShape)) <<", "< workDiv_" << fNY << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " @@ -185,8 +185,8 @@ public: out << SP << "alpaka::exec(queue, workDiv_" << fNY << ", tileKernel, alpaka::getPtrNative(deviceBuf_" << fNInput << "), alpaka::getPtrNative(deviceBuf_" << fNY - << "), "<< UTILITY::ConvertShapeToString(fShapeInput)<<", "<< UTILITY::ConvertShapeToString(UTILITY::ComputeStrideFromShape(fShapeInput)) <<", " - < - namespace SOFIE{ @@ -150,32 +149,86 @@ public: fShapeY = fShapeA; } // check case of constant output (if all inputs are defined) - if (model.IsInitializedTensor(fNA) && model.IsInitializedTensor(fNB) && model.IsInitializedTensor(fNC)) { - std::string nameA = fNBroadcastedA.empty()? fNA : fNBroadcastedA; - std::string nameB = fNBroadcastedB.empty()? fNB : fNBroadcastedB; + if (model.IsInitializedTensor(fNC)) { + std::string nameC = fNBroadcastedC.empty()? fNC : fNBroadcastedC; - auto dataA = static_cast(model.GetInitializedTensorData(nameA).get()); - auto dataB = static_cast(model.GetInitializedTensorData(nameB).get()); auto dataC = static_cast(model.GetInitializedTensorData(nameC).get()); - std::vector dataY(ConvertShapeToLength(fShapeY)); - for (size_t i = 0; i < dataY.size(); i++) - dataY[i] = (dataC[i]) ? dataA[i] : dataB[i]; - model.AddConstantTensor(fNY, fShapeY, dataY.data()); - // flag tensors to not be written in a file - model.SetNotWritableInitializedTensor(nameA); - model.SetNotWritableInitializedTensor(nameB); model.SetNotWritableInitializedTensor(nameC); + T * dataA = nullptr; + T * dataB = nullptr; + std::vector shapeDataA; + std::vector shapeDataB; + if (model.IsInitializedTensor(fNA)) { + std::string nameA = fNBroadcastedA.empty()? fNA : fNBroadcastedA; + dataA = static_cast(model.GetInitializedTensorData(nameA).get()); + // flag tensors to not be written in a file + model.SetNotWritableInitializedTensor(nameA); + } else if (model.IsShapeTensor(fNA)) + shapeDataA = model.GetShapeTensorValues(fNA); + if (model.IsInitializedTensor(fNB)) { + std::string nameB = fNBroadcastedB.empty()? fNB : fNBroadcastedB; + dataB = static_cast(model.GetInitializedTensorData(nameB).get()); + model.SetNotWritableInitializedTensor(nameB); + } else if (model.IsShapeTensor(fNB)) + shapeDataB = model.GetShapeTensorValues(fNB); - fIsOutputConstant = true; - if (model.Verbose()) + std::vector dataY; + std::vector shapeDataY; + + bool isOutputConstantTensor = true; + if (dataA && dataB) { + dataY.resize(ConvertShapeToLength(fShapeY)); + for (size_t i = 0; i < dataY.size(); i++) + dataY[i] = (dataC[i]) ? dataA[i] : dataB[i]; + } + else if (dataA && shapeDataB.size()>0 ) { + shapeDataY.resize(ConvertShapeToLength(fShapeY)); + for (size_t i = 0; i < shapeDataY.size(); i++) { + shapeDataY[i] = (dataC[i]) ? Dim{size_t(dataA[i])} : shapeDataB[i]; + isOutputConstantTensor &= !shapeDataY[i].isParam; + } + } + else if (dataB && shapeDataA.size()>0 ) { + shapeDataY.resize(ConvertShapeToLength(fShapeY)); + for (size_t i = 0; i < shapeDataY.size(); i++) { + shapeDataY[i] = (dataC[i]) ? shapeDataB[i] : Dim{size_t(dataB[i])}; + isOutputConstantTensor &= !shapeDataY[i].isParam; + } + } + else if (shapeDataB.size() > 0 && shapeDataA.size()>0 ) { + shapeDataY.resize(ConvertShapeToLength(fShapeY)); + for (size_t i = 0; i < shapeDataY.size(); i++) { + shapeDataY[i] = (dataC[i]) ? shapeDataA[i] : shapeDataB[i]; + isOutputConstantTensor &= !shapeDataY[i].isParam; + } + } + fIsOutputConstant = true; // this contains both case constant tensor output ans shape tensor output + if (isOutputConstantTensor && dataY.empty()) { + dataY.resize(shapeDataY.size()); + for (size_t i = 0; i < shapeDataY.size(); i++) + dataY[i] = static_cast(shapeDataY[i].dim); + } + if (dataY.size() > 0) + model.AddConstantTensor(fNY, fShapeY, dataY.data()); + else if (shapeDataY.size() > 0 ) + model.AddShapeTensor(fNY, shapeDataY, fShapeY.size() == 0); + else { + fIsOutputConstant = false; + } + if (fIsOutputConstant && model.Verbose()) std::cout << "Where op ---> " << fNY << " " << ConvertShapeToString(fShapeY) << " : " - << ConvertValuesToString(dataY) << std::endl; - + << ((dataY.size() > 0) ? ConvertValuesToString(dataY) : ConvertShapeToString(shapeDataY) ) + << ((dataY.size() > 0) ? " (constant)" : " (shape)") << std::endl; + // output is a constant tensor - fOutputTensorNames.pop_back(); + if (fIsOutputConstant) fOutputTensorNames.pop_back(); } - else { + if (!fIsOutputConstant) { model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), fShapeY); + if (model.Verbose()) + std::cout << "Where op " << " condition : " << fNC << " " << ConvertShapeToString(fShapeC) << + " X " << fNA << " " << ConvertShapeToString(fShapeA) << " Y " << fNB << " " << ConvertShapeToString(fShapeB) + << " ---> " << fNY << " " << ConvertShapeToString(fShapeY) << std::endl; } } @@ -184,17 +237,17 @@ public: return out.str(); } - std::string Generate(std::string OpName) override { + std::string Generate(std::string opName) override { if (fIsOutputConstant) return ""; - OpName = "op_" + OpName; + opName = "op_" + opName; if (fShapeY.empty()) { throw std::runtime_error("TMVA SOFIE Where Op called to Generate without being initialized first"); } std::stringstream out; - out << SP << "\n//-------- Where \n"; + out << SP << "\n//-------- Where " << opName << " --> " << ConvertShapeToString(fShapeY) << "\n"; size_t length = ConvertShapeToLength(fShapeY); std::string typeName = TensorType::Name(); // Broadcast A if it's uninitialized @@ -216,19 +269,18 @@ public: // special case if C is an input tensor if (fIsInputBoolTensor) { size_t inputLength = ConvertShapeToLength(fShapeC); - out << SP << "std::vector fTensor_" << fNC << "(tensor_" << fNC << ", tensor_" << fNC << " + " << inputLength << ");\n"; + out << SP << "std::vector fTensor_" << fNC << "(tensor_" << fNC << ", tensor_" << fNC << " + " << inputLength << ");\n"; } out << SP << "// Broadcasting uninitialized tensor " << fNC << "\n"; //out << SP << "{\n"; - // for boolean we need to pass vector and use the non-template version of the function - out << SP << "SOFIE::UTILITY::UnidirectionalBroadcast(fTensor_" << fNC << ", " << ConvertShapeToString(fShapeC) << ", " << ConvertShapeToString(fShapeY) + out << SP << "SOFIE::UTILITY::UnidirectionalBroadcast(fTensor_" << fNC << ".data(), " << ConvertShapeToString(fShapeC) << ", " << ConvertShapeToString(fShapeY) << ", fTensor_" << fNBroadcastedC << ");\n"; } std::string nameA = fNBroadcastedA.empty()? fNA : fNBroadcastedA; std::string nameB = fNBroadcastedB.empty()? fNB : fNBroadcastedB; std::string nameC = fNBroadcastedC.empty()? fNC : fNBroadcastedC; out << SP << "for (size_t id = 0; id < " << length << " ; id++){\n"; - // get output tensor applying condition (note we need to use directly the vector since v.data(), i.e the data pointer, does not exist) + // get output tensor applying condition out << SP << SP << "tensor_" << fNY << "[id] = " << "(fTensor_" << nameC << "[id]) ? tensor_" << nameA << "[id] : tensor_" + nameB + "[id];\n"; out << SP << "}\n"; @@ -240,4 +292,4 @@ public: }//SOFIE -#endif //SOFIE_ROperator_Where +#endif //TMVA_SOFIE_ROperator_Where diff --git a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx index f25b66b..17ac714 100644 --- a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx +++ b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx @@ -1,5 +1,5 @@ -#ifndef SOFIE_SOFIE_COMMON -#define SOFIE_SOFIE_COMMON +#ifndef TMVA_SOFIE_SOFIE_COMMON +#define TMVA_SOFIE_SOFIE_COMMON #include "TMVA/RTensor.hxx" @@ -21,13 +21,10 @@ #include #include - -namespace SOFIE{ - -//typedef RTensor tensor_t; +namespace SOFIE { enum class ETensorType{ - UNDEFINED = 0, FLOAT = 1, UNINT8 = 2, INT8 = 3, UINT16 = 4, INT16 = 5, INT32 = 6, INT64 = 7, STRING = 8, BOOL = 9, //order sensitive + UNDEFINED = 0, FLOAT = 1, UINT8 = 2, INT8 = 3, UINT16 = 4, INT16 = 5, INT32 = 6, INT64 = 7, STRING = 8, BOOL = 9, //order sensitive FLOAT16 = 10, DOUBLE = 11, UINT32 = 12, UINT64 = 13, COMPLEX64 = 14, COMPLEX28 = 15, BFLOAT16 = 16 }; @@ -39,7 +36,7 @@ constexpr size_t GetTypeSize(ETensorType type) { switch (type) { case ETensorType::FLOAT: return sizeof(float); case ETensorType::DOUBLE: return sizeof(double); - case ETensorType::UNINT8: return sizeof(uint8_t); + case ETensorType::UINT8: return sizeof(uint8_t); case ETensorType::INT8: return sizeof(int8_t); case ETensorType::UINT16: return sizeof(uint16_t); case ETensorType::INT16: return sizeof(int16_t); @@ -58,6 +55,9 @@ typedef std::int64_t int_t; std::string ConvertTypeToString(ETensorType type); ETensorType ConvertStringToType(std::string type); +// find if a string represents a number +bool IsInteger(const std::string & s); + struct Dim{ bool isParam = false; size_t dim = 0; @@ -67,16 +67,42 @@ struct Dim{ Dim() {} // constructor for a parametric dimension with the option to pass a default dim value - Dim(const std::string & p, size_t d = 0) : isParam(true), dim(d), param(p) {} + // We use -1 for dim to indicate that the param dimension is an expression (e.g. "d1+d2") + // in case the string represents a number make Dim not parametric + Dim(const std::string & p, size_t d = 0) : isParam(true), dim(d), param(p) + { + if (IsInteger(p)) { + isParam = false; + dim = std::stoi(p); + } + } // constructor for a non-parametric dimension Dim(size_t d) : dim(d) {} std::string GetVal() const { - return (isParam) ? param : std::to_string(dim); + // cast to int64_t for negative shape values + return (isParam) ? param : std::to_string(static_cast(dim)); + } + + std::ostream& operator<< (std::ostream& os) const { + os << GetVal(); + return os; + } + + bool operator==(const Dim& rhs) const { + return (isParam && rhs.isParam) ? param == rhs.param : dim == rhs.dim; + } + bool operator!=(const Dim& rhs) const { + return !(*this == rhs); } }; +//bool operator==(const Dim& lhs, const Dim& rhs); +inline std::ostream & operator<< (std::ostream &os, const Dim &d) { + os << d.GetVal(); + return os; +} struct InputTensorInfo{ ETensorType type; @@ -93,6 +119,18 @@ struct DynamicTensorInfo{ std::vector shape; }; +// template traits for Tensor Shape +template +struct TensorShape {}; +template<> +struct TensorShape { + static bool IsDim() { return true; } +}; +template<> +struct TensorShape { + static bool IsDim() { return false; } +}; + // template traits for Tensor type template struct TensorType {}; @@ -120,6 +158,10 @@ template<> struct TensorType { static const std::string Name() { return "uint64_t"; } }; +template<> +struct TensorType { + static const std::string Name() { return "bool"; } +}; struct TensorMemoryInfo { std::string_view tensor_name; @@ -148,19 +190,21 @@ struct MemoryPoolInfo { std::map available_stack; }; -std::vector ConvertShapeToDim(std::vector shape); +std::vector ConvertShapeToDim(const std::vector & shape); + +std::vector ConvertShapeToInt(const std::vector & shape); -std::vector ConvertShapeToInt(std::vector shape); +std::size_t ConvertShapeToLength(const std::vector & shape); -std::size_t ConvertShapeToLength(std::vector shape); +std::string ConvertShapeToString(const std::vector & shape); +std::string ConvertDimShapeToString(const std::vector & shape); +std::string ConvertShapeToString(const std::vector & shape); -std::string ConvertShapeToString(std::vector shape); -std::string ConvertDynamicShapeToString(std::vector shape); -// std::string ConvertShapeToString(std::vector shape) { -// return ConvertDynamicShapeToString(shape); -// } -std::string ConvertDynamicShapeToLength(std::vector shape); + +std::string ConvertDimShapeToLength(const std::vector & shape); +std::string ConvertDynamicShapeToLength(const std::vector & shape); + template std::string ConvertValToString(T value) { @@ -271,7 +315,7 @@ private: template ETensorType GetTemplatedType(T /*obj*/ ){ if (std::is_same::value) return ETensorType::FLOAT; - if (std::is_same::value) return ETensorType::UNINT8; + if (std::is_same::value) return ETensorType::UINT8; if (std::is_same::value) return ETensorType::INT8; if (std::is_same::value) return ETensorType::UINT16; if (std::is_same::value) return ETensorType::INT16; @@ -287,6 +331,12 @@ ETensorType GetTemplatedType(T /*obj*/ ){ } namespace UTILITY{ + + + +// clean operator and tensor names +std::string Clean_name(std::string input_tensor_name); + // Check if two shapes are equal bool AreSameShape(const std::vector&, const std::vector&); bool AreSameShape(const std::vector&, const std::vector&); @@ -296,10 +346,14 @@ bool AreSameShape(const std::vector&, const std::vector&); // Multidirectional broadcast a list of tensors to the same shape std::vector MultidirectionalBroadcastShape(std::vector>); -// Unidirectional broadcast two shapes to the same shape -std::vector UnidirectionalBroadcastShape(std::vector, std::vector); +// Multidirectional broadcast two shapes to the same shape + +std::pair> MultidirectionalBroadcastShape(std::vector &, std::vector &); +std::vector UnidirectionalBroadcastShape(std::vector &, std::vector &); + +std::pair> MultidirectionalBroadcastShape(std::vector &, std::vector &); + -std::string Clean_name(std::string input_tensor_name); template T* BroadcastConvBias(const T* data, const size_t channel, const std::vector& targetShape) { @@ -352,7 +406,7 @@ void BroadcastTensor(ConstContT data, const std::vector& shape, const st size_t targetLength = broadcastedData.size(); assert(ConvertShapeToLength(targetShape) == targetLength); // special case when broadcasting last dimensions (initial shapes must be the same) - if (shape.front() == targetShape.front() && shape.back() == 1 && size > 1) { + if (size > 1 && shape.front() == targetShape.front() && shape.back() == 1) { size_t bsize = targetShape.back(); // compute the size of the data to broadcast for (int k = int(size)-2; k >=0; k--) { @@ -419,6 +473,7 @@ T* CreateBroadcastTensor(const T* data, const std::vector& shape, const BroadcastTensor, std::span>(inData, shape, targetShape, bData); return broadcastedData; } + // Unidirectional broadcasting shape to targetShape// In unidirectional broadcast - only tensor B can have the shape changed not // tensor A - otherwise is a multidirectional broadcast template @@ -449,8 +504,6 @@ void UnidirectionalBroadcast(const T* data, const std::vector& shape, co } BroadcastTensor>(inData, shape, targetShape, broadcastedData); } -// specialization for vector of boolean -void UnidirectionalBroadcast(const std::vector & data, const std::vector& shape, const std::vector& targetShape, std::vector & broadcastedData); /// compute stride of a tensor given its shape (assume layout is row-major) std::vector ComputeStrideFromShape(const std::vector & shape); @@ -619,7 +672,15 @@ void col2im(const Dtype* data_col, const int channels, //std::cout << "finishing col2imp" << std::endl; } - +// Used at the end of infer() to fill the return object. +template +void FillOutput(T const *arr, std::vector &out, std::size_t n) +{ + out.resize(n); + for (std::size_t i = 0; i < n; ++i) { + out[i] = arr[i]; + } +} } // end namespace UTILITY @@ -704,11 +765,45 @@ inline GNN_Data Copy(const GNN_Data & data) { return out; } -// Function to generate the code for declaring and initializing constant tensors -// This is for tensors which are not part of weight files and can be created from the Constant operator -template -std::string GenerateConstantTensorCode(const std::pair &t); +inline void Gemm_Call(float *output, bool transa, bool transb, int m, int n, int k, float alpha, const float *A, + const float *B, float beta, const float *C) +{ + char ct = 't'; + char cn = 'n'; + const int *lda = transa ? &k : &m; + const int *ldb = transb ? &n : &k; + const int *ldc = &m; + if (C != nullptr) { + std::copy(C, C + m * n, output); + } + SOFIE::BLAS::sgemm_(transa ? &ct : &cn, transb ? &ct : &cn, &m, &n, &k, &alpha, A, lda, B, ldb, + &beta, output, ldc); +} + +template +void ReadTensorFromStream(std::istream &is, T &target, std::string const &expectedName, std::size_t expectedLength) +{ + std::string name; + std::size_t length; + is >> name >> length; + if (name != expectedName) { + std::string err_msg = + "TMVA-SOFIE failed to read the correct tensor name; expected name is " + expectedName + " , read " + name; + throw std::runtime_error(err_msg); + } + if (length != expectedLength) { + std::string err_msg = "TMVA-SOFIE failed to read the correct tensor size; expected size is " + + std::to_string(expectedLength) + " , read " + std::to_string(length); + throw std::runtime_error(err_msg); + } + for (size_t i = 0; i < length; ++i) { + is >> target[i]; + } + if (is.fail()) { + throw std::runtime_error("TMVA-SOFIE failed to read the values for tensor " + expectedName); + } +} -}//SOFIE +} // namespace SOFIE -#endif //TMVA_SOFIE_RMODEL +#endif //TMVA_SOFIE_COMMON diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index 61357e8..8b87749 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -71,7 +71,19 @@ const std::vector& RModel::GetTensorShape(std::string name) const { throw std::runtime_error("TMVA SOFIE tensor [" + name + "] for which the shape is requested is not found"); } -std::vector RModel::GetDynamicTensorShape(std::string name) const { +std::vector RModel::GetDimTensorShape(const std::string & name) const { + if (auto f = fDynamicTensorInfos.find(name); f != fDynamicTensorInfos.end()) { + return f->second.shape; + } + if (auto f = fInputTensorInfos.find(name); f != fInputTensorInfos.end()) { + return f->second.shape; + } + // in case is not a dynamic tensor convert normal shape to Dim one + // for this we need to return the vector by value + return ConvertShapeToDim(GetTensorShape(name)); +} + +std::vector RModel::GetDynamicTensorShape(const std::string & name) const { if (auto f = fDynamicTensorInfos.find(name); f != fDynamicTensorInfos.end()) { return f->second.shape; } @@ -191,6 +203,23 @@ void RModel::AddConstantTensor(std::string tensor_name, ETensorType type, std::v fInitializedTensors[tensor_name] = new_tensor; } +void RModel::AddShapeTensor(const std::string & name, const std::vector & shape_values, bool scalar){ + auto tensor_name = UTILITY::Clean_name(name); + if (fShapeTensors.count(tensor_name) != 0) { + throw std::runtime_error("TMVA-SOFIE: shape tensor with name " + tensor_name + " already exists \n"); + } + fShapeTensors[tensor_name] = std::make_pair(shape_values, scalar); +} + +bool RModel::IsShapeTensor(const std::string & tensor_name) const { + return fShapeTensors.count(tensor_name) != 0; +} + +const std::vector & RModel::GetShapeTensorValues(const std::string & tensor_name) const { + //if (!IsShapeTensor(tensor_name) ) return std::vector{}; + return fShapeTensors.at(tensor_name).first; +} + bool RModel::IsInitializedTensor(const std::string& tensorName) const { std::string name = UTILITY::Clean_name(tensorName); return fInitializedTensors.find(name) != fInitializedTensors.end(); @@ -294,7 +323,7 @@ void RModel::SetNotWritableInitializedTensor(const std::string & tensor_name) { t->second.SetNotWritable(); } -std::string RModel:: AllocateIntermediateMemory(std::span op_output_tensors) { +std::string RModel:: AllocateIntermediateMemory(std::span op_output_tensors) { std::string memory_allocation_string = ""; bool allocated; @@ -349,7 +378,7 @@ std::string RModel:: AllocateIntermediateMemory(std::span op_input_tensors, const size_t& op_idx){ +void RModel::CheckAndFlushIntermediateMemory(std::span op_input_tensors, const size_t& op_idx){ for (auto &it : op_input_tensors){ // last occurence of the tensor is reached => flush it from memory if (fIntermediateTensorFrequencyLookup[it] == op_idx) { @@ -386,55 +415,6 @@ void RModel::CheckAndFlushIntermediateMemory(std::span o } } -void RModel::CheckAndFuseOperators() { - size_t idx = 0; - std::vector fusable_indices; - std::string fusable_propagate_tensor_name; - while (idx < fOperators.size()) { - if (fOperators[idx]->GetKind() != OperatorKind::GEMM && fOperators[idx]->GetKind() != OperatorKind::CONV) { - ++idx; - continue; - } - - fusable_indices.clear(); - fusable_propagate_tensor_name.clear(); - - fusable_indices.push_back(idx); - size_t j = idx + 1; - for (; j < fOperators.size()-1; ++j) { - auto opKind = fOperators[j]->GetKind(); - // Only consider operators with fusable kinds - if (!FusableKinds.count(opKind)) { - break; - } - - const auto& tensorName = fOperators[j]->GetFusableOutputTensorName(); - auto freqIt = fIntermediateTensorFrequencyLookup.find(tensorName); - - // Propagate tensor name only if it's not used multiple times - fusable_indices.push_back(j); - if (freqIt != fIntermediateTensorFrequencyLookup.end() && - (freqIt->second != fOperators[j + 1]->GetOpOrder() || - FusableKinds.count(fOperators[j + 1]->GetKind()) == 0)) { - fusable_propagate_tensor_name = tensorName; - break; - } - } - if (!fusable_propagate_tensor_name.empty()) { - auto fusable_tensor_type = GetTensorType(fusable_propagate_tensor_name); - auto fusable_tensor_shape = GetDynamicTensorShape(fusable_propagate_tensor_name); - for (auto& index : fusable_indices) { - fOperators[index]->UpdateFusableTensorName(fusable_propagate_tensor_name, [this](const std::string& name) { - this->RemoveIntermediateTensor(name); - }); - } - AddIntermediateTensor(fusable_propagate_tensor_name, fusable_tensor_type, fusable_tensor_shape); - } - - idx = std::max(idx + 1, j); - } -} - void RModel::Initialize(int batchSize, bool verbose) { @@ -484,7 +464,7 @@ void RModel::Initialize(const std::map & inputParams, bool auto shape = ConvertShapeToInt(input.second.shape); if (verbose) std::cout << "converting input shape for " << input.first << " " << ConvertShapeToString(shape) << " from " - << ConvertDynamicShapeToString(input.second.shape) << std::endl; + << ConvertDimShapeToString(input.second.shape) << std::endl; if (!shape.empty()) { // case shape is defined (not parametric) we add the tensor in the fReadyInputTensorInfos map and // we remove the tensor from the fInputTensorInfo where th eold parametric shape was stored @@ -543,7 +523,7 @@ void RModel::Initialize(const std::map & inputParams, bool } i++; } - CheckAndFuseOperators(); + fIsInitialized = true; } @@ -576,6 +556,43 @@ void RModel::InitializeSubGraph(std::shared_ptr graph) { } +// Function to generate the code for declaring and initializing constant tensors +// This is for tensors which are not part of weight files and can be created from the Constant operator +template +std::string GenerateConstantTensorCode(const std::pair &t) +{ + std::stringstream strs; + std::string type = ConvertTypeToString(t.second.type()); + size_t length = ConvertShapeToLength(t.second.shape()); + // avoid using stack sizes for constant tensors to reduce compilation time + bool allocateOnStack = (length > 100) ? false : true; + + const T *data = t.second.data(); + + // and check if all values are the same + bool sameData = false; + // for non stack allocation check if data are the same + if (!allocateOnStack && length > 1) { + size_t idx = 1; + do { + sameData = (data[idx] == data[idx - 1]); + idx++; + } while (sameData && idx < length); + } + if (allocateOnStack) { + strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; + } else { + strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; + if (sameData) + strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; + else { + strs << ConvertValuesToString(length, data) << ";\n"; + } + strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; + } + return strs.str(); +} + void RModel::GenerateInitializedTensorInfo() { if (!fInitializedTensors.empty()) @@ -614,10 +631,10 @@ void RModel::GenerateIntermediateTensorInfo() { for (auto &i : fIntermediateTensorInfos) { if (i.second.type == ETensorType::BOOL) { - tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; - // No pointer allocation needed for BOOL + tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; + tensor_declaration_block += "std::uint8_t * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; } - if (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { + if (fIntermediateTensorFrequencyLookup.find(i.first) == fIntermediateTensorFrequencyLookup.end() && std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { size_t length = ConvertShapeToLength(i.second.shape); if (i.second.type == ETensorType::FLOAT) { @@ -670,16 +687,16 @@ void RModel::GenerateOperatorDeclarations() { } void RModel::GenerateDynamicTensorInfo() { - fGC += "//---- allocate the intermediate dynamic tensors\n"; - std::stringstream out; - for (auto & i: fDynamicTensorInfos) { - auto length = ConvertDynamicShapeToLength(i.second.shape); - out << SP << "if (" << length << " > 0) {\n"; - out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; - out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; - out << SP << "}\n"; - } - fGC += out.str(); + fGC += "//---- allocate the intermediate dynamic tensors\n"; + std::stringstream out; + for (auto & i: fDynamicTensorInfos) { + auto length = ConvertDynamicShapeToLength(i.second.shape); + out << SP << "if (" << length << " > 0) {\n"; + out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; + out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; + out << SP << "}\n"; + } + fGC += out.str(); } std::string RModel::GenerateInferSignature(bool isdecl) { @@ -702,15 +719,14 @@ std::string RModel::GenerateInferSignature(bool isdecl) { } } } - rGC += "alpaka::Buf "; + rGC += type + "* "; } - rGC += "deviceBuf_" + name + ","; + rGC += "tensor_" + name + ","; i_input++; } @@ -718,46 +734,129 @@ std::string RModel::GenerateInferSignature(bool isdecl) { return rGC; } +namespace { + +std::string typeForOutput(ETensorType t) { + // The std::vector is a special type that is not wrapping continuous memory. + // We don't want to use it as a return type. + if (t == ETensorType::BOOL) t = ETensorType::UINT8; + return ConvertTypeToString(t); +} + +} + +void RModel::GenerateOutput() +{ + size_t outputSize = fOutputTensorNames.size(); + // assume output types are all the same + + bool sameOutputTypes = true; + std::string inferReturnType; // type return by infer function + ETensorType eFirstOutputType = GetTensorType(*fOutputTensorNames.begin()); + fGC += "\n\n"; + if (outputSize == 1) { + fGC += "std::vector<" + typeForOutput(eFirstOutputType) + ">"; + } else { + // if all output types are the same we return an std::vector - otherwise a tuple + for (std::string const &name : fOutputTensorNames) { + if (GetTensorType(name) != eFirstOutputType) + sameOutputTypes = false; + } + if (sameOutputTypes) + fGC += "std::vector>"; + else { + inferReturnType = "std::tuple<"; + for (size_t i = 0; i < outputSize; i++) { + inferReturnType += "std::vector<" + typeForOutput(GetTensorType(fOutputTensorNames[i])) + ">"; + if (i < outputSize - 1) + inferReturnType += ","; + } + inferReturnType += ">"; + fGC += inferReturnType; + } + } + + fGC += " infer(" + GenerateInferSignature() + "){\n"; + + std::string doInferArgs = GenerateInferSignature(false); + if (!doInferArgs.empty()) + doInferArgs += ","; + for (std::string const &name : fOutputTensorNames) { + fGC += SP + "std::vector<" + typeForOutput(GetTensorType(name)) + " > output_tensor_" + name + ";\n"; + doInferArgs += " output_tensor_" + name + ","; + } + if (!doInferArgs.empty()) + doInferArgs.back() = ' '; + + fGC += SP + "doInfer(" + doInferArgs + ");\n"; + + fGC += SP + "return {"; + for (size_t i = 0; i < fOutputTensorNames.size(); i++) { + fGC += "output_tensor_" + fOutputTensorNames[i]; + if (i < fOutputTensorNames.size() - 1) + fGC += ","; + } + fGC += "};\n"; + fGC += "}\n"; // end of infer function scope +} + void RModel::GenerateSessionCode() { + // Determine the signature of the actual inference function + std::string doInferSignature = GenerateInferSignature(); + if (!doInferSignature.empty()) + doInferSignature += ", "; + for (auto const &name : fOutputTensorNames) { + doInferSignature += " std::vector<" + typeForOutput(GetTensorType(name)) + "> &output_tensor_" + name + ","; + } + doInferSignature.back() = ' '; + + doInferSignature = "void doInfer(" + doInferSignature + ")"; // define the Session struct (for GNN this is generated in RModel_GNN) - if (fUseSession) { + if (fUseSession && !fIsGNNComponent) { if (!fIsSubGraph) - fGC += "struct Session {\n\n"; + fGC += "struct Session {\n"; else - fGC += "struct Session_" + fName + " {\n\n"; + fGC += "struct Session_" + fName + " {\n"; } // generate code for declaring the initialized tensors GenerateInitializedTensorInfo(); - // // evaluate total intermediate memory and position intermediate tensor addresses - // std::string intermediate_memory_alloc_string = ""; - // intermediate_memory_alloc_string += "\n// --- Positioning intermediate tensor memory --"; - // for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { - // intermediate_memory_alloc_string += AllocateIntermediateMemory(fOperators[op_idx]->GetOpOutputTensors()); - // CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); - // } + if (fOptimizationLevel == OptimizationLevel::kExtended) { + // evaluate total intermediate memory and position intermediate tensor addresses + std::string intermediate_memory_alloc_string = ""; + intermediate_memory_alloc_string += "\n// --- Positioning intermediate tensor memory --"; + for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { + if (fVerbose) { + auto op = fOperators[op_idx].get(); + std::cout << "\n******************\n analyzing input/output operator " << op_idx << " " + << typeid(*op).name() << std::endl; + } + intermediate_memory_alloc_string += AllocateIntermediateMemory(fOperators[op_idx]->GetOpOutputTensors()); + CheckAndFlushIntermediateMemory(fOperators[op_idx]->GetOpInputTensors(), op_idx); + } - // // to check remaining unused fragments after memory allocation (lesser the better) - // // for (const auto &it: fIntermediateMemoryInfo.available_stack){ - // // std::cout<<"chunk_idx: "<GenerateSessionMembersCode(opName); - // } - // fGC += "\n"; + for (size_t id = 0; id < fOperators.size(); id++) { + std::string opName = std::to_string(id); + fGC += fOperators[id]->GenerateSessionMembersCode(opName); + } + fGC += "\n"; // here add initialization and reading of weight tensors if (fUseWeightFile) { std::string fileName = fName; @@ -793,10 +892,10 @@ void RModel::GenerateSessionCode() } // add initialization of shape parameters // assume all parameters are of type size_t - if (!fShapeParams.empty()) { - for (auto &p : fShapeParams) { + if (!fDimShapeNames.empty()) { + for (auto &p : fDimShapeNames) { fGC += ",\n"; - fGC += " size_t " + p.first + " = " + p.second; + fGC += " size_t " + p + " = " + fShapeParams[p]; } } fGC += ") {\n"; @@ -818,12 +917,42 @@ void RModel::GenerateSessionCode() fGC += "}\n\n"; } + + fGC += doInferSignature + "{\n"; + fGC += "\n"; + // generate the inference code + if (fVerbose) + std::cout << "Generating main inference code for " << fName << std::endl; + + if (fOutputTensorNames.size() == 0) + throw std::runtime_error("TMVA-SOFIE: output size=0 are not supported"); + + for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { + if (fVerbose) + std::cout << "Generating code for operator .... " << op_idx << std::endl; + fGC += (fOperators[op_idx]->Generate(std::to_string(op_idx))); + } + + fGC += SP + "using SOFIE::UTILITY::FillOutput;\n\n"; + + for (std::string const &name : fOutputTensorNames) { + // need to check is size is the same (don't want to return a vector with + // larger size) in that case better to copy + bool isIntermediate = fIntermediateTensorInfos.count(name) > 0; + std::string n = isIntermediate ? std::to_string(ConvertShapeToLength(GetTensorShape(name))) + : ConvertDimShapeToLength(GetDimTensorShape(name)); + fGC += SP + "FillOutput(tensor_" + name + ", output_tensor_" + name + ", " + n + ");\n"; + } + + fGC += "}\n\n"; + + // generate the inference overload that returns an output struct GenerateOutput(); // end of session if (fUseSession && !fIsGNNComponent) { - fGC += "}; // end of Session\n"; + fGC += "}; // end of Session\n\n"; } } @@ -896,9 +1025,9 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { fGC += " throw std::runtime_error(\"tmva-sofie failed to open file \" + filename + \" for input weights\");\n"; fGC += " }\n"; - // if(fIsGNNComponent) { - // fGC += " f.seekg(" + std::to_string(pos) + ");\n"; - // } + if(fIsGNNComponent) { + fGC += " f.seekg(" + std::to_string(pos) + ");\n"; + } fGC += " std::string tensor_name;\n"; fGC += " size_t length;\n"; @@ -966,9 +1095,10 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a ROOT file"); } fGC += " }\n"; - } + } + fGC += " }\n"; } - } +} long RModel::WriteInitializedTensorsToFile(std::string filename) { // Determine the file extension based on the weight file type @@ -1163,7 +1293,7 @@ void RModel::PrintOutputTensors() { if (!IsDynamicTensor(it)) std::cout << "shape: " << ConvertShapeToString(GetTensorShape(it)) << std::endl; else - std::cout << "shape: " << ConvertDynamicShapeToString(GetDynamicTensorShape(it)) << std::endl; + std::cout << "shape: " << ConvertDimShapeToString(GetDynamicTensorShape(it)) << std::endl; } std::cout << "\n"; } diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx index 549a3dd..066d4e8 100644 --- a/src/SOFIE_core/src/RModel_ALPAKA.cxx +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -182,7 +182,7 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { std::cout<GetKind())<GetKind()) == registered_operators.end()) { std::cout<<"Generating ALPAKA kernel for operator"<< std::endl; - fGC += fOperators[id]->Generate_GPU_Kernel_ALPAKA(); + fGC += fOperators[id]->Generate_GPU_Kernel_ALPAKA(std::to_string(id)); registered_operators.insert(fOperators[id]->GetKind()); } } @@ -279,7 +279,7 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { std::cout<GetKind())<GetKind()) == registered_operators.end()) { std::cout<<"Declaring ALPAKA kernel for operator"<< std::endl; - fGC += fOperators[id]->Generate_GPU_Kernel_Definitions_ALPAKA(); + fGC += fOperators[id]->Generate_GPU_Kernel_Definitions_ALPAKA(std::to_string(id)); registered_operators.insert(fOperators[id]->GetKind()); } } diff --git a/src/SOFIE_core/src/RModel_Base.cxx b/src/SOFIE_core/src/RModel_Base.cxx index 3b1885c..f212c53 100644 --- a/src/SOFIE_core/src/RModel_Base.cxx +++ b/src/SOFIE_core/src/RModel_Base.cxx @@ -29,7 +29,7 @@ void RModel_Base::GenerateHeaderInfo(std::string& hgname) { } // for the session we need to include SOFIE_Common functions //needed for convolution operator (need to add a flag) - fGC += "#include \"TMVA/SOFIE_common.hxx\"\n"; + fGC += "#include \"SOFIE/SOFIE_common.hxx\"\n"; if (fUseWeightFile) fGC += "#include \n"; // Include TFile when saving the weights in a binary ROOT file @@ -79,7 +79,7 @@ void RModel_Base::GenerateHeaderInfo_GPU_ALPAKA(std::string& hgname) { // for the session we need to include SOFIE_Common functions //needed for convolution operator (need to add a flag) - fGC += "#include \"TMVA/SOFIE_common.hxx\"\n"; + fGC += "#include \"SOFIE/SOFIE_common.hxx\"\n"; if (fUseWeightFile) fGC += "#include \n"; // Include TFile when saving the weights in a binary ROOT file diff --git a/src/SOFIE_core/src/RModel_GNN.cxx b/src/SOFIE_core/src/RModel_GNN.cxx index a1dfe06..3dae254 100644 --- a/src/SOFIE_core/src/RModel_GNN.cxx +++ b/src/SOFIE_core/src/RModel_GNN.cxx @@ -94,7 +94,7 @@ void RModel_GNN::Generate() { // the number of output edges features can be smaller, so we need to correct here auto num_edge_features_input = num_edge_features; - auto edges_update_output_shape = edges_update_block->GetFunctionBlock()->GetDynamicTensorShape(edges_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); + auto edges_update_output_shape = edges_update_block->GetFunctionBlock()->GetDimTensorShape(edges_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); if(!edges_update_output_shape[1].isParam && edges_update_output_shape[1].dim != num_edge_features_input) { num_edge_features = edges_update_output_shape[1].dim; } @@ -117,7 +117,7 @@ void RModel_GNN::Generate() { // we need to correct the output number of node features auto num_node_features_input = num_node_features; - auto nodes_update_output_shape = nodes_update_block->GetFunctionBlock()->GetDynamicTensorShape(nodes_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); + auto nodes_update_output_shape = nodes_update_block->GetFunctionBlock()->GetDimTensorShape(nodes_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); if(!nodes_update_output_shape[1].isParam && nodes_update_output_shape[1].dim != num_node_features_input) { num_node_features = nodes_update_output_shape[1].dim; } diff --git a/src/SOFIE_core/src/RModel_GraphIndependent.cxx b/src/SOFIE_core/src/RModel_GraphIndependent.cxx index bab06b3..cd62d0c 100644 --- a/src/SOFIE_core/src/RModel_GraphIndependent.cxx +++ b/src/SOFIE_core/src/RModel_GraphIndependent.cxx @@ -81,7 +81,7 @@ void RModel_GraphIndependent::Generate() { // the number of output edges features can be smaller, so we need to correct here // assume num_edge_features is not a parametric shape - auto edges_update_output_shape = edges_update_block->GetFunctionBlock()->GetDynamicTensorShape(edges_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); + auto edges_update_output_shape = edges_update_block->GetFunctionBlock()->GetDimTensorShape(edges_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); if(!edges_update_output_shape[1].isParam && edges_update_output_shape[1].dim != num_edge_features_input) { num_edge_features = edges_update_output_shape[1].dim; } @@ -100,7 +100,7 @@ void RModel_GraphIndependent::Generate() { fGC+="};\n}\n"; // we need to correct the output number of node features - auto nodes_update_output_shape = nodes_update_block->GetFunctionBlock()->GetDynamicTensorShape(nodes_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); + auto nodes_update_output_shape = nodes_update_block->GetFunctionBlock()->GetDimTensorShape(nodes_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); if(!nodes_update_output_shape[1].isParam && nodes_update_output_shape[1].dim != num_node_features_input) { num_node_features = nodes_update_output_shape[1].dim; } @@ -119,7 +119,7 @@ void RModel_GraphIndependent::Generate() { // we need to correct the output number of global features // global features are in shape[1] #if 0 - auto globals_update_output_shape = globals_update_block->GetFunctionBlock()->GetDynamicTensorShape(globals_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); + auto globals_update_output_shape = globals_update_block->GetFunctionBlock()->GetDimTensorShape(globals_update_block->GetFunctionBlock()->GetOutputTensorNames()[0]); if(!globals_update_output_shape[1].isParam && globals_update_output_shape[1].dim != num_global_features_input) { num_global_features = globals_update_output_shape[1].dim; } diff --git a/src/SOFIE_core/src/SOFIE_common.cxx b/src/SOFIE_core/src/SOFIE_common.cxx index bb288cf..6136f72 100644 --- a/src/SOFIE_core/src/SOFIE_common.cxx +++ b/src/SOFIE_core/src/SOFIE_common.cxx @@ -1,15 +1,16 @@ #include "SOFIE/SOFIE_common.hxx" -#include + +#include #include #include +#include - -namespace SOFIE{ +namespace SOFIE { /// @brief Convert shape from integer format to dynamic one (based on Dim) /// @param shape /// @return shape based on Dim -std::vector ConvertShapeToDim(std::vector shape){ +std::vector ConvertShapeToDim(const std::vector & shape){ std::vector ret_shape(shape.size()); for (size_t i =0; i < shape.size(); i++){ ret_shape[i].dim = shape[i]; @@ -20,7 +21,7 @@ std::vector ConvertShapeToDim(std::vector shape){ /// @brief Convert shape based on Dim to integer format /// @param shape /// @return shape based on integer. Return an empty shape in case shape is dynamic (has a parameter) -std::vector ConvertShapeToInt(std::vector shape){ +std::vector ConvertShapeToInt(const std::vector & shape){ std::vector ret_shape(shape.size()); for (size_t i =0; i < shape.size(); i++){ if (shape[i].isParam) { @@ -46,7 +47,7 @@ std::vector ConvertShapeToInt(std::vector shape){ } -std::size_t ConvertShapeToLength(std::vector shape){ +std::size_t ConvertShapeToLength(const std::vector & shape){ // Empty shape represent scalar values, so we return a length=1 std::size_t fLength = 1; for (auto& dim: shape) fLength *= dim; @@ -58,6 +59,9 @@ std::string ConvertTypeToString(ETensorType type){ case ETensorType::FLOAT : { return "float"; } + case ETensorType::INT8 : { + return "int8_t"; + } case ETensorType::INT16 : { return "int16_t"; } @@ -67,6 +71,9 @@ std::string ConvertTypeToString(ETensorType type){ case ETensorType::INT64 : { return "int64_t"; } + case ETensorType::UINT8 : { + return "uint8_t"; + } case ETensorType::UINT16 : { return "uint16_t"; } @@ -106,7 +113,7 @@ ETensorType ConvertStringToType(std::string type){ } } -std::string ConvertShapeToString(std::vector shape) { +std::string ConvertShapeToString(const std::vector & shape) { std::stringstream out; out << "{ "; for (size_t i = 0; i < shape.size(); i++) { @@ -117,7 +124,7 @@ std::string ConvertShapeToString(std::vector shape) { return out.str(); } -std::string ConvertDynamicShapeToString(std::vector shape) { +std::string ConvertDimShapeToString(const std::vector & shape) { std::stringstream out; out << "{ "; for (size_t i = 0; i < shape.size(); i++) { @@ -128,10 +135,12 @@ std::string ConvertDynamicShapeToString(std::vector shape) { return out.str(); } -std::string ConvertDynamicShapeToLength(std::vector shape) { +std::string ConvertDimShapeToLength(const std::vector & shape) { // convert generic shape to a string // multiply all the integer specified dimensions of the shape std::string length; + // case of empty vectors return 1 + if (shape.empty()) return "1"; size_t int_length = 0; for (size_t i = 0; i < shape.size(); i++) { if (shape[i].isParam) { @@ -145,12 +154,24 @@ std::string ConvertDynamicShapeToLength(std::vector shape) { } } // multiply the integer components to the parametric one + // if larger than 1 if (int_length > 0) { - if (!length.empty()) length += " * "; - length += std::to_string(int_length); + if (!length.empty() && int_length > 1) { + length += " * "; + length += std::to_string(int_length); + } else if (length.empty()) { // case is full known shape + length = std::to_string(int_length); + } } return length; } +std::string ConvertShapeToString(const std::vector & shape) { + return ConvertDimShapeToString(shape); +} +std::string ConvertDynamicShapeToLength(const std::vector & shape) { + return ConvertDimShapeToLength(shape); +} + namespace{ template @@ -169,6 +190,12 @@ static inline void copy_vector_data(int_t no_of_copies, int_t input_size, T* inp } } +bool IsInteger(const std::string & s) { + int value; + auto [ptr, ec] = std::from_chars(s.data(), s.data() + s.size(), value); + return ec == std::errc() && ptr == s.data() + s.size(); +} + bool UTILITY::AreSameShape(const std::vector& shapeA, const std::vector& shapeB) { if (shapeA.size() != shapeB.size()) { return false; @@ -330,17 +357,24 @@ std::vector UTILITY::MultidirectionalBroadcastShape(std::vector UTILITY::UnidirectionalBroadcastShape(std::vector shapeA, std::vector shapeB) +// check multi-directional broadcasting of two shapes (need to pass inputs by non const ref. since we might prepends with one's +// return a pair of integer flag and new broadcasted shape +// if flag = 0: shape are identical +// flag = 1: return shape is equal to A, we broadcast B +// flag = 2: return shape is equal to B we broadcast A +// flag = 3: return shape is common of two we broadcast A and B to output +std::pair> UTILITY::MultidirectionalBroadcastShape(std::vector & shapeA, std::vector & shapeB) { size_t sizeA = shapeA.size(); size_t sizeB = shapeB.size(); // Check if A and B have the same shape if (UTILITY::AreSameShape(shapeA, shapeB)){ - return shapeA; + return std::make_pair(0, shapeA); } // Find the common shape of A and B size_t size = std::max(sizeA, sizeB); if (sizeA < size) { + // prepend 1's in A to make of same shape as B std::vector newShapeA(size, 1); size_t offset = size - sizeA; std::copy(shapeA.begin(), shapeA.end(), newShapeA.begin() + offset); @@ -359,36 +393,116 @@ std::vector UTILITY::UnidirectionalBroadcastShape(std::vector s break; } } + int broadcastFlag = 0; if (broadcastable) { // The output shape is max(outShape, targetShape) std::vector targetShape(size, 1); for (size_t i = 0; i < size; i++) { targetShape[i] = std::max(shapeA[i], shapeB[i]); + if (shapeB[i] < targetShape[i]) broadcastFlag |= 1; + if (shapeA[i] < targetShape[i]) broadcastFlag |= 2; } - return targetShape; + return std::make_pair(broadcastFlag, targetShape); } else { throw - std::runtime_error("TMVA::SOFIE - Error unidirectional broadcasting tensors of shape " + std::runtime_error("TMVA::SOFIE - Error multidirectional broadcasting tensors of shape " + ConvertShapeToString(shapeA) + " and " + ConvertShapeToString(shapeB) + " to a common shape."); } } +// unidirectional broadcast- only B changes +std::vector UTILITY::UnidirectionalBroadcastShape(std::vector & shapeA, std::vector & shapeB) +{ + auto ret = UTILITY::MultidirectionalBroadcastShape(shapeA, shapeB); + if (ret.first > 1) { + std::runtime_error("TMVA::SOFIE - Error unidirectional broadcasting tensors of shape " + + ConvertShapeToString(shapeA) + " and " + ConvertShapeToString(shapeB) + + " to a common shape."); + } + return ret.second; +} -// UNidirectional boradcast specializaiton for vector +// for broadcasting Dim shapes +// flag indicates also which vector needs to be broadcasted +// flag & 1 == 1 : broadcast B -> A +// flag & 2 == 2 : broadcast A -> B +// flag & 4 == 4 a run time check is needed on shapes with values +std::pair> UTILITY::MultidirectionalBroadcastShape(std::vector & shapeA, std::vector & shapeB) { + size_t sizeA = shapeA.size(); + size_t sizeB = shapeB.size(); + // Check if A and B have the same shape + if (UTILITY::AreSameShape(shapeA, shapeB)){ + return std::make_pair(0, shapeA); + } + // Find the common shape of A and B + size_t size = std::max(sizeA, sizeB); + if (sizeA < size) { + // prepend 1's in A to make of same shape as B + std::vector newShapeA(size, Dim{1}); + size_t offset = size - sizeA; + std::copy(shapeA.begin(), shapeA.end(), newShapeA.begin() + offset); + shapeA = std::move(newShapeA); + } + if (sizeB < size) { + std::vector newShapeB(size, Dim{1}); + size_t offset = size - sizeB; + std::copy(shapeB.begin(), shapeB.end(), newShapeB.begin() + offset); + shapeB = std::move(newShapeB); + } + + int broadcastFlag = 0; + // The output shape is targetShape + std::vector targetShape(size); + for (size_t i = 0; i < size; i++) { + // assume we broadcast to the parametric value + if (shapeA[i] == shapeB[i]) { + targetShape[i] = shapeA[i]; + } else if (shapeA[i].isParam && shapeB[i].GetVal() == "1" ) { + // broadcast B to A (case A is parametric with ) + targetShape[i] = shapeA[i]; + broadcastFlag |= 1; + } else if (shapeA[i].GetVal() == "1" && shapeB[i].isParam) { + // broadcast A to B + targetShape[i] = shapeB[i]; + broadcastFlag |= 2; + } else if (!shapeA[i].isParam && !shapeB[i].isParam) { + if (shapeB[i].dim == 1) { + targetShape[i] = shapeA[i]; + broadcastFlag |= 1; + } else if (shapeA[i].dim == 1) { + targetShape[i] = shapeB[i]; + broadcastFlag |= 2; + } else { + // non broadcastable case cannot have A and B two different defined shapes different than one + broadcastFlag = -1; + } + } else if (shapeA[i].isParam && shapeB[i].isParam) { + // full dynamic case - we will decided at run time + std::stringstream s; + s << "std::max(" << shapeA[i] << "," << shapeB[i] << ")"; + // use -1 for dim to indicate is an expression + targetShape[i] = Dim { s.str() , static_cast(-1)}; + broadcastFlag |= 4; + } else if (shapeA[i].isParam && !shapeB[i].isParam) { + // A -> B need to check at run time if consistent + targetShape[i] = shapeB[i]; + broadcastFlag |= 6; + } else if (!shapeA[i].isParam && shapeB[i].isParam) { + // B -> A need to check at run time if consistent + targetShape[i] = shapeA[i]; + broadcastFlag |= 5; + } else { + // all cases should be covered + throw std::runtime_error("TMVA::SOFIE - Fatal error in MultiDirectionalBroadCastDimShape"); + } + } + if (broadcastFlag == -1) { + throw std::runtime_error("TMVA::SOFIE - Error multidirectional broadcasting tensors of shape " + + ConvertDimShapeToString(shapeA) + " and " + ConvertDimShapeToString(shapeB) + + " to a common shape."); + } -// specialization for vector of boolean -void UTILITY::UnidirectionalBroadcast(const std::vector & data, const std::vector& shape, const std::vector& targetShape, std::vector & broadcastedData) - { - // Prepend shape with ones - auto ncdata = const_cast &>(data); - if (shape.size() < targetShape.size()) { - size_t targetSize = targetShape.size(); - std::vector newShape(targetSize, 1); - size_t offset = targetSize - shape.size(); - std::copy(shape.begin(), shape.end(), newShape.begin() + offset); - UTILITY::BroadcastTensor &, std::vector &>(ncdata, newShape, targetShape, broadcastedData); - } - UTILITY::BroadcastTensor &, std::vector &>(ncdata, shape, targetShape, broadcastedData); + return std::make_pair(broadcastFlag, targetShape); } std::string UTILITY::Clean_name(std::string input_tensor_name){ @@ -413,50 +527,22 @@ std::vector UTILITY::ComputeStrideFromShape(const std::vector & shape) // assume row major layout const auto size = shape.size(); std::vector strides(size); - strides[size-1] = Dim{1}; - for (std::size_t i = 1; i < size; i++) { - if (!shape[size-i].isParam && !strides[size-i].isParam) - strides[size - 1 - i] = Dim{strides[size-i].dim * shape[size-i].dim}; - else - strides[size - 1 - i] = Dim{std::string(strides[size-i].GetVal() + "*" + shape[size-i].GetVal())}; - } - return strides; -} - -template -std::string GenerateConstantTensorCode(const std::pair &t) -{ - std::stringstream strs; - std::string type = ConvertTypeToString(t.second.type()); - size_t length = ConvertShapeToLength(t.second.shape()); - // avoid using stack sizes for constant tensors to reduce compilation time - bool allocateOnStack = (length > 100) ? false : true; - - const T *data = t.second.data(); - - // and check if all values are the same - bool sameData = false; - // for non stack allocation check if data are the same - if (!allocateOnStack && length > 1) { - size_t idx = 1; - do { - sameData = (data[idx] == data[idx - 1]); - idx++; - } while (sameData && idx < length); - } - if (allocateOnStack) { - strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; - } else { - strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; - if (sameData) - strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; - else { - strs << ConvertValuesToString(length, data) << ";\n"; + if (size > 0) { + strides[size-1] = Dim{1}; + for (std::size_t i = 1; i < size; i++) { + if (!shape[size-i].isParam && !strides[size-i].isParam) + strides[size - 1 - i] = Dim{strides[size-i].dim * shape[size-i].dim}; + else { + if (strides[size-i].GetVal() == "1") + strides[size - 1 - i] = shape[size-i]; + else if (shape[size-i].GetVal() == "1") + strides[size - 1 - i] = strides[size-i]; + else + strides[size - 1 - i] = Dim{std::string(strides[size-i].GetVal() + "*" + shape[size-i].GetVal())}; + } } - strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; } - return strs.str(); + return strides; } - -}//SOFIE +} // namespace SOFIE \ No newline at end of file diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index 34bb49f..e4713b6 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -33,6 +33,7 @@ endforeach() string(REPLACE ";" ";\n" EMIT_CAPTURES "${ALL_CAPTURES}") configure_file(EmitFromONNX.cxx.in EmitFromONNX_all.cxx @ONLY) configure_file(EmitFromRoot.cxx.in EmitFromRoot_all.cxx @ONLY) +configure_file(EmitFromONNX_GPU_ALPAKA.cxx.in EmitFromONNX_GPU_ALPAKA_all.cxx @ONLY) ROOTTEST_GENERATE_EXECUTABLE(emitFromONNX EmitFromONNX_all.cxx LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers @@ -129,3 +130,17 @@ ROOT_ADD_TEST(tmva-sofie-EmitGNN COMMAND emitGNN) ROOT_EXECUTABLE(EmitGraphIndependent GNN/EmitGraphIndependent.cxx LIBRARIES SOFIE_core) ROOT_ADD_TEST(tmva-sofie-EmitGraphIndependent COMMAND EmitGraphIndependent) + +# Generating inference code for heterogeneous testing using ALPAKA +# ROOTTEST_GENERATE_EXECUTABLE(emitFromONNXAlpaka EmitFromONNX_GPU_ALPAKA_all.cxx +# LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers +# FIXTURES_SETUP sofie-compile-models-onnx-alpaka-build) + +# # silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 +# target_compile_options(emitFromONNXAlpaka PRIVATE -Wno-unused-parameter -Wno-array-bounds) + +# ROOTTEST_ADD_TEST(SofieCompileModels_ONNX_Alpaka +# COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNXAlpaka ${onnx_file} ${CMAKE_CURRENT_BINARY_DIR}/${fname} +# FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka-build +# FIXTURES_SETUP sofie-compile-models-onnx-alpaka +# ) diff --git a/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx b/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx index d02dc5e..ba9a42a 100644 --- a/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx +++ b/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx @@ -2515,7 +2515,7 @@ TEST(ONNX, Equal){ }); SOFIE_Equal::Session s("Equal_FromONNX.dat"); - std::vector output = s.infer(input1.data(),input2.data()); + std::vector output = s.infer(input1.data(),input2.data()); // Checking output size EXPECT_EQ(output.size(), sizeof(Equal_ExpectedOutput::outputs) / sizeof(bool)); @@ -2540,7 +2540,7 @@ TEST(ONNX, LessOrEqual){ }); SOFIE_LessOrEqual::Session s("LessOrEqual_FromONNX.dat"); - std::vector output = s.infer(input1.data(),input2.data()); + std::vector output = s.infer(input1.data(),input2.data()); // Checking output size EXPECT_EQ(output.size(), sizeof(LessOrEqual_ExpectedOutput::outputs) / sizeof(bool)); @@ -2565,7 +2565,7 @@ TEST(ONNX, GreaterOrEqual){ }); SOFIE_GreaterOrEqual::Session s("GreaterOrEqual_FromONNX.dat"); - std::vector output = s.infer(input1.data(),input2.data()); + std::vector output = s.infer(input1.data(),input2.data()); // Checking output size EXPECT_EQ(output.size(), sizeof(GreaterOrEqual_ExpectedOutput::outputs) / sizeof(bool)); @@ -2590,7 +2590,7 @@ TEST(ONNX, Greater){ }); SOFIE_Greater::Session s("Greater_FromONNX.dat"); - std::vector output = s.infer(input1.data(),input2.data()); + std::vector output = s.infer(input1.data(),input2.data()); // Checking output size EXPECT_EQ(output.size(), sizeof(Greater_ExpectedOutput::outputs) / sizeof(bool)); @@ -2615,7 +2615,7 @@ TEST(ONNX, Less){ }); SOFIE_Less::Session s("Less_FromONNX.dat"); - std::vector output = s.infer(input1.data(),input2.data()); + std::vector output = s.infer(input1.data(),input2.data()); // Checking output size EXPECT_EQ(output.size(), sizeof(Less_ExpectedOutput::outputs) / sizeof(bool)); From b4cd917b56b150b989070420b80fb0efce0aac52 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Sun, 23 Nov 2025 20:44:02 +0100 Subject: [PATCH 14/22] fix: parameteric inputs for range operator --- src/SOFIE_core/inc/SOFIE/RModel.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx | 24 +- src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc | 75 ++-- src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc | 7 +- src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc | 7 +- src/SOFIE_core/src/RModel.cxx | 417 ++++++++++++-------- 6 files changed, 304 insertions(+), 232 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index 09feb17..4ecdaec 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -65,9 +65,9 @@ public: int Verbose() const { return fVerbose;} - const std::vector &GetTensorShape(std::string name) const; + const std::vector &GetTensorShape(const std::string & name) const; std::vector GetDimTensorShape(const std::string & name) const; - const ETensorType &GetTensorType(std::string name) const; + const ETensorType &GetTensorType(const std::string & name) const; std::vector GetDynamicTensorShape(const std::string & name) const ; // get the values for the tensor representing a shape @@ -140,7 +140,7 @@ public: void AddIntermediateTensor(std::string tensor_name, ETensorType type, std::vector shape); // Add an intermediate dynamic tensor void AddDynamicTensor(std::string tensor_name, ETensorType type, std::vector shape); - + void AddShapeParam(const std::string & name, size_t def_value = 0); void AddInputTensorName(std::string name); void AddOutputTensorNameList(std::vector output_tensor_names); void diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx index bb1a74e..f9998e1 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx @@ -1,5 +1,5 @@ -#ifndef SOFIE_ROPERATOR_GRU -#define SOFIE_ROPERATOR_GRU +#ifndef TMVA_SOFIE_ROPERATOR_GRU +#define TMVA_SOFIE_ROPERATOR_GRU #include "SOFIE/RModel.hxx" #include "SOFIE/ROperator.hxx" @@ -11,7 +11,6 @@ #include #include - namespace SOFIE { /*! \brief Gated Recurrent Unit operator @@ -91,7 +90,7 @@ template class ROperator_GRU final : public ROperator { fNSequence_lens(UTILITY::Clean_name(nameSequence_lens)), fNInitial_h(UTILITY::Clean_name(nameInitial_h)), fNY(UTILITY::Clean_name(nameY)), fNY_h(UTILITY::Clean_name(nameY_h)) { - + fInputTensorNames = { fNX, fNW, fNR }; if (!fNB.empty()){ fInputTensorNames.emplace_back(fNB); @@ -123,39 +122,34 @@ template class ROperator_GRU final : public ROperator { * * \param input type of the input tensors */ - std::vector TypeInference(std::vector /*input*/); + std::vector TypeInference(std::vector /*input*/) override; /*! \brief Infers the shape of the output tensors * * \param input shape of the input tensors */ - std::vector> ShapeInference(std::vector> /*input*/); + std::vector> ShapeInference(std::vector> /*input*/) override; /*! \brief Initialize the model * * \param model Model */ - void Initialize(RModel &); + void Initialize(RModel &) override; /*! \brief Generate the inference code * * \param OpName name of the operator */ - std::string Generate(std::string /*OpName*/); - - /*! \brief Generate the code for the Session internal data vectors - * - * \param opName name of the operator - */ - std::string GenerateSessionMembersCode(std::string opName); + std::string Generate(std::string /*OpName*/) override; /*! \brief Returns the blas routines needed to compile the generated code */ - std::vector GetBlasRoutines() { return { std::string("Gemm"), std::string("Axpy") }; } + std::vector GetBlasRoutines() override { return { std::string("Gemm"), std::string("Axpy") }; } }; } // namespace SOFIE + // Implementation of the ROperator_GRU class #include "SOFIE/ROperator_GRU.icc" diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc index f3813c2..d011617 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc @@ -1,5 +1,5 @@ -#ifndef SOFIE_ROPERATOR_GRU_I -#define SOFIE_ROPERATOR_GRU_I +#ifndef TMVA_SOFIE_ROPERATOR_GRU_I +#define TMVA_SOFIE_ROPERATOR_GRU_I namespace SOFIE { @@ -175,51 +175,45 @@ void ROperator_GRU::Initialize(RModel& model){ fAttrActivations = {"Sigmoid", "Tanh"}; } } -} -// generate code for Session data members (e.g. internal vectors) -template -std::string ROperator_GRU::GenerateSessionMembersCode(std::string opName) -{ - opName = "op_" + opName; - std::stringstream out; + // To get unique intermediate tensor names, we add the name of the input + // tensor. One might also consider using the index of the operator in the + // RMode, but this information is not available in the current scope. + std::string opName = "op_gru_" + fNX; size_t num_directions = fShapeW[0]; size_t seq_length = (fAttrLayout == 0) ? fShapeX[0] : fShapeX[1]; size_t batch_size = (fAttrLayout == 0) ? fShapeX[1] : fShapeX[0]; size_t input_size = fShapeX[2]; + auto declareVector = [&](std::string const &name, std::size_t n){ + std::string fullName = opName + "_" + name; + model.AddIntermediateTensor(fullName, ConvertStringToType(fType), std::vector{n}); + }; + if (fAttrLayout != 0) { - out << "std::vector<" << fType << "> fVec_" << opName << "_input = std::vector<" << fType << ">(" - << seq_length * batch_size * input_size << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_initial_hidden_state = std::vector<" << fType << ">(" - << num_directions * batch_size * fAttrHiddenSize << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_initial_cell_state = std::vector<" << fType << ">(" - << num_directions * batch_size * fAttrHiddenSize << ");\n"; + declareVector("input", seq_length * batch_size * input_size); + declareVector("initial_hidden_state", num_directions * batch_size * fAttrHiddenSize); + declareVector("initial_cell_state", num_directions * batch_size * fAttrHiddenSize); } // Set the feedforward size_t ff_size = seq_length * batch_size * fAttrHiddenSize; - out << "std::vector<" << fType << "> fVec_" << opName << "_f_update_gate = std::vector<" << fType << ">(" << ff_size << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_f_reset_gate = std::vector<" << fType << ">(" << ff_size << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_f_hidden_gate = std::vector<" << fType << ">(" << ff_size << ");\n"; + declareVector("f_update_gate", ff_size); + declareVector("f_reset_gate", ff_size); + declareVector("f_hidden_gate", ff_size); // gate results size_t hs_size = seq_length * num_directions * batch_size * fAttrHiddenSize; - out << "std::vector<" << fType << "> fVec_" << opName << "_update_gate = std::vector<" << fType << ">(" << hs_size << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_reset_gate = std::vector<" << fType << ">(" << hs_size << ");\n"; - out << "std::vector<" << fType << "> fVec_" << opName << "_hidden_gate = std::vector<" << fType << ">(" << hs_size << ");\n"; + declareVector("update_gate", hs_size); + declareVector("reset_gate", hs_size); + declareVector("hidden_gate", hs_size); // feedback - out << "std::vector<" << fType << "> fVec_" << opName << "_feedback = std::vector<" << fType << ">(" - << batch_size * fAttrHiddenSize << ");\n"; + declareVector("feedback", batch_size * fAttrHiddenSize); // hiddden state if (fAttrLayout != 0 || fNY.empty()) { - out << "std::vector<" << fType << "> fVec_" << opName << "_hidden_state = std::vector<" << fType << ">(" << hs_size << ");\n"; + declareVector("hidden_state", hs_size); } - - out << "\n"; - - return out.str(); } @@ -234,12 +228,14 @@ auto ROperator_GRU::Generate(std::string OpName) size_t input_size = fShapeX[2]; size_t num_directions = fShapeW[0]; + auto getVec = [&](std::string const &name) { return "tensor_op_gru_" + fNX + "_" + name; }; + // set the input if (fAttrLayout == 0) { - out << SP << fType << " *" << OpName << "_input = tensor_" << fNX << ";\n"; + out << SP << fType << " const* " << OpName << "_input = tensor_" << fNX << ";\n"; } else { if (fUseSession) { - out << SP << fType << " * " << OpName << "_input = fVec_" << OpName << "_input.data();\n"; + out << SP << fType << " * " << OpName << "_input = " << getVec("input") << ";\n"; } else { out << SP << fType << " " << OpName << "_input[" << seq_length * batch_size * input_size << "];\n"; } @@ -261,8 +257,7 @@ auto ROperator_GRU::Generate(std::string OpName) << fNInitial_h << ";\n"; } else { if (fUseSession) { - out << SP << fType << " * " << OpName << "_initial_hidden_state = fVec_" << OpName - << "_initial_hidden_state.data();\n"; + out << SP << fType << " * " << OpName << "_initial_hidden_state = " << getVec("initial_hidden_state") << ";\n"; } else { out << SP << fType << " " << OpName << "_initial_hidden_state[" << num_directions * batch_size * fAttrHiddenSize << "];\n"; @@ -283,9 +278,9 @@ auto ROperator_GRU::Generate(std::string OpName) // Set the feedforward size_t feedforward_size = seq_length * batch_size * fAttrHiddenSize; if (fUseSession) { - out << SP << fType << " * " << OpName << "_f_update_gate = fVec_" << OpName << "_f_update_gate.data();\n"; - out << SP << fType << " * " << OpName << "_f_reset_gate = fVec_" << OpName << "_f_reset_gate.data();\n"; - out << SP << fType << " * " << OpName << "_f_hidden_gate = fVec_" << OpName << "_f_hidden_gate.data();\n"; + out << SP << fType << " * " << OpName << "_f_update_gate = " << getVec("f_update_gate") << ";\n"; + out << SP << fType << " * " << OpName << "_f_reset_gate = " << getVec("f_reset_gate") << ";\n"; + out << SP << fType << " * " << OpName << "_f_hidden_gate = " << getVec("f_hidden_gate") << ";\n"; } else { out << SP << fType << " " << OpName << "_f_update_gate[" << feedforward_size << "] = {0};\n"; out << SP << fType << " " << OpName << "_f_reset_gate[" << feedforward_size << "] = {0};\n"; @@ -294,9 +289,9 @@ auto ROperator_GRU::Generate(std::string OpName) // Set the gates size_t hidden_state_size = seq_length * num_directions * batch_size * fAttrHiddenSize; if (fUseSession) { - out << SP << fType << " * " << OpName << "_update_gate = fVec_" << OpName << "_update_gate.data();\n"; - out << SP << fType << " * " << OpName << "_reset_gate = fVec_" << OpName << "_reset_gate.data();\n"; - out << SP << fType << " * " << OpName << "_hidden_gate = fVec_" << OpName << "_hidden_gate.data();\n"; + out << SP << fType << " * " << OpName << "_update_gate = " << getVec("update_gate") << ";\n"; + out << SP << fType << " * " << OpName << "_reset_gate = " << getVec("reset_gate") << ";\n"; + out << SP << fType << " * " << OpName << "_hidden_gate = " << getVec("hidden_gate") << ";\n"; } else { out << SP << fType << " " << OpName << "_update_gate[" << hidden_state_size << "] = {0};\n"; out << SP << fType << " " << OpName << "_reset_gate[" << hidden_state_size << "] = {0};\n"; @@ -307,14 +302,14 @@ auto ROperator_GRU::Generate(std::string OpName) out << SP << fType << " *" << OpName << "_hidden_state = tensor_" << fNY << ";\n"; } else { if (fUseSession) { - out << SP << fType << " * " << OpName << "_hidden_state = fVec_" << OpName << "_hidden_state.data();\n"; + out << SP << fType << " * " << OpName << "_hidden_state = " << getVec("hidden_state") << ";\n"; } else { out << SP << fType << " " << OpName << "_hidden_state[" << hidden_state_size << "] = {0};\n"; } } if (fUseSession) { - out << SP << fType << " * " << OpName << "_feedback = fVec_" << OpName << "_feedback.data();\n"; + out << SP << fType << " * " << OpName << "_feedback = " << getVec("feedback") << ";\n"; } else { out << SP << fType << " " << OpName << "_feedback[" << batch_size * fAttrHiddenSize << "] = {0};\n"; } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc b/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc index bec7760..9d31b7f 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc @@ -1,6 +1,5 @@ -#ifndef SOFIE_ROPERATOR_LSTM_I -#define SOFIE_ROPERATOR_LSTM_I - +#ifndef TMVA_SOFIE_ROPERATOR_LSTM_I +#define TMVA_SOFIE_ROPERATOR_LSTM_I namespace SOFIE { @@ -291,7 +290,7 @@ auto ROperator_LSTM::Generate(std::string OpName) // set the input if (fAttrLayout == 0) { - out << SP << fType << " *" << OpName << "_input = tensor_" << fNX << ";\n"; + out << SP << fType << " const *" << OpName << "_input = tensor_" << fNX << ";\n"; } else { if (fUseSession) out << SP << fType << " * " << OpName << "_input = fVec_" << OpName << "_input.data();\n"; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc b/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc index c03c1c2..08dc3dc 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc @@ -1,6 +1,5 @@ -#ifndef SOFIE_ROPERATOR_RNN_I -#define SOFIE_ROPERATOR_RNN_I - +#ifndef TMVA_SOFIE_ROPERATOR_RNN_I +#define TMVA_SOFIE_ROPERATOR_RNN_I namespace SOFIE { @@ -230,7 +229,7 @@ auto ROperator_RNN::Generate(std::string OpName) // set the input if (fAttrLayout == 0) { if (fType == "float") { - out << SP << "float *" << OpName << "_input = tensor_" << fNX << ";\n"; + out << SP << "float const*" << OpName << "_input = tensor_" << fNX << ";\n"; } } else { if (fUseSession) diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index 8b87749..b2d8625 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -4,48 +4,21 @@ #include #include +#ifdef SOFIE_SUPPORT_ROOT_BINARY #include "TFile.h" +#endif #include "SOFIE/RModel.hxx" #include "SOFIE/SOFIE_common.hxx" - namespace SOFIE { -RModel::RModel(RModel&& other) { - fInputTensorInfos = std::move(other.fInputTensorInfos); - fReadyInputTensorInfos = std::move(other.fReadyInputTensorInfos); - fOutputTensorNames = other.fOutputTensorNames; - fInputTensorNames = other.fInputTensorNames; - fOperators = std::move(other.fOperators); - fInitializedTensors = std::move(other.fInitializedTensors); - fIntermediateTensorInfos = std::move(other.fIntermediateTensorInfos); - fName = other.fName; - fFileName = other.fFileName; - fParseTime = other.fParseTime; - fGC = other.fGC; - fNeededBlasRoutines = other.fNeededBlasRoutines; - fNeededStdLib = other.fNeededStdLib; +namespace { +const std::string SP = " "; } -RModel& RModel::operator=(RModel&& other) { - fInputTensorInfos = std::move(other.fInputTensorInfos); - fReadyInputTensorInfos = std::move(other.fReadyInputTensorInfos); - fOutputTensorNames = other.fOutputTensorNames; - fInputTensorNames = other.fInputTensorNames; - fOperators = std::move(other.fOperators); - fInitializedTensors = std::move(other.fInitializedTensors); - fIntermediateTensorInfos = std::move(other.fIntermediateTensorInfos); - fName = other.fName; - fFileName = other.fFileName; - fParseTime = other.fParseTime; - fGC = other.fGC; - fNeededBlasRoutines = other.fNeededBlasRoutines; - fNeededStdLib = other.fNeededStdLib; - return *this; -} -const std::vector& RModel::GetTensorShape(std::string name) const { +const std::vector& RModel::GetTensorShape(const std::string & name) const { auto f = fReadyInputTensorInfos.find(name); if (f != fReadyInputTensorInfos.end()) { return f->second.shape; @@ -62,6 +35,16 @@ const std::vector& RModel::GetTensorShape(std::string name) const { if (f4 != fIntermediateTensorInfos.end()) { return f4->second.shape; } + // case of shape tensors + auto f5 = fShapeTensors.find(name); + if (f5 != fShapeTensors.end()) { + // shape is vector of size 1 with size of shape values or just a scalar + if (f5->second.second) // check scalar flag + return std::vector{}; + else + return std::vector{f5->second.first.size()}; + } + if (fDynamicTensorInfos.find(name) != fDynamicTensorInfos.end()) throw std::runtime_error("TMVA SOFIE tensor [" + name + "] is a dynamic tensor. Use GetDynamicTensorShape instead of GetTensorShape"); @@ -82,7 +65,6 @@ std::vector RModel::GetDimTensorShape(const std::string & name) const { // for this we need to return the vector by value return ConvertShapeToDim(GetTensorShape(name)); } - std::vector RModel::GetDynamicTensorShape(const std::string & name) const { if (auto f = fDynamicTensorInfos.find(name); f != fDynamicTensorInfos.end()) { return f->second.shape; @@ -90,12 +72,14 @@ std::vector RModel::GetDynamicTensorShape(const std::string & name) const { if (auto f = fInputTensorInfos.find(name); f != fInputTensorInfos.end()) { return f->second.shape; } - // in case is not a dynamic tensor convert normal shape to Dim one - // for this we need to return the vector by value - return ConvertShapeToDim(GetTensorShape(name)); + // throw error if shape is not dynamic + if (!IsDynamicTensor(name)) + throw std::runtime_error("TMVA SOFIE tensor [" + name + "] for which the shape is requested is not dynamic"); + + throw std::runtime_error("TMVA SOFIE tensor [" + name + "] for which the shape is requested is not found"); } -const ETensorType& RModel::GetTensorType(std::string name) const { +const ETensorType& RModel::GetTensorType(const std::string & name) const { auto f = fReadyInputTensorInfos.find(name); if (f != fReadyInputTensorInfos.end()) { return f->second.type; @@ -116,6 +100,10 @@ const ETensorType& RModel::GetTensorType(std::string name) const { if (f5 != fDynamicTensorInfos.end()){ return f5->second.type; } + // case of shape tensor type is INT64 + if (fShapeTensors.find(name) != fShapeTensors.end()){ + return ETensorType::INT64; + } if (fIsSubGraph && fParentGraph) return fParentGraph->GetTensorType(name); @@ -129,6 +117,7 @@ bool RModel::CheckIfTensorAlreadyExist(std::string tensor_name) { if (fInitializedTensors.find(tensor_name) != fInitializedTensors.end()) return true; if (fIntermediateTensorInfos.find(tensor_name) != fIntermediateTensorInfos.end()) return true; if (fDynamicTensorInfos.find(tensor_name) != fDynamicTensorInfos.end()) return true; + if (fShapeTensors.find(tensor_name) != fShapeTensors.end()) return true; if (fIsSubGraph && fParentGraph) return fParentGraph->CheckIfTensorAlreadyExist(tensor_name); return false; } @@ -197,7 +186,7 @@ void RModel::AddConstantTensor(std::string tensor_name, ETensorType type, std::v tensor_name = UTILITY::Clean_name(tensor_name); //NB: own data if (CheckIfTensorAlreadyExist(tensor_name)) { - throw std::runtime_error("TMVA-SOFIE: initialized tensor with name " + tensor_name + " already exists \n"); + throw std::runtime_error("TMVA-SOFIE: constant tensor with name " + tensor_name + " already exists \n"); } InitializedTensor new_tensor {type, shape, data, true}; // add here flag to specify is a constant tensor fInitializedTensors[tensor_name] = new_tensor; @@ -231,9 +220,11 @@ bool RModel::IsConstantTensor(const std::string& tensorName) const { return itr->second.IsConstantTensor(); } +// dynamic tensors include also Dim input tensors bool RModel::IsDynamicTensor(const std::string& tensorName) const { std::string name = UTILITY::Clean_name(tensorName); - return fDynamicTensorInfos.find(name) != fDynamicTensorInfos.end(); + bool ret = fDynamicTensorInfos.find(name) != fDynamicTensorInfos.end(); + return (ret) ? true : IsDimInputTensor(tensorName); } bool RModel::IsDimInputTensor(const std::string& tensorName) const { std::string name = UTILITY::Clean_name(tensorName); @@ -272,17 +263,21 @@ void RModel::AddDynamicTensor(std::string tensor_name, ETensorType type, std::ve // store shape parameter if not existing for (auto &d : shape) { if (d.isParam) { - if (fShapeParams.count(d.param) == 0) { - // case parameter is an expression of some other existing parameter, no need to - // register it - if (d.dim != size_t(-1)) { - fShapeParams[d.param] = std::to_string(d.dim); - } + if (d.dim != size_t(-1)) { + AddShapeParam(d.param, d.dim); } } } } +void RModel::AddShapeParam(const std::string & param, size_t default_value) { + if (fShapeParams.count(param) == 0) { + fShapeParams[param] = std::to_string(default_value); + // add also in the vector list (used to keep the order) + fDimShapeNames.push_back(param); + } +} + void RModel::AddOutputTensorNameList(std::vector outputtensornames) { fOutputTensorNames.clear(); for(auto& it : outputtensornames) { @@ -323,100 +318,180 @@ void RModel::SetNotWritableInitializedTensor(const std::string & tensor_name) { t->second.SetNotWritable(); } -std::string RModel:: AllocateIntermediateMemory(std::span op_output_tensors) { +std::string RModel::AllocateIntermediateMemory(std::span op_output_tensors) +{ + std::stringstream code; - std::string memory_allocation_string = ""; - bool allocated; + if (fVerbose) { + std::cout << "Total chunks allocated\n"; + for (auto chunk = fIntermediateMemoryInfo.total_stack.begin(); chunk != fIntermediateMemoryInfo.total_stack.end(); ++chunk) { + std::cout << "..... chunk " << chunk->first << " size " << chunk->second.tensor_size << " " << chunk->second.tensor_name << std::endl; + } + } - for (auto& it : op_output_tensors) { - allocated = false; - if (GetTensorType(std::string(it)) == ETensorType::BOOL || - fInitializedTensors.find(std::string(it)) != fInitializedTensors.end() || - fDynamicTensorInfos.find(std::string(it)) != fDynamicTensorInfos.end()) continue; + auto declareIntermediateTensor = [this, &code](std::string const &name, size_t size, size_t location) { + std::string typeName = ConvertTypeToString(GetTensorType(name)); + code << "\n // Allocating memory for intermediate tensor " << name << " with size " << size << " bytes"; + code << "\n" + << typeName << "* tensor_" << name << " = reinterpret_cast<" << typeName + << "*>(fIntermediateMemoryPool.data() + " << location << ");\n"; + }; + + if (fVerbose) std::cout << "*** AllocateIntermediateMemory: Loop on op output tensors\n"; + // order output tensors by size + std::vector ordered_output_tensors; + + for (auto &it : op_output_tensors) { + auto name = std::string(it); + if (GetTensorType(name) == ETensorType::BOOL || fInitializedTensors.find(name) != fInitializedTensors.end() || + fDynamicTensorInfos.find(name) != fDynamicTensorInfos.end()) + continue; + + auto tensor_size = GetTypeSize(GetTensorType(name)) * ConvertShapeToLength(GetTensorShape(name)); + // important fill the pair in the ordered output tensors with the string view and not the string + TensorMemoryInfo tmi = {it, tensor_size}; + ordered_output_tensors.push_back(tmi); + } + std::sort(ordered_output_tensors.begin(), ordered_output_tensors.end(), + [](const TensorMemoryInfo &a, const TensorMemoryInfo &b) { return a.tensor_size > b.tensor_size; }); - auto tensor_size = GetTypeSize(GetTensorType(std::string(it))) * ConvertShapeToLength(GetTensorShape(std::string(it))); - memory_allocation_string += "\n // Allocating memory for intermediate tensor " + std::string(it) + " with size " + std::to_string(tensor_size) + " bytes"; + for (auto &it : ordered_output_tensors) { + bool allocated = false; + std::string name = std::string{it.tensor_name}; + size_t tensor_size = it.tensor_size; + if (fVerbose) + std::cout << "output tensor " << name << " size " << tensor_size << std::endl; - for (auto chunk = fIntermediateMemoryInfo.available_stack.begin(); chunk != fIntermediateMemoryInfo.available_stack.end(); ) { + for (auto chunk = fIntermediateMemoryInfo.available_stack.begin(); + chunk != fIntermediateMemoryInfo.available_stack.end();) { - // check if available memory chunks can accommodate the tensor - if (chunk->second >= tensor_size) { - auto new_chunk = fIntermediateMemoryInfo.total_stack[chunk->first].split(it, tensor_size); - auto new_chunk_location = chunk->first+chunk->second-tensor_size; - fIntermediateMemoryInfo.total_stack[new_chunk_location] = new_chunk; + if (fVerbose) std::cout << ".. available chunk " << chunk->first << " with size = " << chunk->second; + // check if available memory chunks can accommodate the tensor + if (chunk->second >= tensor_size) { + // need to use here string_view (i.e it.tensor_name) + // split returns the new chunk with size of new tensor. The free chunk is before the used one + auto new_chunk = fIntermediateMemoryInfo.total_stack[chunk->first].split(it.tensor_name, tensor_size); + auto new_chunk_location = chunk->first + chunk->second - tensor_size; + fIntermediateMemoryInfo.total_stack[new_chunk_location] = new_chunk; - memory_allocation_string += "\n" + ConvertTypeToString(GetTensorType(std::string(it))) + - "* tensor_" + std::string(it) + - " = reinterpret_cast<"+ConvertTypeToString(GetTensorType(std::string(it)))+"*>(fIntermediateMemoryPool + " + std::to_string(new_chunk_location) + ");\n"; - chunk->second -= tensor_size; + declareIntermediateTensor(name, tensor_size, new_chunk_location); + chunk->second -= tensor_size; - allocated = true; + allocated = true; - if (chunk->second == 0) { - chunk = fIntermediateMemoryInfo.available_stack.erase(chunk); - } + if (fVerbose) std::cout << " is re-used and split in a new of size " << new_chunk.tensor_size << " at " << new_chunk_location; - break; - } - ++chunk; + if (chunk->second == 0) { + if (fVerbose) std::cout << " and deleted since size matches"; + fIntermediateMemoryInfo.available_stack.erase(chunk); } + if (fVerbose) std::cout << std::endl; + break; + } else if (chunk->first == fIntermediateMemoryInfo.available_stack.rbegin()->first && + fIntermediateMemoryInfo.total_stack.rbegin()->first == chunk->first) { + // case last available chunk is the last in the memory, we can increase that one + fIntermediateMemoryInfo.total_stack[chunk->first] = {it.tensor_name, tensor_size}; + declareIntermediateTensor(name, tensor_size, chunk->first); + fIntermediateMemoryInfo.available_stack.erase(chunk); + allocated = true; + if (fVerbose) std::cout << " is extended with a bigger one of size " << tensor_size << std::endl; + break; + } + ++chunk; + if (fVerbose) std::cout << std::endl; + } - if (!allocated) { - size_t chunk_idx = fIntermediateMemoryInfo.total_stack.empty() - ? 0 - : fIntermediateMemoryInfo.total_stack.rbegin()->first + fIntermediateMemoryInfo.total_stack.rbegin()->second.tensor_size; + if (!allocated) { + size_t chunk_idx = fIntermediateMemoryInfo.total_stack.empty() + ? 0 + : fIntermediateMemoryInfo.total_stack.rbegin()->first + + fIntermediateMemoryInfo.total_stack.rbegin()->second.tensor_size; - fIntermediateMemoryInfo.total_stack[chunk_idx] = - { - it, - tensor_size - }; + fIntermediateMemoryInfo.total_stack[chunk_idx] = it; - memory_allocation_string += "\n"+ConvertTypeToString(GetTensorType(std::string(it)))+"* tensor_"+ std::string(it) + "= reinterpret_cast<"+ConvertTypeToString(GetTensorType(std::string(it)))+"*>(fIntermediateMemoryPool + " + std::to_string(chunk_idx) + ");\n"; - } + declareIntermediateTensor(name, tensor_size, chunk_idx); + + if (fVerbose) std::cout << "no chunk available - add in total stack a new chunk with size of tensor and idx : " << chunk_idx + << std::endl; + } } - return memory_allocation_string; + return code.str(); } void RModel::CheckAndFlushIntermediateMemory(std::span op_input_tensors, const size_t& op_idx){ - for (auto &it : op_input_tensors){ + if (fVerbose) std::cout << "*** CheckAndFlushIntermediateMemory: Loop on input tensors for op " << op_idx << "\n"; + //print available chunks + if (fVerbose) std::cout << "available chunks before freeing them : \n"; + for (auto chunk = fIntermediateMemoryInfo.available_stack.begin(); + chunk != fIntermediateMemoryInfo.available_stack.end(); chunk++) { + if (fVerbose) std::cout << "-- free chunk " << chunk->first << " size = " << chunk->second << std::endl; + } + for (auto &it : op_input_tensors) { // last occurence of the tensor is reached => flush it from memory + if (fVerbose) std::cout << ".. input tensors : " << it; if (fIntermediateTensorFrequencyLookup[it] == op_idx) { + if (fVerbose) std::cout << " flash condition is met - looping on chunks to find matching one \n"; for (auto chunk = fIntermediateMemoryInfo.total_stack.begin(); - chunk != fIntermediateMemoryInfo.total_stack.end(); ++chunk ) { - if (chunk->second.tensor_name == it) { - - // check if nearby chunks in available memory can coalesce - auto first_greater = fIntermediateMemoryInfo.available_stack.upper_bound(chunk->first); // smallest element greater than the flushed chunk idx - auto last_smaller = (first_greater == fIntermediateMemoryInfo.available_stack.begin()) ? fIntermediateMemoryInfo.available_stack.end() : std::prev(first_greater); // largest element smaller than the flushed chunk idx - - // check if the next stack entry is actually adjacent in memory - if (last_smaller->first+last_smaller->second + 1 == chunk->first){ - last_smaller->second += chunk->second.tensor_size; - fIntermediateMemoryInfo.total_stack[last_smaller->first].merge(chunk->second); - - if (last_smaller->first + last_smaller->second + 1 == first_greater->first){ - fIntermediateMemoryInfo.total_stack[last_smaller->first].merge(fIntermediateMemoryInfo.total_stack[first_greater->first]); - first_greater = fIntermediateMemoryInfo.available_stack.erase(first_greater); - } - } else{ - if (chunk->first + chunk->second.tensor_size + 1 == first_greater->first){ - fIntermediateMemoryInfo.total_stack[chunk->first].merge(fIntermediateMemoryInfo.total_stack[first_greater->first]); - first_greater = fIntermediateMemoryInfo.available_stack.erase(first_greater); - } - fIntermediateMemoryInfo.available_stack.insert({ - chunk->first, - chunk->second.tensor_size - }); - } + chunk != fIntermediateMemoryInfo.total_stack.end(); ++chunk) { + if (fVerbose) std::cout << "--- chunk " << chunk->first << " , " << chunk->second.tensor_name << " size " << chunk->second.tensor_size; + if (chunk->second.tensor_name == it) { + if (fVerbose) std::cout << " -- Found chunk corresponding to input tensor: " << chunk->first; + // check if nearby chunks in available memory can coalesce + auto first_greater = fIntermediateMemoryInfo.available_stack.upper_bound( + chunk->first); // smallest element greater than the flushed chunk idx + auto last_smaller = (first_greater == fIntermediateMemoryInfo.available_stack.begin()) + ? fIntermediateMemoryInfo.available_stack.end() + : std::prev(first_greater); // largest element smaller than the flushed chunk idx + + // check if the next stack entry is actually adjacent in memory + + if (last_smaller != fIntermediateMemoryInfo.available_stack.end() && + last_smaller->first + last_smaller->second == chunk->first) { + // merge chunk with previous one + last_smaller->second += chunk->second.tensor_size; + fIntermediateMemoryInfo.total_stack[last_smaller->first].merge(chunk->second); + if (fVerbose) std::cout << " is adjacent in memory with previous one - merge "; + if (first_greater != fIntermediateMemoryInfo.available_stack.end() && + last_smaller->first + last_smaller->second == first_greater->first) { + // merge also with following one + last_smaller->second += first_greater->second; + fIntermediateMemoryInfo.total_stack[last_smaller->first].merge( + fIntermediateMemoryInfo.total_stack[first_greater->first]); + // delete merged one in available stack and in total stack + fIntermediateMemoryInfo.total_stack.erase(first_greater->first); + fIntermediateMemoryInfo.available_stack.erase(first_greater); + if (fVerbose) std::cout << " merge also with following that is free "; + } + fIntermediateMemoryInfo.total_stack.erase(chunk->first); + if (fVerbose) std::cout << std::endl; + break; + } else if (first_greater != fIntermediateMemoryInfo.available_stack.end() && + chunk->first + chunk->second.tensor_size == first_greater->first) { + // merge with first greater + if (fVerbose) std::cout << " is adjacent in memory with following one - merge \n"; + // cannot modify idx of first_greter. Insert a new one and delete previous one + size_t new_size = chunk->second.tensor_size + first_greater->second; + size_t first_greater_idx = first_greater->first; + fIntermediateMemoryInfo.available_stack.erase(first_greater); + // cannot use anymore first_greater + fIntermediateMemoryInfo.available_stack.insert({chunk->first, new_size}); + fIntermediateMemoryInfo.total_stack[chunk->first].merge( + fIntermediateMemoryInfo.total_stack[first_greater_idx]); + fIntermediateMemoryInfo.total_stack.erase(first_greater_idx); + } else { + fIntermediateMemoryInfo.available_stack.insert({chunk->first, chunk->second.tensor_size}); + if (fVerbose) std::cout << " insert in the available stack the chunk with size " << chunk->second.tensor_size << std::endl; } + chunk->second.tensor_name = "free"; + break; + } } + } else { + if (fVerbose) std::cout << std::endl; } } } - - void RModel::Initialize(int batchSize, bool verbose) { std::map inputParams; if (batchSize > 0) { @@ -464,7 +539,7 @@ void RModel::Initialize(const std::map & inputParams, bool auto shape = ConvertShapeToInt(input.second.shape); if (verbose) std::cout << "converting input shape for " << input.first << " " << ConvertShapeToString(shape) << " from " - << ConvertDimShapeToString(input.second.shape) << std::endl; + << ConvertShapeToString(input.second.shape) << std::endl; if (!shape.empty()) { // case shape is defined (not parametric) we add the tensor in the fReadyInputTensorInfos map and // we remove the tensor from the fInputTensorInfo where th eold parametric shape was stored @@ -478,8 +553,12 @@ void RModel::Initialize(const std::map & inputParams, bool else { // store the found parametric shape parameters for (auto &d : input.second.shape) { - if (d.isParam) - fShapeParams[d.param] = std::to_string(d.dim); + if (d.isParam) { + if (fShapeParams.count(d.param) == 0) { + fDimShapeNames.push_back(d.param); + fShapeParams[d.param] = std::to_string(d.dim); + } + } } } } @@ -514,10 +593,11 @@ void RModel::Initialize(const std::map & inputParams, bool } fOperators[op_idx]->Initialize(*this); for(auto &it:fOperators[op_idx]->GetOpOutputTensors()){ + std::string name = std::string{it}; if (fIntermediateTensorFrequencyLookup.find(it) == fIntermediateTensorFrequencyLookup.end() && - std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), std::string(it)) == fOutputTensorNames.end() && - fInitializedTensors.find(std::string(it)) == fInitializedTensors.end() && - fDynamicTensorInfos.find(std::string(it)) == fDynamicTensorInfos.end()){ + std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), name) == fOutputTensorNames.end() && + fInitializedTensors.find(name) == fInitializedTensors.end() && + fDynamicTensorInfos.find(name) == fDynamicTensorInfos.end()){ fIntermediateTensorFrequencyLookup[it] = op_idx; } } @@ -600,10 +680,13 @@ void RModel::GenerateInitializedTensorInfo() for (auto &i : fInitializedTensors) { if (!fUseWeightFile || i.second.IsConstantTensor()) { - if (i.second.type() == ETensorType::FLOAT) + if (i.second.type() == ETensorType::FLOAT) { fGC += GenerateConstantTensorCode(i); - else if (i.second.type() == ETensorType::INT64) + fConstantTensorSize += ConvertShapeToLength(i.second.shape()) * 4; + } else if (i.second.type() == ETensorType::INT64) { fGC += GenerateConstantTensorCode(i); + fConstantTensorSize += ConvertShapeToLength(i.second.shape()) * 8; + } } else { // case of tensors which are read from a file @@ -611,43 +694,55 @@ void RModel::GenerateInitializedTensorInfo() if (i.second.type() == ETensorType::FLOAT) { fGC += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(length) + ");\n"; fGC += "float * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; + fWeightsTensorSize += ConvertShapeToLength(i.second.shape()) * 4; } } } } void RModel::GenerateIntermediateMemoryPool() { - if (fIntermediateMemoryInfo.total_stack.size() == 0) return; + if (fIntermediateMemoryInfo.total_stack.empty()) return; fGC += "\n//--- Allocating session memory pool to be used for allocating intermediate tensors\n"; // char memory block is allocated since char takes 1 byte, thus easier to allocate tensors // of other data types - fGC += "char* fIntermediateMemoryPool = new char[" + std::to_string(fIntermediateMemoryInfo.total_stack.rbegin()->first + fIntermediateMemoryInfo.total_stack.rbegin()->second.tensor_size)+ "];\n\n"; + auto const &totalStack = fIntermediateMemoryInfo.total_stack; + const size_t memPoolSize = totalStack.rbegin()->first + totalStack.rbegin()->second.tensor_size; + fGC += "std::vector fIntermediateMemoryPool = std::vector(" + std::to_string(memPoolSize) + ");\n\n"; } void RModel::GenerateIntermediateTensorInfo() { if (!fIntermediateTensorInfos.empty()) { std::string tensor_declaration_block = ""; - for (auto &i : fIntermediateTensorInfos) { if (i.second.type == ETensorType::BOOL) { tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(ConvertShapeToLength(i.second.shape)) + ");\n"; tensor_declaration_block += "std::uint8_t * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; + continue; } - if (fIntermediateTensorFrequencyLookup.find(i.first) == fIntermediateTensorFrequencyLookup.end() && std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()) { + bool is_extended = (fOptimizationLevel == OptimizationLevel::kExtended); + bool not_in_freq_map = + (fIntermediateTensorFrequencyLookup.find(i.first) == fIntermediateTensorFrequencyLookup.end()); + bool not_in_output_names = + (std::find(fOutputTensorNames.begin(), fOutputTensorNames.end(), i.first) == fOutputTensorNames.end()); + + if ((not_in_freq_map && not_in_output_names) || (!not_in_freq_map && !is_extended && not_in_output_names)) { size_t length = ConvertShapeToLength(i.second.shape); if (i.second.type == ETensorType::FLOAT) { tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(length) + ");\n"; tensor_declaration_block += "float * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; + fOtherTensorSize += 4 * length; } else if (i.second.type == ETensorType::DOUBLE) { tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(length) + ");\n"; tensor_declaration_block += "double * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; + fOtherTensorSize += 8 * length; } else if (i.second.type == ETensorType::INT64) { tensor_declaration_block += "std::vector fTensor_" + i.first + " = std::vector(" + std::to_string(length) + ");\n"; tensor_declaration_block += "int64_t * tensor_" + i.first + " = fTensor_" + i.first + ".data();\n"; + fOtherTensorSize += 8 * length; } } } @@ -686,17 +781,17 @@ void RModel::GenerateOperatorDeclarations() { fGC += "\n"; } -void RModel::GenerateDynamicTensorInfo() { - fGC += "//---- allocate the intermediate dynamic tensors\n"; - std::stringstream out; - for (auto & i: fDynamicTensorInfos) { - auto length = ConvertDynamicShapeToLength(i.second.shape); - out << SP << "if (" << length << " > 0) {\n"; - out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; - out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; - out << SP << "}\n"; - } - fGC += out.str(); +void RModel::GenerateDynamicTensorInfo() +{ + std::stringstream out; + for (auto &i : fDynamicTensorInfos) { + auto length = ConvertDynamicShapeToLength(i.second.shape); + out << SP << "if (" << length << " > 0) {\n"; + out << SP << SP << "fTensor_" << i.first << ".resize(" << length << ");\n"; + out << SP << SP << "tensor_" << i.first << " = fTensor_" << i.first << ".data();\n"; + out << SP << "}\n"; + } + fGC += out.str(); } std::string RModel::GenerateInferSignature(bool isdecl) { @@ -724,7 +819,7 @@ std::string RModel::GenerateInferSignature(bool isdecl) { if (type == "other") throw std::runtime_error("TMVA-SOFIE: input tensor " + name + " is of a data type which is not yet supported."); - rGC += type + "* "; + rGC += type + " const* "; } rGC += "tensor_" + name + ","; i_input++; @@ -935,7 +1030,7 @@ void RModel::GenerateSessionCode() } fGC += SP + "using SOFIE::UTILITY::FillOutput;\n\n"; - + for (std::string const &name : fOutputTensorNames) { // need to check is size is the same (don't want to return a vector with // larger size) in that case better to copy @@ -1029,8 +1124,7 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { fGC += " f.seekg(" + std::to_string(pos) + ");\n"; } - fGC += " std::string tensor_name;\n"; - fGC += " size_t length;\n"; + fGC += " using SOFIE::ReadTensorFromStream;\n"; // loop on tensors and parse the file for (auto& i: fInitializedTensors) { @@ -1038,25 +1132,8 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { if (!i.second.IsWeightTensor()) continue; std::string tensor_name = "tensor_" + i.first; if (i.second.type() == ETensorType::FLOAT) { - size_t length = 1; - length = ConvertShapeToLength(i.second.shape()); - std::string slength = std::to_string(length); - fGC += " f >> tensor_name >> length;\n"; - fGC += " if (tensor_name != \"" + tensor_name + "\" ) {\n"; - fGC += " std::string err_msg = \"TMVA-SOFIE failed to read the correct tensor name; expected name is " + - tensor_name + " , read \" + tensor_name;\n"; - fGC += " throw std::runtime_error(err_msg);\n"; - fGC += " }\n"; - fGC += " if (length != " + slength + ") {\n"; - fGC += " std::string err_msg = \"TMVA-SOFIE failed to read the correct tensor size; expected size is " + - slength + " , read \" + std::to_string(length) ;\n"; - fGC += " throw std::runtime_error(err_msg);\n"; - fGC += " }\n"; - fGC += " for (size_t i = 0; i < length; ++i)\n"; - fGC += " f >> " + tensor_name + "[i];\n"; - fGC += " if (f.fail()) {\n"; - fGC += " throw std::runtime_error(\"TMVA-SOFIE failed to read the values for tensor " + tensor_name + "\");\n"; - fGC += " }\n"; + std::string length = std::to_string(ConvertShapeToLength(i.second.shape())); + fGC += " ReadTensorFromStream(f, " + tensor_name + ", \"" + tensor_name + "\", " + length + ");\n"; } else { std::runtime_error("tmva-sofie tensor " + tensor_name + " with type " + ConvertTypeToString(i.second.type()) + " cannot be read from a file"); } @@ -1066,6 +1143,7 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { // generate the code to read initialized tensors from a ROOT data file if(fWeightFile == WeightFileType::RootBinary) { +#ifdef SOFIE_SUPPORT_ROOT_BINARY fGC += " {\n"; fGC += " std::unique_ptr rootFile(TFile::Open(filename.c_str(), \"READ\"));\n"; fGC += " if (!rootFile->IsOpen()) {\n"; @@ -1097,6 +1175,9 @@ void RModel::ReadInitializedTensorsFromFile(long pos) { fGC += " }\n"; } fGC += " }\n"; +#else + throw std::runtime_error("SOFIE was not built with ROOT file support."); +#endif // SOFIE_SUPPORT_ROOT_BINARY } } @@ -1122,6 +1203,7 @@ long RModel::WriteInitializedTensorsToFile(std::string filename) { // Write the initialized tensors to the file if (fWeightFile == WeightFileType::RootBinary) { +#ifdef SOFIE_SUPPORT_ROOT_BINARY if(fIsGNNComponent || fIsGNN) { throw std::runtime_error("SOFIE-GNN yet not supports writing to a ROOT file."); } @@ -1165,6 +1247,9 @@ long RModel::WriteInitializedTensorsToFile(std::string filename) { // this needs to be changed, similar to the text file return -1; +#else + throw std::runtime_error("SOFIE was not built with ROOT file support."); +#endif // SOFIE_SUPPORT_ROOT_BINARY } else if (fWeightFile == WeightFileType::Text) { std::ofstream f; if(fIsGNNComponent) { @@ -1291,9 +1376,9 @@ void RModel::PrintOutputTensors() { for (auto& it: fOutputTensorNames) { std::cout << "Tensor name: \"" << it << "\"\t"; if (!IsDynamicTensor(it)) - std::cout << "shape: " << ConvertShapeToString(GetTensorShape(it)) << std::endl; - else - std::cout << "shape: " << ConvertDimShapeToString(GetDynamicTensorShape(it)) << std::endl; + std::cout << "shape: " << ConvertShapeToString(GetTensorShape(it)) << std::endl; + else + std::cout << "shape: " << ConvertShapeToString(GetDynamicTensorShape(it)) << std::endl; } std::cout << "\n"; } @@ -1359,13 +1444,13 @@ void RModel::OutputGenerated(std::string filename, bool append) { void RModel::Streamer(TBuffer &R__b) { if (R__b.IsReading()) { RModel::Class()->ReadBuffer(R__b, this); - for(auto i=RModel::fInitializedTensors.begin(); i!=RModel::fInitializedTensors.end(); ++i) { - i->second.CastPersistentToShared(); + for (auto & i : fInitializedTensors) { + i.second.CastPersistentToShared(); } } else { - for(auto i=RModel::fInitializedTensors.begin(); i!=RModel::fInitializedTensors.end(); ++i) { - i->second.CastSharedToPersistent(); + for (auto & i : fInitializedTensors) { + i.second.CastSharedToPersistent(); } RModel::Class()->WriteBuffer(R__b, this); } From 20167943d59a3bf7b138fb8391c8eb65b704133e Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 24 Nov 2025 16:26:31 +0100 Subject: [PATCH 15/22] fix: linking issue because of incorrect symbols --- src/SOFIE_core/CMakeLists.txt | 7 +- src/SOFIE_core/inc/SOFIE/RFunction.hxx | 1 + src/SOFIE_core/inc/SOFIE/RModel_Base.hxx | 1 - src/SOFIE_core/inc/SOFIE/ROperator.hxx | 6 +- .../inc/SOFIE/ROperator_BasicBinary.hxx | 14 +-- src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx | 6 +- src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx | 4 +- src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc | 4 +- src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc | 4 +- .../SOFIE/ROperator_LayerNormalization.hxx | 4 +- src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc | 4 +- src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx | 6 +- .../inc/SOFIE/ROperator_Reshape.hxx | 4 +- src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx | 6 +- src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx | 6 +- src/SOFIE_core/src/RModel.cxx | 37 -------- src/SOFIE_core/test/CMakeLists.txt | 88 ++++++++++++++----- src/SOFIE_core/test/EmitFromONNX.cxx.in | 2 +- .../test/EmitFromONNX_GPU_ALPAKA.cxx.in | 11 ++- src/SOFIE_parsers/CMakeLists.txt | 9 ++ 20 files changed, 126 insertions(+), 98 deletions(-) diff --git a/src/SOFIE_core/CMakeLists.txt b/src/SOFIE_core/CMakeLists.txt index de13b58..ac7499d 100644 --- a/src/SOFIE_core/CMakeLists.txt +++ b/src/SOFIE_core/CMakeLists.txt @@ -94,12 +94,17 @@ target_link_libraries(SOFIE_core PUBLIC RIO ) -ROOT_GENERATE_DICTIONARY(G__SOFIE ${sources_headers} +ROOT_GENERATE_DICTIONARY(G__SOFIE_core ${sources_headers} LINKDEF inc/LinkDef.h MODULE SOFIE_core OPTIONS --deep ) +# Install the dictionaries. +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/libSOFIE_core_rdict.pcm + ${CMAKE_CURRENT_BINARY_DIR}/libSOFIE_core.rootmap + DESTINATION lib) + install(TARGETS SOFIE_core LIBRARY DESTINATION lib ) diff --git a/src/SOFIE_core/inc/SOFIE/RFunction.hxx b/src/SOFIE_core/inc/SOFIE/RFunction.hxx index 53c30e3..f79691a 100644 --- a/src/SOFIE_core/inc/SOFIE/RFunction.hxx +++ b/src/SOFIE_core/inc/SOFIE/RFunction.hxx @@ -3,6 +3,7 @@ #include "SOFIE/RModel_Base.hxx" #include "SOFIE/SOFIE_common.hxx" +#include "SOFIE/ROperator.hxx" #include #include diff --git a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx index 460372a..deac58b 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel_Base.hxx @@ -12,7 +12,6 @@ #include #include #include "SOFIE/SOFIE_common.hxx" -#include "SOFIE/ROperator.hxx" #include "TBuffer.h" diff --git a/src/SOFIE_core/inc/SOFIE/ROperator.hxx b/src/SOFIE_core/inc/SOFIE/ROperator.hxx index 9bccc5b..17b62f6 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR -#define TMVA_SOFIE_ROPERATOR +#ifndef SOFIE_ROPERATOR +#define SOFIE_ROPERATOR #include #include @@ -103,4 +103,4 @@ public: }//SOFIE -#endif //TMVA_SOFIE_OPERATOR +#endif //SOFIE_OPERATOR diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index da7cf63..80f35be 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROperator_BasicBinary -#define TMVA_SOFIE_ROperator_BasicBinary +#ifndef SOFIE_SOFIE_ROperator_BasicBinary +#define SOFIE_SOFIE_ROperator_BasicBinary #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" @@ -402,7 +402,7 @@ public: } op.pop_back(); op += "));\n"; - + std::cout<<"okay till here 1\n"; op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; auto stridesA = UTILITY::ComputeStrideFromShape(fDimShapeA); @@ -426,6 +426,7 @@ public: for (int j = 0; j < 3; j++) compute_idx_A.pop_back(); } + std::cout<<"okay till here 2\n"; if (fDimShapeB.empty() || std::all_of(fDimShapeB.begin(), fDimShapeB.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { compute_idx_B = "0"; @@ -442,6 +443,7 @@ public: for (int j = 0; j < 3; j++) compute_idx_B.pop_back(); } + std::cout<<"okay till here 3\n"; int nloop = 0; if (fDimShapeY.empty() || std::all_of(fDimShapeY.begin(), fDimShapeY.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { @@ -461,16 +463,18 @@ public: for (int j = 0; j < 3; j++) compute_idx_Y.pop_back(); } + std::cout<<"okay till here 4\n"; for (int j = 0; j < nloop + 1; j++) op += SP; op += "C[" + compute_idx_Y + "] = " + BinaryOperatorTrait::Op("A[" + compute_idx_A + "]", "B[" + compute_idx_B + "]") + " ;\n"; - + std::cout<<"okay till here 5\n"; for (int i = nloop; i > 0; i--) { for (int j = 0; j < i; j++) op += SP; op += "}\n"; } + return op; } std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string OpName) { @@ -503,4 +507,4 @@ public: } // namespace SOFIE -#endif // TMVA_SOFIE_ROperator_BasicBinary +#endif // SOFIE_ROperator_BasicBinary diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx index 3ef0ee4..c828668 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Concat.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_Concat - #define TMVA_SOFIE_ROPERATOR_Concat +#ifndef SOFIE_ROPERATOR_Concat +#define SOFIE_ROPERATOR_Concat #include "SOFIE/SOFIE_common.hxx" @@ -320,4 +320,4 @@ }//SOFIE - #endif //TMVA_SOFIE_ROPERATOR_CONCAT \ No newline at end of file + #endif //SOFIE_ROPERATOR_CONCAT \ No newline at end of file diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx index f9998e1..5b553ff 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_GRU -#define TMVA_SOFIE_ROPERATOR_GRU +#ifndef SOFIE_ROPERATOR_GRU +#define SOFIE_ROPERATOR_GRU #include "SOFIE/RModel.hxx" #include "SOFIE/ROperator.hxx" diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc index d011617..38030d1 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_GRU.icc @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_GRU_I -#define TMVA_SOFIE_ROPERATOR_GRU_I +#ifndef SOFIE_ROPERATOR_GRU_I +#define SOFIE_ROPERATOR_GRU_I namespace SOFIE { diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc b/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc index 9d31b7f..ebf4daf 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LSTM.icc @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_LSTM_I -#define TMVA_SOFIE_ROPERATOR_LSTM_I +#ifndef SOFIE_ROPERATOR_LSTM_I +#define SOFIE_ROPERATOR_LSTM_I namespace SOFIE { diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx index e6c4c99..4a328de 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LayerNormalization.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_LAYERNORMALIZATION -#define TMVA_SOFIE_ROPERATOR_LAYERNORMALIZATION +#ifndef SOFIE_ROPERATOR_LAYERNORMALIZATION +#define SOFIE_ROPERATOR_LAYERNORMALIZATION #include "SOFIE/RModel.hxx" #include "SOFIE/SOFIE_common.hxx" diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc b/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc index 08dc3dc..c10c2a5 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc +++ b/src/SOFIE_core/inc/SOFIE/ROperator_RNN.icc @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_RNN_I -#define TMVA_SOFIE_ROPERATOR_RNN_I +#ifndef SOFIE_ROPERATOR_RNN_I +#define SOFIE_ROPERATOR_RNN_I namespace SOFIE { diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx index 0930a0b..3e8605e 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Range.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_RANGE -#define TMVA_SOFIE_ROPERATOR_RANGE +#ifndef SOFIE_ROPERATOR_RANGE +#define SOFIE_ROPERATOR_RANGE #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" @@ -121,4 +121,4 @@ public: }//SOFIE -#endif //TMVA_SOFIE_ROPERATOR_RANGE +#endif //SOFIE_ROPERATOR_RANGE diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx index a014547..0a21709 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Reshape.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROPERATOR_RESHAPE -#define TMVA_SOFIE_ROPERATOR_RESHAPE +#ifndef SOFIE_ROPERATOR_RESHAPE +#define SOFIE_ROPERATOR_RESHAPE #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx index 15906e5..19d217d 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Where.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_ROperator_Where -#define TMVA_SOFIE_ROperator_Where +#ifndef SOFIE_ROperator_Where +#define SOFIE_ROperator_Where #include "SOFIE/SOFIE_common.hxx" #include "SOFIE/ROperator.hxx" @@ -292,4 +292,4 @@ public: }//SOFIE -#endif //TMVA_SOFIE_ROperator_Where +#endif // SOFIE_ROperator_Where diff --git a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx index 17ac714..c120570 100644 --- a/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx +++ b/src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx @@ -1,5 +1,5 @@ -#ifndef TMVA_SOFIE_SOFIE_COMMON -#define TMVA_SOFIE_SOFIE_COMMON +#ifndef SOFIE_SOFIE_COMMON +#define SOFIE_SOFIE_COMMON #include "TMVA/RTensor.hxx" @@ -806,4 +806,4 @@ void ReadTensorFromStream(std::istream &is, T &target, std::string const &expect } // namespace SOFIE -#endif //TMVA_SOFIE_COMMON +#endif //SOFIE_COMMON diff --git a/src/SOFIE_core/src/RModel.cxx b/src/SOFIE_core/src/RModel.cxx index b2d8625..fb23c11 100644 --- a/src/SOFIE_core/src/RModel.cxx +++ b/src/SOFIE_core/src/RModel.cxx @@ -636,43 +636,6 @@ void RModel::InitializeSubGraph(std::shared_ptr graph) { } -// Function to generate the code for declaring and initializing constant tensors -// This is for tensors which are not part of weight files and can be created from the Constant operator -template -std::string GenerateConstantTensorCode(const std::pair &t) -{ - std::stringstream strs; - std::string type = ConvertTypeToString(t.second.type()); - size_t length = ConvertShapeToLength(t.second.shape()); - // avoid using stack sizes for constant tensors to reduce compilation time - bool allocateOnStack = (length > 100) ? false : true; - - const T *data = t.second.data(); - - // and check if all values are the same - bool sameData = false; - // for non stack allocation check if data are the same - if (!allocateOnStack && length > 1) { - size_t idx = 1; - do { - sameData = (data[idx] == data[idx - 1]); - idx++; - } while (sameData && idx < length); - } - if (allocateOnStack) { - strs << type << " tensor_" << t.first << "[" << length << "] = " << ConvertValuesToString(length, data) << ";\n"; - } else { - strs << "std::vector<" << type << "> fTensor_" << t.first << " = "; - if (sameData) - strs << "std::vector<" << type << ">(" << length << ", " << ConvertValToString(data[0]) << ");\n"; - else { - strs << ConvertValuesToString(length, data) << ";\n"; - } - strs << "const " << type << " * tensor_" + t.first + " = fTensor_" + t.first + ".data();\n"; - } - return strs.str(); -} - void RModel::GenerateInitializedTensorInfo() { if (!fInitializedTensors.empty()) diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index e4713b6..1ba5dfd 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -16,32 +16,54 @@ if (NOT ONNX_MODELS_DIR) set(ONNX_MODELS_DIR input_models) endif() -# Finding .onnx files to be parsed and creating the appropriate code to -# parse all file. It is much faster to combine all parsing in a single executable -# which will avoid initialization time (especially when using ROOT) -set(CAPTURE_STR "EmitModel( \"@1\", \"@2\");") +# String template used to produce calls to EmitModel(...) per file. +set(CAPTURE_STR +"try {\n\ + EmitModel(\"@1\", \"@2\");\n\ +} catch (const std::exception& e) {\n\ + std::cerr << \"[ERROR] Failed processing @1: \" << e.what() << std::endl;\n\ + failures++;\n\ +} catch (...) {\n\ + std::cerr << \"[ERROR] Unknown failure processing @1\" << std::endl;\n\ + failures++;\n\ +}\n\ +") +# --- Collect ONNX files and build ALL_CAPTURES BEFORE any configure_file() --- set(ALL_CAPTURES "") -# Finding .onnx files to be parsed and creating the appropriate command file(GLOB ONNX_FILES "${ONNX_MODELS_DIR}/*.onnx") + +# If there are no models, ONNX_FILES will be empty and ALL_CAPTURES stays empty. foreach(onnx_file ${ONNX_FILES}) get_filename_component(fname ${onnx_file} NAME_WE) get_filename_component(fdir ${onnx_file} DIRECTORY) - string(REPLACE "@1" ${onnx_file} cap ${CAPTURE_STR}) - string(REPLACE "@2" ${fname} cap ${cap}) + + string(REPLACE "@1" "${onnx_file}" cap "${CAPTURE_STR}") + string(REPLACE "@2" "${fname}" cap "${cap}") list(APPEND ALL_CAPTURES ${cap}) endforeach() -string(REPLACE ";" ";\n" EMIT_CAPTURES "${ALL_CAPTURES}") + +# Now generate the combined source files for CPU, ROOT and ALPAKA +# They will have @EMIT_CAPTURES@ substituted with the contents of ALL_CAPTURES +# (CMake configure_file uses variables via @VAR@ when @ONLY is provided). +# To make ALL_CAPTURES visible to configure_file we set a temporary variable +# that configure_file can reference directly. +set(EMIT_CAPTURES "${ALL_CAPTURES}") + +# Note: the .in templates must use @EMIT_CAPTURES@ placeholder. configure_file(EmitFromONNX.cxx.in EmitFromONNX_all.cxx @ONLY) configure_file(EmitFromRoot.cxx.in EmitFromRoot_all.cxx @ONLY) configure_file(EmitFromONNX_GPU_ALPAKA.cxx.in EmitFromONNX_GPU_ALPAKA_all.cxx @ONLY) +# --- CPU emitter executable and test (unchanged) --- ROOTTEST_GENERATE_EXECUTABLE(emitFromONNX EmitFromONNX_all.cxx LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers - FIXTURES_SETUP sofie-compile-models-onnx-build) + FIXTURES_SETUP sofie-compile-models-onnx-build) # silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 target_compile_options(emitFromONNX PRIVATE -Wno-unused-parameter -Wno-array-bounds) +# IMPORTANT: ROOTTEST_ADD_TEST below used to expand ${onnx_file}/${fname} which are loop vars. +# We keep it as a single-test wrapper; per-file test invocation is added later for ALPAKA. ROOTTEST_ADD_TEST(SofieCompileModels_ONNX COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNX ${onnx_file} ${CMAKE_CURRENT_BINARY_DIR}/${fname} FIXTURES_REQUIRED sofie-compile-models-onnx-build @@ -69,7 +91,6 @@ if (BLAS_FOUND) # we need BLAS for compiling the models endif() # For testing serialisation of RModel object - ROOTTEST_GENERATE_EXECUTABLE(emitFromROOT EmitFromRoot_all.cxx LIBRARIES protobuf::libprotobuf RIO SOFIE_core SOFIE_parsers FIXTURES_SETUP sofie-compile-models-onnx-root @@ -132,15 +153,38 @@ ROOT_EXECUTABLE(EmitGraphIndependent GNN/EmitGraphIndependent.cxx LIBRARIES SOFI ROOT_ADD_TEST(tmva-sofie-EmitGraphIndependent COMMAND EmitGraphIndependent) # Generating inference code for heterogeneous testing using ALPAKA -# ROOTTEST_GENERATE_EXECUTABLE(emitFromONNXAlpaka EmitFromONNX_GPU_ALPAKA_all.cxx -# LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers -# FIXTURES_SETUP sofie-compile-models-onnx-alpaka-build) - -# # silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 -# target_compile_options(emitFromONNXAlpaka PRIVATE -Wno-unused-parameter -Wno-array-bounds) - -# ROOTTEST_ADD_TEST(SofieCompileModels_ONNX_Alpaka -# COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNXAlpaka ${onnx_file} ${CMAKE_CURRENT_BINARY_DIR}/${fname} -# FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka-build -# FIXTURES_SETUP sofie-compile-models-onnx-alpaka -# ) +ROOTTEST_GENERATE_EXECUTABLE(emitFromONNXAlpaka EmitFromONNX_GPU_ALPAKA_all.cxx + LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers + FIXTURES_SETUP sofie-compile-models-onnx-alpaka-build) + +# silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 +target_compile_options(emitFromONNXAlpaka PRIVATE -Wno-unused-parameter -Wno-array-bounds) + +# Add explicit per-file post-build runs of the alpaka emitter executable so that +# EmitFromONNXAlpaka is invoked for each detected .onnx file during the build. +# This avoids relying on a single ${onnx_file}/${fname} value that would +# otherwise expand only to the last entry when used outside the loop. + +if (ONNX_FILES) + foreach(onnx_file ${ONNX_FILES}) + get_filename_component(fname ${onnx_file} NAME_WE) + + # Create a post-build command attached to the emitFromONNXAlpaka target that + # will run the built binary with the current onnx file and the chosen outname. + add_custom_command(TARGET emitFromONNXAlpaka + POST_BUILD + COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 $ "${onnx_file}" "${CMAKE_CURRENT_BINARY_DIR}/${fname}" + COMMENT "Running ALPAKA emitter on ${onnx_file}" + ) + endforeach() +endif() + +# Also add a ROOTTEST wrapper so that ctest can run one of the invocations if desired. +# (This remains mostly for CI / test harness compatibility.) +ROOTTEST_ADD_TEST(SofieCompileModels_ONNX_Alpaka + COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNXAlpaka ${ONNX_FILES} + FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka-build + FIXTURES_SETUP sofie-compile-models-onnx-alpaka +) + +# End of CMakeLists.txt diff --git a/src/SOFIE_core/test/EmitFromONNX.cxx.in b/src/SOFIE_core/test/EmitFromONNX.cxx.in index f7a56e2..a45af49 100644 --- a/src/SOFIE_core/test/EmitFromONNX.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX.cxx.in @@ -23,7 +23,7 @@ int EmitModel(std::string filename, std::string outname) { int main(int argc, char *argv[]){ -@EMIT_CAPTURES@ ; +@EMIT_CAPTURES@ } diff --git a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in index e2250e6..10619a5 100644 --- a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in @@ -11,14 +11,17 @@ int EmitModel(std::string filename, std::string outname) { RModelParser_ONNX parser; RModel model = parser.Parse(filename); model.GenerateGPU_ALPAKA(); - model.OutputGenerated(outname+"_FromONNX.hxx"); + model.OutputGenerated(outname+"_FromONNX_GPU_ALPAKA.hxx"); return 0; } -int main(int argc, char *argv[]){ +int main(int argc, char *argv[]) { -@EMIT_CAPTURES@ ; + int failures = 0; -} +@EMIT_CAPTURES@ + std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; + return failures == 0 ? 0 : 1; +} diff --git a/src/SOFIE_parsers/CMakeLists.txt b/src/SOFIE_parsers/CMakeLists.txt index 379b7d7..d77d1e6 100644 --- a/src/SOFIE_parsers/CMakeLists.txt +++ b/src/SOFIE_parsers/CMakeLists.txt @@ -102,6 +102,15 @@ target_include_directories(SOFIE_parsers PUBLIC set_target_properties(SOFIE_parsers PROPERTIES POSITION_INDEPENDENT_CODE TRUE) + ROOT_GENERATE_DICTIONARY(G__SOFIE_parsers ${sources_headers} + LINKDEF inc/LinkDef.h + MODULE SOFIE_parsers + OPTIONS --deep +) +install(FILES ${CMAKE_CURRENT_BINARY_DIR}/libSOFIE_parsers_rdict.pcm + ${CMAKE_CURRENT_BINARY_DIR}/libSOFIE_parsers.rootmap + DESTINATION lib) + install(TARGETS SOFIE_parsers LIBRARY DESTINATION lib ) From f35d9d94cc14872488ec6f40625c1f0e35ea70ab Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Thu, 27 Nov 2025 11:48:25 +0100 Subject: [PATCH 16/22] fix: cmake script for tests --- .vscode/settings.json | 3 ++- src/SOFIE_core/src/RModel_ALPAKA.cxx | 4 ++-- src/SOFIE_core/test/CMakeLists.txt | 2 +- src/SOFIE_core/test/EmitFromONNX.cxx.in | 8 +++++++- src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in | 2 +- 5 files changed, 13 insertions(+), 6 deletions(-) diff --git a/.vscode/settings.json b/.vscode/settings.json index fd03126..182ccd4 100644 --- a/.vscode/settings.json +++ b/.vscode/settings.json @@ -75,6 +75,7 @@ "format": "cpp", "any": "cpp", "source_location": "cpp", - "run_inference_particle_net.C": "cpp" + "run_inference_particle_net.C": "cpp", + "test.C": "cpp" } } diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx index 066d4e8..2da0e1f 100644 --- a/src/SOFIE_core/src/RModel_ALPAKA.cxx +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -266,11 +266,11 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { for (size_t id = 0; id < fOperators.size(); id++) { fGC += fOperators[id]->GenerateInitCode_GPU_ALPAKA(); if (fOperators[id]->GetKind() == OperatorKind::GEMM){ - fGC += "\nblas.AddLayoutConfig("+fOperators[id]->GetBlasConfig()+");"; + fGC += "\nblas.AddLayoutConfig("+fOperators[id]->GetBlasConfig()+");\n"; } } - fGC += "alpaka::wait(queue);\n"; + fGC += "\nalpaka::wait(queue);\n"; fGC += "}\n\n"; } diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index 1ba5dfd..c5e0d8f 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -39,7 +39,7 @@ foreach(onnx_file ${ONNX_FILES}) string(REPLACE "@1" "${onnx_file}" cap "${CAPTURE_STR}") string(REPLACE "@2" "${fname}" cap "${cap}") - list(APPEND ALL_CAPTURES ${cap}) + string(APPEND ALL_CAPTURES "${cap}") endforeach() # Now generate the combined source files for CPU, ROOT and ALPAKA diff --git a/src/SOFIE_core/test/EmitFromONNX.cxx.in b/src/SOFIE_core/test/EmitFromONNX.cxx.in index a45af49..1433ba6 100644 --- a/src/SOFIE_core/test/EmitFromONNX.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX.cxx.in @@ -23,7 +23,13 @@ int EmitModel(std::string filename, std::string outname) { int main(int argc, char *argv[]){ -@EMIT_CAPTURES@ + + int failures = 0; + + @EMIT_CAPTURES@ + + std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; + return failures == 0 ? 0 : 1; } diff --git a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in index 10619a5..0d51e92 100644 --- a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in @@ -20,7 +20,7 @@ int main(int argc, char *argv[]) { int failures = 0; -@EMIT_CAPTURES@ + @EMIT_CAPTURES@ std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; return failures == 0 ? 0 : 1; From cdc6a9f8865b4eb7d43bff80dda8277391de33ad Mon Sep 17 00:00:00 2001 From: Saransh Chopra Date: Tue, 2 Dec 2025 16:11:53 +0100 Subject: [PATCH 17/22] fix: define failures in EmitFromRoot.cxx.in (#6) --- src/SOFIE_core/test/EmitFromRoot.cxx.in | 6 +++++- 1 file changed, 5 insertions(+), 1 deletion(-) diff --git a/src/SOFIE_core/test/EmitFromRoot.cxx.in b/src/SOFIE_core/test/EmitFromRoot.cxx.in index 4a630c7..b08d17b 100644 --- a/src/SOFIE_core/test/EmitFromRoot.cxx.in +++ b/src/SOFIE_core/test/EmitFromRoot.cxx.in @@ -43,6 +43,10 @@ int EmitModel(std::string inputfile, std::string outname){ int main(int argc, char *argv[]){ -@EMIT_CAPTURES@ ; + int failures = 0; + @EMIT_CAPTURES@; + + std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; + return failures == 0 ? 0 : 1; } From 3ffbe4605c46397e353505f1b3027e53908ee09f Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Sun, 14 Dec 2025 23:41:56 +0100 Subject: [PATCH 18/22] fix: layout inconsistencies in alpaka code generation --- src/CMakeLists.txt | 1 + src/SOFIE_core/CMakeLists.txt | 1 + src/SOFIE_core/README.md | 3 - src/SOFIE_core/inc/SOFIE/RModel.hxx | 4 + src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 28 +- src/SOFIE_core/inc/SOFIE/SOFIE_common.hxx | 33 +- src/SOFIE_core/src/RModel_ALPAKA.cxx | 38 +- src/SOFIE_core/src/SOFIE_common.cxx | 10 +- src/SOFIE_core/test/EmitFromONNX.cxx.in | 2 +- .../test/EmitFromONNX_GPU_ALPAKA.cxx.in | 2 +- src/SOFIE_core/test/EmitFromRoot.cxx.in | 7 +- src/utils/CMakeLists.txt | 11 + src/utils/SOFIE/RTensor.hxx | 628 ++++++++++++++++++ 13 files changed, 726 insertions(+), 42 deletions(-) create mode 100644 src/utils/CMakeLists.txt create mode 100644 src/utils/SOFIE/RTensor.hxx diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index c48e8d1..102ca3b 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -8,3 +8,4 @@ set(sofie_legacy_eval_backend ON CACHE BOOL "" FORCE) add_subdirectory(SOFIE_core) add_subdirectory(SOFIE_parsers) +add_subdirectory(utils) diff --git a/src/SOFIE_core/CMakeLists.txt b/src/SOFIE_core/CMakeLists.txt index ac7499d..4cab8e0 100644 --- a/src/SOFIE_core/CMakeLists.txt +++ b/src/SOFIE_core/CMakeLists.txt @@ -88,6 +88,7 @@ set(sources_cxx target_sources(SOFIE_core PRIVATE ${sources_headers} ${sources_cxx}) target_include_directories(SOFIE_core PUBLIC ${CMAKE_CURRENT_SOURCE_DIR}/inc) +target_link_libraries(SOFIE_core PUBLIC utils) target_link_libraries(SOFIE_core PUBLIC Tree Core diff --git a/src/SOFIE_core/README.md b/src/SOFIE_core/README.md index 033cad4..2259d7a 100644 --- a/src/SOFIE_core/README.md +++ b/src/SOFIE_core/README.md @@ -25,7 +25,6 @@ SOFIE works in a parser-generator working architecture. With SOFIE, the user get From ROOT command line, or in a ROOT macro, we can proceed with an ONNX model: ```c++ -using namespace TMVA::Experimental; SOFIE::RModelParser_ONNX parser; SOFIE::RModel model = parser.Parse(“./example_model.onnx”); model.Generate(); @@ -73,7 +72,6 @@ SOFIE also supports generating inference code with RDataFrame as inputs, refer t Here is the updated list of supported ONNX operators. You can obtain this list by doing ```cpp -using namespace TMVA::Experimental; SOFIE::RModelParser_ONNX parser; std::vector supportedOperators = parser.GetRegisteredOperators(); ``` @@ -164,7 +162,6 @@ The above operators are supported for tensors of the following types: You can also check your model whether all operators are implemented by doing the following: ```c++ -using namespace TMVA::Experimental; SOFIE::RModelParser_ONNX parser; parser.CheckModel("example_model.ONNX"); ``` diff --git a/src/SOFIE_core/inc/SOFIE/RModel.hxx b/src/SOFIE_core/inc/SOFIE/RModel.hxx index 4ecdaec..ed28b48 100644 --- a/src/SOFIE_core/inc/SOFIE/RModel.hxx +++ b/src/SOFIE_core/inc/SOFIE/RModel.hxx @@ -168,6 +168,10 @@ public: // used to infer the sub-graphs std::string GenerateInferSignature(bool isdecl = true); + // generate the infer function signature for inference on ALPAKA. If isdecl= false generate the calling infer function + // used to infer the sub-graphs + std::string GenerateInferSignature_GPU_ALPAKA(bool isdecl = true); + void RemoveIntermediateTensor(const std::string& tensor_name){ fIntermediateTensorInfos.erase(tensor_name); } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index 7756f9a..f417acf 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -436,8 +436,8 @@ namespace SOFIE{ throw std::runtime_error("TMVA SOFIE Gemm(MatMul) has invalid shape for inputs or output"); } auto m = (fAttrTransA ? fShapeA[dimA-1].GetVal() : fShapeA[dimA-2].GetVal()); - auto n = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); - auto k = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); + auto n = (fAttrTransB ? fShapeB[dimB-2].GetVal() : fShapeB[dimB-1].GetVal()); + auto k = (fAttrTransA ? fShapeA[dimA-2].GetVal() : fShapeA[dimA-1].GetVal()); std::vector sY = {fShapeY[dimY-2], fShapeY[dimY-1]}; // extra dimensions in case of stacked MatMul std::vector sA; @@ -451,7 +451,9 @@ namespace SOFIE{ out << SP << "int " << opName << "_n = " << n << ";\n"; out << SP << "int " << opName << "_k = " << k << ";\n"; out << SP << "float " << opName << "_alpha = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrAlpha << ";\n"; - out << SP << "float " << opName << "_beta = " << std::setprecision(std::numeric_limits::max_digits10) << fAttrBeta << ";\n"; + + // restricting to a 0 beta since BIAS is configured separately through sofieBLAS interface + out << SP << "float " << opName << "_beta = 0;\n"; // case bias is present if (!fNC.empty()){ @@ -484,10 +486,14 @@ namespace SOFIE{ out << SP; } // in the case of bias - if (!fNC.empty() && fActivation == EActivationType::RELU){ - out << SP << "blas.gemmrelu("< ConvertShapeToDim(const std::vector & shape); std::vector ConvertShapeToInt(const std::vector & shape); -std::size_t ConvertShapeToLength(const std::vector & shape); +inline std::size_t ConvertShapeToLength(const std::vector & shape){ + // Empty shape represent scalar values, so we return a length=1 + std::size_t fLength = 1; + for (auto& dim: shape) fLength *= dim; + return fLength; +} std::string ConvertShapeToString(const std::vector & shape); std::string ConvertDimShapeToString(const std::vector & shape); @@ -692,20 +697,20 @@ extern "C" void sgemm_(const char * transa, const char * transb, const int * m, struct GNN_Data { - TMVA::Experimental::RTensor node_data; // the node feature data, tensor with shape (num_nodes, num_node_features) - TMVA::Experimental::RTensor edge_data; // the edge feature data, tensor with shape (num_edges, num_edge_features) - TMVA::Experimental::RTensor global_data; // the global features, tensor with shape (1, num_global_features) - TMVA::Experimental::RTensor edge_index; // the edge index (receivers and senders for each edge), tensor with shape (2, num_edges) + SOFIE::RTensor node_data; // the node feature data, tensor with shape (num_nodes, num_node_features) + SOFIE::RTensor edge_data; // the edge feature data, tensor with shape (num_edges, num_edge_features) + SOFIE::RTensor global_data; // the global features, tensor with shape (1, num_global_features) + SOFIE::RTensor edge_index; // the edge index (receivers and senders for each edge), tensor with shape (2, num_edges) // edge_index[0,:] are the receivers and edge_index[1,:] are the senders // need to have default constructor since RTensor has not one - GNN_Data(): node_data(TMVA::Experimental::RTensor({})), edge_data(TMVA::Experimental::RTensor({})), global_data(TMVA::Experimental::RTensor({})), edge_index(TMVA::Experimental::RTensor({})) {} + GNN_Data(): node_data(SOFIE::RTensor({})), edge_data(SOFIE::RTensor({})), global_data(SOFIE::RTensor({})), edge_index(SOFIE::RTensor({})) {} }; template -TMVA::Experimental::RTensor Concatenate( TMVA::Experimental::RTensor & t1, TMVA::Experimental::RTensor & t2, int axis = 0) +SOFIE::RTensor Concatenate( SOFIE::RTensor & t1, SOFIE::RTensor & t2, int axis = 0) { // concatenate tensor along axis. Shape must be the same except in the dimension of the concatenated axis if (t1.GetMemoryLayout() != t2.GetMemoryLayout()) @@ -720,8 +725,8 @@ TMVA::Experimental::RTensor Concatenate( TMVA::Experimental::RTensor & t1, } std::vector outShape = shape1; outShape[axis] = shape1[axis] + shape2[axis]; - TMVA::Experimental::RTensor tout(outShape, t1.GetMemoryLayout()); - if (t1.GetMemoryLayout() == TMVA::Experimental::MemoryLayout::ColumnMajor) { + SOFIE::RTensor tout(outShape, t1.GetMemoryLayout()); + if (t1.GetMemoryLayout() == SOFIE::MemoryLayout::ColumnMajor) { throw std::runtime_error("TMVA RTensor Concatenate is not yet supported for column major tensors"); } @@ -754,10 +759,10 @@ inline GNN_Data Concatenate(GNN_Data & data1, GNN_Data & data2, int axis = 0) { inline GNN_Data Copy(const GNN_Data & data) { GNN_Data out; - out.node_data = TMVA::Experimental::RTensor(data.node_data.GetShape()); - out.edge_data = TMVA::Experimental::RTensor(data.edge_data.GetShape()); - out.global_data = TMVA::Experimental::RTensor(data.global_data.GetShape()); - out.edge_index = TMVA::Experimental::RTensor(data.edge_index.GetShape()); + out.node_data = SOFIE::RTensor(data.node_data.GetShape()); + out.edge_data = SOFIE::RTensor(data.edge_data.GetShape()); + out.global_data = SOFIE::RTensor(data.global_data.GetShape()); + out.edge_index = SOFIE::RTensor(data.edge_index.GetShape()); std::copy(data.node_data.GetData(), data.node_data.GetData()+ data.node_data.GetSize(), out.node_data.GetData()); std::copy(data.edge_data.GetData(), data.edge_data.GetData()+ data.edge_data.GetSize(), out.edge_data.GetData()); std::copy(data.global_data.GetData(), data.global_data.GetData()+ data.global_data.GetSize(), out.global_data.GetData()); diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx index 2da0e1f..03eb5e7 100644 --- a/src/SOFIE_core/src/RModel_ALPAKA.cxx +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -128,6 +128,42 @@ void RModel::GenerateDynamicTensorInfo_GPU_ALPAKA() { fGC += out.str(); } +// only supports BufF1D buffer data types for now +std::string RModel::GenerateInferSignature_GPU_ALPAKA(bool isdecl) { + // generate the infer signature given the inputs: eg. "BufF1D const deviceBuf_A, BufF1D const deviceBuf_B" + // if (decl = false) generate only calling signature (deviceBuf_A, deviceBuf_B, ....) + std::string rGC; + std::unordered_map inputParams; + int i_input = 0; + for (auto &name : fInputTensorNames) { + // if is a dynamic tensor pass initial parameters + if (IsDimInputTensor(name)) { + auto shape = GetDynamicTensorShape(name); + for (auto &d : shape) { + std::string pName = d.param; + // need to check if the input parameters is already existing in another input tensor + if (d.isParam && inputParams.count(pName) == 0) { + if (isdecl) rGC += "size_t "; + rGC += d.param + ","; + inputParams[pName] = i_input; + } + } + } + if (isdecl) { + std::string type = "BufF1D"; + if (type == "other") + throw std::runtime_error("TMVA-SOFIE: input tensor " + name + + " is of a data type which is not yet supported."); + rGC += type + " const "; + } + rGC += "deviceBuf_" + name + ","; + i_input++; + } + + if (fInputTensorNames.size() > 0) rGC.pop_back();// remove last "," + return rGC; +} + void RModel::GenerateOutput_GPU_ALPAKA() { if (fVerbose) std::cout << "Generating main inference code for " << fName << std::endl; @@ -149,7 +185,7 @@ void RModel::GenerateOutput_GPU_ALPAKA() { } fGC += " infer("; - fGC += GenerateInferSignature(); + fGC += GenerateInferSignature_GPU_ALPAKA(); fGC += "){\n"; for (size_t op_idx = 0; op_idx < fOperators.size(); ++op_idx) { diff --git a/src/SOFIE_core/src/SOFIE_common.cxx b/src/SOFIE_core/src/SOFIE_common.cxx index 6136f72..05f873b 100644 --- a/src/SOFIE_core/src/SOFIE_common.cxx +++ b/src/SOFIE_core/src/SOFIE_common.cxx @@ -46,14 +46,6 @@ std::vector ConvertShapeToInt(const std::vector & shape){ return ret_shape; } - -std::size_t ConvertShapeToLength(const std::vector & shape){ - // Empty shape represent scalar values, so we return a length=1 - std::size_t fLength = 1; - for (auto& dim: shape) fLength *= dim; - return fLength; -} - std::string ConvertTypeToString(ETensorType type){ switch(type){ case ETensorType::FLOAT : { @@ -545,4 +537,4 @@ std::vector UTILITY::ComputeStrideFromShape(const std::vector & shape) return strides; } -} // namespace SOFIE \ No newline at end of file +} // namespace SOFIE diff --git a/src/SOFIE_core/test/EmitFromONNX.cxx.in b/src/SOFIE_core/test/EmitFromONNX.cxx.in index 1433ba6..c464f4d 100644 --- a/src/SOFIE_core/test/EmitFromONNX.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX.cxx.in @@ -28,7 +28,7 @@ int main(int argc, char *argv[]){ @EMIT_CAPTURES@ - std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; + std::cout << "[SUMMARY for generation from ONNX] Completed with " << failures << " failures" << std::endl; return failures == 0 ? 0 : 1; } diff --git a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in index 0d51e92..58198c1 100644 --- a/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in +++ b/src/SOFIE_core/test/EmitFromONNX_GPU_ALPAKA.cxx.in @@ -22,6 +22,6 @@ int main(int argc, char *argv[]) { @EMIT_CAPTURES@ - std::cout << "[SUMMARY] Completed with " << failures << " failures" << std::endl; + std::cout << "[SUMMARY for generation from ONNX with ALPAKA] Completed with " << failures << " failures" << std::endl; return failures == 0 ? 0 : 1; } diff --git a/src/SOFIE_core/test/EmitFromRoot.cxx.in b/src/SOFIE_core/test/EmitFromRoot.cxx.in index b08d17b..88c0789 100644 --- a/src/SOFIE_core/test/EmitFromRoot.cxx.in +++ b/src/SOFIE_core/test/EmitFromRoot.cxx.in @@ -43,7 +43,12 @@ int EmitModel(std::string inputfile, std::string outname){ int main(int argc, char *argv[]){ - int failures = 0; + int failures = 0; + + @EMIT_CAPTURES@ + + std::cout << "[SUMMARY for generation from ROOT] Completed with " << failures << " failures" << std::endl; + return failures == 0 ? 0 : 1; @EMIT_CAPTURES@; diff --git a/src/utils/CMakeLists.txt b/src/utils/CMakeLists.txt new file mode 100644 index 0000000..2ede060 --- /dev/null +++ b/src/utils/CMakeLists.txt @@ -0,0 +1,11 @@ +add_library(utils INTERFACE) + +target_include_directories(utils INTERFACE + $ + $ +) + +install( + DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR}/SOFIE + DESTINATION include +) diff --git a/src/utils/SOFIE/RTensor.hxx b/src/utils/SOFIE/RTensor.hxx new file mode 100644 index 0000000..db82dc9 --- /dev/null +++ b/src/utils/SOFIE/RTensor.hxx @@ -0,0 +1,628 @@ +#ifndef SOFIE_RTENSOR +#define SOFIE_RTENSOR + +#include +#include // std::size_t +#include +#include // std::runtime_error +#include // std::stringstream +#include // std::shared_ptr +#include // std::is_convertible +#include // std::reverse +#include // std::random_access_iterator_tag + +namespace SOFIE { + +/// Memory layout type +enum class MemoryLayout : uint8_t { + RowMajor = 0x01, + ColumnMajor = 0x02 +}; + +namespace Internal { + +/// \brief Get size of tensor from shape vector +/// \param[in] shape Shape vector +/// \return Size of contiguous memory +template +inline std::size_t GetSizeFromShape(const T &shape) +{ + if (shape.size() == 0) + return 0; + std::size_t size = 1; + for (auto &s : shape) + size *= s; + return size; +} + +/// \brief Compute strides from shape vector. +/// \param[in] shape Shape vector +/// \param[in] layout Memory layout +/// \return Size of contiguous memory +/// +/// This information is needed for the multi-dimensional indexing. See here: +/// https://en.wikipedia.org/wiki/Row-_and_column-major_order +/// https://docs.scipy.org/doc/numpy/reference/generated/numpy.ndarray.strides.html +template +inline std::vector ComputeStridesFromShape(const T &shape, MemoryLayout layout) +{ + const auto size = shape.size(); + T strides(size); + if (layout == MemoryLayout::RowMajor) { + for (std::size_t i = 0; i < size; i++) { + if (i == 0) { + strides[size - 1 - i] = 1; + } else { + strides[size - 1 - i] = strides[size - 1 - i + 1] * shape[size - 1 - i + 1]; + } + } + } else if (layout == MemoryLayout::ColumnMajor) { + for (std::size_t i = 0; i < size; i++) { + if (i == 0) { + strides[i] = 1; + } else { + strides[i] = strides[i - 1] * shape[i - 1]; + } + } + } else { + std::stringstream ss; + ss << "Memory layout type is not valid for calculating strides."; + throw std::runtime_error(ss.str()); + } + return strides; +} + +/// \brief Compute indices from global index +/// \param[in] shape Shape vector +/// \param[in] idx Global index +/// \param[in] layout Memory layout +/// \return Indice vector +template +inline T ComputeIndicesFromGlobalIndex(const T& shape, MemoryLayout layout, const typename T::value_type idx) +{ + const auto size = shape.size(); + auto strides = ComputeStridesFromShape(shape, layout); + T indices(size); + auto r = idx; + for (std::size_t i = 0; i < size; i++) { + indices[i] = int(r / strides[i]); + r = r % strides[i]; + } + return indices; +} + +/// \brief Compute global index from indices +/// \param[in] strides Strides vector +/// \param[in] idx Indice vector +/// \return Global index +template +inline std::size_t ComputeGlobalIndex(const U& strides, const V& idx) +{ + std::size_t globalIndex = 0; + const auto size = idx.size(); + for (std::size_t i = 0; i < size; i++) { + globalIndex += strides[size - 1 - i] * idx[size - 1 - i]; + } + return globalIndex; +} + +/// \brief Type checking for all types of a parameter pack, e.g., used in combination with std::is_convertible +template +struct and_types : std::true_type { +}; + +template +struct and_types : std::integral_constant()> { +}; + +/// \brief Copy slice of a tensor recursively from here to there +/// \param[in] here Source tensor +/// \param[in] there Target tensor (slice of source tensor) +/// \param[in] mins Minimum of indices for each dimension +/// \param[in] maxs Maximum of indices for each dimension +/// \param[in] idx Current indices +/// \param[in] active Active index needed to stop the recursion +/// +/// Copy the content of a slice of a tensor from source to target. This is done +/// by recursively iterating over the ranges of the slice for each dimension. +template +void RecursiveCopy(const T &here, T &there, + const std::vector &mins, const std::vector &maxs, + std::vector idx, std::size_t active) +{ + const auto size = idx.size(); + for (std::size_t i = mins[active]; i < maxs[active]; i++) { + idx[active] = i; + if (active == size - 1) { + auto idxThere = idx; + for (std::size_t j = 0; j < size; j++) { + idxThere[j] -= mins[j]; + } + there(idxThere) = here(idx); + } else { + Internal::RecursiveCopy(here, there, mins, maxs, idx, active + 1); + } + } +} + +} // namespace SOFIE::Internal + +/// \class SOFIE::RTensor +/// \brief RTensor is a container with contiguous memory and shape information. +/// \tparam T Data-type of the tensor +/// +/// An RTensor is a vector-like container, which has additional shape information. +/// The elements of the multi-dimensional container can be accessed by their +/// indices in a coherent way without taking care about the one-dimensional memory +/// layout of the contiguous storage. This also allows to manipulate the shape +/// of the container without moving the actual elements in memory. Another feature +/// is that an RTensor can own the underlying contiguous memory but can also represent +/// only a view on existing data without owning it. +template > +class RTensor { +public: + // Typedefs + using Value_t = V; + using Shape_t = std::vector; + using Index_t = Shape_t; + using Slice_t = std::vector; + using Container_t = C; + +private: + Shape_t fShape; + Shape_t fStrides; + std::size_t fSize; + MemoryLayout fLayout; + Value_t *fData; + std::shared_ptr fContainer; + +protected: + void ReshapeInplace(const Shape_t &shape); + +public: + // Constructors + + /// \brief Construct a tensor as view on data + /// \param[in] data Pointer to data contiguous in memory + /// \param[in] shape Shape vector + /// \param[in] layout Memory layout + RTensor(Value_t *data, Shape_t shape, MemoryLayout layout = MemoryLayout::RowMajor) + : fShape(shape), fLayout(layout), fData(data), fContainer(nullptr) + { + fSize = Internal::GetSizeFromShape(shape); + fStrides = Internal::ComputeStridesFromShape(shape, layout); + } + + /// \brief Construct a tensor as view on data + /// \param[in] data Pointer to data contiguous in memory + /// \param[in] shape Shape vector + /// \param[in] strides Strides vector + /// \param[in] layout Memory layout + RTensor(Value_t *data, Shape_t shape, Shape_t strides, MemoryLayout layout = MemoryLayout::RowMajor) + : fShape(shape), fStrides(strides), fLayout(layout), fData(data), fContainer(nullptr) + { + fSize = Internal::GetSizeFromShape(shape); + } + + /// \brief Construct a tensor owning externally provided data + /// \param[in] container Shared pointer to data container + /// \param[in] shape Shape vector + /// \param[in] layout Memory layout + RTensor(std::shared_ptr container, Shape_t shape, + MemoryLayout layout = MemoryLayout::RowMajor) + : fShape(shape), fLayout(layout), fContainer(container) + { + fSize = Internal::GetSizeFromShape(shape); + fStrides = Internal::ComputeStridesFromShape(shape, layout); + fData = std::data(*fContainer); + } + + /// \brief Construct a tensor owning data initialized with new container + /// \param[in] shape Shape vector + /// \param[in] layout Memory layout + RTensor(Shape_t shape, MemoryLayout layout = MemoryLayout::RowMajor) + : fShape(shape), fLayout(layout) + { + // TODO: Document how data pointer is determined using STL iterator interface. + // TODO: Sanitize given container type with type traits + fSize = Internal::GetSizeFromShape(shape); + fStrides = Internal::ComputeStridesFromShape(shape, layout); + fContainer = std::make_shared(fSize); + fData = std::data(*fContainer); + } + + // Access elements + Value_t &operator()(const Index_t &idx); + const Value_t &operator() (const Index_t &idx) const; + template Value_t &operator()(Idx... idx); + template const Value_t &operator() (Idx... idx) const; + + // Access properties + std::size_t GetSize() const { return fSize; } + const Shape_t &GetShape() const { return fShape; } + const Shape_t &GetStrides() const { return fStrides; } + Value_t *GetData() { return fData; } + const Value_t *GetData() const { return fData; } + std::shared_ptr GetContainer() { return fContainer; } + const std::shared_ptr GetContainer() const { return fContainer; } + MemoryLayout GetMemoryLayout() const { return fLayout; } + bool IsView() const { return fContainer == nullptr; } + bool IsOwner() const { return !IsView(); } + + // Copy + RTensor Copy(MemoryLayout layout = MemoryLayout::RowMajor) const; + + // Transformations + RTensor Transpose() const; + RTensor Squeeze() const; + RTensor ExpandDims(int idx) const; + RTensor Reshape(const Shape_t &shape) const; + RTensor Resize(const Shape_t &shape); + RTensor Slice(const Slice_t &slice); + + // Iterator class + class Iterator { + private: + RTensor& fTensor; + Index_t::value_type fGlobalIndex; + public: + using iterator_category = std::random_access_iterator_tag; + using value_type = Value_t; + using difference_type = std::ptrdiff_t; + using pointer = Value_t *; + using reference = Value_t &; + + Iterator(RTensor& x, typename Index_t::value_type idx) : fTensor(x), fGlobalIndex(idx) {} + Iterator& operator++() { fGlobalIndex++; return *this; } + Iterator operator++(int) { auto tmp = *this; operator++(); return tmp; } + Iterator& operator--() { fGlobalIndex--; return *this; } + Iterator operator--(int) { auto tmp = *this; operator--(); return tmp; } + Iterator operator+(difference_type rhs) const { return Iterator(fTensor, fGlobalIndex + rhs); } + Iterator operator-(difference_type rhs) const { return Iterator(fTensor, fGlobalIndex - rhs); } + difference_type operator-(const Iterator& rhs) { return fGlobalIndex - rhs.GetGlobalIndex(); } + Iterator& operator+=(difference_type rhs) { fGlobalIndex += rhs; return *this; } + Iterator& operator-=(difference_type rhs) { fGlobalIndex -= rhs; return *this; } + Value_t& operator*() + { + auto idx = Internal::ComputeIndicesFromGlobalIndex(fTensor.GetShape(), fTensor.GetMemoryLayout(), fGlobalIndex); + return fTensor(idx); + } + bool operator==(const Iterator& rhs) const + { + if (fGlobalIndex == rhs.GetGlobalIndex()) return true; + return false; + } + bool operator!=(const Iterator& rhs) const { return !operator==(rhs); }; + bool operator>(const Iterator& rhs) const { return fGlobalIndex > rhs.GetGlobalIndex(); } + bool operator<(const Iterator& rhs) const { return fGlobalIndex < rhs.GetGlobalIndex(); } + bool operator>=(const Iterator& rhs) const { return fGlobalIndex >= rhs.GetGlobalIndex(); } + bool operator<=(const Iterator& rhs) const { return fGlobalIndex <= rhs.GetGlobalIndex(); } + typename Index_t::value_type GetGlobalIndex() const { return fGlobalIndex; }; + }; + + // Iterator interface + // TODO: Document that the iterator always iterates following the physical memory layout. + Iterator begin() noexcept { + return Iterator(*this, 0); + } + Iterator end() noexcept { + return Iterator(*this, fSize); + } +}; + +/// \brief Reshape tensor in place +/// \param[in] shape Shape vector +/// Reshape tensor without changing the overall size +template +inline void RTensor::ReshapeInplace(const Shape_t &shape) +{ + const auto size = Internal::GetSizeFromShape(shape); + if (size != fSize) { + std::stringstream ss; + ss << "Cannot reshape tensor with size " << fSize << " into shape { "; + for (std::size_t i = 0; i < shape.size(); i++) { + if (i != shape.size() - 1) { + ss << shape[i] << ", "; + } else { + ss << shape[i] << " }."; + } + } + throw std::runtime_error(ss.str()); + } + + // Compute new strides from shape + auto strides = Internal::ComputeStridesFromShape(shape, fLayout); + fShape = shape; + fStrides = strides; +} + + +/// \brief Access elements +/// \param[in] idx Index vector +/// \return Reference to element +template +inline Value_t &RTensor::operator()(const Index_t &idx) +{ + const auto globalIndex = Internal::ComputeGlobalIndex(fStrides, idx); + return fData[globalIndex]; +} + +/// \brief Access elements +/// \param[in] idx Index vector +/// \return Reference to element +template +inline const Value_t &RTensor::operator() (const Index_t &idx) const +{ + const auto globalIndex = Internal::ComputeGlobalIndex(fStrides, idx); + return fData[globalIndex]; +} + +/// \brief Access elements +/// \param[in] idx Indices +/// \return Reference to element +template +template +Value_t &RTensor::operator()(Idx... idx) +{ + static_assert(Internal::and_types...>{}, + "Indices are not convertible to std::size_t."); + return operator()({static_cast(idx)...}); +} + +/// \brief Access elements +/// \param[in] idx Indices +/// \return Reference to element +template +template +const Value_t &RTensor::operator() (Idx... idx) const +{ + static_assert(Internal::and_types...>{}, + "Indices are not convertible to std::size_t."); + return operator()({static_cast(idx)...}); +} + +/// \brief Transpose +/// \returns New RTensor +/// The tensor is transposed by inverting the associated memory layout from row- +/// major to column-major and vice versa. Therefore, the underlying data is not +/// touched. +template +inline RTensor RTensor::Transpose() const +{ + MemoryLayout layout; + // Transpose by inverting memory layout + if (fLayout == MemoryLayout::RowMajor) { + layout = MemoryLayout::ColumnMajor; + } else if (fLayout == MemoryLayout::ColumnMajor) { + layout = MemoryLayout::RowMajor; + } else { + throw std::runtime_error("Memory layout is not known."); + } + + // Create copy of container + RTensor x(fData, fShape, fStrides, layout); + + // Reverse shape + std::reverse(x.fShape.begin(), x.fShape.end()); + + // Reverse strides + std::reverse(x.fStrides.begin(), x.fStrides.end()); + + return x; +} + +/// \brief Squeeze dimensions +/// \returns New RTensor +/// Squeeze removes the dimensions of size one from the shape. +template +inline RTensor RTensor::Squeeze() const +{ + // Remove dimensions of one and associated strides + Shape_t shape; + Shape_t strides; + for (std::size_t i = 0; i < fShape.size(); i++) { + if (fShape[i] != 1) { + shape.emplace_back(fShape[i]); + strides.emplace_back(fStrides[i]); + } + } + + // If all dimensions are 1, we need to keep one. + // This does not apply if the inital shape is already empty. Then, return + // the empty shape. + if (shape.size() == 0 && fShape.size() != 0) { + shape.emplace_back(1); + strides.emplace_back(1); + } + + // Create copy, attach new shape and strides and return + RTensor x(*this); + x.fShape = shape; + x.fStrides = strides; + return x; +} + +/// \brief Expand dimensions +/// \param[in] idx Index in shape vector where dimension is added +/// \returns New RTensor +/// Inserts a dimension of one into the shape. +template +inline RTensor RTensor::ExpandDims(int idx) const +{ + // Compose shape vector with additional dimensions and adjust strides + const int len = fShape.size(); + auto shape = fShape; + auto strides = fStrides; + if (idx < 0) { + idx = len + 1 + idx; + } + if (idx < 0) { + throw std::runtime_error("Given negative index is invalid."); + } + else if (idx > len) { + throw std::runtime_error("Given index is invalid."); + } + shape.insert(shape.begin() + idx, 1); + strides = Internal::ComputeStridesFromShape(shape, fLayout); + + // Create view copy, attach new shape and strides and return + RTensor x(*this); + x.fShape = shape; + x.fStrides = strides; + return x; +} + +/// \brief Reshape tensor +/// \param[in] shape Shape vector +/// \returns New RTensor +/// Reshape tensor without changing the overall size +template +inline RTensor RTensor::Reshape(const Shape_t &shape) const +{ + // Create copy, replace and return + RTensor x(*this); + x.ReshapeInplace(shape); + return x; +} + +/// \brief Resize tensor +/// \param[in] shape Shape vector +/// \returns New RTensor +/// Resize tensor into new shape +template +inline RTensor RTensor::Resize(const Shape_t &shape) +{ + // Create new tensor with the specified shape + RTensor x(shape, fLayout); + + // Copying contents from previous tensor + size_t n = (x.GetSize()>fSize) ? fSize : x.GetSize(); + std::copy(this->GetData(), this->GetData() + n, x.GetData() ); + + return x; +} + +/// \brief Create a slice of the tensor +/// \param[in] slice Slice vector +/// \returns New RTensor +/// A slice is a subset of the tensor defined by a vector of pairs of indices. +template +inline RTensor RTensor::Slice(const Slice_t &slice) +{ + // Sanitize size of slice + const auto sliceSize = slice.size(); + const auto shapeSize = fShape.size(); + if (sliceSize != shapeSize) { + std::stringstream ss; + ss << "Size of slice (" << sliceSize << ") is unequal number of dimensions (" << shapeSize << ")."; + throw std::runtime_error(ss.str()); + } + + // Sanitize slice indices + // TODO: Sanitize slice indices + /* + for (std::size_t i = 0; i < sliceSize; i++) { + } + */ + + // Convert -1 in slice to proper pair of indices + // TODO + + // Recompute shape and size + Shape_t shape(sliceSize); + for (std::size_t i = 0; i < sliceSize; i++) { + shape[i] = slice[i][1] - slice[i][0]; + } + auto size = Internal::GetSizeFromShape(shape); + + // Determine first element contributing to the slice and get the data pointer + Value_t *data; + Shape_t idx(sliceSize); + for (std::size_t i = 0; i < sliceSize; i++) { + idx[i] = slice[i][0]; + } + data = &operator()(idx); + + // Create copy and modify properties + RTensor x(*this); + x.fData = data; + x.fShape = shape; + x.fSize = size; + + // Squeeze tensor and return + return x.Squeeze(); +} + +/// Copy RTensor to new object +/// \param[in] layout Memory layout of the new RTensor +/// \returns New RTensor +/// The operation copies all elements of the current RTensor to a new RTensor +/// with the given layout contiguous in memory. Note that this copies by default +/// to a row major memory layout. +template +inline RTensor RTensor::Copy(MemoryLayout layout) const +{ + // Create new tensor with zeros owning the memory + RTensor r(fShape, layout); + + // Copy over the elements from this tensor + const auto mins = Shape_t(fShape.size()); + const auto maxs = fShape; + auto idx = mins; + Internal::RecursiveCopy(*this, r, mins, maxs, idx, 0); + + return r; +} + +/// \brief Pretty printing +/// \param[in] os Output stream +/// \param[in] x RTensor +/// \return Modified output stream +template +std::ostream &operator<<(std::ostream &os, RTensor &x) +{ + const auto shapeSize = x.GetShape().size(); + if (shapeSize == 1) { + os << "{ "; + const auto size = x.GetSize(); + for (std::size_t i = 0; i < size; i++) { + os << x({i}); + if (i != size - 1) + os << ", "; + } + os << " }"; + } else if (shapeSize == 2) { + os << "{"; + const auto shape = x.GetShape(); + for (std::size_t i = 0; i < shape[0]; i++) { + os << " { "; + for (std::size_t j = 0; j < shape[1]; j++) { + os << x({i, j}); + if (j < shape[1] - 1) { + os << ", "; + } else { + os << " "; + } + } + os << "}"; + } + os << " }"; + } else { + os << "{ printing not yet implemented for this rank }"; + } + return os; +} + +} // namespace SOFIE + +namespace cling { +template +std::string printValue(SOFIE::RTensor *x) +{ + std::stringstream ss; + ss << *x; + return ss.str(); +} +} // namespace cling + +#endif // SOFIE_RTENSOR From 1979a11e0642f00174e211b36c0495244f645d45 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 15 Dec 2025 01:36:14 +0100 Subject: [PATCH 19/22] feat: turn off emitting from ROOT files and skip tests with multiple output errors for now --- .../inc/SOFIE/ROperator_BasicBinary.hxx | 14 ++-- src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx | 1 - src/SOFIE_core/src/RModel.cxx | 1 + src/SOFIE_core/src/RModel_ALPAKA.cxx | 12 ++-- src/SOFIE_core/test/CMakeLists.txt | 72 ++++++++++--------- 5 files changed, 58 insertions(+), 42 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx index 80f35be..2d0e6cb 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_BasicBinary.hxx @@ -387,6 +387,9 @@ public: } std::string Generate_GPU_Kernel_ALPAKA(std::string opName) { + if (fIsOutputConstant) + return ""; + std::string op; op = "\n//------ "+opName+"_"+BinaryOperatorTrait::Name()+"_KERNEL_ALPAKA\n"; op += SP + "struct Binary"+BinaryOperatorTrait::Name()+"Kernel {\n"; @@ -402,7 +405,6 @@ public: } op.pop_back(); op += "));\n"; - std::cout<<"okay till here 1\n"; op += SP + SP + SP + SP + "for (auto const& elem : elements) {\n"; auto stridesA = UTILITY::ComputeStrideFromShape(fDimShapeA); @@ -426,7 +428,6 @@ public: for (int j = 0; j < 3; j++) compute_idx_A.pop_back(); } - std::cout<<"okay till here 2\n"; if (fDimShapeB.empty() || std::all_of(fDimShapeB.begin(), fDimShapeB.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { compute_idx_B = "0"; @@ -443,7 +444,6 @@ public: for (int j = 0; j < 3; j++) compute_idx_B.pop_back(); } - std::cout<<"okay till here 3\n"; int nloop = 0; if (fDimShapeY.empty() || std::all_of(fDimShapeY.begin(), fDimShapeY.end(), [](Dim d) { return d.dim == 1 || d.GetVal() == "1"; })) { @@ -463,13 +463,11 @@ public: for (int j = 0; j < 3; j++) compute_idx_Y.pop_back(); } - std::cout<<"okay till here 4\n"; for (int j = 0; j < nloop + 1; j++) op += SP; op += "C[" + compute_idx_Y + "] = " + BinaryOperatorTrait::Op("A[" + compute_idx_A + "]", "B[" + compute_idx_B + "]") + " ;\n"; - std::cout<<"okay till here 5\n"; for (int i = nloop; i > 0; i--) { for (int j = 0; j < i; j++) op += SP; op += "}\n"; @@ -478,10 +476,16 @@ public: } std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string OpName) { + if (fIsOutputConstant) + return ""; + return SP + "Binary"+BinaryOperatorTrait::Name()+"Kernel " + OpName + "Kernel;\n"; } std::string Generate_GPU_ALPAKA(std::string OpName) { + if (fIsOutputConstant) + return ""; + if (fDimShapeY.empty()) { throw std::runtime_error("TMVA SOFIE Operator Basic Binary called to Generate without being initialized first"); } diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx index f417acf..1c43724 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Gemm.hxx @@ -258,7 +258,6 @@ namespace SOFIE{ if (!fIsDynamic){ model.AddIntermediateTensor(fNY, model.GetTensorType(fNA), shapeY); - std::cout<<"currently adding: "<GetKind())<GetKind()) == registered_operators.end()) { - std::cout<<"Generating ALPAKA kernel for operator"<< std::endl; + + if (fVerbose) + std::cout<<"Generating ALPAKA kernel for operator"<< toString(fOperators[id]->GetKind()) << std::endl; + fGC += fOperators[id]->Generate_GPU_Kernel_ALPAKA(std::to_string(id)); registered_operators.insert(fOperators[id]->GetKind()); } @@ -312,9 +314,11 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { registered_operators.clear(); for (size_t id = 0; id < fOperators.size(); id++) { - std::cout<GetKind())<GetKind()) == registered_operators.end()) { - std::cout<<"Declaring ALPAKA kernel for operator"<< std::endl; + + if (fVerbose) + std::cout<<"Declaring ALPAKA kernel for operator"<< toString(fOperators[id]->GetKind())<Generate_GPU_Kernel_Definitions_ALPAKA(std::to_string(id)); registered_operators.insert(fOperators[id]->GetKind()); } diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index c5e0d8f..c792e3d 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -21,13 +21,19 @@ set(CAPTURE_STR "try {\n\ EmitModel(\"@1\", \"@2\");\n\ } catch (const std::exception& e) {\n\ - std::cerr << \"[ERROR] Failed processing @1: \" << e.what() << std::endl;\n\ - failures++;\n\ + std::string msg = e.what();\n\ + if (msg.find(\"multiple output tensors are not supported\") != std::string::npos) {\n\ + std::cerr << \"[SKIP] Multiple outputs are not supported for @1\" << std::endl;\n\ + } else {\n\ + std::cerr << \"[ERROR] Failed processing @1: \" << msg << std::endl;\n\ + failures++;\n\ + }\n\ } catch (...) {\n\ std::cerr << \"[ERROR] Unknown failure processing @1\" << std::endl;\n\ failures++;\n\ }\n\ ") + # --- Collect ONNX files and build ALL_CAPTURES BEFORE any configure_file() --- set(ALL_CAPTURES "") file(GLOB ONNX_FILES "${ONNX_MODELS_DIR}/*.onnx") @@ -51,7 +57,7 @@ set(EMIT_CAPTURES "${ALL_CAPTURES}") # Note: the .in templates must use @EMIT_CAPTURES@ placeholder. configure_file(EmitFromONNX.cxx.in EmitFromONNX_all.cxx @ONLY) -configure_file(EmitFromRoot.cxx.in EmitFromRoot_all.cxx @ONLY) +# configure_file(EmitFromRoot.cxx.in EmitFromRoot_all.cxx @ONLY) configure_file(EmitFromONNX_GPU_ALPAKA.cxx.in EmitFromONNX_GPU_ALPAKA_all.cxx @ONLY) # --- CPU emitter executable and test (unchanged) --- @@ -90,39 +96,41 @@ if (BLAS_FOUND) # we need BLAS for compiling the models FIXTURES_REQUIRED sofie-test-models-onnx-build) endif() +# Skipping emitting from ROOT and further tests for now. # For testing serialisation of RModel object -ROOTTEST_GENERATE_EXECUTABLE(emitFromROOT EmitFromRoot_all.cxx - LIBRARIES protobuf::libprotobuf RIO SOFIE_core SOFIE_parsers - FIXTURES_SETUP sofie-compile-models-onnx-root -) +# ROOTTEST_GENERATE_EXECUTABLE(emitFromROOT EmitFromRoot_all.cxx +# LIBRARIES protobuf::libprotobuf RIO SOFIE_core SOFIE_parsers +# FIXTURES_SETUP sofie-compile-models-onnx-root +# ) + # silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 -target_compile_options(emitFromROOT PRIVATE -Wno-unused-parameter -Wno-array-bounds) +# target_compile_options(emitFromROOT PRIVATE -Wno-unused-parameter -Wno-array-bounds) # Automatic compilation of headers from root files -ROOTTEST_ADD_TEST(SofieCompileModels_ROOT - COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromROOT - FIXTURES_REQUIRED sofie-compile-models-onnx-root - FIXTURES_SETUP sofie-compile-models-root -) - -if (BLAS_FOUND) - # Creating a Google Test for Serialisation of RModel - ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromROOT TestCustomModelsFromROOT.cxx - LIBRARIES - SOFIE_core - BLAS::BLAS - GTest::gtest - GTest::gtest_main - FIXTURES_REQUIRED - sofie-compile-models-root - FIXTURES_SETUP - sofie-test-models-root-build - ) - target_include_directories(TestCustomModelsFromROOT PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) - ROOTTEST_ADD_TEST(TestCustomModelsFromROOT - EXEC ./TestCustomModelsFromROOT - FIXTURES_REQUIRED sofie-test-models-root-build) -endif() +# ROOTTEST_ADD_TEST(SofieCompileModels_ROOT +# COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromROOT +# FIXTURES_REQUIRED sofie-compile-models-onnx-root +# FIXTURES_SETUP sofie-compile-models-root +# ) + +# if (BLAS_FOUND) +# # Creating a Google Test for Serialisation of RModel +# ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromROOT TestCustomModelsFromROOT.cxx +# LIBRARIES +# SOFIE_core +# BLAS::BLAS +# GTest::gtest +# GTest::gtest_main +# FIXTURES_REQUIRED +# sofie-compile-models-root +# FIXTURES_SETUP +# sofie-test-models-root-build +# ) +# target_include_directories(TestCustomModelsFromROOT PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) +# ROOTTEST_ADD_TEST(TestCustomModelsFromROOT +# EXEC ./TestCustomModelsFromROOT +# FIXTURES_REQUIRED sofie-test-models-root-build) +# endif() # Look for needed Python modules ROOT_FIND_PYTHON_MODULE(torch) From 59aeac458ee2559b8162bf092ae97538b7ecfeb0 Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 15 Dec 2025 17:40:51 +0100 Subject: [PATCH 20/22] feat: support for google tests for inference code with alpaka implementations --- README.md | 6 +- src/SOFIE_core/test/CMakeLists.txt | 259 +++++++++++------- .../TestCustomModelsFromONNXForAlpakaCuda.cxx | 156 +++++++++++ 3 files changed, 316 insertions(+), 105 deletions(-) create mode 100644 src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx diff --git a/README.md b/README.md index 97902f8..597cb56 100644 --- a/README.md +++ b/README.md @@ -25,7 +25,11 @@ source setup.sh ``` Now ROOT should also access the SOFIE libraries while it runs. This helps to accelerate development. Submit your developments here and we will proceed with the developments in ROOT carefull. - +3. To enable testing generated code with alpaka implementations, build using the following command: +```bash +cmake -Dtesting=ON -DENABLE_ALPAKA_TESTS=ON -DCMAKE_INSTALL_PREFIX=../install -DCMAKE_BUILD_TYPE=RelWithDebInfo .. +``` +The default architecture is CUDA, but can be configured using an additional`-DALPAKA_BACKEND=hip` cmake option. ## Inspiration The standalone version of SOFIE is developed with inspiration from the standalone version of RooFit developed by Jonas Rembser that can be found [here](https://github.com/guitargeek/roofit). diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index c792e3d..5d5667a 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -9,13 +9,24 @@ # @author Federico Sossai, Sanjiban Sengupta ############################################################################ +cmake_minimum_required(VERSION 3.14) +include(FetchContent) + include_directories(${CMAKE_SOURCE_DIR}/src/SOFIE_core/inc) include_directories(${CMAKE_SOURCE_DIR}/src/SOFIE_parsers/inc) +set(CMAKE_CXX_STANDARD 20) +set(CMAKE_CXX_STANDARD_REQUIRED ON) + if (NOT ONNX_MODELS_DIR) set(ONNX_MODELS_DIR input_models) endif() +option(ENABLE_ALPAKA_TESTS "Enable Alpaka-based SOFIE tests" OFF) +set(ALPAKA_BACKEND "cuda" + CACHE STRING "Alpaka backend to test (cuda, cpu, hip, sycl)") +set_property(CACHE ALPAKA_BACKEND PROPERTY STRINGS cuda cpu hip sycl) + # String template used to produce calls to EmitModel(...) per file. set(CAPTURE_STR "try {\n\ @@ -34,50 +45,37 @@ set(CAPTURE_STR }\n\ ") -# --- Collect ONNX files and build ALL_CAPTURES BEFORE any configure_file() --- +# --- Collect ONNX files --- set(ALL_CAPTURES "") file(GLOB ONNX_FILES "${ONNX_MODELS_DIR}/*.onnx") -# If there are no models, ONNX_FILES will be empty and ALL_CAPTURES stays empty. foreach(onnx_file ${ONNX_FILES}) get_filename_component(fname ${onnx_file} NAME_WE) - get_filename_component(fdir ${onnx_file} DIRECTORY) - string(REPLACE "@1" "${onnx_file}" cap "${CAPTURE_STR}") string(REPLACE "@2" "${fname}" cap "${cap}") string(APPEND ALL_CAPTURES "${cap}") endforeach() -# Now generate the combined source files for CPU, ROOT and ALPAKA -# They will have @EMIT_CAPTURES@ substituted with the contents of ALL_CAPTURES -# (CMake configure_file uses variables via @VAR@ when @ONLY is provided). -# To make ALL_CAPTURES visible to configure_file we set a temporary variable -# that configure_file can reference directly. set(EMIT_CAPTURES "${ALL_CAPTURES}") -# Note: the .in templates must use @EMIT_CAPTURES@ placeholder. configure_file(EmitFromONNX.cxx.in EmitFromONNX_all.cxx @ONLY) -# configure_file(EmitFromRoot.cxx.in EmitFromRoot_all.cxx @ONLY) configure_file(EmitFromONNX_GPU_ALPAKA.cxx.in EmitFromONNX_GPU_ALPAKA_all.cxx @ONLY) -# --- CPU emitter executable and test (unchanged) --- +# --- CPU emitter --- ROOTTEST_GENERATE_EXECUTABLE(emitFromONNX EmitFromONNX_all.cxx - LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers - FIXTURES_SETUP sofie-compile-models-onnx-build) + LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers + FIXTURES_SETUP sofie-compile-models-onnx-build) -# silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 target_compile_options(emitFromONNX PRIVATE -Wno-unused-parameter -Wno-array-bounds) -# IMPORTANT: ROOTTEST_ADD_TEST below used to expand ${onnx_file}/${fname} which are loop vars. -# We keep it as a single-test wrapper; per-file test invocation is added later for ALPAKA. ROOTTEST_ADD_TEST(SofieCompileModels_ONNX - COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNX ${onnx_file} ${CMAKE_CURRENT_BINARY_DIR}/${fname} + COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNX FIXTURES_REQUIRED sofie-compile-models-onnx-build FIXTURES_SETUP sofie-compile-models-onnx ) -# Creating a Google Test -if (BLAS_FOUND) # we need BLAS for compiling the models +# --- Custom model tests --- +if (BLAS_FOUND) ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromONNX TestCustomModelsFromONNX.cxx LIBRARIES MathCore @@ -85,71 +83,32 @@ if (BLAS_FOUND) # we need BLAS for compiling the models BLAS::BLAS GTest::gtest GTest::gtest_main - FIXTURES_REQUIRED - sofie-compile-models-onnx - FIXTURES_SETUP - sofie-test-models-onnx-build + FIXTURES_REQUIRED sofie-compile-models-onnx + FIXTURES_SETUP sofie-test-models-onnx-build ) + target_include_directories(TestCustomModelsFromONNX PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) + ROOTTEST_ADD_TEST(TestCustomModelsFromONNX - EXEC ./TestCustomModelsFromONNX - FIXTURES_REQUIRED sofie-test-models-onnx-build) + EXEC ./TestCustomModelsFromONNX + FIXTURES_REQUIRED sofie-test-models-onnx-build + ) endif() -# Skipping emitting from ROOT and further tests for now. -# For testing serialisation of RModel object -# ROOTTEST_GENERATE_EXECUTABLE(emitFromROOT EmitFromRoot_all.cxx -# LIBRARIES protobuf::libprotobuf RIO SOFIE_core SOFIE_parsers -# FIXTURES_SETUP sofie-compile-models-onnx-root -# ) - -# silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 -# target_compile_options(emitFromROOT PRIVATE -Wno-unused-parameter -Wno-array-bounds) - -# Automatic compilation of headers from root files -# ROOTTEST_ADD_TEST(SofieCompileModels_ROOT -# COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromROOT -# FIXTURES_REQUIRED sofie-compile-models-onnx-root -# FIXTURES_SETUP sofie-compile-models-root -# ) - -# if (BLAS_FOUND) -# # Creating a Google Test for Serialisation of RModel -# ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromROOT TestCustomModelsFromROOT.cxx -# LIBRARIES -# SOFIE_core -# BLAS::BLAS -# GTest::gtest -# GTest::gtest_main -# FIXTURES_REQUIRED -# sofie-compile-models-root -# FIXTURES_SETUP -# sofie-test-models-root-build -# ) -# target_include_directories(TestCustomModelsFromROOT PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) -# ROOTTEST_ADD_TEST(TestCustomModelsFromROOT -# EXEC ./TestCustomModelsFromROOT -# FIXTURES_REQUIRED sofie-test-models-root-build) -# endif() - -# Look for needed Python modules +# --- Python-based generators --- ROOT_FIND_PYTHON_MODULE(torch) if (ROOT_TORCH_FOUND) configure_file(Conv1dModelGenerator.py Conv1dModelGenerator.py COPYONLY) configure_file(Conv2dModelGenerator.py Conv2dModelGenerator.py COPYONLY) configure_file(Conv3dModelGenerator.py Conv3dModelGenerator.py COPYONLY) - configure_file(ConvTrans2dModelGenerator.py ConvTrans2dModelGenerator.py COPYONLY) - configure_file(LinearModelGenerator.py LinearModelGenerator.py COPYONLY) - configure_file(RecurrentModelGenerator.py RecurrentModelGenerator.py COPYONLY) + configure_file(ConvTrans2dModelGenerator.py ConvTrans2dModelGenerator.py COPYONLY) + configure_file(LinearModelGenerator.py LinearModelGenerator.py COPYONLY) + configure_file(RecurrentModelGenerator.py RecurrentModelGenerator.py COPYONLY) if (BLAS_FOUND) ROOT_ADD_GTEST(TestSofieModels TestSofieModels.cxx - LIBRARIES - SOFIE_core - SOFIE_parsers - BLAS::BLAS - INCLUDE_DIRS - ${CMAKE_CURRENT_BINARY_DIR} + LIBRARIES SOFIE_core SOFIE_parsers BLAS::BLAS + INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR} ) endif() endif() @@ -160,39 +119,131 @@ ROOT_ADD_TEST(tmva-sofie-EmitGNN COMMAND emitGNN) ROOT_EXECUTABLE(EmitGraphIndependent GNN/EmitGraphIndependent.cxx LIBRARIES SOFIE_core) ROOT_ADD_TEST(tmva-sofie-EmitGraphIndependent COMMAND EmitGraphIndependent) -# Generating inference code for heterogeneous testing using ALPAKA -ROOTTEST_GENERATE_EXECUTABLE(emitFromONNXAlpaka EmitFromONNX_GPU_ALPAKA_all.cxx +# ========================= +# ALPAKA TESTS +# ========================= +if (ENABLE_ALPAKA_TESTS) + + string(TOLOWER "${ALPAKA_BACKEND}" _alpaka_backend) + + if (NOT _alpaka_backend IN_LIST ALPAKA_BACKEND) + message(FATAL_ERROR + "Unsupported ALPAKA_BACKEND='${ALPAKA_BACKEND}'. " + "Valid values: cuda, cpu, hip, sycl") + endif() + + FetchContent_Declare( + sofieBLAS + GIT_REPOSITORY https://github.com/ML4EP/sofieBLAS + GIT_TAG edf2259876e9f4fb5a8f72db20b2dfb5dc26b517 + ) + FetchContent_MakeAvailable(sofieBLAS) + + FetchContent_Declare( + alpaka + GIT_REPOSITORY https://github.com/alpaka-group/alpaka + GIT_TAG 2fa91a34ed11b2076e474c5507d920e85cf9b79d + ) + FetchContent_MakeAvailable(alpaka) + + # --- ALPAKA emitter --- + ROOTTEST_GENERATE_EXECUTABLE(emitFromONNXAlpaka EmitFromONNX_GPU_ALPAKA_all.cxx LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers - FIXTURES_SETUP sofie-compile-models-onnx-alpaka-build) - -# silence protobuf warnings seen in version 3.0 and 3.6. Not needed from protobuf version 3.17 -target_compile_options(emitFromONNXAlpaka PRIVATE -Wno-unused-parameter -Wno-array-bounds) - -# Add explicit per-file post-build runs of the alpaka emitter executable so that -# EmitFromONNXAlpaka is invoked for each detected .onnx file during the build. -# This avoids relying on a single ${onnx_file}/${fname} value that would -# otherwise expand only to the last entry when used outside the loop. - -if (ONNX_FILES) - foreach(onnx_file ${ONNX_FILES}) - get_filename_component(fname ${onnx_file} NAME_WE) - - # Create a post-build command attached to the emitFromONNXAlpaka target that - # will run the built binary with the current onnx file and the chosen outname. - add_custom_command(TARGET emitFromONNXAlpaka - POST_BUILD - COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 $ "${onnx_file}" "${CMAKE_CURRENT_BINARY_DIR}/${fname}" - COMMENT "Running ALPAKA emitter on ${onnx_file}" + FIXTURES_SETUP sofie-compile-models-onnx-alpaka-build + ) + + target_compile_options(emitFromONNXAlpaka PRIVATE -Wno-unused-parameter -Wno-array-bounds) + + if (ONNX_FILES) + foreach(onnx_file ${ONNX_FILES}) + get_filename_component(fname ${onnx_file} NAME_WE) + add_custom_command(TARGET emitFromONNXAlpaka POST_BUILD + COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 + $ + "${onnx_file}" "${CMAKE_CURRENT_BINARY_DIR}/${fname}" + COMMENT "Running ALPAKA emitter on ${onnx_file}") + endforeach() + endif() + + ROOTTEST_ADD_TEST(SofieCompileModels_ONNX_Alpaka + COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNXAlpaka + FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka-build + FIXTURES_SETUP sofie-compile-models-onnx-alpaka + ) + + set(CXXFLAGS -O2 -g -DALPAKA_HAS_STD_ATOMIC_REF) + set(CXX_HOST_FLAGS -fPIC -pthread) + + # ---- Backend selection ---- + if (_alpaka_backend STREQUAL "cuda") + message(STATUS "Enabling Alpaka CUDA tests") + enable_language(CUDA) + find_package(CUDAToolkit REQUIRED) + + set(CUDA_ARCH "sm_86") + set(CXX_CUDA_FLAGS + -arch=${CUDA_ARCH} + -Wno-deprecated-gpu-targets + --extended-lambda + --expt-relaxed-constexpr) + + set_source_files_properties( + TestCustomModelsFromONNXForAlpakaCuda.cxx + PROPERTIES LANGUAGE CUDA ) - endforeach() -endif() -# Also add a ROOTTEST wrapper so that ctest can run one of the invocations if desired. -# (This remains mostly for CI / test harness compatibility.) -ROOTTEST_ADD_TEST(SofieCompileModels_ONNX_Alpaka - COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNXAlpaka ${ONNX_FILES} - FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka-build - FIXTURES_SETUP sofie-compile-models-onnx-alpaka -) + ROOTTEST_GENERATE_EXECUTABLE( + TestCustomModelsFromONNXForAlpakaCuda + TestCustomModelsFromONNXForAlpakaCuda.cxx + LIBRARIES MathCore SOFIE_core GTest::gtest GTest::gtest_main + FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka + FIXTURES_SETUP sofie-test-models-onnx-alpaka-build + ) + + target_include_directories( + TestCustomModelsFromONNXForAlpakaCuda PRIVATE + ${CMAKE_CURRENT_BINARY_DIR} + ${alpaka_SOURCE_DIR}/include + ${SOFIE_INCLUDE} + ${sofieblas_SOURCE_DIR}/include + ${ROOT_INCLUDE_DIRS} + ${CUDA_BASE}/include + ${CMAKE_CURRENT_SOURCE_DIR} + ) + + set_target_properties( + TestCustomModelsFromONNXForAlpakaCuda + PROPERTIES CUDA_SEPARABLE_COMPILATION ON + ) + + target_compile_definitions( + TestCustomModelsFromONNXForAlpakaCuda PRIVATE + ALPAKA_ACC_GPU_CUDA_ENABLED + ) + + target_link_directories( + TestCustomModelsFromONNXForAlpakaCuda PRIVATE + ${CUDA_BASE}/lib64 + ) + + target_link_libraries(TestCustomModelsFromONNXForAlpakaCuda + CUDA::cublas + CUDA::cublasLt + CUDA::cudart + ${ROOT_LIBRARIES} + ) + + ROOTTEST_ADD_TEST(TestCustomModelsFromONNXForAlpakaCuda + EXEC ./TestCustomModelsFromONNXForAlpakaCuda + FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka + ) + + elseif (_alpaka_backend STREQUAL "cpu") + message(STATUS "Alpaka CPU backend selected (not yet implemented)") + elseif (_alpaka_backend STREQUAL "hip") + message(STATUS "Alpaka HIP backend selected (not yet implemented)") + elseif (_alpaka_backend STREQUAL "sycl") + message(STATUS "Alpaka SYCL backend selected (not yet implemented)") + endif() # backend -# End of CMakeLists.txt +endif() # ENABLE_ALPAKA_TESTS diff --git a/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx new file mode 100644 index 0000000..017a19f --- /dev/null +++ b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx @@ -0,0 +1,156 @@ +#include +#include + +#include "Linear_16_FromONNX_GPU_ALPAKA.hxx" +#include "input_models/references/Linear_16.ref.hxx" + +#include "Linear_32_FromONNX_GPU_ALPAKA.hxx" +#include "input_models/references/Linear_32.ref.hxx" + +#include "Linear_64_FromONNX_GPU_ALPAKA.hxx" +#include "input_models/references/Linear_64.ref.hxx" + +#include +#include +#include +#include "gtest/gtest.h" + +constexpr float DEFAULT_TOLERANCE = 1e-3f; + +using Idx = std::size_t; +using Dim = alpaka::DimInt<1>; +using Ext1D = alpaka::Vec; + +class SofieAlpakaTest : public ::testing::Test { +protected: + // Shared devices and platforms + alpaka::PlatformCpu hostPlatform; + alpaka::DevCpu host; + alpaka::PlatformCudaRt platform; + alpaka::DevCudaRt device; + alpaka::Queue queue; + + SofieAlpakaTest() + : hostPlatform{} + , host(alpaka::getDevByIdx(hostPlatform, 0u)) + , platform{} + , device(alpaka::getDevByIdx(platform, 0u)) + , queue(device) + { + } + + void SetUp() override { + cudaDeviceSynchronize(); + } + + void TearDown() override { + alpaka::wait(queue); + cudaDeviceSynchronize(); + } + + ~SofieAlpakaTest() override { + cudaDeviceSynchronize(); + } +}; + +TEST_F(SofieAlpakaTest, Linear16) +{ + constexpr float TOLERANCE = DEFAULT_TOLERANCE; + + auto A = alpaka::allocBuf(host, Ext1D::all(Idx{1600})); + float *A_ptr = reinterpret_cast(alpaka::getPtrNative(A)); + + for (Idx i = 0; i < 1600; ++i) { + A_ptr[i] = 1.0; + } + + auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{1600})); + alpaka::memcpy(queue, A_d, A); + alpaka::wait(queue); + + auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{160})); + + { + SOFIE_Linear_16::Session session("Linear_16_FromONNX_GPU_ALPAKA.dat"); + auto result = session.infer(A_d); + + alpaka::memcpy(queue, result_h, result); + alpaka::wait(queue); + cudaDeviceSynchronize(); + } + + float* res_ptr = reinterpret_cast(alpaka::getPtrNative(result_h)); + float *correct = Linear_16_ExpectedOutput::all_ones; + + for (size_t i = 0; i < 160; ++i) { + EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); + } +} + +TEST_F(SofieAlpakaTest, Linear32) +{ + constexpr float TOLERANCE = DEFAULT_TOLERANCE; + + auto A = alpaka::allocBuf(host, Ext1D::all(Idx{1600})); + float *A_ptr = reinterpret_cast(alpaka::getPtrNative(A)); + + for (Idx i = 0; i < 1600; ++i) { + A_ptr[i] = 1.0; + } + + auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{1600})); + alpaka::memcpy(queue, A_d, A); + alpaka::wait(queue); + + auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{160})); + + { + SOFIE_Linear_32::Session session("Linear_32_FromONNX_GPU_ALPAKA.dat"); + auto result = session.infer(A_d); + + alpaka::memcpy(queue, result_h, result); + alpaka::wait(queue); + cudaDeviceSynchronize(); + } + + float* res_ptr = reinterpret_cast(alpaka::getPtrNative(result_h)); + float *correct = Linear_32_ExpectedOutput::all_ones; + + for (size_t i = 0; i < 160; ++i) { + EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); + } +} + +TEST_F(SofieAlpakaTest, Linear64) +{ + constexpr float TOLERANCE = DEFAULT_TOLERANCE; + + auto A = alpaka::allocBuf(host, Ext1D::all(Idx{1600})); + float *A_ptr = reinterpret_cast(alpaka::getPtrNative(A)); + + for (Idx i = 0; i < 1600; ++i) { + A_ptr[i] = 1.0; + } + + auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{1600})); + alpaka::memcpy(queue, A_d, A); + alpaka::wait(queue); + + auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{160})); + + { + SOFIE_Linear_64::Session session("Linear_64_FromONNX_GPU_ALPAKA.dat"); + auto result = session.infer(A_d); + + alpaka::memcpy(queue, result_h, result); + alpaka::wait(queue); + cudaDeviceSynchronize(); + } + + float* res_ptr = reinterpret_cast(alpaka::getPtrNative(result_h)); + float *correct = Linear_64_ExpectedOutput::all_ones; + + for (size_t i = 0; i < 160; ++i) { + EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); + } +} From 815a80c4e96f72d86fa348232bb4b4ee3eae6f2c Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 26 Jan 2026 12:52:36 +0100 Subject: [PATCH 21/22] feat: test cases for leaky relu operator --- .../inc/SOFIE/ROperator_LeakyRelu.hxx | 34 +++-- src/SOFIE_core/src/RModel_ALPAKA.cxx | 12 +- src/SOFIE_core/test/CMakeLists.txt | 128 +++++++++--------- .../test/TestCustomModelsFromONNX.cxx | 2 +- .../TestCustomModelsFromONNXForAlpakaCuda.cxx | 51 +++++++ 5 files changed, 142 insertions(+), 85 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx index 02eca17..7d12228 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx @@ -75,22 +75,23 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() { + std::string Generate_GPU_Kernel_ALPAKA(std::string /*opName*/) override { std::string op; op = "\n//------ LEAKY_RELU_KERNEL_ALPAKA\n"; - op += SP + "struct LeakyReluKernel {\n"; - op += SP + SP + "template\n"; - op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements, T alpha = static_cast(0.01)) const {\n"; - op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; - op += SP + SP + SP + SP + "data[i] = (data[i] < static_cast(0)) ? alpha * data[i] : data[i];\n"; - op += SP + SP + SP + "}\n"; + op += "struct LeakyReluKernel {\n"; + op += SP + "template\n"; + op += SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const* __restrict__ data, T* __restrict__ out, std::size_t numElements, T alpha) const {\n"; + op += SP + SP + "const auto idx = alpaka::getIdx(acc)[0];\n"; + op += SP + "if(idx < numElements) {\n"; + op += SP + SP + "out[idx] = data[idx] >= 0 ? data[idx] : alpha * data[idx];\n"; op += SP + SP + "}\n"; - op += SP + "};\n"; + op += SP + "}\n"; + op += "};\n"; return op; } std::string Generate_GPU_Kernel_Definitions_ALPAKA(std::string /*opName*/) override { - return SP + "LeakyReluKernel leakyReluKernel;\n"; + return "LeakyReluKernel leakyReluKernel;\n"; } std::string Generate_GPU_ALPAKA(std::string OpName) override { @@ -99,17 +100,20 @@ public: throw std::runtime_error("TMVA SOFIE Operator LeakyRelu called to Generate without being initialized first"); } + + std::stringstream out; auto length = ConvertShapeToLength(fShape); out << "\n//------ LEAKY_RELU_GPU_ALPAKA\n"; - out << SP << "alpaka::WorkDivMembers workDiv_" << fNX - << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " - << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; - + out << SP << "constexpr float " << OpName << "_alpha = " << std::setprecision(std::numeric_limits::max_digits10) << falpha << ";\n"; + out << SP << "auto const elementsPerThread_"<(1));\n"; + out << SP << "auto const elementsPerGrid_"< const kernelCfg_" << fNX << " = {elementsPerGrid_" << fNX << ", elementsPerThread_" << fNX << "};\n"; + out << SP << "auto const workDiv_" << fNX << " = alpaka::getValidWorkDiv(kernelCfg_" << fNX << ", devAcc, leakyReluKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << "), " << OpName << "_alpha);\n"; out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", leakyReluKernel, alpaka::getPtrNative(deviceBuf_" << fNX - << "), static_cast(" << length << "), static_cast(0.01));\n"; - + << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << "), " << OpName << "_alpha);\n"; return out.str(); } diff --git a/src/SOFIE_core/src/RModel_ALPAKA.cxx b/src/SOFIE_core/src/RModel_ALPAKA.cxx index 3e0be79..9ff300a 100644 --- a/src/SOFIE_core/src/RModel_ALPAKA.cxx +++ b/src/SOFIE_core/src/RModel_ALPAKA.cxx @@ -13,10 +13,6 @@ namespace SOFIE { void RModel::GenerateInitializedTensorInfo_GPU_ALPAKA() { if (!fInitializedTensors.empty()){ fGC += "\n// initialized tensors for weights\n"; - fGC += "using BufF1D = alpaka::Buf;\n"; - fGC += "using BufD1D = alpaka::Buf;\n"; - fGC += "using BufI641D = alpaka::Buf;\n"; - } for (auto &i : fInitializedTensors) { @@ -238,9 +234,13 @@ void RModel::GenerateSessionCode_GPU_ALPAKA() { fGC += "using Idx = std::size_t;\n"; fGC += "using Dim = alpaka::DimInt<1>;\n"; fGC += "using Acc = alpaka::TagToAcc;\n"; - fGC += "using DevAcc = alpaka::Dev;\n"; + fGC += "using DevAcc = alpaka::Dev;\n\n"; fGC += "using QueueProperty = alpaka::NonBlocking;\n"; - fGC += "using QueueAcc = alpaka::Queue;\n"; + fGC += "using QueueAcc = alpaka::Queue;\n\n"; + fGC += "using BufF1D = alpaka::Buf;\n"; + fGC += "using BufD1D = alpaka::Buf;\n"; + fGC += "using BufI641D = alpaka::Buf;\n\n"; + fGC += "\nalpaka::Platform const platform{};\n"; fGC += "DevAcc devAcc = alpaka::getDevByIdx(platform, 0);\n"; fGC += "alpaka::PlatformCpu platformHost{};\n"; diff --git a/src/SOFIE_core/test/CMakeLists.txt b/src/SOFIE_core/test/CMakeLists.txt index 5d5667a..76e5e29 100644 --- a/src/SOFIE_core/test/CMakeLists.txt +++ b/src/SOFIE_core/test/CMakeLists.txt @@ -62,62 +62,62 @@ configure_file(EmitFromONNX.cxx.in EmitFromONNX_all.cxx @ONLY) configure_file(EmitFromONNX_GPU_ALPAKA.cxx.in EmitFromONNX_GPU_ALPAKA_all.cxx @ONLY) # --- CPU emitter --- -ROOTTEST_GENERATE_EXECUTABLE(emitFromONNX EmitFromONNX_all.cxx - LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers - FIXTURES_SETUP sofie-compile-models-onnx-build) - -target_compile_options(emitFromONNX PRIVATE -Wno-unused-parameter -Wno-array-bounds) - -ROOTTEST_ADD_TEST(SofieCompileModels_ONNX - COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNX - FIXTURES_REQUIRED sofie-compile-models-onnx-build - FIXTURES_SETUP sofie-compile-models-onnx -) - -# --- Custom model tests --- -if (BLAS_FOUND) - ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromONNX TestCustomModelsFromONNX.cxx - LIBRARIES - MathCore - SOFIE_core - BLAS::BLAS - GTest::gtest - GTest::gtest_main - FIXTURES_REQUIRED sofie-compile-models-onnx - FIXTURES_SETUP sofie-test-models-onnx-build - ) - - target_include_directories(TestCustomModelsFromONNX PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) - - ROOTTEST_ADD_TEST(TestCustomModelsFromONNX - EXEC ./TestCustomModelsFromONNX - FIXTURES_REQUIRED sofie-test-models-onnx-build - ) -endif() - -# --- Python-based generators --- -ROOT_FIND_PYTHON_MODULE(torch) -if (ROOT_TORCH_FOUND) - configure_file(Conv1dModelGenerator.py Conv1dModelGenerator.py COPYONLY) - configure_file(Conv2dModelGenerator.py Conv2dModelGenerator.py COPYONLY) - configure_file(Conv3dModelGenerator.py Conv3dModelGenerator.py COPYONLY) - configure_file(ConvTrans2dModelGenerator.py ConvTrans2dModelGenerator.py COPYONLY) - configure_file(LinearModelGenerator.py LinearModelGenerator.py COPYONLY) - configure_file(RecurrentModelGenerator.py RecurrentModelGenerator.py COPYONLY) - - if (BLAS_FOUND) - ROOT_ADD_GTEST(TestSofieModels TestSofieModels.cxx - LIBRARIES SOFIE_core SOFIE_parsers BLAS::BLAS - INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR} - ) - endif() -endif() - -ROOT_EXECUTABLE(emitGNN GNN/EmitGNN.cxx LIBRARIES SOFIE_core) -ROOT_ADD_TEST(tmva-sofie-EmitGNN COMMAND emitGNN) - -ROOT_EXECUTABLE(EmitGraphIndependent GNN/EmitGraphIndependent.cxx LIBRARIES SOFIE_core) -ROOT_ADD_TEST(tmva-sofie-EmitGraphIndependent COMMAND EmitGraphIndependent) +# ROOTTEST_GENERATE_EXECUTABLE(emitFromONNX EmitFromONNX_all.cxx +# LIBRARIES protobuf::libprotobuf SOFIE_core SOFIE_parsers +# FIXTURES_SETUP sofie-compile-models-onnx-build) + +# target_compile_options(emitFromONNX PRIVATE -Wno-unused-parameter -Wno-array-bounds) + +# ROOTTEST_ADD_TEST(SofieCompileModels_ONNX +# COMMAND ${CMAKE_COMMAND} -E env ROOTIGNOREPREFIX=1 ./emitFromONNX +# FIXTURES_REQUIRED sofie-compile-models-onnx-build +# FIXTURES_SETUP sofie-compile-models-onnx +# ) + +# # --- Custom model tests --- +# if (BLAS_FOUND) +# ROOTTEST_GENERATE_EXECUTABLE(TestCustomModelsFromONNX TestCustomModelsFromONNX.cxx +# LIBRARIES +# MathCore +# SOFIE_core +# BLAS::BLAS +# GTest::gtest +# GTest::gtest_main +# FIXTURES_REQUIRED sofie-compile-models-onnx +# FIXTURES_SETUP sofie-test-models-onnx-build +# ) + +# target_include_directories(TestCustomModelsFromONNX PRIVATE ${CMAKE_CURRENT_BINARY_DIR}) + +# ROOTTEST_ADD_TEST(TestCustomModelsFromONNX +# EXEC ./TestCustomModelsFromONNX +# FIXTURES_REQUIRED sofie-test-models-onnx-build +# ) +# endif() + +# # --- Python-based generators --- +# ROOT_FIND_PYTHON_MODULE(torch) +# if (ROOT_TORCH_FOUND) +# configure_file(Conv1dModelGenerator.py Conv1dModelGenerator.py COPYONLY) +# configure_file(Conv2dModelGenerator.py Conv2dModelGenerator.py COPYONLY) +# configure_file(Conv3dModelGenerator.py Conv3dModelGenerator.py COPYONLY) +# configure_file(ConvTrans2dModelGenerator.py ConvTrans2dModelGenerator.py COPYONLY) +# configure_file(LinearModelGenerator.py LinearModelGenerator.py COPYONLY) +# configure_file(RecurrentModelGenerator.py RecurrentModelGenerator.py COPYONLY) + +# if (BLAS_FOUND) +# ROOT_ADD_GTEST(TestSofieModels TestSofieModels.cxx +# LIBRARIES SOFIE_core SOFIE_parsers BLAS::BLAS +# INCLUDE_DIRS ${CMAKE_CURRENT_BINARY_DIR} +# ) +# endif() +# endif() + +# ROOT_EXECUTABLE(emitGNN GNN/EmitGNN.cxx LIBRARIES SOFIE_core) +# ROOT_ADD_TEST(tmva-sofie-EmitGNN COMMAND emitGNN) + +# ROOT_EXECUTABLE(EmitGraphIndependent GNN/EmitGraphIndependent.cxx LIBRARIES SOFIE_core) +# ROOT_ADD_TEST(tmva-sofie-EmitGraphIndependent COMMAND EmitGraphIndependent) # ========================= # ALPAKA TESTS @@ -192,13 +192,15 @@ if (ENABLE_ALPAKA_TESTS) PROPERTIES LANGUAGE CUDA ) - ROOTTEST_GENERATE_EXECUTABLE( - TestCustomModelsFromONNXForAlpakaCuda - TestCustomModelsFromONNXForAlpakaCuda.cxx - LIBRARIES MathCore SOFIE_core GTest::gtest GTest::gtest_main - FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka - FIXTURES_SETUP sofie-test-models-onnx-alpaka-build - ) + +ROOTTEST_GENERATE_EXECUTABLE( + TestCustomModelsFromONNXForAlpakaCuda + TestCustomModelsFromONNXForAlpakaCuda.cxx + LIBRARIES MathCore SOFIE_core GTest::gtest GTest::gtest_main + FIXTURES_REQUIRED sofie-compile-models-onnx-alpaka + FIXTURES_SETUP sofie-test-models-onnx-alpaka-build +) + target_include_directories( TestCustomModelsFromONNXForAlpakaCuda PRIVATE diff --git a/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx b/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx index ba9a42a..14eb6a3 100644 --- a/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx +++ b/src/SOFIE_core/test/TestCustomModelsFromONNX.cxx @@ -812,7 +812,7 @@ TEST(ONNX, LinearWithLeakyRelu) { constexpr float TOLERANCE = 1; - // Preparing the standard all-ones input + // Preparing input std::vector input({ 0.4369, -0.6882, 1.0309, -1.0263, -0.1519, 1.2237, -0.7054, -0.1762, -0.6811, -2.2597, 1.0388, -0.7993, 0.1468, 1.3257, -0.4714, -0.0958, diff --git a/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx index 017a19f..fc2f154 100644 --- a/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx +++ b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx @@ -10,6 +10,9 @@ #include "Linear_64_FromONNX_GPU_ALPAKA.hxx" #include "input_models/references/Linear_64.ref.hxx" +#include "LinearWithLeakyRelu_FromONNX_GPU_ALPAKA.hxx" +#include "input_models/references/LinearWithLeakyRelu.ref.hxx" + #include #include #include @@ -154,3 +157,51 @@ TEST_F(SofieAlpakaTest, Linear64) EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); } } + +TEST_F(SofieAlpakaTest, LinearWithLeakyRelu) +{ + alpaka::PlatformCpu hostPlatform{}; + auto host = alpaka::getDevByIdx(hostPlatform, 0u); + constexpr float TOLERANCE = DEFAULT_TOLERANCE; + + alpaka::PlatformCudaRt platform{}; + alpaka::DevCudaRt device = alpaka::getDevByIdx(platform, 0u); + alpaka::Queue queue{device}; + + std::vector input({ + 0.4369, -0.6882, 1.0309, -1.0263, -0.1519, 1.2237, -0.7054, -0.1762, + -0.6811, -2.2597, 1.0388, -0.7993, 0.1468, 1.3257, -0.4714, -0.0958, + 0.7057, -0.3749, -0.3310, 0.0986, -0.1370, 0.0832, -1.6465, -0.2793 + }); + + auto A = alpaka::allocBuf(host, Ext1D::all(Idx{input.size()})); + float *A_ptr = reinterpret_cast(alpaka::getPtrNative(A)); + + for (Idx i = 0; i < input.size(); ++i) { + A_ptr[i] = input[i]; + } + + auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{input.size()})); + alpaka::memcpy(queue, A_d, A); + alpaka::wait(queue); + cudaDeviceSynchronize(); + + auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{24})); + + { + SOFIE_LinearWithLeakyRelu::Session session; + auto result = session.infer(A_d); + alpaka::wait(queue); + cudaDeviceSynchronize(); + + alpaka::memcpy(queue, result_h, result); + alpaka::wait(queue); + } + + float* res_ptr = reinterpret_cast(alpaka::getPtrNative(result_h)); + float *correct = LinearWithLeakyRelu_ExpectedOutput::outputs; + + for (size_t i = 0; i < 24; ++i) { + EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); + } +} From 671b4b035566b6e8b1243c62e2f3beecc41b648a Mon Sep 17 00:00:00 2001 From: Sanjiban Sengupta Date: Mon, 26 Jan 2026 13:26:23 +0100 Subject: [PATCH 22/22] fix: sigmoid operator gpu implementation and test --- .../inc/SOFIE/ROperator_LeakyRelu.hxx | 2 - .../inc/SOFIE/ROperator_Sigmoid.hxx | 25 ++++++------ .../TestCustomModelsFromONNXForAlpakaCuda.cxx | 39 ++++++++++++++++++- 3 files changed, 51 insertions(+), 15 deletions(-) diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx index 7d12228..0f3b699 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_LeakyRelu.hxx @@ -100,8 +100,6 @@ public: throw std::runtime_error("TMVA SOFIE Operator LeakyRelu called to Generate without being initialized first"); } - - std::stringstream out; auto length = ConvertShapeToLength(fShape); out << "\n//------ LEAKY_RELU_GPU_ALPAKA\n"; diff --git a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx index f2e2e25..5edbcf9 100644 --- a/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx +++ b/src/SOFIE_core/inc/SOFIE/ROperator_Sigmoid.hxx @@ -61,14 +61,15 @@ public: return out.str(); } - std::string Generate_GPU_Kernel_ALPAKA() { + std::string Generate_GPU_Kernel_ALPAKA(std::string /*opName*/) override { std::string op; op = "\n//------ SIGMOID_KERNEL_ALPAKA\n"; - op += SP + "struct SigmoidKernel {\n"; - op += SP + SP + "template\n"; - op += SP + SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T* data, std::size_t numElements) const {\n"; - op += SP + SP + SP + "for (auto i : alpaka::uniformElements(acc, numElements)) {\n"; - op += SP + SP + SP + SP + "data[i] = static_cast(1) / (static_cast(1) + exp(-data[i]));\n"; + op += "struct SigmoidKernel {\n"; + op += SP + "template\n"; + op += SP + "ALPAKA_FN_ACC void operator()(TAcc const & acc, T const* __restrict__ data, T* __restrict__ out, std::size_t numElements) const {\n"; + op += SP + SP + "const auto idx = alpaka::getIdx(acc)[0];\n"; + op += SP + SP + "if(idx < numElements) {\n"; + op += SP + SP + SP + SP + "out[idx] = static_cast(1) / (static_cast(1) + exp(-data[idx]));\n"; op += SP + SP + SP + "}\n"; op += SP + SP + "}\n"; op += SP + "};\n"; @@ -89,14 +90,14 @@ public: std::stringstream out; auto length = ConvertShapeToLength(fShape); out << "\n//------ SIGMOID_GPU_ALPAKA\n"; - out << SP << "alpaka::WorkDivMembers workDiv_" << fNX - << "(alpaka::Vec::all((" << length << " + 256 - 1) / 256), " - << "alpaka::Vec::all(256), alpaka::Vec::all(1));\n"; - + out << SP << "auto const elementsPerThread_"<(1));\n"; + out << SP << "auto const elementsPerGrid_"< const kernelCfg_" << fNX << " = {elementsPerGrid_" << fNX << ", elementsPerThread_" << fNX << "};\n"; + out << SP << "auto const workDiv_" << fNX << " = alpaka::getValidWorkDiv(kernelCfg_" << fNX << ", devAcc, sigmoidKernel, alpaka::getPtrNative(deviceBuf_" << fNX + << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << "));\n"; out << SP << "alpaka::exec(queue, workDiv_" << fNX << ", sigmoidKernel, alpaka::getPtrNative(deviceBuf_" << fNX - << "), static_cast(" << length << "));\n"; - + << "), alpaka::getPtrNative(deviceBuf_" << fNY << "), static_cast(" << length << "));\n"; return out.str(); } diff --git a/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx index fc2f154..1537ea4 100644 --- a/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx +++ b/src/SOFIE_core/test/TestCustomModelsFromONNXForAlpakaCuda.cxx @@ -13,6 +13,9 @@ #include "LinearWithLeakyRelu_FromONNX_GPU_ALPAKA.hxx" #include "input_models/references/LinearWithLeakyRelu.ref.hxx" +#include "LinearWithSigmoid_FromONNX_GPU_ALPAKA.hxx" +#include "input_models/references/LinearWithSigmoid.ref.hxx" + #include #include #include @@ -184,7 +187,6 @@ TEST_F(SofieAlpakaTest, LinearWithLeakyRelu) auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{input.size()})); alpaka::memcpy(queue, A_d, A); alpaka::wait(queue); - cudaDeviceSynchronize(); auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{24})); @@ -205,3 +207,38 @@ TEST_F(SofieAlpakaTest, LinearWithLeakyRelu) EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); } } + +TEST_F(SofieAlpakaTest, LinearWithSigmoid) +{ + + constexpr float TOLERANCE = DEFAULT_TOLERANCE; + + auto A = alpaka::allocBuf(host, Ext1D::all(Idx{48})); + float *A_ptr = reinterpret_cast(alpaka::getPtrNative(A)); + + for (Idx i = 0; i < 48; ++i) { + A_ptr[i] = 1.0; + } + + auto A_d = alpaka::allocBuf(device, Ext1D::all(Idx{48})); + alpaka::memcpy(queue, A_d, A); + alpaka::wait(queue); + + auto result_h = alpaka::allocBuf(host, Ext1D::all(Idx{48})); + + { + SOFIE_LinearWithSigmoid::Session session("LinearWithSigmoid_FromONNX_GPU_ALPAKA.dat"); + auto result = session.infer(A_d); + alpaka::wait(queue); + cudaDeviceSynchronize(); + + alpaka::memcpy(queue, result_h, result); + alpaka::wait(queue); + } + + float* res_ptr = reinterpret_cast(alpaka::getPtrNative(result_h)); + float *correct = LinearWithSigmoid_ExpectedOutput::all_ones; + for (size_t i = 0; i < 24; ++i) { + EXPECT_LE(std::abs(res_ptr[i] - correct[i]), TOLERANCE); + } +}