Skip to content

Commit

Permalink
[ trivial ] clang format & revert some tmp codes & fix bug
Browse files Browse the repository at this point in the history
- clang format is applied.
- revert Android.mk
- fix bug in registerClKernels

Signed-off-by: Eunju Yang <[email protected]>
  • Loading branch information
EunjuYang committed Nov 29, 2024
1 parent 4d663cd commit f778ceb
Show file tree
Hide file tree
Showing 4 changed files with 13 additions and 8 deletions.
7 changes: 3 additions & 4 deletions nntrainer/cl_context.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -44,9 +44,8 @@ static void add_default_object(ClContext &cc) {
ml::train::LayerType::LAYER_ADDITION);

// @todo swiglulayercl also needs to be updated.
cc.registerFactory(nntrainer::createLayer<SwiGLULayerCl>,
SwiGLULayerCl::type,
ml::train::LayerType::LAYER_SWIGLU);
cc.registerFactory(nntrainer::createLayer<SwiGLULayerCl>, SwiGLULayerCl::type,
ml::train::LayerType::LAYER_SWIGLU);

if (ReshapeLayerCl::registerClKernels()) {
cc.registerFactory(nntrainer::createLayer<ReshapeLayerCl>,
Expand All @@ -56,7 +55,7 @@ static void add_default_object(ClContext &cc) {

// @todo rmsnormlayercl also needs to be updated.
cc.registerFactory(nntrainer::createLayer<RMSNormLayerCl>,
RMSNormLayerCl::type, ml::train::LayerType::LAYER_RMSNORM);
RMSNormLayerCl::type, ml::train::LayerType::LAYER_RMSNORM);

if (ConcatLayerCl::registerClKernels()) {
cc.registerFactory(nntrainer::createLayer<ConcatLayerCl>,
Expand Down
6 changes: 4 additions & 2 deletions nntrainer/layers/cl_layers/concat_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -242,8 +242,10 @@ static constexpr size_t INPUT_IDX_2 = 1;
bool ConcatLayerCl::registerClKernels() {

// check if already registered
if (!layer_kernel_ptrs.empty())
return true;
if (!layer_kernel_ptrs.empty()) {
ml_loge("kernels for concat layer are already registered");
return false;
}

do {

Expand Down
6 changes: 4 additions & 2 deletions nntrainer/layers/cl_layers/reshape_cl.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -52,8 +52,10 @@ static constexpr size_t SINGLE_INOUT_IDX = 0;
bool ReshapeLayerCl::registerClKernels() {

// check if already registered
if (!layer_kernel_ptrs.empty())
return true;
if (!layer_kernel_ptrs.empty()) {
ml_loge("kernels for reshape layer are already registered");
return false;
}

do {
ClContext::SharedPtrClKernel kernel_copy_ptr = nullptr;
Expand Down
2 changes: 2 additions & 0 deletions test/jni/Android.mk
Original file line number Diff line number Diff line change
Expand Up @@ -444,11 +444,13 @@ LOCAL_SRC_FILES := \
../unittest/layers/unittest_layers_impl.cpp \
../unittest/layers/unittest_layers_transpose_cl.cpp \
../unittest/layers/unittest_layers_concat_cl.cpp \
../unittest/layers/unittest_layers_swiglu_cl.cpp \
../unittest/layers/unittest_layers_fully_connected_cl.cpp \
../unittest/layers/unittest_layers_input.cpp \
../unittest/layers/unittest_layers_loss.cpp \
../unittest/layers/unittest_layers_reshape_cl.cpp \
../unittest/layers/unittest_layers_fully_connected.cpp \
../unittest/layers/unittest_layers_rmsnorm_cl.cpp \
../unittest/layers/unittest_layers_batch_normalization.cpp \
../unittest/layers/unittest_layers_layer_normalization.cpp \
../unittest/layers/unittest_layers_convolution2d.cpp \
Expand Down

0 comments on commit f778ceb

Please sign in to comment.