Skip to content

Commit

Permalink
Merge pull request Tyill#6 from Tyill/AE
Browse files Browse the repository at this point in the history
Ae
  • Loading branch information
Tyill authored Jan 5, 2020
2 parents d773f73 + 07f0067 commit 86ccf50
Show file tree
Hide file tree
Showing 15 changed files with 268 additions and 49 deletions.
Binary file added example/autoEncoder/autoencoder.jpg
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
150 changes: 150 additions & 0 deletions example/autoEncoder/cpp_example.cpp
Original file line number Diff line number Diff line change
@@ -0,0 +1,150 @@

#include <string>
#include <iostream>
#include <sstream>
#include <cstdlib>
#include <map>
#include <filesystem>

#include "../cpp/snNet.h"
#include "../cpp/snTensor.h"
#include "../cpp/snOperator.h"

#include "Lib/OpenCV_3.3.0/opencv2/core/core_c.h"
#include "Lib/OpenCV_3.3.0/opencv2/core/core.hpp"
#include "Lib/OpenCV_3.3.0/opencv2/imgproc/imgproc_c.h"
#include "Lib/OpenCV_3.3.0/opencv2/imgproc/imgproc.hpp"
#include "Lib/OpenCV_3.3.0/opencv2/highgui/highgui_c.h"
#include "Lib/OpenCV_3.3.0/opencv2/highgui/highgui.hpp"

using namespace std;
namespace sn = SN_API;

bool loadImage(string& imgPath, int classCnt, vector<vector<string>>& imgName, vector<int>& imgCntDir, map<string, cv::Mat>& images){

for (int i = 0; i < classCnt; ++i){

namespace fs = std::tr2::sys;

if (!fs::exists(fs::path(imgPath + to_string(i) + "/"))) continue;

fs::directory_iterator it(imgPath + to_string(i) + "/"); int cnt = 0;
while (it != fs::directory_iterator()){

fs::path p = it->path();
if (fs::is_regular_file(p) && (p.extension() == ".png")){

imgName[i].push_back(p.filename());
}
++it;
++cnt;
}

imgCntDir[i] = cnt;
}

return true;
}

int main(int argc, char* argv[]){

sn::Net snet;

snet.addNode("Input", sn::Input(), "FC1")
.addNode("FC1", sn::FullyConnected(256, sn::active::relu), "FC2")
.addNode("FC2", sn::FullyConnected(128, sn::active::relu), "FC3")
.addNode("FC3", sn::FullyConnected(32, sn::active::relu), "FC4")
.addNode("FC4", sn::FullyConnected(128, sn::active::relu), "FC5")
.addNode("FC5", sn::FullyConnected(256, sn::active::relu), "FC6")
.addNode("FC6", sn::FullyConnected(784, sn::active::sigmoid), "LS")
.addNode("LS", sn::LossFunction(sn::lossType::binaryCrossEntropy), "Output");

string imgPath = "c://cpp//skyNet//example//autoEncoder//images//";

int classCnt = 5, batchSz = 100, w = 28, h = 28;
float lr = 0.001F;

vector<vector<string>> imgName(classCnt);
vector<int> imgCntDir(classCnt);
map<string, cv::Mat> images;

if (!loadImage(imgPath, classCnt, imgName, imgCntDir, images)){
cout << "Error 'loadImage' path: " << imgPath << endl;
system("pause");
return -1;
}

//snet.loadAllWeightFromFile("c:\\cpp\\w.dat");


sn::Tensor inLayer(sn::snLSize(w, h, 1, batchSz));
sn::Tensor outLayer(sn::snLSize(w * h, 1, 1, batchSz));

size_t sum_metric = 0;
size_t num_inst = 0;
float accuratSumm = 0;
for (int k = 0; k < 1000; ++k){

srand(clock());

for (int i = 0; i < batchSz; ++i){

// directory
int ndir = rand() % classCnt;
while (imgCntDir[ndir] == 0)
ndir = rand() % classCnt;

// image
int nimg = rand() % imgCntDir[ndir];

// read
cv::Mat img;
string nm = imgName[ndir][nimg];
if (images.find(nm) != images.end())
img = images[nm];
else{
img = cv::imread(imgPath + to_string(ndir) + "/" + nm, CV_LOAD_IMAGE_UNCHANGED);
images[nm] = img;
}

float* refData = inLayer.data() + i * w * h;

size_t nr = img.rows, nc = img.cols;
for (size_t r = 0; r < nr; ++r){
uchar* pt = img.ptr<uchar>(r);
for (size_t c = 0; c < nc; ++c)
refData[r * nc + c] = pt[c] / 255.0;
}
}

// training
float accurat = 0;
snet.training(lr,
inLayer,
outLayer,
inLayer,
accurat);

float* refData = outLayer.data();

cv::Mat img(w, h, CV_8U);
for (size_t r = 0; r < h; ++r){
uchar* pt = img.ptr<uchar>(r);
for (size_t c = 0; c < w; ++c)
pt[c] = refData[r * w + c] * 255.0;
}

cv::namedWindow("1", 0);
cv::imshow("1", img);
cv::waitKey(1);

accuratSumm += accurat;

cout << k << " accurate " << accuratSumm / k << " " << snet.getLastErrorStr() << endl;
}

snet.saveAllWeightToFile("c:\\cpp\\w.dat");

system("pause");
return 0;
}
Binary file added example/autoEncoder/images.rar
Binary file not shown.
67 changes: 67 additions & 0 deletions example/autoEncoder/python_example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,67 @@

import os

from libskynet import*
import numpy as np
import imageio
import random
import ctypes
import datetime


# create net
net = snNet.Net()
net.addNode('In', snOperator.Input(), 'FC1') \
.addNode('FC1', snOperator.FullyConnected(256), 'FC2') \
.addNode('FC2', snOperator.FullyConnected(128), 'FC3') \
.addNode('FC3', snOperator.FullyConnected(32), 'FC4') \
.addNode('FC4', snOperator.FullyConnected(128), 'FC5') \
.addNode('FC5', snOperator.FullyConnected(256), 'FC6') \
.addNode('FC6', snOperator.FullyConnected(784), 'LS') \
.addNode('LS', snOperator.LossFunction(snType.lossType.binaryCrossEntropy), 'Output')

# load of weight
#if (net.loadAllWeightFromFile('c:/cpp/w.dat')):
# print('weight is load')
#else:
# print('error load weight')

# loadImg
imgList = []
pathImg = 'c:\\cpp\\skyNet\\example\\autoEncoder\\images\\'
for i in range(10):
imgList.append(os.listdir(pathImg + str(i)))

bsz = 100
lr = 0.001
accuratSumm = 0.
inLayer = np.zeros((bsz, 1, 28, 28), ctypes.c_float)
outLayer = np.zeros((bsz, 1, 1, 28 * 28), ctypes.c_float)
imgMem = {}

# cycle lern
for n in range(1000):

for i in range(bsz):
ndir = random.randint(0, 10 - 1)
nimg = random.randint(0, len(imgList[ndir]) - 1)

nm = pathImg + str(ndir) + '/' + imgList[ndir][nimg]
if (nm in imgMem):
inLayer[i][0] = imgMem[nm]
else:
inLayer[i][0] = imageio.imread(nm)
imgMem[nm] = inLayer[i][0].copy()

acc = [0]
net.training(lr, inLayer, outLayer, inLayer, acc)

accuratSumm += acc[0]/bsz

print(datetime.datetime.now().strftime('%H:%M:%S'), n, "accurate", accuratSumm / (n + 1))

# save weight
if (net.saveAllWeightToFile('c:/cpp/w.dat')):
print('weight is save')
else:
print('error save weight')
6 changes: 5 additions & 1 deletion mswin/VS12/skyNet.sln
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@

Microsoft Visual Studio Solution File, Format Version 12.00
# Visual Studio 2013
VisualStudioVersion = 12.0.31101.0
VisualStudioVersion = 12.0.40629.0
MinimumVisualStudioVersion = 10.0.40219.1
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "snEngine", "SNEngine.vcxproj", "{87E622F8-2436-40B6-A5A6-3F5DDE14BAC5}"
ProjectSection(ProjectDependencies) = postProject
Expand All @@ -28,6 +28,7 @@ Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "snAux", "SNAux.vcxproj", "{
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "skynet", "SkyNet.vcxproj", "{507A9B3C-E5E6-4CEC-A99D-729D2171DF81}"
ProjectSection(ProjectDependencies) = postProject
{2C7FA033-361B-4429-97E9-64F2A0A20CBE} = {2C7FA033-361B-4429-97E9-64F2A0A20CBE}
{C993F645-29F0-4079-ACBA-033BFED8B8F6} = {C993F645-29F0-4079-ACBA-033BFED8B8F6}
{F3E981BD-4BA4-4538-9D21-6ACD252F4895} = {F3E981BD-4BA4-4538-9D21-6ACD252F4895}
{87E622F8-2436-40B6-A5A6-3F5DDE14BAC5} = {87E622F8-2436-40B6-A5A6-3F5DDE14BAC5}
Expand All @@ -39,6 +40,9 @@ EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "snSIMD", "snSIMD.vcxproj", "{30FAF753-DBAE-4701-B5F4-29C7FBCBF9F6}"
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "snOperatorCPU", "snOperatorCPU.vcxproj", "{F3E981BD-4BA4-4538-9D21-6ACD252F4895}"
ProjectSection(ProjectDependencies) = postProject
{30FAF753-DBAE-4701-B5F4-29C7FBCBF9F6} = {30FAF753-DBAE-4701-B5F4-29C7FBCBF9F6}
EndProjectSection
EndProject
Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "snOperatorCUDA", "snOperatorCUDA.vcxproj", "{2C7FA033-361B-4429-97E9-64F2A0A20CBE}"
EndProject
Expand Down
4 changes: 2 additions & 2 deletions mswin/VS12/skyNet.vcxproj
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>true</GenerateDebugInformation>
<AdditionalLibraryDirectories>$(OutDir)</AdditionalLibraryDirectories>
<AdditionalDependencies>snOperatorCPU.lib;snAux.lib;snEngine.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>snOperatorCUDA.lib;snAux.lib;snEngine.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
Expand All @@ -97,7 +97,7 @@
<EnableCOMDATFolding>true</EnableCOMDATFolding>
<OptimizeReferences>true</OptimizeReferences>
<AdditionalLibraryDirectories>$(OutDir)</AdditionalLibraryDirectories>
<AdditionalDependencies>snOperatorCPU.lib;snAux.lib;snEngine.lib;%(AdditionalDependencies)</AdditionalDependencies>
<AdditionalDependencies>snOperatorCUDA.lib;snAux.lib;snEngine.lib;%(AdditionalDependencies)</AdditionalDependencies>
</Link>
</ItemDefinitionGroup>
<Import Project="$(VCTargetsPath)\Microsoft.Cpp.targets" />
Expand Down
6 changes: 4 additions & 2 deletions mswin/VS12/snOperatorCUDA.vcxproj
Original file line number Diff line number Diff line change
Expand Up @@ -64,7 +64,8 @@
</MultiProcessorCompilation>
<MinimalRebuild>true</MinimalRebuild>
<OpenMPSupport>true</OpenMPSupport>
<AdditionalOptions>/arch:AVX</AdditionalOptions>
<AdditionalOptions>
</AdditionalOptions>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
Expand Down Expand Up @@ -132,7 +133,8 @@
<MultiProcessorCompilation>
</MultiProcessorCompilation>
<OpenMPSupport>true</OpenMPSupport>
<AdditionalOptions>/arch:AVX</AdditionalOptions>
<AdditionalOptions>
</AdditionalOptions>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
Expand Down
19 changes: 8 additions & 11 deletions src/skynet/src/snet.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -195,19 +195,14 @@ bool SNet::createNet(Net& inout_net, std::string& out_err){
SNet::SNet(const char* jnNet, char* out_err /*sz 256*/,
SN_API::snStatusCBack sts, SN_API::snUData ud) : stsCBack_(sts), udata_(ud){

string err; SN_Base::Net net;
if (!jnParseNet(jnNet, net, err)){
string err;
SN_Base::Net net;
if (!jnParseNet(jnNet, net, err) || !createNet(net, err)){
statusMess(err);
strcpy(out_err, err.c_str());
return;
}

if (!createNet(net, err)){
statusMess(err);
strcpy(out_err, err.c_str());
return;
}

nodes_ = net.nodes;
operats_ = net.operats;

Expand Down Expand Up @@ -352,7 +347,7 @@ bool SNet::getWeightNode(const char* nodeName, SN_Base::snSize& wsz, SN_Base::sn
return false;
}

auto weight = operats_[nodeName]->getWeight();
const Tensor& weight = operats_[nodeName]->getWeight();

wsz = weight.size();

Expand Down Expand Up @@ -496,8 +491,10 @@ bool SNet::saveAllWeightToFile(const char* filePath){
snSize lSize;
for (auto opr : operats_){

data = opr.second->getWeight().getDataCPU();
lSize = opr.second->getWeight().size();
const Tensor& wt = opr.second->getWeight();

data = wt.getDataCPU();
lSize = wt.size();

if (data){
ofs << opr.first << " w " << lSize.w << " " << lSize.h << " " << lSize.d << endl;
Expand Down
16 changes: 8 additions & 8 deletions src/snBase/snBase.h
Original file line number Diff line number Diff line change
Expand Up @@ -187,23 +187,23 @@ namespace SN_Base{
return true;
}

virtual std::map<std::string, std::string> getInternPrm() const final{
virtual std::map<std::string, std::string> getInternPrm() const{
return basePrms_;
}

virtual const SN_Base::Tensor& getWeight() const final{
return baseWeight_;
}


virtual batchNorm getBatchNorm() const{
return baseBatchNorm_;
}

virtual const SN_Base::Tensor& getOutput() const final{
virtual const SN_Base::Tensor& getWeight() const{
return baseWeight_;
}

virtual const SN_Base::Tensor& getOutput() const{
return baseOut_;
}

virtual const SN_Base::Tensor& getGradient() const final{
virtual const SN_Base::Tensor& getGradient() const{
return baseGrad_;
}

Expand Down
Loading

0 comments on commit 86ccf50

Please sign in to comment.