MNIST : Caffe

Prepare

  1. wget http://deeplearning.net/data/mnist/mnist.pkl.gz

Loading data from pkl

  1. import os
  2. import pickle, gzip
  3. from matplotlib import pyplot
  4.  
  5. print('Loading data from mnist.pkl.gz ...')
  6. with gzip.open('mnist.pkl.gz', 'rb') as f:
  7.     train_set, valid_set, test_set = pickle.load(f)
  8.  
  9. imgs_dir = 'mnist'
  10. os.system('mkdir -p {}'.format(imgs_dir))
  11. datasets = {'train': train_set, 'val': valid_set, 'test': test_set}
  12.  
  13. for dataname, dataset in datasets.items():
  14.     print('Converting {} dataset ...'.format(dataname))
  15.     data_dir = os.sep.join([imgs_dir, dataname])
  16.  
  17.     os.system('mkdir -p {}'.format(data_dir))
  18.  
  19.     for i, (img, label) in enumerate(zip(*dataset)):
  20.  
  21.         filename = '{:0>6d}_{}.jpg'.format(i, label)
  22.  
  23.         filepath = os.sep.join([data_dir, filename])
  24.  
  25.         img = img.reshape((28, 28))
  26.  
  27.         pyplot.imsave(filepath, img, cmap='gray')
  28.         if (i + 1) % 10000 == 0:
  29.             print('{} images converted!'.format(i + 1))

Prepare imglist for Caffe

  1. import os
  2. import sys
  3.  
  4. input_path = sys.argv[1].rstrip(os.sep)
  5.  
  6. output_path = sys.argv[2]
  7.  
  8. filenames = os.listdir(input_path)
  9. with open(output_path, 'w') as f:
  10.     for filename in filenames:
  11.  
  12.         filepath = os.sep.join([input_path, filename])
  13.  
  14.         label = filename[: filename.rfind('.')].split('_')[1]
  15.  
  16.         line = '{} {}\n'.format(filepath, label)
  17.         f.write(line)

Convert to LMDB

  1. python gen_caffe_imglist.py mnist/train train.txt
  2. python gen_caffe_imglist.py mnist/val val.txt
  3. python gen_caffe_imglist.py mnist/test test.txt
  4.  
  5. /home/d/Documents/caffe/build/tools/convert_imageset ./ train.txt train_lmdb --gray --shuffle
  6. /home/d/Documents/caffe/build/tools/convert_imageset ./ val.txt val_lmdb --gray --shuffle
  7. /home/d/Documents/caffe/build/tools/convert_imageset ./ test.txt test_lmdb --gray --shuffle

Train (LeNet-5)

  1. /home/d/Documents/caffe/build/tools/caffe train -solver lenet_solver.prototxt -log_dir ./

Log Visualization

loss_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 2 loss_iters.png caffe.ubuntu.d.log


accuracy_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 0 accuracy_iters.png caffe.ubuntu.d.log


Test Model Accuracy

  1. /home/d/Documents/caffe/build/tools/caffe test -model lenet_test.prototxt -weights mnist_lenet_iter_36000.caffemodel -iterations 100

Test Model Time

  1. /home/d/Documents/caffe/build/tools/caffe time -model lenet.prototxt

Augmented, Train

loss_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 2 loss_iters_aug.png mnist_train.log mnist_train_with_augmentation.log




accuracy_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 0 accuracy_iters_aug.png mnist_train.log mnist_train_with_augmentation.log



Update solver max_iter, Train

  1. /home/d/Documents/caffe/build/tools/caffe train -solver lenet_solver_aug.prototxt -snapshot mnist_aug_lenet_iter_36000.solverstate -log_dir ./

Caffe Sample

Gen HDF5 Data

  1. import pickle
  2. import numpy as np
  3. import h5py
  4.  
  5. with open('data.pkl', 'rb') as f:
  6.     samples, labels = pickle.load(f)
  7. sample_size = len(labels)
  8.  
  9. samples = np.array(samples).reshape((sample_size, 2))
  10. labels = np.array(labels).reshape((sample_size, 1))
  11.  
  12. h5_filename = 'data.h5'
  13. with h5py.File(h5_filename, 'w') as h:
  14.     h.create_dataset('data', data=samples)
  15.     h.create_dataset('label', data=labels)
  16.  
  17. with open('data_h5.txt', 'w') as f:
  18.     f.write(h5_filename)

Network Train

  1. name: "SimpleMLP"
  2. layer {
  3.     name:   "data"
  4.     type:   "HDF5Data"
  5.     top:    "data"
  6.     top:    "label"
  7.     include {
  8.         phase:  TRAIN
  9.     }
  10.     hdf5_data_param {
  11.         source: "data_h5.txt"
  12.         batch_size: 41
  13.     }
  14. }
  15. layer {
  16.     name:   "fc1"
  17.     type:   "InnerProduct"
  18.     bottom: "data"
  19.     top:    "fc1"
  20.     inner_product_param {
  21.         num_output: 2
  22.         weight_filler   {
  23.             type:   "uniform"
  24.         }
  25.     }
  26. }
  27. layer {
  28.     name:   "sigmoid1"
  29.     type:   "Sigmoid"
  30.     bottom: "fc1"
  31.     top:    "sigmoid1"
  32. }
  33. layer {
  34.     name:   "fc2"
  35.     type:   "InnerProduct"
  36.     bottom: "sigmoid1"
  37.     top:    "fc2"
  38.     inner_product_param {
  39.         num_output: 2
  40.         weight_filler   {
  41.             type:   "uniform"
  42.         }
  43.     }
  44. }
  45. layer {
  46.     name:   "loss"
  47.     type:   "SoftmaxWithLoss"
  48.     bottom: "fc2"
  49.     bottom: "label"
  50.     top:    "loss"
  51. }

Gen Network Picture

  1. python /home/d/Documents/caffe/python/draw_net.py train.prototxt mlp_train.png --rankdir BT

Network Solver

  1. net:    "train.prototxt"
  2. base_lr:    0.15
  3. lr_policy:  "fixed"
  4. display:    100
  5. max_iter:   2000
  6. momentum:   0.95
  7. snapshot_prefix:    "simple_mlp"
  8. solver_mode:    CPU

Start Train

  1. /home/d/Documents/caffe/build/tools/caffe train -solver solver.prototxt

Network Test

  1. name: "SimpleMLP"
  2. input:  "data"
  3. input_shape {
  4.     dim:    1
  5.     dim:    2
  6. }
  7. layer {
  8.     name:   "fc1"
  9.     type:   "InnerProduct"
  10.     bottom: "data"
  11.     top:    "fc1"
  12.     inner_product_param {
  13.         num_output: 2
  14.     }
  15. }
  16. layer {
  17.     name:   "sigmoid1"
  18.     type:   "Sigmoid"
  19.     bottom: "fc1"
  20.     top:    "sigmoid1"
  21. }
  22. layer {
  23.     name:   "fc2"
  24.     type:   "InnerProduct"
  25.     bottom: "sigmoid1"
  26.     top:    "fc2"
  27.     inner_product_param {
  28.         num_output: 2
  29.     }
  30. }
  31. layer {
  32.     name:   "softmax"
  33.     type:   "Softmax"
  34.     bottom: "fc2"
  35.     top:    "prob"
  36. }

Start Test

  1. import sys
  2. import pickle
  3. import numpy as np
  4. import matplotlib.pyplot as plt
  5. from mpl_toolkits.mplot3d import Axes3D
  6. sys.path.append('/home/d/Documents/caffe/python')
  7. import caffe
  8.  
  9. net = caffe.Net('test.prototxt', 'simple_mlp_iter_2000.caffemodel', caffe.TEST)
  10.  
  11. with open('data.pkl', 'rb') as f:
  12.     samples, labels = pickle.load(f)
  13. samples = np.array(samples)
  14. labels = np.array(labels)
  15.  
  16. X = np.arange(0, 1.05, 0.05)
  17. Y = np.arange(0, 1.05, 0.05)
  18. X, Y = np.meshgrid(X, Y)
  19.  
  20. grids = np.array([[X[i][j], Y[i][j]] for i in range(X.shape[0]) for j in range(X.shape[1])])
  21.  
  22. grid_probs = []
  23. for grid in grids:
  24.     net.blobs['data'].data[...] = grid.reshape((1, 2))[...]
  25.     output = net.forward()
  26.     grid_probs.append(output['prob'][0][1])
  27. grid_probs = np.array(grid_probs).reshape(X.shape)
  28. fig = plt.figure('Sample Surface')
  29. ax = fig.gca(projection='3d')
  30. ax.plot_surface(X, Y, grid_probs, alpha=0.15, color='k', rstride=2, cstride=2, lw=0.5)
  31.  
  32. samples0 = samples[labels==0]
  33. samples0_probs = []
  34. for sample in samples0:
  35.     net.blobs['data'].data[...] = sample.reshape((1, 2))[...]
  36.     output = net.forward()
  37.     samples0_probs.append(output['prob'][0][1])
  38. samples1 = samples[labels==1]
  39. samples1_probs = []
  40. for sample in samples1:
  41.     net.blobs['data'].data[...] = sample.reshape((1, 2))[...]
  42.     output = net.forward()
  43.     samples1_probs.append(output['prob'][0][1])
  44.  
  45. ax.scatter(samples0[:, 0], samples0[:, 1], samples0_probs, c='r', marker='o', s=50)
  46. ax.scatter(samples1[:, 0], samples1[:, 1], samples1_probs, c='b', marker='^', s=50)
  47.  
  48. plt.show()

MXNet Sample

MXNet Sample

  1. import pickle
  2. import numpy as np
  3.  
  4. def cos_curve(x):
  5.     return 0.25 * np.sin(2 * x * np.pi + 0.5 * np.pi) + 0.5
  6.  
  7. np.random.seed(123)
  8. samples = []
  9. labels = []
  10.  
  11. sample_density = 50
  12. for i in range(sample_density):
  13.     x1, x2 = np.random.random(2)
  14.  
  15.     bound = cos_curve(x1)
  16.  
  17.     if bound - 0.1 < x2 <= bound + 0.1:
  18.         continue
  19.     else:
  20.         samples.append((x1, x2))
  21.  
  22.         if x2 > bound:
  23.             labels.append(1)
  24.         else:
  25.             labels.append(0)
  26.  
  27. with open('data.pkl', 'wb') as f:
  28.     pickle.dump((samples, labels), f)
  29.  
  30. import matplotlib.pyplot as plt
  31.  
  32. for i, sample in enumerate(samples):
  33.     plt.plot(sample[0], sample[1], 'o' if labels[i] else '^',
  34.              mec='r' if labels[i] else 'b',
  35.              mfc='none',
  36.              markersize=10)
  37. x1 = np.linspace(0, 1)
  38. plt.plot(x1, cos_curve(x1), 'k--')
  39. plt.show()
  40.  
  41. #
  42.  
  43. import numpy as np
  44. import mxnet as mx
  45.  
  46. data = mx.sym.Variable('data')
  47.  
  48. fc1 = mx.sym.FullyConnected(data=data, name='fc1', num_hidden=2)
  49.  
  50. sigmoid1 = mx.sym.Activation(data=fc1, name='sigmoid1', act_type='sigmoid')
  51.  
  52. fc2 = mx.sym.FullyConnected(data=sigmoid1, name='fc2', num_hidden=2)
  53.  
  54. mlp = mx.sym.SoftmaxOutput(data=fc2, name='softmax')
  55.  
  56. shape = {'data': (2,)}
  57. mlp_dot = mx.viz.plot_network(symbol=mlp, shape=shape)
  58. mlp_dot.render('simple_mlp.gv', view=True)
  59.  
  60. #
  61.  
  62. import pickle
  63. import logging
  64.  
  65. with open('data.pkl', 'rb') as f:
  66.     samples, labels = pickle.load(f)
  67.  
  68. logging.getLogger().setLevel(logging.DEBUG)
  69.  
  70. batch_size = len(labels)
  71. samples = np.array(samples)
  72. labels = np.array(labels)
  73.  
  74. train_iter = mx.io.NDArrayIter(samples, labels, batch_size)
  75.  
  76. model = mx.model.FeedForward.create(
  77.     symbol=mlp,
  78.     X=train_iter,
  79.     num_epoch=1000,
  80.     learning_rate=0.1,
  81.     momentum=0.99
  82. )
  83. '''
  84. model = mx.model.FeedForward(
  85.     symbol=mlp,
  86.     num_epoch=1000,
  87.     learning_rate=0.1
  88.     momentum=0.99
  89. )
  90. model.fit(X=train_iter)
  91. '''
  92. print(model.predict(mx.nd.array([[0.5, 0.5]])))
  93.  
  94. #
  95.  
  96. import matplotlib.pyplot as plt
  97. from mpl_toolkits.mplot3d import Axes3D
  98.  
  99. X = np.arange(0, 1.05, 0.05)
  100. Y = np.arange(0, 1.05, 0.05)
  101. X, Y = np.meshgrid(X, Y)
  102.  
  103. grids = mx.nd.array([[X[i][j], Y[i][j]] for i in range(X.shape[0]) for j in range(X.shape[1])])
  104.  
  105. grid_probs = model.predict(grids)[:, 1].reshape(X.shape)
  106.  
  107. fig = plt.figure('Sample Surface')
  108. ax = fig.gca(projection='3d')
  109.  
  110. ax.plot_surface(X, Y, grid_probs, alpha=0.15, color='k', rstride=2, cstride=2, lw=0.5)
  111.  
  112. samples0 = samples[labels==0]
  113. samples0_probs = model.predict(samples0)[:, 1]
  114. samples1 = samples[labels==1]
  115. samples1_probs = model.predict(samples1)[:, 1]
  116.  
  117. ax.scatter(samples0[:, 0], samples0[:, 1], samples0_probs, c='r', marker='o', s=50)
  118. ax.scatter(samples1[:, 0], samples1[:, 1], samples1_probs, c='b', marker='^', s=50)
  119.  
  120. plt.show()


Matplotlib RGB & OpenCV BGR

Matplotlib RGB & OpenCV BGR

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. import cv2
  4.  
  5. img = np.array([
  6.     [[255, 0, 0], [0, 255, 0], [0, 0, 255]],
  7.     [[255, 255, 0], [255, 0, 255], [0, 255, 255]],
  8.     [[255, 255, 255], [128, 128, 128], [0, 0, 0]],
  9. ], dtype=np.uint8)
  10.  
  11. plt.imsave('img_pyplot.png', img)
  12. cv2.imwrite('img_cv2.jpg', img)


Matplotlib 3D Example 2 : Python

3D Example 2

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. from mpl_toolkits.mplot3d import Axes3D
  4.  
  5. np.random.seed(42)
  6.  
  7. n_samples = 500
  8. dim = 3
  9.  
  10. samples = np.random.multivariate_normal(
  11.     np.zeros(dim),
  12.     np.eye(dim),
  13.     n_samples
  14. )
  15.  
  16. for i in range(samples.shape[0]) :
  17.     r = np.power(np.random.random(), 1.0 / 3.0)
  18.     samples[i] *= r / np.linalg.norm(samples[i])
  19.  
  20. upper_samples = []
  21. lower_samples = []
  22. for x, y, z in samples:
  23.     if z > 3 * x + 2 * y - 1:
  24.         upper_samples.append((x, y, z))
  25.     else:
  26.         lower_samples.append((x, y, z))
  27.  
  28. fig = plt.figure('3D scatter plot')
  29. ax = fig.add_subplot(111, projection='3d')
  30. uppers = np.array(upper_samples)
  31. lowers = np.array(lower_samples)
  32.  
  33. ax.scatter(uppers[:, 0], uppers[:, 1], uppers[:, 2], c='r', marker='o')
  34. ax.scatter(uppers[:, 0], uppers[:, 1], uppers[:, 2], c='g', marker='^')
  35.  
  36. plt.show()

Matplotlib 3D Example : Python

3D Example

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. from mpl_toolkits.mplot3d import Axes3D
  4.  
  5. np.random.seed(42)
  6.  
  7. n_grids = 51
  8. c = n_grids / 2
  9. nf = 2
  10.  
  11. x = np.linspace(0, 1, n_grids)
  12. y = np.linspace(0, 1, n_grids)
  13. X, Y = np.meshgrid(x, y)
  14.  
  15. spectrum = np.zeros((n_grids, n_grids), dtype=np.complex)
  16. noise = [np.complex(x, y) for x, y in np.random.uniform(-1, 1, ((2 * nf + 1) ** 2 / 2, 2))]
  17. noisy_block = np.concatenate((noise, [0j], np.conjugate(noise[:: -1])))
  18. spectrum[c - nf: c + nf + 1, c - nf: c + nf + 1] = noisy_block.reshape((2 * nf + 1, 2 * nf + 1))
  19. Z = np.real(np.fft.ifft2(np.fft.ifftshift(spectrum)))
  20.  
  21. fig = plt.figure('3D surface & wire')
  22. ax = fig.add_subplot(1, 2, 1, projection='3d')
  23. ax.plot_surface(X, Y, Z, alpha=0.7, cmap='jet', rstride=1, cstride=1, lw=0)
  24.  
  25. ax = fig.add_subplot(1, 2, 2, projection='3d')
  26. ax.plot_wireframe(X, Y, Z, rstride=3, cstride=3, lw=0.5)
  27.  
  28. plt.show()

Matplotlib 2D Example : Python

2D Example

  1. import numpy as np
  2. import matplotlib as mpl
  3. import matplotlib.pyplot as plt
  4.  
  5. mpl.rcParams['xtick.labelsize'] = 24
  6. mpl.rcParams['ytick.labelsize'] = 24
  7. np.random.seed(42)
  8.  
  9. x = np.linspace(0, 5, 100)
  10. y = 2 * np.sin(x) + 0.3 * x ** 2
  11. y_data = y + np.random.normal(scale=0.3, size=100)
  12. plt.figure('data')
  13. plt.plot(x, y_data, '.')
  14.  
  15. plt.figure('model')
  16. plt.plot(x, y)
  17.  
  18. plt.figure('data & model')
  19. plt.plot(x, y, 'k', lw=3)
  20. plt.scatter(x, y_data)
  21.  
  22. plt.savefig('result.png')
  23.  
  24. plt.show()



Caffe Installation : Ubuntu 16.04

Prepare

  1. sudo apt update
  2. sudo apt install build-essential git libatlas-base-dev
  3. sudo apt-get install python-pip
  4. pip install --upgrade pip
  5. sudo apt-get install graphviz
  6. sudo pip install graphviz
  7. sudo apt install libprotobuf-dev libleveldb-dev libsnappy-dev libboost-all-dev libhdf5-serial-dev protobuf-compiler gfortran libjpeg62 libfreeimage-dev libgoogle-glog-dev libbz2-dev libxml2-dev libxslt-dev libffi-dev libssl-dev libgflags-dev liblmdb-dev python-yaml
  8. sudo apt-get install libopencv-dev python-opencv

Config

  1. # git clone https://github.com/BVLC/caffe. git
  2. # Or
  3. unzip caffe-master.zip 
  4. cd caffe-master/
  5. cp Makefile.config.example Makefile.config
  6.  
  7. d@ubuntu:~/Documents/caffe$ diff Makefile.config.example Makefile.config
  8. 8c8
  9. < # CPU_ONLY := 1
  10. ---
  11. > CPU_ONLY := 1
  12. 94c94
  13. < # WITH_PYTHON_LAYER := 1
  14. ---
  15. > WITH_PYTHON_LAYER := 1
  16. 97,98c97,98
  17. < INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include
  18. < LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib
  19. ---
  20. > INCLUDE_DIRS := $(PYTHON_INCLUDE) /usr/local/include /usr/include/hdf5/serial/
  21. > LIBRARY_DIRS := $(PYTHON_LIB) /usr/local/lib /usr/lib /usr/lib/x86_64-linux-gnu/hdf5/serial/
  22. d@ubuntu:~/Documents/caffe$

Compile & Test

  1. export LD_LIBRARY=$LD_LIBRARY:/usr/include/hdf5
  2. export PYTHONPATH=$PYTHONPATH:/home/d/Documents/caffe/python
  3. make pycaffe -j
  4. make all -j
  5. make test -j
  6. make runtest

virtual memory exhausted

  1. sudo mkdir /opt/images/
  2. sudo rm -rf /opt/images/swap
  3. sudo dd if=/dev/zero of=/opt/images/swap bs=1024 count=10240000
  4. sudo mkswap /opt/images/swap
  5. sudo swapon /opt/images/swap

QT + OpenCV Example

Source

  1. #include <QCoreApplication>
  2.  
  3. #include <opencv2/opencv.hpp>
  4.  
  5. int main(int argc, char *argv[])
  6. {
  7.     //QCoreApplication a(argc, argv);
  8.  
  9.     using namespace cv;
  10.     Mat image = imread("/Users/water/Downloads/Share.jpg");
  11.     imshow("Output", image);
  12.  
  13.     waitKey(1000);
  14.  
  15.     return 0;
  16.  
  17.     //return a.exec();
  18. }

pro

  1. QT -= gui
  2.  
  3. CONFIG += c++11 console
  4. CONFIG -= app_bundle
  5.  
  6. # The following define makes your compiler emit warnings if you use
  7. # any Qt feature that has been marked deprecated (the exact warnings
  8. # depend on your compiler). Please consult the documentation of the
  9. # deprecated API in order to know how to port your code away from it.
  10. DEFINES += QT_DEPRECATED_WARNINGS
  11.  
  12. # You can also make your code fail to compile if it uses deprecated APIs.
  13. # In order to do so, uncomment the following line.
  14. # You can also select to disable deprecated APIs only up to a certain version of Qt.
  15. #DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000    # disables all the APIs deprecated before Qt 6.0.0
  16.  
  17. SOURCES += \
  18.         main.cpp
  19.  
  20. # Default rules for deployment.
  21. qnx: target.path = /tmp/$${TARGET}/bin
  22. else: unix:!android: target.path = /opt/$${TARGET}/bin
  23. !isEmpty(target.path): INSTALLS += target
  24.  
  25. INCLUDEPATH += /usr/local/include
  26. INCLUDEPATH += /usr/local/include/opencv
  27. INCLUDEPATH += /usr/local/include/opencv2
  28. LIBS += -L/usr/local/lib \
  29.  -lopencv_core \
  30.  -lopencv_highgui \
  31.  -lopencv_imgproc \
  32.   -lopencv_imgcodecs \