Collecting Data & Remove

Collecting Data from the Web with Python If you like

Remove duplicate image

1. Remove invalid images

  1. import os
  2. import sys
  3. import cv2
  4.  
  5. from collect_data import SUPPORTED_FORMATS
  6.  
  7. input_path = sys.argv[1]
  8.  
  9. for root, dirs, files in os.walk(input_path):
  10.     for filename in files:
  11.         ext = filename[filename.rfind('.')+1:].lower()
  12.         if ext not in SUPPORTED_FORMATS:
  13.             continue
  14.         filepath = os.sep.join([root, filename])
  15.         if cv2.imread(filepath) is None:
  16.             os.system('rm {}'.format(filepath))
  17.             print('{} is not a valid image file. Deleted!'.format(filepath))

2. Remove duplicate files

FDUPES is a program for identifying duplicate files residing
within specified directories.

  1. sudo apt-get install fdupes
  2. fdupes -rdN ./

3. Remove visually similar or duplicate images

3.1 Down scale

  1. import os
  2. import cv2
  3. import sys
  4.  
  5. input_path = sys.argv[1].rstrip(os.sep)
  6.  
  7. target_short_edge = int(sys.argv[2])
  8.  
  9. for root, dirs, files in os.walk(input_path):
  10.     print('scanning {} ...'.format(root))
  11.     for filename in files:
  12.         filepath = os.sep.join([root, filename])
  13.         img = cv2.imread(filepath)
  14.         h, w = img.shape[:2]
  15.         short_edge = min(w, h)
  16.  
  17.         if short_edge > target_short_edge:
  18.             scale = float(target_short_edge) / float(short_edge)
  19.             new_w = int(round(w * scale))
  20.             new_h = int(round(h * scale))
  21.             print('Down sampling {} from {} x {} to {} x {} ...'.format(
  22.                 filepath, w, w, new_w, new_h
  23.             ))
  24.             img = cv2.resize(img, (new_w, new_h))
  25.             cv2.imwrite(filepath, img)
  26. print('Done!')

3.2 Find image dupes

  1. sudo apt-get install findimagedupes
  2. findimagedupes -R train > dup_list

3.3 Remove duplicates

  1. import os
  2. import sys
  3. dup_list = sys.argv[1]
  4. with open(dup_list, 'r') as f:
  5.     lines = f.readlines()
  6.     for line in lines:
  7.         dups = line.split()
  8.         print('Removing duplicates of {}'.format(dups[0]))
  9.         for dup in dups[1:]:
  10.             cmd = 'rm {}'.format(dup)
  11.             os.system(cmd)

MNIST : MXNet

MNIST Data (refer to MNIST : Caffe)

Prepare imglist for MXNet

  1. import os
  2. import sys
  3.  
  4. input_path = sys.argv[1].rstrip(os.sep)
  5.  
  6. out_path = sys.argv[2]
  7.  
  8. filenames = os.listdir(input_path)
  9. with open(out_path, 'w') as f:
  10.     for i, filename in enumerate(filenames):
  11.         filepath = os.sep.join([input_path, filename])
  12.         label = filename[: filename.rfind('.')].split('_')[1]
  13.  
  14.         line = '{}\t{}\t{}\n'.format(i, label, filepath)
  15.         f.write(line)

Convert to rec

  1. python gen_mxnet_imglist.py mnist/train train.lst
  2. python gen_mxnet_imglist.py mnist/val val.lst
  3. python gen_mxnet_imglist.py mnist/test test.lst
  4.  
  5. /home/d/mxnet/bin/im2rec train.lst ./ train.rec color=0
  6. /home/d/mxnet/bin/im2rec val.lst ./ val.rec color=0
  7. /home/d/mxnet/bin/im2rec test.lst ./ test.rec color=0

Train (LeNet-5)

  1. import mxnet as mx
  2.  
  3. data = mx.symbol.Variable('data')
  4.  
  5. conv1 = mx.symbol.Convolution(data=data, kernel=(5, 5), num_filter=20)
  6. pool1 = mx.symbol.Pooling(data=conv1, pool_type="max", kernel=(2, 2), stride=(2, 2))
  7.  
  8. conv2 = mx.symbol.Convolution(data=pool1, kernel=(5, 5), num_filter=50)
  9. pool2 = mx.symbol.Pooling(data=conv2, pool_type="max", kernel=(2, 2), stride=(2, 2))
  10.  
  11. flatten = mx.symbol.Flatten(data=pool2)
  12. fc1 = mx.symbol.FullyConnected(data=flatten, num_hidden=500)
  13. relu1 = mx.symbol.Activation(data=fc1, act_type="relu")
  14.  
  15. fc2 = mx.symbol.FullyConnected(data=relu1, num_hidden=10)
  16.  
  17. lenet5 = mx.symbol.SoftmaxOutput(data=fc2, name='softmax')
  18.  
  19. mod = mx.mod.Module(lenet5, context=mx.gpu(0))
  20.  
  21. #
  22.  
  23. train_dataiter = mx.io.ImageRecordIter(
  24.     path_imgrec="train.rec",
  25.     data_shape=(1, 28, 28),
  26.     batch_size=50,
  27.     mean_r=128,
  28.     scale=0.00390625,
  29.     rand_crop=True,
  30.     min_crop_size=24,
  31.     max_crop_size=28,
  32.     max_rotate_angle=15,
  33.     fill_value=0
  34. )
  35. val_dataiter = mx.io.ImageRecordIter(
  36.     path_imgrec="val.rec",
  37.     data_shape=(1, 28, 28),
  38.     batch_size=100,
  39.     mean_r=128,
  40.     scale=0.00390625,
  41. )
  42.  
  43. #
  44.  
  45. import logging
  46.  
  47. logging.getLogger().setLevel(logging.DEBUG)
  48. fh = logging.FileHandler('train_mnist_lenet.log')
  49. logging.getLogger().addHandler(fh)
  50.  
  51. lr_scheduler = mx.lr_scheduler.FactorScheduler(1000, factor=0.95)
  52. optimizer_params = {
  53.     'learning_rate': 0.01,
  54.     'momentum': 0.9,
  55.     'wd': 0.0005,
  56.     'lr_scheduler': lr_scheduler
  57. }
  58.  
  59. checkpoint = mx.callback.do_checkpoint('mnist_lenet', period=5)
  60.  
  61. mod.fit(train_dataiter,
  62.         eval_data=val_dataiter,
  63.         optimizer_params=optimizer_params,
  64.         num_epoch=36,
  65.         epoch_end_callback=checkpoint)

Log Visualization

Test Model Accuracy

  1. import mxnet as mx
  2.  
  3. test_dataiter = mx.io.ImageRecordIter(
  4.     path_imgrec="test.rec",
  5.     data_shape=(1, 28, 28),
  6.     batch_size=100,
  7.     mean_r=128,
  8.     scale=0.00390625,
  9. )
  10.  
  11. mod = mx.mod.Module.load('mnist_lenet', 35, context=mx.gpu(0))
  12.  
  13. mod.bind(
  14.     data_shapes=test_dataiter.provide_data,
  15.     label_shapes=test_dataiter.provide_label,
  16.     for_training=False)
  17.  
  18. metric = mx.metric.create('acc')
  19.  
  20. mod.score(test_dataiter, metric)
  21.  
  22. for name, val in metric.get_name_value():
  23.     print('{} = {:.2f}%'.format(name, val * 100))

Test Model Time

  1. import time
  2. import mxnet as mx
  3.  
  4. benchmark_dataiter = mx.io.ImageRecordIter(
  5.     path_imgrec="test.rec",
  6.     data_shape=(1, 28, 28),
  7.     batch_size=64,
  8.     mean_r=128,
  9.     scale=0.00390625,
  10. )
  11.  
  12. mod = mx.mod.Module.load('mnist_lenet', 35, context=mx.gpu(0))
  13. mod.bind(
  14.     data_shapes=benchmark_dataiter.provide_data,
  15.     label_shapes=benchmark_dataiter.provide_label,
  16.     for_training=False)
  17.  
  18. start = time.time()
  19.  
  20. for i, batch in enumerate(benchmark_dataiter):
  21.     mod.forward(batch)
  22.  
  23. time_elapsed = time.time() - start
  24. msg = '{} batches iterated!\nAverage forward time per batch: {:.6f} ms'
  25. print(msg.format(i + 1, 1000 * time_elapsed / float(i)))

MNIST : Caffe

Prepare

  1. wget http://deeplearning.net/data/mnist/mnist.pkl.gz

Loading data from pkl

  1. import os
  2. import pickle, gzip
  3. from matplotlib import pyplot
  4.  
  5. print('Loading data from mnist.pkl.gz ...')
  6. with gzip.open('mnist.pkl.gz', 'rb') as f:
  7.     train_set, valid_set, test_set = pickle.load(f)
  8.  
  9. imgs_dir = 'mnist'
  10. os.system('mkdir -p {}'.format(imgs_dir))
  11. datasets = {'train': train_set, 'val': valid_set, 'test': test_set}
  12.  
  13. for dataname, dataset in datasets.items():
  14.     print('Converting {} dataset ...'.format(dataname))
  15.     data_dir = os.sep.join([imgs_dir, dataname])
  16.  
  17.     os.system('mkdir -p {}'.format(data_dir))
  18.  
  19.     for i, (img, label) in enumerate(zip(*dataset)):
  20.  
  21.         filename = '{:0>6d}_{}.jpg'.format(i, label)
  22.  
  23.         filepath = os.sep.join([data_dir, filename])
  24.  
  25.         img = img.reshape((28, 28))
  26.  
  27.         pyplot.imsave(filepath, img, cmap='gray')
  28.         if (i + 1) % 10000 == 0:
  29.             print('{} images converted!'.format(i + 1))

Prepare imglist for Caffe

  1. import os
  2. import sys
  3.  
  4. input_path = sys.argv[1].rstrip(os.sep)
  5.  
  6. output_path = sys.argv[2]
  7.  
  8. filenames = os.listdir(input_path)
  9. with open(output_path, 'w') as f:
  10.     for filename in filenames:
  11.  
  12.         filepath = os.sep.join([input_path, filename])
  13.  
  14.         label = filename[: filename.rfind('.')].split('_')[1]
  15.  
  16.         line = '{} {}\n'.format(filepath, label)
  17.         f.write(line)

Convert to LMDB

  1. python gen_caffe_imglist.py mnist/train train.txt
  2. python gen_caffe_imglist.py mnist/val val.txt
  3. python gen_caffe_imglist.py mnist/test test.txt
  4.  
  5. /home/d/Documents/caffe/build/tools/convert_imageset ./ train.txt train_lmdb --gray --shuffle
  6. /home/d/Documents/caffe/build/tools/convert_imageset ./ val.txt val_lmdb --gray --shuffle
  7. /home/d/Documents/caffe/build/tools/convert_imageset ./ test.txt test_lmdb --gray --shuffle

Train (LeNet-5)

  1. /home/d/Documents/caffe/build/tools/caffe train -solver lenet_solver.prototxt -log_dir ./

Log Visualization

loss_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 2 loss_iters.png caffe.ubuntu.d.log


accuracy_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 0 accuracy_iters.png caffe.ubuntu.d.log


Test Model Accuracy

  1. /home/d/Documents/caffe/build/tools/caffe test -model lenet_test.prototxt -weights mnist_lenet_iter_36000.caffemodel -iterations 100

Test Model Time

  1. /home/d/Documents/caffe/build/tools/caffe time -model lenet.prototxt

Augmented, Train

loss_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 2 loss_iters_aug.png mnist_train.log mnist_train_with_augmentation.log




accuracy_iters

  1. python /home/d/Documents/caffe/tools/extra/plot_training_log.py.example 0 accuracy_iters_aug.png mnist_train.log mnist_train_with_augmentation.log



Update solver max_iter, Train

  1. /home/d/Documents/caffe/build/tools/caffe train -solver lenet_solver_aug.prototxt -snapshot mnist_aug_lenet_iter_36000.solverstate -log_dir ./

Caffe Sample

Gen HDF5 Data

  1. import pickle
  2. import numpy as np
  3. import h5py
  4.  
  5. with open('data.pkl', 'rb') as f:
  6.     samples, labels = pickle.load(f)
  7. sample_size = len(labels)
  8.  
  9. samples = np.array(samples).reshape((sample_size, 2))
  10. labels = np.array(labels).reshape((sample_size, 1))
  11.  
  12. h5_filename = 'data.h5'
  13. with h5py.File(h5_filename, 'w') as h:
  14.     h.create_dataset('data', data=samples)
  15.     h.create_dataset('label', data=labels)
  16.  
  17. with open('data_h5.txt', 'w') as f:
  18.     f.write(h5_filename)

Network Train

  1. name: "SimpleMLP"
  2. layer {
  3.     name:   "data"
  4.     type:   "HDF5Data"
  5.     top:    "data"
  6.     top:    "label"
  7.     include {
  8.         phase:  TRAIN
  9.     }
  10.     hdf5_data_param {
  11.         source: "data_h5.txt"
  12.         batch_size: 41
  13.     }
  14. }
  15. layer {
  16.     name:   "fc1"
  17.     type:   "InnerProduct"
  18.     bottom: "data"
  19.     top:    "fc1"
  20.     inner_product_param {
  21.         num_output: 2
  22.         weight_filler   {
  23.             type:   "uniform"
  24.         }
  25.     }
  26. }
  27. layer {
  28.     name:   "sigmoid1"
  29.     type:   "Sigmoid"
  30.     bottom: "fc1"
  31.     top:    "sigmoid1"
  32. }
  33. layer {
  34.     name:   "fc2"
  35.     type:   "InnerProduct"
  36.     bottom: "sigmoid1"
  37.     top:    "fc2"
  38.     inner_product_param {
  39.         num_output: 2
  40.         weight_filler   {
  41.             type:   "uniform"
  42.         }
  43.     }
  44. }
  45. layer {
  46.     name:   "loss"
  47.     type:   "SoftmaxWithLoss"
  48.     bottom: "fc2"
  49.     bottom: "label"
  50.     top:    "loss"
  51. }

Gen Network Picture

  1. python /home/d/Documents/caffe/python/draw_net.py train.prototxt mlp_train.png --rankdir BT

Network Solver

  1. net:    "train.prototxt"
  2. base_lr:    0.15
  3. lr_policy:  "fixed"
  4. display:    100
  5. max_iter:   2000
  6. momentum:   0.95
  7. snapshot_prefix:    "simple_mlp"
  8. solver_mode:    CPU

Start Train

  1. /home/d/Documents/caffe/build/tools/caffe train -solver solver.prototxt

Network Test

  1. name: "SimpleMLP"
  2. input:  "data"
  3. input_shape {
  4.     dim:    1
  5.     dim:    2
  6. }
  7. layer {
  8.     name:   "fc1"
  9.     type:   "InnerProduct"
  10.     bottom: "data"
  11.     top:    "fc1"
  12.     inner_product_param {
  13.         num_output: 2
  14.     }
  15. }
  16. layer {
  17.     name:   "sigmoid1"
  18.     type:   "Sigmoid"
  19.     bottom: "fc1"
  20.     top:    "sigmoid1"
  21. }
  22. layer {
  23.     name:   "fc2"
  24.     type:   "InnerProduct"
  25.     bottom: "sigmoid1"
  26.     top:    "fc2"
  27.     inner_product_param {
  28.         num_output: 2
  29.     }
  30. }
  31. layer {
  32.     name:   "softmax"
  33.     type:   "Softmax"
  34.     bottom: "fc2"
  35.     top:    "prob"
  36. }

Start Test

  1. import sys
  2. import pickle
  3. import numpy as np
  4. import matplotlib.pyplot as plt
  5. from mpl_toolkits.mplot3d import Axes3D
  6. sys.path.append('/home/d/Documents/caffe/python')
  7. import caffe
  8.  
  9. net = caffe.Net('test.prototxt', 'simple_mlp_iter_2000.caffemodel', caffe.TEST)
  10.  
  11. with open('data.pkl', 'rb') as f:
  12.     samples, labels = pickle.load(f)
  13. samples = np.array(samples)
  14. labels = np.array(labels)
  15.  
  16. X = np.arange(0, 1.05, 0.05)
  17. Y = np.arange(0, 1.05, 0.05)
  18. X, Y = np.meshgrid(X, Y)
  19.  
  20. grids = np.array([[X[i][j], Y[i][j]] for i in range(X.shape[0]) for j in range(X.shape[1])])
  21.  
  22. grid_probs = []
  23. for grid in grids:
  24.     net.blobs['data'].data[...] = grid.reshape((1, 2))[...]
  25.     output = net.forward()
  26.     grid_probs.append(output['prob'][0][1])
  27. grid_probs = np.array(grid_probs).reshape(X.shape)
  28. fig = plt.figure('Sample Surface')
  29. ax = fig.gca(projection='3d')
  30. ax.plot_surface(X, Y, grid_probs, alpha=0.15, color='k', rstride=2, cstride=2, lw=0.5)
  31.  
  32. samples0 = samples[labels==0]
  33. samples0_probs = []
  34. for sample in samples0:
  35.     net.blobs['data'].data[...] = sample.reshape((1, 2))[...]
  36.     output = net.forward()
  37.     samples0_probs.append(output['prob'][0][1])
  38. samples1 = samples[labels==1]
  39. samples1_probs = []
  40. for sample in samples1:
  41.     net.blobs['data'].data[...] = sample.reshape((1, 2))[...]
  42.     output = net.forward()
  43.     samples1_probs.append(output['prob'][0][1])
  44.  
  45. ax.scatter(samples0[:, 0], samples0[:, 1], samples0_probs, c='r', marker='o', s=50)
  46. ax.scatter(samples1[:, 0], samples1[:, 1], samples1_probs, c='b', marker='^', s=50)
  47.  
  48. plt.show()

MXNet Sample

MXNet Sample

  1. import pickle
  2. import numpy as np
  3.  
  4. def cos_curve(x):
  5.     return 0.25 * np.sin(2 * x * np.pi + 0.5 * np.pi) + 0.5
  6.  
  7. np.random.seed(123)
  8. samples = []
  9. labels = []
  10.  
  11. sample_density = 50
  12. for i in range(sample_density):
  13.     x1, x2 = np.random.random(2)
  14.  
  15.     bound = cos_curve(x1)
  16.  
  17.     if bound - 0.1 < x2 <= bound + 0.1:
  18.         continue
  19.     else:
  20.         samples.append((x1, x2))
  21.  
  22.         if x2 > bound:
  23.             labels.append(1)
  24.         else:
  25.             labels.append(0)
  26.  
  27. with open('data.pkl', 'wb') as f:
  28.     pickle.dump((samples, labels), f)
  29.  
  30. import matplotlib.pyplot as plt
  31.  
  32. for i, sample in enumerate(samples):
  33.     plt.plot(sample[0], sample[1], 'o' if labels[i] else '^',
  34.              mec='r' if labels[i] else 'b',
  35.              mfc='none',
  36.              markersize=10)
  37. x1 = np.linspace(0, 1)
  38. plt.plot(x1, cos_curve(x1), 'k--')
  39. plt.show()
  40.  
  41. #
  42.  
  43. import numpy as np
  44. import mxnet as mx
  45.  
  46. data = mx.sym.Variable('data')
  47.  
  48. fc1 = mx.sym.FullyConnected(data=data, name='fc1', num_hidden=2)
  49.  
  50. sigmoid1 = mx.sym.Activation(data=fc1, name='sigmoid1', act_type='sigmoid')
  51.  
  52. fc2 = mx.sym.FullyConnected(data=sigmoid1, name='fc2', num_hidden=2)
  53.  
  54. mlp = mx.sym.SoftmaxOutput(data=fc2, name='softmax')
  55.  
  56. shape = {'data': (2,)}
  57. mlp_dot = mx.viz.plot_network(symbol=mlp, shape=shape)
  58. mlp_dot.render('simple_mlp.gv', view=True)
  59.  
  60. #
  61.  
  62. import pickle
  63. import logging
  64.  
  65. with open('data.pkl', 'rb') as f:
  66.     samples, labels = pickle.load(f)
  67.  
  68. logging.getLogger().setLevel(logging.DEBUG)
  69.  
  70. batch_size = len(labels)
  71. samples = np.array(samples)
  72. labels = np.array(labels)
  73.  
  74. train_iter = mx.io.NDArrayIter(samples, labels, batch_size)
  75.  
  76. model = mx.model.FeedForward.create(
  77.     symbol=mlp,
  78.     X=train_iter,
  79.     num_epoch=1000,
  80.     learning_rate=0.1,
  81.     momentum=0.99
  82. )
  83. '''
  84. model = mx.model.FeedForward(
  85.     symbol=mlp,
  86.     num_epoch=1000,
  87.     learning_rate=0.1
  88.     momentum=0.99
  89. )
  90. model.fit(X=train_iter)
  91. '''
  92. print(model.predict(mx.nd.array([[0.5, 0.5]])))
  93.  
  94. #
  95.  
  96. import matplotlib.pyplot as plt
  97. from mpl_toolkits.mplot3d import Axes3D
  98.  
  99. X = np.arange(0, 1.05, 0.05)
  100. Y = np.arange(0, 1.05, 0.05)
  101. X, Y = np.meshgrid(X, Y)
  102.  
  103. grids = mx.nd.array([[X[i][j], Y[i][j]] for i in range(X.shape[0]) for j in range(X.shape[1])])
  104.  
  105. grid_probs = model.predict(grids)[:, 1].reshape(X.shape)
  106.  
  107. fig = plt.figure('Sample Surface')
  108. ax = fig.gca(projection='3d')
  109.  
  110. ax.plot_surface(X, Y, grid_probs, alpha=0.15, color='k', rstride=2, cstride=2, lw=0.5)
  111.  
  112. samples0 = samples[labels==0]
  113. samples0_probs = model.predict(samples0)[:, 1]
  114. samples1 = samples[labels==1]
  115. samples1_probs = model.predict(samples1)[:, 1]
  116.  
  117. ax.scatter(samples0[:, 0], samples0[:, 1], samples0_probs, c='r', marker='o', s=50)
  118. ax.scatter(samples1[:, 0], samples1[:, 1], samples1_probs, c='b', marker='^', s=50)
  119.  
  120. plt.show()


Matplotlib RGB & OpenCV BGR

Matplotlib RGB & OpenCV BGR

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. import cv2
  4.  
  5. img = np.array([
  6.     [[255, 0, 0], [0, 255, 0], [0, 0, 255]],
  7.     [[255, 255, 0], [255, 0, 255], [0, 255, 255]],
  8.     [[255, 255, 255], [128, 128, 128], [0, 0, 0]],
  9. ], dtype=np.uint8)
  10.  
  11. plt.imsave('img_pyplot.png', img)
  12. cv2.imwrite('img_cv2.jpg', img)


Matplotlib 3D Example 2 : Python

3D Example 2

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. from mpl_toolkits.mplot3d import Axes3D
  4.  
  5. np.random.seed(42)
  6.  
  7. n_samples = 500
  8. dim = 3
  9.  
  10. samples = np.random.multivariate_normal(
  11.     np.zeros(dim),
  12.     np.eye(dim),
  13.     n_samples
  14. )
  15.  
  16. for i in range(samples.shape[0]) :
  17.     r = np.power(np.random.random(), 1.0 / 3.0)
  18.     samples[i] *= r / np.linalg.norm(samples[i])
  19.  
  20. upper_samples = []
  21. lower_samples = []
  22. for x, y, z in samples:
  23.     if z > 3 * x + 2 * y - 1:
  24.         upper_samples.append((x, y, z))
  25.     else:
  26.         lower_samples.append((x, y, z))
  27.  
  28. fig = plt.figure('3D scatter plot')
  29. ax = fig.add_subplot(111, projection='3d')
  30. uppers = np.array(upper_samples)
  31. lowers = np.array(lower_samples)
  32.  
  33. ax.scatter(uppers[:, 0], uppers[:, 1], uppers[:, 2], c='r', marker='o')
  34. ax.scatter(uppers[:, 0], uppers[:, 1], uppers[:, 2], c='g', marker='^')
  35.  
  36. plt.show()

Matplotlib 3D Example : Python

3D Example

  1. import matplotlib.pyplot as plt
  2. import numpy as np
  3. from mpl_toolkits.mplot3d import Axes3D
  4.  
  5. np.random.seed(42)
  6.  
  7. n_grids = 51
  8. c = n_grids / 2
  9. nf = 2
  10.  
  11. x = np.linspace(0, 1, n_grids)
  12. y = np.linspace(0, 1, n_grids)
  13. X, Y = np.meshgrid(x, y)
  14.  
  15. spectrum = np.zeros((n_grids, n_grids), dtype=np.complex)
  16. noise = [np.complex(x, y) for x, y in np.random.uniform(-1, 1, ((2 * nf + 1) ** 2 / 2, 2))]
  17. noisy_block = np.concatenate((noise, [0j], np.conjugate(noise[:: -1])))
  18. spectrum[c - nf: c + nf + 1, c - nf: c + nf + 1] = noisy_block.reshape((2 * nf + 1, 2 * nf + 1))
  19. Z = np.real(np.fft.ifft2(np.fft.ifftshift(spectrum)))
  20.  
  21. fig = plt.figure('3D surface & wire')
  22. ax = fig.add_subplot(1, 2, 1, projection='3d')
  23. ax.plot_surface(X, Y, Z, alpha=0.7, cmap='jet', rstride=1, cstride=1, lw=0)
  24.  
  25. ax = fig.add_subplot(1, 2, 2, projection='3d')
  26. ax.plot_wireframe(X, Y, Z, rstride=3, cstride=3, lw=0.5)
  27.  
  28. plt.show()

Matplotlib 2D Example : Python

2D Example

  1. import numpy as np
  2. import matplotlib as mpl
  3. import matplotlib.pyplot as plt
  4.  
  5. mpl.rcParams['xtick.labelsize'] = 24
  6. mpl.rcParams['ytick.labelsize'] = 24
  7. np.random.seed(42)
  8.  
  9. x = np.linspace(0, 5, 100)
  10. y = 2 * np.sin(x) + 0.3 * x ** 2
  11. y_data = y + np.random.normal(scale=0.3, size=100)
  12. plt.figure('data')
  13. plt.plot(x, y_data, '.')
  14.  
  15. plt.figure('model')
  16. plt.plot(x, y)
  17.  
  18. plt.figure('data & model')
  19. plt.plot(x, y, 'k', lw=3)
  20. plt.scatter(x, y_data)
  21.  
  22. plt.savefig('result.png')
  23.  
  24. plt.show()



CMake & Arm Cross compilation based Windows systems (Visual Studio)

CMakeLists.txt

  1. cmake_minimum_required(VERSION 3.12)
  2. project(network_information)
  3.  
  4. SET(CMAKE_SYSTEM_NAME Linux)
  5.  
  6. add_definitions("-std=c++17")
  7.  
  8. # specify the cross compiler
  9. SET(CMAKE_C_COMPILER   arm-linux-gnueabihf-gcc)
  10. SET(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
  11.  
  12. if(${CMAKE_HOST_SYSTEM_NAME} MATCHES "Windows")
  13.     set(BOOST_ROOT "C:/MinGW/msys/1.0/home/d/boost_1_68_0")
  14. else()
  15.     set(BOOST_ROOT /root/Downloads/boost_1_68_0)
  16. endif()
  17.  
  18. set(Boost_USE_STATIC_LIBS ON)
  19. set(Boost_USE_MULTITHREADED ON)
  20.  
  21. include_directories(sdk/inc)
  22. link_directories("sdk/lib")
  23.  
  24. add_executable(network_information
  25. 		network_information.cpp
  26. 		)
  27. TARGET_LINK_LIBRARIES(network_information "-lpthread" "-ldl" "-lpos" "-lrt" "-lpng" "-liconv" "-lfreetype" "-lz")

GNUlinux_config.cmake

  1. set(GNULINUX_PLATFORM ON)
  2. set(CMAKE_SYSTEM_NAME "Linux")
  3. set(CMAKE_SYSTEM_PROCESSOR "arm")
  4.  
  5. if(NOT DEFINED ENV{NE10_LINUX_TARGET_ARCH})
  6.    set(NE10_LINUX_TARGET_ARCH "armv7")
  7. else()
  8.    set(NE10_LINUX_TARGET_ARCH $ENV{NE10_LINUX_TARGET_ARCH})
  9. endif()
  10.  
  11. if(NE10_LINUX_TARGET_ARCH STREQUAL "armv7")
  12.    set(CMAKE_C_COMPILER arm-linux-gnueabihf-gcc)
  13.    set(CMAKE_CXX_COMPILER arm-linux-gnueabihf-g++)
  14.    set(CMAKE_ASM_COMPILER arm-linux-gnueabihf-as)
  15.    find_program(CMAKE_AR NAMES "arm-linux-gnueabihf-ar")
  16.    find_program(CMAKE_RANLIB NAMES "arm-linux-gnueabihf-ranlib")
  17. elseif(NE10_LINUX_TARGET_ARCH STREQUAL "aarch64")
  18.    set(CMAKE_C_COMPILER aarch64-linux-gnu-gcc)
  19.    set(CMAKE_CXX_COMPILER aarch64-linux-gnu-g++)
  20.    set(CMAKE_ASM_COMPILER aarch64-linux-gnu-as)
  21.    find_program(CMAKE_AR NAMES "aarch64-linux-gnu-ar")
  22.    find_program(CMAKE_RANLIB NAMES "aarch64-linux-gnu-ranlib")
  23. endif()
  24.  
  25. mark_as_advanced(CMAKE_AR)
  26. mark_as_advanced(CMAKE_RANLIB)

CMakeSettings.json

  1. {
  2.   "configurations": [
  3.     {
  4.       "environments": [
  5.         {
  6.           "MINGW64_ROOT": "C:\\MinGW\\msys\\1.0\\home\\d\\gcc-linaro-7.3.1-2018.05-i686-mingw32_arm-linux-gnueabihf",
  7.           "BIN_ROOT": "${env.MINGW64_ROOT}\\bin",
  8.           "FLAVOR": "x86_64-w64-mingw32",
  9.           "TOOLSET_VERSION": "7.3.0",
  10.           "PATH": "${env.MINGW64_ROOT}\\bin;${env.MINGW64_ROOT}\\..\\usr\\local\\bin;${env.MINGW64_ROOT}\\..\\usr\\bin;${env.MINGW64_ROOT}\\..\\bin;${env.PATH}",
  11.           "INCLUDE": "${env.INCLUDE};${env.MINGW64_ROOT}\\include\\c++\\${env.TOOLSET_VERSION};${env.MINGW64_ROOT}\\include\\c++\\${env.TOOLSET_VERSION}\\tr1;${env.MINGW64_ROOT}\\include\\c++\\${env.TOOLSET_VERSION}\\${env.FLAVOR}",
  12.           "environment": "mingw_64"
  13.         }
  14.       ],
  15.       "name": "Mingw64-Release",
  16.       "generator": "Ninja",
  17.       "configurationType": "Release",
  18.       "inheritEnvironments": [
  19.         "mingw_64"
  20.       ],
  21.       "buildRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\build\\${name}",
  22.       "installRoot": "${env.USERPROFILE}\\CMakeBuilds\\${workspaceHash}\\install\\${name}",
  23.       "cmakeCommandArgs": "",
  24.       "buildCommandArgs": "-v",
  25.       "ctestCommandArgs": "",
  26.       "intelliSenseMode": "linux-gcc-x64",
  27.       "variables": [
  28.         {
  29.           "name": "CMAKE_TOOLCHAIN_FILE",
  30.           "value": "GNUlinux_config.cmake"
  31.         },
  32.         {
  33.           "name": "CMAKE_C_COMPILER",
  34.           "value": "${env.BIN_ROOT}\\arm-linux-gnueabihf-gcc.exe"
  35.         },
  36.         {
  37.           "name": "CMAKE_CXX_COMPILER",
  38.           "value": "${env.BIN_ROOT}\\arm-linux-gnueabihf-g++.exe"
  39.         }
  40.       ]
  41.     }
  42.   ]
  43. }