Water's Home

Just another Life Style

0%

Set ENV

export HOST=arm-linux-gnueabihf
export CPP=”${HOST}-gcc -E”
export STRIP=”${HOST}-strip”
export OBJCOPY=”${HOST}-objcopy”
export AR=”${HOST}-ar”
export RANLIB=”${HOST}-ranlib”
export LD=”${HOST}-ld”
export OBJDUMP=”${HOST}-objdump”
export CC=”${HOST}-gcc”
export CXX=”${HOST}-g++”
export NM=”${HOST}-nm”
export AS=”${HOST}-as”
export LD=”$CXX”
export LINK=”$CXX”
export GYP_DEFINES=”armv7=0”

Build

node-gyp –arch arm configure build

Test

root@imx6ul7d:~# node hello.js
world

TIP 0

{
“targets”: [
{
“target_name”: “addon”,
“sources”: [ “test_api.cc” ],
“include_dirs” : [“sdk/inc”],
“link_settings”: {“libraries”: [ “-L/mnt/hgfs/tmp0511/sdk/lib”] },
“libraries”: [ “-lpos”, “-lpthread”, “-lrt”, “-lpng”, “-liconv”, “-lfreetype”, “-lz” ],
}
]
}

TIP 1

using v8::FunctionCallbackInfo;
using v8::Isolate;
using v8::Local;
using v8::Object;
using v8::String;
using v8::Value;
using v8::Number;
using v8::Exception;

void js_Exchange(const FunctionCallbackInfo& args) {
Isolate* isolate = args.GetIsolate();

int slot = args[0]->NumberValue();

String::Utf8Value param1(args[1]->ToString());
char *szApdu = *param1;

uint8_t apdu[300] = “\0”;
MyAHex((uint8_t *)szApdu, apdu, strlen(szApdu));

uint8_t rpdu[300] = “\0”;
uint32_t rpdu_len = 0;
char szResult[300] = “\0”;

int ret = Exchange(slot, apdu, strlen(szApdu) / 2, rpdu, &rpdu_len);
if(0 == ret) {
MyHexA(rpdu, (uint8_t *)szResult, rpdu_len);
args.GetReturnValue().Set(String::NewFromUtf8(isolate, szResult));
} else {
args.GetReturnValue().Set(String::NewFromUtf8(isolate, “”));
}
}

Install node-gyp

npm install -g node-gyp

Prepare SourceCode

[root@localhost test_nodejs]# ls
binding.gyp hello.cc hello.js

hello.cc

#include namespace demo {

using v8::FunctionCallbackInfo;
using v8::Isolate;
using v8::Local;
using v8::Object;
using v8::String;
using v8::Value;

void Method(const FunctionCallbackInfo& args) {
Isolate* isolate = args.GetIsolate();
args.GetReturnValue().Set(String::NewFromUtf8(isolate, “world”));
}

void init(Local exports) {
NODE_SET_METHOD(exports, “hello”, Method);
}

NODE_MODULE(NODE_GYP_MODULE_NAME, init)

} // namespace demo

hello.js

// hello.js
const addon = require(‘./build/Release/addon’);

console.log(addon.hello());
// Prints: ‘world’

binding.gyp

{
“targets”: [
{
“target_name”: “addon”,
“sources”: [ “hello.cc” ]
}
]
}

Compile & Test

[root@localhost test_nodejs]# node-gyp configure
gyp info it worked if it ends with ok
gyp info using node-gyp@3.6.2
gyp info using node@8.11.1 linux x64
gyp info spawn /usr/bin/python2
gyp info spawn args [ ‘/usr/local/node-v8.11.1-linux-x64/lib/node_modules/node-gyp/gyp/gyp_main.py’,
gyp info spawn args ‘binding.gyp’,
gyp info spawn args ‘-f’,
gyp info spawn args ‘make’,
gyp info spawn args ‘-I’,
gyp info spawn args ‘/mnt/hgfs/linux_tmp/test_nodejs/build/config.gypi’,
gyp info spawn args ‘-I’,
gyp info spawn args ‘/usr/local/node-v8.11.1-linux-x64/lib/node_modules/node-gyp/addon.gypi’,
gyp info spawn args ‘-I’,
gyp info spawn args ‘/root/.node-gyp/8.11.1/include/node/common.gypi’,
gyp info spawn args ‘-Dlibrary=shared_library’,
gyp info spawn args ‘-Dvisibility=default’,
gyp info spawn args ‘-Dnode_root_dir=/root/.node-gyp/8.11.1’,
gyp info spawn args ‘-Dnode_gyp_dir=/usr/local/node-v8.11.1-linux-x64/lib/node_modules/node-gyp’,
gyp info spawn args ‘-Dnode_lib_file=/root/.node-gyp/8.11.1/<(target_arch)/node.lib’,
gyp info spawn args ‘-Dmodule_root_dir=/mnt/hgfs/linux_tmp/test_nodejs’,
gyp info spawn args ‘-Dnode_engine=v8’,
gyp info spawn args ‘–depth=.’,
gyp info spawn args ‘–no-parallel’,
gyp info spawn args ‘–generator-output’,
gyp info spawn args ‘build’,
gyp info spawn args ‘-Goutput_dir=.’ ]
gyp info ok
[root@localhost test_nodejs]#
[root@localhost test_nodejs]# ls
binding.gyp build hello.cc hello.js
[root@localhost test_nodejs]# ls build/
addon.target.mk binding.Makefile config.gypi Makefile

[root@localhost test_nodejs]# node-gyp build
gyp info it worked if it ends with ok
gyp info using node-gyp@3.6.2
gyp info using node@8.11.1 linux x64
gyp info spawn make
gyp info spawn args [ ‘BUILDTYPE=Release’, ‘-C’, ‘build’ ]
make: Entering directory `/mnt/hgfs/linux_tmp/test_nodejs/build’
CXX(target) Release/obj.target/addon/hello.o
SOLINK_MODULE(target) Release/obj.target/addon.node
COPY Release/addon.node
make: Leaving directory `/mnt/hgfs/linux_tmp/test_nodejs/build’
gyp info ok

[root@localhost test_nodejs]# ls build/
addon.target.mk binding.Makefile config.gypi Makefile Release
[root@localhost test_nodejs]# ls build/Release/
addon.node obj.target

[root@localhost test_nodejs]# node hello.js
world

Source Code Dir

CMake-GUI Configure

Configure/Generate

Unix Makefiles Specify options for cross-compiling

Choose ARM - LINUX

Operating System : arm-linux C Compilers : C++ Compilers : Target Root : The Cross Compiler BIN Directory

CMAKE_INSTALL_PREFIX

make & make install

make
make install

Maybe Need It !

<>

The END

tar jcvf opencv-arm.tar.bz2 opencv-arm/

LucasKanade Tracker

    // Check if there are points to track
    if(!trackingPoints\[0\].empty())
    {
        // Status vector to indicate whether the flow for the corresponding features has been found
        vector statusVector;
        
        // Error vector to indicate the error for the corresponding feature
        vector errorVector;
        
        // Check if previous image is empty
        if(prevGrayImage.empty())
        {
            curGrayImage.copyTo(prevGrayImage);
        }
        
        // Calculate the optical flow using Lucas-Kanade algorithm
        calcOpticalFlowPyrLK(prevGrayImage, curGrayImage, trackingPoints\[0\], trackingPoints\[1\], statusVector, errorVector, windowSize, 3, terminationCriteria, 0, 0.001);

        int count = 0;
        
        // Minimum distance between any two tracking points
        int minDist = 7;
        
        for(int i=0; i < trackingPoints\[1\].size(); i++)
        {
            if(pointTrackingFlag)
            {
                // If the new point is within 'minDist' distance from an existing point, it will not be tracked
                if(norm(currentPoint - trackingPoints\[1\]\[i\]) <= minDist)
                {
                    pointTrackingFlag = false;
                    continue;
                }
            }
            
            // Check if the status vector is good
            if(!statusVector\[i\])
                continue;
            
            trackingPoints\[1\]\[count++\] = trackingPoints\[1\]\[i\];

            // Draw a filled circle for each of the tracking points
            int radius = 8;
            int thickness = 2;
            int lineType = 8;
            circle(image, trackingPoints\[1\]\[i\], radius, Scalar(0,255,0), thickness, lineType);
        }
        
        trackingPoints\[1\].resize(count);
    }
    
    // Refining the location of the feature points
    if(pointTrackingFlag && trackingPoints\[1\].size() < maxNumPoints)
    {
        vector tempPoints;
        tempPoints.push\_back(currentPoint);
        
        // Function to refine the location of the corners to subpixel accuracy.
        // Here, 'pixel' refers to the image patch of size 'windowSize' and not the actual image pixel
        cornerSubPix(curGrayImage, tempPoints, windowSize, cvSize(-1,-1), terminationCriteria);
        
        trackingPoints\[1\].push\_back(tempPoints\[0\]);
        pointTrackingFlag = false;
    } 

Farneback Tracker

    // Check if the image is valid
    if(prevGray.data)
    {
        // Initialize parameters for the optical flow algorithm
        float pyrScale = 0.5;
        int numLevels = 3;
        int windowSize = 15;
        int numIterations = 3;
        int neighborhoodSize = 5;
        float stdDeviation = 1.2;
        
        // Calculate optical flow map using Farneback algorithm
        calcOpticalFlowFarneback(prevGray, curGray, flowImage, pyrScale, numLevels, windowSize, numIterations, neighborhoodSize, stdDeviation, OPTFLOW\_USE\_INITIAL\_FLOW);
        
        // Convert to 3-channel RGB
        cvtColor(prevGray, flowImageGray, COLOR\_GRAY2BGR);
        
        // Draw the optical flow map
        drawOpticalFlow(flowImage, flowImageGray);
        
        // Display the output image
        imshow(windowName, flowImageGray);
    }

Opening

Mat performOpening(Mat inputImage, int morphologyElement, int morphologySize)
{
Mat outputImage, tempImage;
int morphologyType;

if(morphologyElement == 0)
    morphologyType = MORPH\_RECT;

else if(morphologyElement == 1)
    morphologyType = MORPH\_CROSS;

else if(morphologyElement == 2)
    morphologyType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(morphologyType, Size(2\*morphologySize + 1, 2\*morphologySize + 1), Point(morphologySize, morphologySize));

// Apply morphological opening to the image using the structuring element
erode(inputImage, tempImage, element);
dilate(tempImage, outputImage, element);

// Return the output image
return outputImage;

}

Closing

Mat performClosing(Mat inputImage, int morphologyElement, int morphologySize)
{
Mat outputImage, tempImage;
int morphologyType;

if(morphologyElement == 0)
    morphologyType = MORPH\_RECT;

else if(morphologyElement == 1)
    morphologyType = MORPH\_CROSS;

else if(morphologyElement == 2)
    morphologyType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(morphologyType, Size(2\*morphologySize + 1, 2\*morphologySize + 1), Point(morphologySize, morphologySize));

// Apply morphological opening to the image using the structuring element
dilate(inputImage, tempImage, element);
erode(tempImage, outputImage, element);

// Return the output image
return outputImage;

}

Morphological Gradient

Mat performMorphologicalGradient(Mat inputImage, int morphologyElement, int morphologySize)
{
Mat outputImage, tempImage1, tempImage2;
int morphologyType;

if(morphologyElement == 0)
    morphologyType = MORPH\_RECT;

else if(morphologyElement == 1)
    morphologyType = MORPH\_CROSS;

else if(morphologyElement == 2)
    morphologyType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(morphologyType, Size(2\*morphologySize + 1, 2\*morphologySize + 1), Point(morphologySize, morphologySize));

// Apply morphological gradient to the image using the structuring element
dilate(inputImage, tempImage1, element);
erode(inputImage, tempImage2, element);

// Return the output image
return tempImage1 - tempImage2;

}

TopHat

Mat performTopHat(Mat inputImage, int morphologyElement, int morphologySize)
{
Mat outputImage;
int morphologyType;

if(morphologyElement == 0)
    morphologyType = MORPH\_RECT;

else if(morphologyElement == 1)
    morphologyType = MORPH\_CROSS;

else if(morphologyElement == 2)
    morphologyType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(morphologyType, Size(2\*morphologySize + 1, 2\*morphologySize + 1), Point(morphologySize, morphologySize));

// Apply top hat operation to the image using the structuring element
outputImage = inputImage - performOpening(inputImage, morphologyElement, morphologySize);

// Return the output image
return outputImage;

}

BlackHat

Mat performBlackHat(Mat inputImage, int morphologyElement, int morphologySize)
{
Mat outputImage;
int morphologyType;

if(morphologyElement == 0)
    morphologyType = MORPH\_RECT;

else if(morphologyElement == 1)
    morphologyType = MORPH\_CROSS;

else if(morphologyElement == 2)
    morphologyType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(morphologyType, Size(2\*morphologySize + 1, 2\*morphologySize + 1), Point(morphologySize, morphologySize));

// Apply black hat operation to the image using the structuring element
outputImage = performClosing(inputImage, morphologyElement, morphologySize) - inputImage;

// Return the output image
return outputImage;

}

Erosion

Mat performErosion(Mat inputImage, int erosionElement, int erosionSize)
{
Mat outputImage;
int erosionType;

if(erosionElement == 0)
    erosionType = MORPH\_RECT;

else if(erosionElement == 1)
    erosionType = MORPH\_CROSS;

else if(erosionElement == 2)
    erosionType = MORPH\_ELLIPSE;

// Create the structuring element for erosion
Mat element = getStructuringElement(erosionType, Size(2\*erosionSize + 1, 2\*erosionSize + 1), Point(erosionSize, erosionSize));

// Erode the image using the structuring element
erode(inputImage, outputImage, element);

// Return the output image
return outputImage;

}

Dilation

Mat performDilation(Mat inputImage, int dilationElement, int dilationSize)
{
Mat outputImage;
int dilationType;

if(dilationElement == 0)
    dilationType = MORPH\_RECT;

else if(dilationElement == 1)
    dilationType = MORPH\_CROSS;

else if(dilationElement == 2)
    dilationType = MORPH\_ELLIPSE;

// Create the structuring element for dilation
Mat element = getStructuringElement(dilationType, Size(2\*dilationSize + 1, 2\*dilationSize + 1), Point(dilationSize, dilationSize));

// Dilate the image using the structuring element
dilate(inputImage, outputImage, element);

// Return the output image
return outputImage;

}

Background Subtraction

// Create MOG Background Subtractor object
pMOG= cv::bgsegm::createBackgroundSubtractorMOG();//new BackgroundSubtractorMOG();

// Create MOG2 Background Subtractor object
pMOG2 = createBackgroundSubtractorMOG2(20, 16, true);//new BackgroundSubtractorMOG2();

    // Capture the current frame
    cap >> frame;
    
    // Resize the frame
    resize(frame, frame, Size(), scalingFactor, scalingFactor, INTER\_AREA);
    
    // Update the MOG background model based on the current frame
    pMOG->apply(frame, fgMaskMOG);
    
    // Update the MOG2 background model based on the current frame
    pMOG2->apply(frame, fgMaskMOG2);
    
    // Show the current frame
    //imshow("Frame", frame);
    
    // Show the MOG foreground mask
    //imshow("FG Mask MOG", fgMaskMOG);

    // Show the MOG2 foreground mask
    imshow("FG Mask MOG 2", fgMaskMOG2);

Frame Differencing

Mat frameDiff(Mat prevFrame, Mat curFrame, Mat nextFrame)
{
Mat diffFrames1, diffFrames2, output;

// Compute absolute difference between current frame and the next frame
absdiff(nextFrame, curFrame, diffFrames1);

// Compute absolute difference between current frame and the previous frame
absdiff(curFrame, prevFrame, diffFrames2);

// Bitwise "AND" operation between the above two diff images
bitwise\_and(diffFrames1, diffFrames2, output);

return output;

}

Tracking Your Ears (Two Ears, \(^o^)/~)

    // Capture the current frame
    cap >> frame;
    
    // Resize the frame
    resize(frame, frame, Size(), scalingFactor, scalingFactor, INTER\_AREA);
    
    // Convert to grayscale
    cvtColor(frame, frameGray, CV\_BGR2GRAY);
    
    // Equalize the histogram
    equalizeHist(frameGray, frameGray);
    
    // Detect left ear
    leftEarCascade.detectMultiScale(frameGray, leftEars, 1.1, 2, 0CV\_HAAR\_SCALE\_IMAGE, Size(30, 30) );
    
    // Detect right ear
    rightEarCascade.detectMultiScale(frameGray, rightEars, 1.1, 2, 0CV\_HAAR\_SCALE\_IMAGE, Size(30, 30) );
    
    // Draw green rectangle around the left ear
    for(int i = 0; i < leftEars.size(); i++)
    {
        Rect leftEarRect(leftEars\[i\].x, leftEars\[i\].y, leftEars\[i\].width, leftEars\[i\].height);
        rectangle(frame, leftEarRect, Scalar(0,255,0), 4);
    }
    
    // Draw green rectangle around the right ear
    for(int i = 0; i < rightEars.size(); i++)
    {
        Rect rightEarRect(rightEars\[i\].x, rightEars\[i\].y, rightEars\[i\].width, rightEars\[i\].height);
        rectangle(frame, rightEarRect, Scalar(0,255,0), 4);
    }

Detect Faces

    // Capture the current frame
    cap >> frame;

    // Resize the frame
    resize(frame, frame, Size(), scalingFactor, scalingFactor, INTER\_AREA);

    // Convert to grayscale
    cvtColor(frame, frameGray, CV\_BGR2GRAY);

    // Equalize the histogram
    equalizeHist(frameGray, frameGray);

    // Detect faces
    faceCascade.detectMultiScale(frameGray, faces, 1.1, 2, 0CV\_HAAR\_SCALE\_IMAGE, Size(30, 30) );

Add Mask Image

    // Draw green rectangle around the face
    for(int i = 0; i < faces.size(); i++)
    {
        //Rect faceRect(faces\[i\].x, faces\[i\].y, faces\[i\].width, faces\[i\].height);

        // Custom parameters to make the mask fit your face. You may have to play around with them to make sure it works.
        int x = faces\[i\].x - int(0.1\*faces\[i\].width);
        int y = faces\[i\].y - int(0.1\*faces\[i\].height); // 0.0\*faces\[i\].height
        int w = int(1.1 \* faces\[i\].width);
        int h = int(1.3 \* faces\[i\].height);

        // Extract region of interest (ROI) covering your face
        frameROI = frame(Rect(x,y,w,h));

        // Resize the face mask image based on the dimensions of the above ROI
        resize(faceMask, faceMaskSmall, Size(w,h));

        // Convert the above image to grayscale
        cvtColor(faceMaskSmall, grayMaskSmall, CV\_BGR2GRAY);

        // Threshold the above image to isolate the pixels associated only with the face mask
        threshold(grayMaskSmall, grayMaskSmallThresh, 230, 255, CV\_THRESH\_BINARY\_INV);

        // Create mask by inverting the above image (because we don't want the background to affect the overlay)
        bitwise\_not(grayMaskSmallThresh, grayMaskSmallThreshInv);

        // Use bitwise "AND" operator to extract precise boundary of face mask
        bitwise\_and(faceMaskSmall, faceMaskSmall, maskedFace, grayMaskSmallThresh);

        // Use bitwise "AND" operator to overlay face mask
        bitwise\_and(frameROI, frameROI, maskedFrame, grayMaskSmallThreshInv);

        // Add the above masked images and place it in the original frame ROI to create the final image
        add(maskedFace, maskedFrame, frame(Rect(x,y,w,h)));
    }