update_fasta

This commit is contained in:
E41202233_FASTA BIQUL HOIROT 2024-05-31 16:01:26 +07:00
parent a54e7c023d
commit d7e8a2c95d
213 changed files with 88570 additions and 59 deletions

76
.gitignore vendored
View File

@ -1,61 +1,19 @@
# ---> Android
# Gradle files
.gradle/
build/
# Local configuration file (sdk path, etc)
*.iml
.gradle
/local.properties
/.idea/caches
/.idea/libraries
/.idea/modules.xml
/.idea/workspace.xml
/.idea/navEditor.xml
/.idea/assetWizardSettings.xml
.DS_Store
/build
/captures
.externalNativeBuild
.cxx
local.properties
# Log/OS Files
*.log
# Android Studio generated files and folders
captures/
.externalNativeBuild/
.cxx/
*.apk
output.json
# IntelliJ
*.iml
.idea/
misc.xml
deploymentTargetDropDown.xml
render.experimental.xml
# Keystore files
*.jks
*.keystore
# Google Services (e.g. APIs or Firebase)
google-services.json
# Android Profiling
*.hprof
# ---> Java
# Compiled class file
*.class
# Log file
*.log
# BlueJ files
*.ctxt
# Mobile Tools for Java (J2ME)
.mtj.tmp/
# Package Files #
*.jar
*.war
*.nar
*.ear
*.zip
*.tar.gz
*.rar
# virtual machine crash logs, see http://www.java.com/en/download/help/error_hotspot.xml
hs_err_pid*
replay_pid*
# Project exclude paths
/openCVLibrary3413/build/
/openCVLibrary3413/build/intermediates/javac/debug/classes/

3
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,3 @@
# Default ignored files
/shelf/
/workspace.xml

1
.idea/.name Normal file
View File

@ -0,0 +1 @@
ETech

6
.idea/compiler.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="CompilerConfiguration">
<bytecodeTargetLevel target="1.8" />
</component>
</project>

21
.idea/gradle.xml Normal file
View File

@ -0,0 +1,21 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="GradleMigrationSettings" migrationVersion="1" />
<component name="GradleSettings">
<option name="linkedExternalProjectsSettings">
<GradleProjectSettings>
<option name="testRunner" value="GRADLE" />
<option name="distributionType" value="DEFAULT_WRAPPED" />
<option name="externalProjectPath" value="$PROJECT_DIR$" />
<option name="gradleJvm" value="1.8" />
<option name="modules">
<set>
<option value="$PROJECT_DIR$" />
<option value="$PROJECT_DIR$/app" />
<option value="$PROJECT_DIR$/openCVLibrary3413" />
</set>
</option>
</GradleProjectSettings>
</option>
</component>
</project>

25
.idea/jarRepositories.xml Normal file
View File

@ -0,0 +1,25 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="RemoteRepositoriesConfiguration">
<remote-repository>
<option name="id" value="central" />
<option name="name" value="Maven Central repository" />
<option name="url" value="https://repo1.maven.org/maven2" />
</remote-repository>
<remote-repository>
<option name="id" value="jboss.community" />
<option name="name" value="JBoss Community repository" />
<option name="url" value="https://repository.jboss.org/nexus/content/repositories/public/" />
</remote-repository>
<remote-repository>
<option name="id" value="BintrayJCenter" />
<option name="name" value="BintrayJCenter" />
<option name="url" value="https://jcenter.bintray.com/" />
</remote-repository>
<remote-repository>
<option name="id" value="Google" />
<option name="name" value="Google" />
<option name="url" value="https://dl.google.com/dl/android/maven2/" />
</remote-repository>
</component>
</project>

8
.idea/misc.xml Normal file
View File

@ -0,0 +1,8 @@
<project version="4">
<component name="ProjectRootManager" version="2" languageLevel="JDK_1_8" default="true" project-jdk-name="corretto-1.8" project-jdk-type="JavaSDK">
<output url="file://$PROJECT_DIR$/build/classes" />
</component>
<component name="ProjectType">
<option name="id" value="Android" />
</component>
</project>

33
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,33 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"type": "android",
"request": "attach",
"name": "Face",
"appSrcRoot": "${workspaceRoot}/app/src/main",
"adbPort": 5037,
"processId": "${command:PickAndroidProcess}"
},
{
"type": "android",
"request": "launch",
"name": "Face",
"appSrcRoot": "${workspaceRoot}/app/src/main",
"apkFile": "${workspaceRoot}/app/build/outputs/apk/debug/app-debug.apk",
"adbPort": 5037
},
{
"type": "java",
"name": "Face",
"request": "launch",
"mainClass": "${file}"
}
]
}

11
.vscode/sftp.json vendored Normal file
View File

@ -0,0 +1,11 @@
{
"name": "My Server",
"host": "localhost",
"protocol": "sftp",
"port": 22,
"username": "username",
"remotePath": "/",
"uploadOnSave": false,
"useTempFile": false,
"openSsh": false
}

1
app/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
/build

54
app/build.gradle Normal file
View File

@ -0,0 +1,54 @@
plugins {
id 'com.android.application'
}
android {
compileSdkVersion 30
buildToolsVersion "30.0.2"
defaultConfig {
applicationId 'com.example.ETech'
minSdkVersion 23
targetSdkVersion 30
versionCode 1
versionName "1.0"
testInstrumentationRunner "androidx.test.runner.AndroidJUnitRunner"
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android-optimize.txt'), 'proguard-rules.pro'
}
}
compileOptions {
sourceCompatibility JavaVersion.VERSION_1_8
targetCompatibility JavaVersion.VERSION_1_8
}
sourceSets {
main {
jni {
srcDirs 'src\\main\\jni', 'src\\main\\jnilibs'
}
}
}
}
dependencies {
//this include library for tensorflow gpu
// click on sync now to implement
implementation 'org.tensorflow:tensorflow-lite-metadata:0.1.0-rc1'
implementation 'org.tensorflow:tensorflow-lite-gpu:2.2.0'
implementation 'org.tensorflow:tensorflow-lite-support:0.1.0'
implementation 'org.tensorflow:tensorflow-lite-task-vision:0.1.0'
implementation 'org.tensorflow:tensorflow-lite-task-text:0.1.0'
implementation 'androidx.appcompat:appcompat:1.2.0'
implementation 'com.google.android.material:material:1.3.0'
implementation 'androidx.constraintlayout:constraintlayout:2.0.4'
implementation project(path: ':openCVLibrary3413')
testImplementation 'junit:junit:4.+'
androidTestImplementation 'androidx.test.ext:junit:1.1.2'
androidTestImplementation 'androidx.test.espresso:espresso-core:3.3.0'
}

21
app/proguard-rules.pro vendored Normal file
View File

@ -0,0 +1,21 @@
# Add project specific ProGuard rules here.
# You can control the set of applied configuration files using the
# proguardFiles setting in build.gradle.
#
# For more details, see
# http://developer.android.com/guide/developing/tools/proguard.html
# If your project uses WebView with JS, uncomment the following
# and specify the fully qualified class name to the JavaScript interface
# class:
#-keepclassmembers class fqcn.of.javascript.interface.for.webview {
# public *;
#}
# Uncomment this to preserve the line number information for
# debugging stack traces.
#-keepattributes SourceFile,LineNumberTable
# If you keep the line number information, uncomment this to
# hide the original source file name.
#-renamesourcefileattribute SourceFile

View File

@ -0,0 +1,26 @@
package com.example.ETech;
import android.content.Context;
import androidx.test.platform.app.InstrumentationRegistry;
import androidx.test.ext.junit.runners.AndroidJUnit4;
import org.junit.Test;
import org.junit.runner.RunWith;
import static org.junit.Assert.*;
/**
* Instrumented test, which will execute on an Android device.
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
@RunWith(AndroidJUnit4.class)
public class ExampleInstrumentedTest {
@Test
public void useAppContext() {
// Context of the app under test.
Context appContext = InstrumentationRegistry.getInstrumentation().getTargetContext();
assertEquals("com.example.imagepro", appContext.getPackageName());
}
}

View File

@ -0,0 +1,50 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.example.ETech">
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.INTERNET" />
<uses-feature
android:name="android.hardware.camera"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.autofocus"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.front"
android:required="false" />
<uses-feature
android:name="android.hardware.camera.front.autofocus"
android:required="false" />
<application
android:allowBackup="true"
android:icon="@drawable/gass"
android:label="E-Tech"
android:roundIcon="@mipmap/ic_launcher_round"
android:supportsRtl="true"
android:theme="@style/Theme.ImagePro"
android:usesCleartextTraffic="true">
<activity
android:name="com.example.ETech.MainActivity"
android:exported="false" />
<activity
android:name="com.example.ETech.cara"
android:exported="false" />
<activity
android:name="com.example.ETech.about"
android:exported="false" />
<activity android:name="com.example.ETech.CameraActivity" />
<activity android:name="com.example.ETech.splash">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,170 @@
package com.example.ETech;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import android.Manifest;
import android.app.Activity;
import android.content.pm.PackageManager;
import android.os.Bundle;
import android.util.Log;
import android.view.SurfaceView;
import android.view.Window;
import android.view.WindowManager;
import android.widget.Button;
import org.opencv.android.BaseLoaderCallback;
import org.opencv.android.CameraBridgeViewBase;
import org.opencv.android.LoaderCallbackInterface;
import org.opencv.android.OpenCVLoader;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import java.io.IOException;
public class CameraActivity extends Activity implements CameraBridgeViewBase.CvCameraViewListener2{
private static final String TAG="MainActivity";
private Mat mRgba;
private Mat mGray;
private CameraBridgeViewBase mOpenCvCameraView;
private wajah facialExpressionRecognition;
private Button btnFliip;
private int cameraIndex = 0;
private BaseLoaderCallback mLoaderCallback =new BaseLoaderCallback(this) {
@Override
public void onManagerConnected(int status) {
switch (status){
case LoaderCallbackInterface
.SUCCESS:{
Log.i(TAG,"OpenCv Is loaded");
mOpenCvCameraView.enableView();
}
default:
{
super.onManagerConnected(status);
}
break;
}
}
};
public CameraActivity(){
Log.i(TAG,"Instantiated new "+this.getClass());
}
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
requestWindowFeature(Window.FEATURE_NO_TITLE);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
int MY_PERMISSIONS_REQUEST_CAMERA=0;
// if camera permission is not given it will ask for it on device
if (ContextCompat.checkSelfPermission(CameraActivity.this, Manifest.permission.CAMERA)
== PackageManager.PERMISSION_DENIED){
ActivityCompat.requestPermissions(CameraActivity.this, new String[] {Manifest.permission.CAMERA}, MY_PERMISSIONS_REQUEST_CAMERA);
}
setContentView(R.layout.activity_camera);
// btnFliip = findViewById(R.id.btn_flip);
// btnFliip.setOnClickListener(new View.OnClickListener() {
// @Override
// public void onClick(View view) {
// flipcamera();
// }
// });
mOpenCvCameraView=(CameraBridgeViewBase) findViewById(R.id.frame_Surface);
mOpenCvCameraView.setVisibility(SurfaceView.VISIBLE);
mOpenCvCameraView.setCameraIndex(cameraIndex);
mOpenCvCameraView.setCvCameraViewListener(this);
mOpenCvCameraView.enableFpsMeter();
// cascade classifier and model
try{
// input size of model is 48
int inputSize=48;
facialExpressionRecognition=new wajah(getAssets(),CameraActivity.this,
"model300.tflite",inputSize);
}
catch (IOException e){
e.printStackTrace();
}
}
// for flip camera
// private void flipcamera() {
// if (cameraIndex == 1){
// cameraIndex = 0;
// mOpenCvCameraView.disableView();
// mOpenCvCameraView.setCameraIndex(cameraIndex);
// Core.flip(mRgba,mRgba,1);
// mOpenCvCameraView.enableView();
//
//
// }else {
// cameraIndex = 1;
// mOpenCvCameraView.disableView();
// mOpenCvCameraView.setCameraIndex(cameraIndex);
// mOpenCvCameraView.enableView();
// }
// }
@Override
protected void onResume() {
super.onResume();
if (OpenCVLoader.initDebug()){
//if load success
Log.d(TAG,"Opencv initialization is done");
mLoaderCallback.onManagerConnected(LoaderCallbackInterface.SUCCESS);
}
else{
//if not loaded
Log.d(TAG,"Opencv is not loaded. try again");
OpenCVLoader.initAsync(OpenCVLoader.OPENCV_VERSION_3_4_0,this,mLoaderCallback);
}
}
@Override
protected void onPause() {
super.onPause();
if (mOpenCvCameraView !=null){
mOpenCvCameraView.disableView();
}
}
public void onDestroy(){
super.onDestroy();
if(mOpenCvCameraView !=null){
mOpenCvCameraView.disableView();
}
}
public void onCameraViewStarted(int width ,int height){
mRgba=new Mat(height,width, CvType.CV_8UC4);
mGray =new Mat(height,width,CvType.CV_8UC1);
}
public void onCameraViewStopped(){
mRgba.release();
}
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame){
mRgba=inputFrame.rgba();
mGray=inputFrame.gray();
//output input
mRgba=facialExpressionRecognition.recognizeImage(mRgba);
return mRgba;
}
}

View File

@ -0,0 +1,64 @@
package com.example.ETech;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.util.Log;
import android.view.View;
import android.webkit.WebView;
import android.widget.Button;
import org.opencv.android.OpenCVLoader;
public class MainActivity extends AppCompatActivity {
static {
if(OpenCVLoader.initDebug()){
Log.d("MainActivity: ","Opencv is loaded");
}
else {
Log.d("MainActivity: ","Opencv failed to load");
}
}
private Button camera_button, Babout, Batur;
private WebView webView;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Babout = findViewById(R.id.about);
Batur = findViewById(R.id.tutorial);
// webView = findViewById(R.id.webView);
camera_button=findViewById(R.id.camera_button);
camera_button.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(MainActivity.this,CameraActivity.class).addFlags(Intent.FLAG_ACTIVITY_CLEAR_TASK | Intent.FLAG_ACTIVITY_CLEAR_TOP));
}
});
//babout
Babout.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, about.class);
startActivity(intent);
}
});
Batur.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
Intent intent = new Intent(MainActivity.this, cara.class);
startActivity(intent);
}
});
// webView.setWebViewClient(new WebViewClient());
// webView.loadUrl("http://www.google.com");
}
}

View File

@ -0,0 +1,25 @@
package com.example.ETech;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
public class about extends AppCompatActivity {
private WebView AboutWeb;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_about);
AboutWeb = findViewById(R.id.aboutweb);
AboutWeb.setWebViewClient(new WebViewClient());
AboutWeb.loadUrl("https://www.bersyukur.cloud/autis/cara_penggunaan/index.html");
WebSettings webSettings = AboutWeb.getSettings();
webSettings.setJavaScriptEnabled(true);
}
}

View File

@ -0,0 +1,25 @@
package com.example.ETech;
import androidx.appcompat.app.AppCompatActivity;
import android.os.Bundle;
import android.webkit.WebSettings;
import android.webkit.WebView;
import android.webkit.WebViewClient;
public class cara extends AppCompatActivity {
private WebView CaraWeb;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_cara);
CaraWeb = findViewById(R.id.caraweb);
CaraWeb.setWebViewClient(new WebViewClient());
CaraWeb.loadUrl("http://www.bersyukur.cloud/autis/about/index.html");
WebSettings webSettings = CaraWeb.getSettings();
webSettings.setJavaScriptEnabled(true);
}
}

View File

@ -0,0 +1,27 @@
package com.example.ETech;
import androidx.appcompat.app.AppCompatActivity;
import android.content.Intent;
import android.os.Bundle;
import android.os.Handler;
public class splash extends AppCompatActivity {
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_splash);
new Handler().postDelayed(new Runnable() {
@Override
public void run() {
Intent intent = new Intent(splash.this, MainActivity.class);
startActivity(intent);
finish();
}
},5000
);
}
}

View File

@ -0,0 +1,244 @@
package com.example.ETech;
import android.content.Context;
import android.content.res.AssetFileDescriptor;
import android.content.res.AssetManager;
import android.graphics.Bitmap;
import android.util.Log;
import org.opencv.android.Utils;
import org.opencv.core.Core;
import org.opencv.core.Mat;
import org.opencv.core.MatOfRect;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
import org.opencv.objdetect.CascadeClassifier;
import org.tensorflow.lite.Interpreter;
import org.tensorflow.lite.gpu.GpuDelegate;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.Array;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.MappedByteBuffer;
import java.nio.channels.FileChannel;
public class wajah {
private Interpreter interpreter;
private int INPUT_SIZE;
private int height=0;
private int width=0;
private GpuDelegate gpuDelegate=null;
private CascadeClassifier cascadeClassifier;
wajah(AssetManager assetManager, Context context, String modelPath, int inputSize) throws IOException {
INPUT_SIZE=inputSize;
Interpreter.Options options=new Interpreter.Options();
gpuDelegate=new GpuDelegate();
options.addDelegate(gpuDelegate);
options.setNumThreads(4); // set this according to your phone
interpreter=new Interpreter(loadModelFile(assetManager,modelPath),options);
// model is load
Log.d("facial_Expression","Model is loaded");
try {
InputStream is=context.getResources().openRawResource(R.raw.haarcascade_frontalface_alt);
File cascadeDir=context.getDir("cascade",Context.MODE_PRIVATE);
File mCascadeFile=new File(cascadeDir,"haarcascade_frontalface_alt");
FileOutputStream os=new FileOutputStream(mCascadeFile);
byte[] buffer=new byte[4096];
int byteRead;
// when it read -1 that means no data to read
while ((byteRead=is.read(buffer)) !=-1){
// writing on mCascade file
os.write(buffer,0,byteRead);
}
// close input and output stream
is.close();
os.close();
cascadeClassifier=new CascadeClassifier(mCascadeFile.getAbsolutePath());
Log.d("facial_Expression","Classifier is loaded");
}
catch (IOException e){
e.printStackTrace();
}
}
public Mat recognizeImage(Mat mat_image){
// rotate it by 90 degree for proper prediction
Core.flip(mat_image.t(),mat_image,1);// rotate mat_image by 90 degree
Mat grayscaleImage=new Mat();
Imgproc.cvtColor(mat_image,grayscaleImage,Imgproc.COLOR_RGBA2GRAY);
// set height and width
height=grayscaleImage.height();
width=grayscaleImage.width();
// minimum height of face in original image
int absoluteFaceSize=(int)(height*0.1);
MatOfRect faces=new MatOfRect();
// check if cascadeClassifier is loaded or not
if(cascadeClassifier !=null){
// detect face in frame
// input output
cascadeClassifier.detectMultiScale(grayscaleImage,faces,1.1,2,2,
new Size(absoluteFaceSize,absoluteFaceSize),new Size());
// minimum size
}
// convert it to array
Rect[] faceArray=faces.toArray();
// loop through each face
for (int i=0;i<faceArray.length;i++){
// input/output starting point ending point color R G B alpha thickness
Imgproc.rectangle(mat_image,faceArray[i].tl(),faceArray[i].br(),new Scalar(0,255,0,255),2);
// starting x coordinate starting y coordinate
Rect roi=new Rect((int)faceArray[i].tl().x,(int)faceArray[i].tl().y,
((int)faceArray[i].br().x)-(int)(faceArray[i].tl().x),
((int)faceArray[i].br().y)-(int)(faceArray[i].tl().y));
// it's very important check one more time
Mat cropped_rgba=new Mat(mat_image,roi);//
// now convert cropped_rgba to bitmap
Bitmap bitmap=null;
bitmap=Bitmap.createBitmap(cropped_rgba.cols(),cropped_rgba.rows(),Bitmap.Config.ARGB_8888);
Utils.matToBitmap(cropped_rgba,bitmap);
// resize bitmap to (48,48)
Bitmap scaledBitmap=Bitmap.createScaledBitmap(bitmap,48,48,false);
ByteBuffer byteBuffer=convertBitmapToByteBuffer(scaledBitmap);
float[][] emotion=new float[1][1];
interpreter.run(byteBuffer,emotion);
// print value of it
// define index emotion
float emotion_v=(float)Array.get(Array.get(emotion,0),0);
Log.d("facial_expression","Output: "+ emotion_v);
// function that return text emotion
String emotion_s=get_emotion_text(emotion_v);
// text on original frame(mat_image)
// input/output text: emotion
Imgproc.putText(mat_image,emotion_s,
new Point((int)faceArray[i].tl().x+10,(int)faceArray[i].tl().y+20),
1,1.5,new Scalar(0,0,255,150),2);
// use to scale text color bounding box thickness
}
// potrait view
Core.flip(mat_image.t(),mat_image,0);
return mat_image;
}
private String get_emotion_text(float emotion_v) {
// create an empty string
String val="";
// use if statement to determine val
// You can change starting value and ending value to get better result
// Like
if(emotion_v>=0 & emotion_v<0.5){
val="terkejut";
}
else if(emotion_v>=0.5 & emotion_v <1.5){
val="takut";
}
else if(emotion_v>=1.5 & emotion_v <2.5){
val="marah";
}
else if(emotion_v>=2.5 & emotion_v <3.5){
val="netral";
}
else if(emotion_v>=3.5 & emotion_v <4.5){
val="sedih";
}
else if(emotion_v>=4.5 & emotion_v <5.5){
val="sedih";
}
else {
val="bahagia";
}
return val;
}
private ByteBuffer convertBitmapToByteBuffer(Bitmap scaledBitmap) {
ByteBuffer byteBuffer;
int size_image=INPUT_SIZE;//48
byteBuffer=ByteBuffer.allocateDirect(4*1*size_image*size_image*3);
//
byteBuffer.order(ByteOrder.nativeOrder());
int[] intValues=new int[size_image*size_image];
scaledBitmap.getPixels(intValues,0,scaledBitmap.getWidth(),0,0,scaledBitmap.getWidth(),scaledBitmap.getHeight());
int pixel=0;
for(int i =0;i<size_image;++i){
for(int j=0;j<size_image;++j){
final int val=intValues[pixel++];
// raad image from array
byteBuffer.putFloat((((val>>16)&0xFF))/255.0f);
byteBuffer.putFloat((((val>>8)&0xFF))/255.0f);
byteBuffer.putFloat(((val & 0xFF))/255.0f);
}
}
return byteBuffer;
}
private MappedByteBuffer loadModelFile(AssetManager assetManager, String modelPath) throws IOException{
// description of file
AssetFileDescriptor assetFileDescriptor=assetManager.openFd(modelPath);
// inputsteam to read file
FileInputStream inputStream=new FileInputStream(assetFileDescriptor.getFileDescriptor());
FileChannel fileChannel=inputStream.getChannel();
long startOffset=assetFileDescriptor.getStartOffset();
long declaredLength=assetFileDescriptor.getDeclaredLength();
return fileChannel.map(FileChannel.MapMode.READ_ONLY,startOffset,declaredLength);
}
}

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,30 @@
<vector xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:aapt="http://schemas.android.com/aapt"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path android:pathData="M31,63.928c0,0 6.4,-11 12.1,-13.1c7.2,-2.6 26,-1.4 26,-1.4l38.1,38.1L107,108.928l-32,-1L31,63.928z">
<aapt:attr name="android:fillColor">
<gradient
android:endX="85.84757"
android:endY="92.4963"
android:startX="42.9492"
android:startY="49.59793"
android:type="linear">
<item
android:color="#44000000"
android:offset="0.0" />
<item
android:color="#00000000"
android:offset="1.0" />
</gradient>
</aapt:attr>
</path>
<path
android:fillColor="#FFFFFF"
android:fillType="nonZero"
android:pathData="M65.3,45.828l3.8,-6.6c0.2,-0.4 0.1,-0.9 -0.3,-1.1c-0.4,-0.2 -0.9,-0.1 -1.1,0.3l-3.9,6.7c-6.3,-2.8 -13.4,-2.8 -19.7,0l-3.9,-6.7c-0.2,-0.4 -0.7,-0.5 -1.1,-0.3C38.8,38.328 38.7,38.828 38.9,39.228l3.8,6.6C36.2,49.428 31.7,56.028 31,63.928h46C76.3,56.028 71.8,49.428 65.3,45.828zM43.4,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2c-0.3,-0.7 -0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C45.3,56.528 44.5,57.328 43.4,57.328L43.4,57.328zM64.6,57.328c-0.8,0 -1.5,-0.5 -1.8,-1.2s-0.1,-1.5 0.4,-2.1c0.5,-0.5 1.4,-0.7 2.1,-0.4c0.7,0.3 1.2,1 1.2,1.8C66.5,56.528 65.6,57.328 64.6,57.328L64.6,57.328z"
android:strokeWidth="1"
android:strokeColor="#00000000" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 208 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 68 KiB

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<shape xmlns:android="http://schemas.android.com/apk/res/android">
<corners
android:radius="50dp"
>
</corners>
</shape>

Binary file not shown.

After

Width:  |  Height:  |  Size: 27 KiB

View File

@ -0,0 +1,170 @@
<?xml version="1.0" encoding="utf-8"?>
<vector xmlns:android="http://schemas.android.com/apk/res/android"
android:width="108dp"
android:height="108dp"
android:viewportWidth="108"
android:viewportHeight="108">
<path
android:fillColor="#3DDC84"
android:pathData="M0,0h108v108h-108z" />
<path
android:fillColor="#00000000"
android:pathData="M9,0L9,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,0L19,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,0L29,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,0L39,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,0L49,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,0L59,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,0L69,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,0L79,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M89,0L89,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M99,0L99,108"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,9L108,9"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,19L108,19"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,29L108,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,39L108,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,49L108,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,59L108,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,69L108,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,79L108,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,89L108,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M0,99L108,99"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,29L89,29"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,39L89,39"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,49L89,49"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,59L89,59"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,69L89,69"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M19,79L89,79"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M29,19L29,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M39,19L39,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M49,19L49,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M59,19L59,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M69,19L69,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
<path
android:fillColor="#00000000"
android:pathData="M79,19L79,89"
android:strokeWidth="0.8"
android:strokeColor="#33FFFFFF" />
</vector>

Binary file not shown.

After

Width:  |  Height:  |  Size: 200 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 153 KiB

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".about">
<WebView
android:id="@+id/aboutweb"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,19 @@
<?xml version="1.0" encoding="utf-8"?>
<FrameLayout
xmlns:android="http://schemas.android.com/apk/res/android"
android:layout_width="match_parent"
android:layout_height="match_parent">
<org.opencv.android.JavaCameraView
android:layout_width="match_parent"
android:layout_height="match_parent"
android:id="@+id/frame_Surface"/>
<!-- <Button-->
<!-- android:id="@+id/btn_flip"-->
<!-- android:layout_gravity="center"-->
<!-- android:layout_width="wrap_content"-->
<!-- android:layout_height="wrap_content"-->
<!-- android:text="flip" />-->
</FrameLayout>

View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".cara">
<WebView
android:id="@+id/caraweb"
android:layout_width="match_parent"
android:layout_height="match_parent"/>
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,57 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".MainActivity"
android:gravity="center"
android:background="@drawable/back2"
>
<Button
android:id="@+id/camera_button"
android:layout_width="279dp"
android:layout_height="191dp"
android:background="@android:color/transparent"
android:backgroundTint="@android:color/transparent"
android:text=""
android:textColor="@android:color/transparent"
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="0.496"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_bias="0.411" />
<Button
android:id="@+id/tutorial"
android:layout_width="117dp"
android:layout_height="128dp"
android:background="@android:color/transparent"
android:backgroundTint="@android:color/transparent"
android:text=""
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="0.224"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_bias="0.794" />
<Button
android:id="@+id/about"
android:layout_width="123dp"
android:layout_height="125dp"
android:background="@android:color/transparent"
android:backgroundTint="@android:color/transparent"
android:text=""
app:layout_constraintBottom_toBottomOf="parent"
app:layout_constraintEnd_toEndOf="parent"
app:layout_constraintHorizontal_bias="0.812"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent"
app:layout_constraintVertical_bias="0.795" />
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
tools:context=".splash"
android:background="@drawable/slide">
</androidx.constraintlayout.widget.ConstraintLayout>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

View File

@ -0,0 +1,5 @@
<?xml version="1.0" encoding="utf-8"?>
<adaptive-icon xmlns:android="http://schemas.android.com/apk/res/android">
<background android:drawable="@drawable/ic_launcher_background" />
<foreground android:drawable="@drawable/ic_launcher_foreground" />
</adaptive-icon>

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.5 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.2 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.6 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 4.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.3 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 7.7 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 12 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 10 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 16 KiB

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.ImagePro" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_200</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/black</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_200</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,10 @@
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="purple_200">#FFBB86FC</color>
<color name="purple_500">#FF6200EE</color>
<color name="purple_700">#FF3700B3</color>
<color name="teal_200">#FF03DAC5</color>
<color name="teal_700">#FF018786</color>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
</resources>

View File

@ -0,0 +1,3 @@
<resources>
<string name="app_name">ImagePro</string>
</resources>

View File

@ -0,0 +1,16 @@
<resources xmlns:tools="http://schemas.android.com/tools">
<!-- Base application theme. -->
<style name="Theme.ImagePro" parent="Theme.MaterialComponents.DayNight.NoActionBar">
<!-- Primary brand color. -->
<item name="colorPrimary">@color/purple_500</item>
<item name="colorPrimaryVariant">@color/purple_700</item>
<item name="colorOnPrimary">@color/white</item>
<!-- Secondary brand color. -->
<item name="colorSecondary">@color/teal_200</item>
<item name="colorSecondaryVariant">@color/teal_700</item>
<item name="colorOnSecondary">@color/black</item>
<!-- Status bar color. -->
<item name="android:statusBarColor" tools:targetApi="l">?attr/colorPrimaryVariant</item>
<!-- Customize your theme here. -->
</style>
</resources>

View File

@ -0,0 +1,17 @@
package com.example.ETech;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Example local unit test, which will execute on the development machine (host).
*
* @see <a href="http://d.android.com/tools/testing">Testing documentation</a>
*/
public class ExampleUnitTest {
@Test
public void addition_isCorrect() {
assertEquals(4, 2 + 2);
}
}

24
build.gradle Normal file
View File

@ -0,0 +1,24 @@
// Top-level build file where you can add configuration options common to all sub-projects/modules.
buildscript {
repositories {
google()
jcenter()
}
dependencies {
classpath "com.android.tools.build:gradle:4.1.2"
// NOTE: Do not place your application dependencies here; they belong
// in the individual module build.gradle files
}
}
allprojects {
repositories {
google()
jcenter()
}
}
task clean(type: Delete) {
delete rootProject.buildDir
}

19
gradle.properties Normal file
View File

@ -0,0 +1,19 @@
# Project-wide Gradle settings.
# IDE (e.g. Android Studio) users:
# Gradle settings configured through the IDE *will override*
# any settings specified in this file.
# For more details on how to configure your build environment visit
# http://www.gradle.org/docs/current/userguide/build_environment.html
# Specifies the JVM arguments used for the daemon process.
# The setting is particularly useful for tweaking memory settings.
org.gradle.jvmargs=-Xmx2048m -Dfile.encoding=UTF-8
# When configured, Gradle will run in incubating parallel mode.
# This option should only be used with decoupled projects. More details, visit
# http://www.gradle.org/docs/current/userguide/multi_project_builds.html#sec:decoupled_projects
# org.gradle.parallel=true
# AndroidX package structure to make it clearer which packages are bundled with the
# Android operating system, and which are packaged with your app"s APK
# https://developer.android.com/topic/libraries/support-library/androidx-rn
android.useAndroidX=true
# Automatically convert third-party libraries to use AndroidX
android.enableJetifier=true

BIN
gradle/wrapper/gradle-wrapper.jar vendored Normal file

Binary file not shown.

View File

@ -0,0 +1,6 @@
#Wed Mar 31 07:39:32 PDT 2021
distributionBase=GRADLE_USER_HOME
distributionPath=wrapper/dists
zipStoreBase=GRADLE_USER_HOME
zipStorePath=wrapper/dists
distributionUrl=https\://services.gradle.org/distributions/gradle-6.5-bin.zip

84
gradlew.bat vendored Normal file
View File

@ -0,0 +1,84 @@
@if "%DEBUG%" == "" @echo off
@rem ##########################################################################
@rem
@rem Gradle startup script for Windows
@rem
@rem ##########################################################################
@rem Set local scope for the variables with windows NT shell
if "%OS%"=="Windows_NT" setlocal
set DIRNAME=%~dp0
if "%DIRNAME%" == "" set DIRNAME=.
set APP_BASE_NAME=%~n0
set APP_HOME=%DIRNAME%
@rem Add default JVM options here. You can also use JAVA_OPTS and GRADLE_OPTS to pass JVM options to this script.
set DEFAULT_JVM_OPTS=
@rem Find java.exe
if defined JAVA_HOME goto findJavaFromJavaHome
set JAVA_EXE=java.exe
%JAVA_EXE% -version >NUL 2>&1
if "%ERRORLEVEL%" == "0" goto init
echo.
echo ERROR: JAVA_HOME is not set and no 'java' command could be found in your PATH.
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:findJavaFromJavaHome
set JAVA_HOME=%JAVA_HOME:"=%
set JAVA_EXE=%JAVA_HOME%/bin/java.exe
if exist "%JAVA_EXE%" goto init
echo.
echo ERROR: JAVA_HOME is set to an invalid directory: %JAVA_HOME%
echo.
echo Please set the JAVA_HOME variable in your environment to match the
echo location of your Java installation.
goto fail
:init
@rem Get command-line arguments, handling Windows variants
if not "%OS%" == "Windows_NT" goto win9xME_args
:win9xME_args
@rem Slurp the command line arguments.
set CMD_LINE_ARGS=
set _SKIP=2
:win9xME_args_slurp
if "x%~1" == "x" goto execute
set CMD_LINE_ARGS=%*
:execute
@rem Setup the command line
set CLASSPATH=%APP_HOME%\gradle\wrapper\gradle-wrapper.jar
@rem Execute Gradle
"%JAVA_EXE%" %DEFAULT_JVM_OPTS% %JAVA_OPTS% %GRADLE_OPTS% "-Dorg.gradle.appname=%APP_BASE_NAME%" -classpath "%CLASSPATH%" org.gradle.wrapper.GradleWrapperMain %CMD_LINE_ARGS%
:end
@rem End local scope for the variables with windows NT shell
if "%ERRORLEVEL%"=="0" goto mainEnd
:fail
rem Set variable GRADLE_EXIT_CONSOLE if you need the _script_ return code instead of
rem the _cmd.exe /c_ return code!
if not "" == "%GRADLE_EXIT_CONSOLE%" exit 1
exit /b 1
:mainEnd
if "%OS%"=="Windows_NT" endlocal
:omega

253
import-summary.txt Normal file
View File

@ -0,0 +1,253 @@
ECLIPSE ANDROID PROJECT IMPORT SUMMARY
======================================
Ignored Files:
--------------
The following files were *not* copied into the new Gradle project; you
should evaluate whether these are still needed in your project and if
so manually move them:
* javadoc\
* javadoc\allclasses-frame.html
* javadoc\allclasses-noframe.html
* javadoc\constant-values.html
* javadoc\help-doc.html
* javadoc\index-all.html
* javadoc\index.html
* javadoc\mymath.js
* javadoc\org\
* javadoc\org\opencv\
* javadoc\org\opencv\android\
* javadoc\org\opencv\android\BaseLoaderCallback.html
* javadoc\org\opencv\android\Camera2Renderer.html
* javadoc\org\opencv\android\CameraBridgeViewBase.CvCameraViewFrame.html
* javadoc\org\opencv\android\CameraBridgeViewBase.CvCameraViewListener.html
* javadoc\org\opencv\android\CameraBridgeViewBase.CvCameraViewListener2.html
* javadoc\org\opencv\android\CameraBridgeViewBase.ListItemAccessor.html
* javadoc\org\opencv\android\CameraBridgeViewBase.html
* javadoc\org\opencv\android\CameraGLRendererBase.html
* javadoc\org\opencv\android\CameraGLSurfaceView.CameraTextureListener.html
* javadoc\org\opencv\android\CameraGLSurfaceView.html
* javadoc\org\opencv\android\CameraRenderer.html
* javadoc\org\opencv\android\FpsMeter.html
* javadoc\org\opencv\android\InstallCallbackInterface.html
* javadoc\org\opencv\android\JavaCamera2View.JavaCameraSizeAccessor.html
* javadoc\org\opencv\android\JavaCamera2View.html
* javadoc\org\opencv\android\JavaCameraView.JavaCameraSizeAccessor.html
* javadoc\org\opencv\android\JavaCameraView.html
* javadoc\org\opencv\android\LoaderCallbackInterface.html
* javadoc\org\opencv\android\OpenCVLoader.html
* javadoc\org\opencv\android\Utils.html
* javadoc\org\opencv\android\package-frame.html
* javadoc\org\opencv\android\package-summary.html
* javadoc\org\opencv\android\package-tree.html
* javadoc\org\opencv\calib3d\
* javadoc\org\opencv\calib3d\Calib3d.html
* javadoc\org\opencv\calib3d\StereoBM.html
* javadoc\org\opencv\calib3d\StereoMatcher.html
* javadoc\org\opencv\calib3d\StereoSGBM.html
* javadoc\org\opencv\calib3d\package-frame.html
* javadoc\org\opencv\calib3d\package-summary.html
* javadoc\org\opencv\calib3d\package-tree.html
* javadoc\org\opencv\core\
* javadoc\org\opencv\core\Algorithm.html
* javadoc\org\opencv\core\Core.MinMaxLocResult.html
* javadoc\org\opencv\core\Core.html
* javadoc\org\opencv\core\CvException.html
* javadoc\org\opencv\core\CvType.html
* javadoc\org\opencv\core\DMatch.html
* javadoc\org\opencv\core\KeyPoint.html
* javadoc\org\opencv\core\Mat.html
* javadoc\org\opencv\core\MatOfByte.html
* javadoc\org\opencv\core\MatOfDMatch.html
* javadoc\org\opencv\core\MatOfDouble.html
* javadoc\org\opencv\core\MatOfFloat.html
* javadoc\org\opencv\core\MatOfFloat4.html
* javadoc\org\opencv\core\MatOfFloat6.html
* javadoc\org\opencv\core\MatOfInt.html
* javadoc\org\opencv\core\MatOfInt4.html
* javadoc\org\opencv\core\MatOfKeyPoint.html
* javadoc\org\opencv\core\MatOfPoint.html
* javadoc\org\opencv\core\MatOfPoint2f.html
* javadoc\org\opencv\core\MatOfPoint3.html
* javadoc\org\opencv\core\MatOfPoint3f.html
* javadoc\org\opencv\core\MatOfRect.html
* javadoc\org\opencv\core\MatOfRect2d.html
* javadoc\org\opencv\core\MatOfRotatedRect.html
* javadoc\org\opencv\core\Point.html
* javadoc\org\opencv\core\Point3.html
* javadoc\org\opencv\core\Range.html
* javadoc\org\opencv\core\Rect.html
* javadoc\org\opencv\core\Rect2d.html
* javadoc\org\opencv\core\RotatedRect.html
* javadoc\org\opencv\core\Scalar.html
* javadoc\org\opencv\core\Size.html
* javadoc\org\opencv\core\TermCriteria.html
* javadoc\org\opencv\core\TickMeter.html
* javadoc\org\opencv\core\package-frame.html
* javadoc\org\opencv\core\package-summary.html
* javadoc\org\opencv\core\package-tree.html
* javadoc\org\opencv\dnn\
* javadoc\org\opencv\dnn\DictValue.html
* javadoc\org\opencv\dnn\Dnn.html
* javadoc\org\opencv\dnn\Layer.html
* javadoc\org\opencv\dnn\Net.html
* javadoc\org\opencv\dnn\package-frame.html
* javadoc\org\opencv\dnn\package-summary.html
* javadoc\org\opencv\dnn\package-tree.html
* javadoc\org\opencv\features2d\
* javadoc\org\opencv\features2d\AKAZE.html
* javadoc\org\opencv\features2d\AffineFeature.html
* javadoc\org\opencv\features2d\AgastFeatureDetector.html
* javadoc\org\opencv\features2d\BFMatcher.html
* javadoc\org\opencv\features2d\BOWImgDescriptorExtractor.html
* javadoc\org\opencv\features2d\BOWKMeansTrainer.html
* javadoc\org\opencv\features2d\BOWTrainer.html
* javadoc\org\opencv\features2d\BRISK.html
* javadoc\org\opencv\features2d\DescriptorMatcher.html
* javadoc\org\opencv\features2d\FastFeatureDetector.html
* javadoc\org\opencv\features2d\Feature2D.html
* javadoc\org\opencv\features2d\Features2d.html
* javadoc\org\opencv\features2d\FlannBasedMatcher.html
* javadoc\org\opencv\features2d\GFTTDetector.html
* javadoc\org\opencv\features2d\KAZE.html
* javadoc\org\opencv\features2d\MSER.html
* javadoc\org\opencv\features2d\ORB.html
* javadoc\org\opencv\features2d\Params.html
* javadoc\org\opencv\features2d\SIFT.html
* javadoc\org\opencv\features2d\SimpleBlobDetector.html
* javadoc\org\opencv\features2d\package-frame.html
* javadoc\org\opencv\features2d\package-summary.html
* javadoc\org\opencv\features2d\package-tree.html
* javadoc\org\opencv\imgcodecs\
* javadoc\org\opencv\imgcodecs\Imgcodecs.html
* javadoc\org\opencv\imgcodecs\package-frame.html
* javadoc\org\opencv\imgcodecs\package-summary.html
* javadoc\org\opencv\imgcodecs\package-tree.html
* javadoc\org\opencv\imgproc\
* javadoc\org\opencv\imgproc\CLAHE.html
* javadoc\org\opencv\imgproc\GeneralizedHough.html
* javadoc\org\opencv\imgproc\GeneralizedHoughBallard.html
* javadoc\org\opencv\imgproc\GeneralizedHoughGuil.html
* javadoc\org\opencv\imgproc\Imgproc.html
* javadoc\org\opencv\imgproc\LineSegmentDetector.html
* javadoc\org\opencv\imgproc\Moments.html
* javadoc\org\opencv\imgproc\Subdiv2D.html
* javadoc\org\opencv\imgproc\package-frame.html
* javadoc\org\opencv\imgproc\package-summary.html
* javadoc\org\opencv\imgproc\package-tree.html
* javadoc\org\opencv\ml\
* javadoc\org\opencv\ml\ANN_MLP.html
* javadoc\org\opencv\ml\ANN_MLP_ANNEAL.html
* javadoc\org\opencv\ml\Boost.html
* javadoc\org\opencv\ml\DTrees.html
* javadoc\org\opencv\ml\EM.html
* javadoc\org\opencv\ml\KNearest.html
* javadoc\org\opencv\ml\LogisticRegression.html
* javadoc\org\opencv\ml\Ml.html
* javadoc\org\opencv\ml\NormalBayesClassifier.html
* javadoc\org\opencv\ml\ParamGrid.html
* javadoc\org\opencv\ml\RTrees.html
* javadoc\org\opencv\ml\SVM.html
* javadoc\org\opencv\ml\SVMSGD.html
* javadoc\org\opencv\ml\StatModel.html
* javadoc\org\opencv\ml\TrainData.html
* javadoc\org\opencv\ml\package-frame.html
* javadoc\org\opencv\ml\package-summary.html
* javadoc\org\opencv\ml\package-tree.html
* javadoc\org\opencv\objdetect\
* javadoc\org\opencv\objdetect\BaseCascadeClassifier.html
* javadoc\org\opencv\objdetect\CascadeClassifier.html
* javadoc\org\opencv\objdetect\HOGDescriptor.html
* javadoc\org\opencv\objdetect\Objdetect.html
* javadoc\org\opencv\objdetect\QRCodeDetector.html
* javadoc\org\opencv\objdetect\package-frame.html
* javadoc\org\opencv\objdetect\package-summary.html
* javadoc\org\opencv\objdetect\package-tree.html
* javadoc\org\opencv\osgi\
* javadoc\org\opencv\osgi\OpenCVInterface.html
* javadoc\org\opencv\osgi\OpenCVNativeLoader.html
* javadoc\org\opencv\osgi\package-frame.html
* javadoc\org\opencv\osgi\package-summary.html
* javadoc\org\opencv\osgi\package-tree.html
* javadoc\org\opencv\photo\
* javadoc\org\opencv\photo\AlignExposures.html
* javadoc\org\opencv\photo\AlignMTB.html
* javadoc\org\opencv\photo\CalibrateCRF.html
* javadoc\org\opencv\photo\CalibrateDebevec.html
* javadoc\org\opencv\photo\CalibrateRobertson.html
* javadoc\org\opencv\photo\MergeDebevec.html
* javadoc\org\opencv\photo\MergeExposures.html
* javadoc\org\opencv\photo\MergeMertens.html
* javadoc\org\opencv\photo\MergeRobertson.html
* javadoc\org\opencv\photo\Photo.html
* javadoc\org\opencv\photo\Tonemap.html
* javadoc\org\opencv\photo\TonemapDrago.html
* javadoc\org\opencv\photo\TonemapMantiuk.html
* javadoc\org\opencv\photo\TonemapReinhard.html
* javadoc\org\opencv\photo\package-frame.html
* javadoc\org\opencv\photo\package-summary.html
* javadoc\org\opencv\photo\package-tree.html
* javadoc\org\opencv\utils\
* javadoc\org\opencv\utils\Converters.html
* javadoc\org\opencv\utils\package-frame.html
* javadoc\org\opencv\utils\package-summary.html
* javadoc\org\opencv\utils\package-tree.html
* javadoc\org\opencv\video\
* javadoc\org\opencv\video\BackgroundSubtractor.html
* javadoc\org\opencv\video\BackgroundSubtractorKNN.html
* javadoc\org\opencv\video\BackgroundSubtractorMOG2.html
* javadoc\org\opencv\video\DenseOpticalFlow.html
* javadoc\org\opencv\video\DualTVL1OpticalFlow.html
* javadoc\org\opencv\video\FarnebackOpticalFlow.html
* javadoc\org\opencv\video\KalmanFilter.html
* javadoc\org\opencv\video\SparseOpticalFlow.html
* javadoc\org\opencv\video\SparsePyrLKOpticalFlow.html
* javadoc\org\opencv\video\Video.html
* javadoc\org\opencv\video\package-frame.html
* javadoc\org\opencv\video\package-summary.html
* javadoc\org\opencv\video\package-tree.html
* javadoc\org\opencv\videoio\
* javadoc\org\opencv\videoio\VideoCapture.html
* javadoc\org\opencv\videoio\VideoWriter.html
* javadoc\org\opencv\videoio\Videoio.html
* javadoc\org\opencv\videoio\package-frame.html
* javadoc\org\opencv\videoio\package-summary.html
* javadoc\org\opencv\videoio\package-tree.html
* javadoc\overview-frame.html
* javadoc\overview-summary.html
* javadoc\overview-tree.html
* javadoc\package-list
* javadoc\resources\
* javadoc\resources\background.gif
* javadoc\resources\tab.gif
* javadoc\resources\titlebar.gif
* javadoc\resources\titlebar_end.gif
* javadoc\serialized-form.html
* javadoc\stylesheet.css
Moved Files:
------------
Android Gradle projects use a different directory structure than ADT
Eclipse projects. Here's how the projects were restructured:
* AndroidManifest.xml => openCVLibrary3413\src\main\AndroidManifest.xml
* lint.xml => openCVLibrary3413\lint.xml
* res\ => openCVLibrary3413\src\main\res\
* src\ => openCVLibrary3413\src\main\java\
* src\org\opencv\engine\OpenCVEngineInterface.aidl => openCVLibrary3413\src\main\aidl\org\opencv\engine\OpenCVEngineInterface.aidl
Next Steps:
-----------
You can now build the project. The Gradle project needs network
connectivity to download dependencies.
Bugs:
-----
If for some reason your project does not build, and you determine that
it is due to a bug or limitation of the Eclipse to Gradle importer,
please file a bug at http://b.android.com with category
Component-Tools.
(This import summary is for your information only, and can be deleted
after import once you are satisfied with the results.)

View File

@ -0,0 +1,18 @@
apply plugin: 'com.android.library'
android {
compileSdkVersion 33
buildToolsVersion "30.0.2"
defaultConfig {
minSdkVersion 23
targetSdkVersion 33
}
buildTypes {
release {
minifyEnabled false
proguardFiles getDefaultProguardFile('proguard-android.txt'), 'proguard-rules.txt'
}
}
}

View File

@ -0,0 +1,9 @@
<?xml version="1.0" encoding="utf-8"?>
<lint>
<issue id="InlinedApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
<issue id="NewApi">
<ignore path="src\org\opencv\android\JavaCameraView.java" />
</issue>
</lint>

View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="org.opencv"
android:versionCode="34130"
android:versionName="3.4.13">
<uses-sdk android:minSdkVersion="8" android:targetSdkVersion="21" />
</manifest>

View File

@ -0,0 +1,33 @@
package org.opencv.engine;
/**
* Class provides a Java interface for OpenCV Engine Service. It's synchronous with native OpenCVEngine class.
*/
interface OpenCVEngineInterface
{
/**
* @return Returns service version.
*/
int getEngineVersion();
/**
* Finds an installed OpenCV library.
* @param OpenCV version.
* @return Returns path to OpenCV native libs or an empty string if OpenCV can not be found.
*/
String getLibPathByVersion(String version);
/**
* Tries to install defined version of OpenCV from Google Play Market.
* @param OpenCV version.
* @return Returns true if installation was successful or OpenCV package has been already installed.
*/
boolean installVersion(String version);
/**
* Returns list of libraries in loading order, separated by semicolon.
* @param OpenCV version.
* @return Returns names of OpenCV libraries, separated by semicolon.
*/
String getLibraryList(String version);
}

View File

@ -0,0 +1,391 @@
package org.opencv.android;
import java.io.File;
import java.util.StringTokenizer;
import org.opencv.core.Core;
import org.opencv.engine.OpenCVEngineInterface;
import android.content.ComponentName;
import android.content.Context;
import android.content.Intent;
import android.content.ServiceConnection;
import android.net.Uri;
import android.os.IBinder;
import android.os.RemoteException;
import android.util.Log;
class AsyncServiceHelper
{
public static boolean initOpenCV(String Version, final Context AppContext,
final LoaderCallbackInterface Callback)
{
AsyncServiceHelper helper = new AsyncServiceHelper(Version, AppContext, Callback);
Intent intent = new Intent("org.opencv.engine.BIND");
intent.setPackage("org.opencv.engine");
if (AppContext.bindService(intent, helper.mServiceConnection, Context.BIND_AUTO_CREATE))
{
return true;
}
else
{
AppContext.unbindService(helper.mServiceConnection);
InstallService(AppContext, Callback);
return false;
}
}
protected AsyncServiceHelper(String Version, Context AppContext, LoaderCallbackInterface Callback)
{
mOpenCVersion = Version;
mUserAppCallback = Callback;
mAppContext = AppContext;
}
protected static final String TAG = "OpenCVManager/Helper";
protected static final int MINIMUM_ENGINE_VERSION = 2;
protected OpenCVEngineInterface mEngineService;
protected LoaderCallbackInterface mUserAppCallback;
protected String mOpenCVersion;
protected Context mAppContext;
protected static boolean mServiceInstallationProgress = false;
protected static boolean mLibraryInstallationProgress = false;
protected static boolean InstallServiceQuiet(Context context)
{
boolean result = true;
try
{
Intent intent = new Intent(Intent.ACTION_VIEW, Uri.parse(OPEN_CV_SERVICE_URL));
intent.addFlags(Intent.FLAG_ACTIVITY_NEW_TASK);
context.startActivity(intent);
}
catch(Exception e)
{
result = false;
}
return result;
}
protected static void InstallService(final Context AppContext, final LoaderCallbackInterface Callback)
{
if (!mServiceInstallationProgress)
{
Log.d(TAG, "Request new service installation");
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV Manager via Google Play");
boolean result = InstallServiceQuiet(AppContext);
if (result)
{
mServiceInstallationProgress = true;
Log.d(TAG, "Package installation started");
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
int Status = LoaderCallbackInterface.MARKET_ERROR;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Unbind from service");
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
Callback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
Log.d(TAG, "Waiting current installation process");
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
private LoaderCallbackInterface mUserAppCallback = Callback;
public String getPackageName()
{
return "OpenCV Manager";
}
public void install()
{
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "Waiting for OpenCV canceled by user");
mServiceInstallationProgress = false;
int Status = LoaderCallbackInterface.INSTALL_CANCELED;
Log.d(TAG, "Init finished with status " + Status);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(Status);
}
public void wait_install()
{
InstallServiceQuiet(AppContext);
}
};
Callback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
}
/**
* URL of OpenCV Manager page on Google Play Market.
*/
protected static final String OPEN_CV_SERVICE_URL = "market://details?id=org.opencv.engine";
protected ServiceConnection mServiceConnection = new ServiceConnection()
{
public void onServiceConnected(ComponentName className, IBinder service)
{
Log.d(TAG, "Service connection created");
mEngineService = OpenCVEngineInterface.Stub.asInterface(service);
if (null == mEngineService)
{
Log.d(TAG, "OpenCV Manager Service connection fails. May be service was not installed?");
InstallService(mAppContext, mUserAppCallback);
}
else
{
mServiceInstallationProgress = false;
try
{
if (mEngineService.getEngineVersion() < MINIMUM_ENGINE_VERSION)
{
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION);
return;
}
Log.d(TAG, "Trying to get library path");
String path = mEngineService.getLibPathByVersion(mOpenCVersion);
if ((null == path) || (path.length() == 0))
{
if (!mLibraryInstallationProgress)
{
InstallCallbackInterface InstallQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.d(TAG, "Trying to install OpenCV lib via Google Play");
try
{
if (mEngineService.installVersion(mOpenCVersion))
{
mLibraryInstallationProgress = true;
Log.d(TAG, "Package installation started");
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
}
else
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
} catch (RemoteException e) {
e.printStackTrace();;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
public void cancel() {
Log.d(TAG, "OpenCV library installation was canceled");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.e(TAG, "Installation was not started! Nothing to wait!");
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.NEW_INSTALLATION, InstallQuery);
}
else
{
InstallCallbackInterface WaitQuery = new InstallCallbackInterface() {
public String getPackageName()
{
return "OpenCV library";
}
public void install() {
Log.e(TAG, "Nothing to install we just wait current installation");
}
public void cancel()
{
Log.d(TAG, "OpenCV library installation was canceled");
mLibraryInstallationProgress = false;
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INSTALL_CANCELED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INSTALL_CANCELED);
}
public void wait_install() {
Log.d(TAG, "Waiting for current installation");
try
{
if (!mEngineService.installVersion(mOpenCVersion))
{
Log.d(TAG, "OpenCV package was not installed!");
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.MARKET_ERROR);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.MARKET_ERROR);
}
else
{
Log.d(TAG, "Waiting for package installation");
}
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
} catch (RemoteException e) {
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
};
mUserAppCallback.onPackageInstall(InstallCallbackInterface.INSTALLATION_PROGRESS, WaitQuery);
}
return;
}
else
{
Log.d(TAG, "Trying to get library list");
mLibraryInstallationProgress = false;
String libs = mEngineService.getLibraryList(mOpenCVersion);
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
int status;
if (initOpenCVLibs(path, libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
status = LoaderCallbackInterface.SUCCESS;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
status = LoaderCallbackInterface.INIT_FAILED;
}
Log.d(TAG, "Init finished with status " + status);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(status);
}
}
catch (RemoteException e)
{
e.printStackTrace();
Log.d(TAG, "Init finished with status " + LoaderCallbackInterface.INIT_FAILED);
Log.d(TAG, "Unbind from service");
mAppContext.unbindService(mServiceConnection);
Log.d(TAG, "Calling using callback");
mUserAppCallback.onManagerConnected(LoaderCallbackInterface.INIT_FAILED);
}
}
}
public void onServiceDisconnected(ComponentName className)
{
mEngineService = null;
}
};
private boolean loadLibrary(String AbsPath)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + AbsPath);
try
{
System.load(AbsPath);
Log.d(TAG, "OpenCV libs init was ok!");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + AbsPath + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private boolean initOpenCVLibs(String Path, String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
if ((null != Path) && (Path.length() != 0))
{
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
String AbsLibraryPath = Path + File.separator + splitter.nextToken();
result &= loadLibrary(AbsLibraryPath);
}
}
else
{
// If the dependencies list is not defined or empty.
String AbsLibraryPath = Path + File.separator + "libopencv_java3.so";
result = loadLibrary(AbsLibraryPath);
}
return result;
}
else
{
Log.d(TAG, "Library path \"" + Path + "\" is empty");
return false;
}
}
}

View File

@ -0,0 +1,141 @@
package org.opencv.android;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.DialogInterface.OnClickListener;
import android.util.Log;
/**
* Basic implementation of LoaderCallbackInterface.
*/
public abstract class BaseLoaderCallback implements LoaderCallbackInterface {
public BaseLoaderCallback(Context AppContext) {
mAppContext = AppContext;
}
public void onManagerConnected(int status)
{
switch (status)
{
/** OpenCV initialization was successful. **/
case LoaderCallbackInterface.SUCCESS:
{
/** Application must override this method to handle successful library initialization. **/
} break;
/** OpenCV loader can not start Google Play Market. **/
case LoaderCallbackInterface.MARKET_ERROR:
{
Log.e(TAG, "Package installation failed!");
AlertDialog MarketErrorMessage = new AlertDialog.Builder(mAppContext).create();
MarketErrorMessage.setTitle("OpenCV Manager");
MarketErrorMessage.setMessage("Package installation failed!");
MarketErrorMessage.setCancelable(false); // This blocks the 'BACK' button
MarketErrorMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
MarketErrorMessage.show();
} break;
/** Package installation has been canceled. **/
case LoaderCallbackInterface.INSTALL_CANCELED:
{
Log.d(TAG, "OpenCV library installation was canceled by user");
finish();
} break;
/** Application is incompatible with this version of OpenCV Manager. Possibly, a service update is required. **/
case LoaderCallbackInterface.INCOMPATIBLE_MANAGER_VERSION:
{
Log.d(TAG, "OpenCV Manager Service is uncompatible with this app!");
AlertDialog IncomatibilityMessage = new AlertDialog.Builder(mAppContext).create();
IncomatibilityMessage.setTitle("OpenCV Manager");
IncomatibilityMessage.setMessage("OpenCV Manager service is incompatible with this app. Try to update it via Google Play.");
IncomatibilityMessage.setCancelable(false); // This blocks the 'BACK' button
IncomatibilityMessage.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
IncomatibilityMessage.show();
} break;
/** Other status, i.e. INIT_FAILED. **/
default:
{
Log.e(TAG, "OpenCV loading failed!");
AlertDialog InitFailedDialog = new AlertDialog.Builder(mAppContext).create();
InitFailedDialog.setTitle("OpenCV error");
InitFailedDialog.setMessage("OpenCV was not initialised correctly. Application will be shut down");
InitFailedDialog.setCancelable(false); // This blocks the 'BACK' button
InitFailedDialog.setButton(AlertDialog.BUTTON_POSITIVE, "OK", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
finish();
}
});
InitFailedDialog.show();
} break;
}
}
public void onPackageInstall(final int operation, final InstallCallbackInterface callback)
{
switch (operation)
{
case InstallCallbackInterface.NEW_INSTALLATION:
{
AlertDialog InstallMessage = new AlertDialog.Builder(mAppContext).create();
InstallMessage.setTitle("Package not found");
InstallMessage.setMessage(callback.getPackageName() + " package was not found! Try to install it?");
InstallMessage.setCancelable(false); // This blocks the 'BACK' button
InstallMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Yes", new OnClickListener()
{
public void onClick(DialogInterface dialog, int which)
{
callback.install();
}
});
InstallMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "No", new OnClickListener() {
public void onClick(DialogInterface dialog, int which)
{
callback.cancel();
}
});
InstallMessage.show();
} break;
case InstallCallbackInterface.INSTALLATION_PROGRESS:
{
AlertDialog WaitMessage = new AlertDialog.Builder(mAppContext).create();
WaitMessage.setTitle("OpenCV is not ready");
WaitMessage.setMessage("Installation is in progress. Wait or exit?");
WaitMessage.setCancelable(false); // This blocks the 'BACK' button
WaitMessage.setButton(AlertDialog.BUTTON_POSITIVE, "Wait", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.wait_install();
}
});
WaitMessage.setButton(AlertDialog.BUTTON_NEGATIVE, "Exit", new OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
callback.cancel();
}
});
WaitMessage.show();
} break;
}
}
void finish()
{
((Activity) mAppContext).finish();
}
protected Context mAppContext;
private final static String TAG = "OCV/BaseLoaderCallback";
}

View File

@ -0,0 +1,302 @@
package org.opencv.android;
import java.util.Arrays;
import java.util.concurrent.Semaphore;
import java.util.concurrent.TimeUnit;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.view.Surface;
@TargetApi(21)
public class Camera2Renderer extends CameraGLRendererBase {
protected final String LOGTAG = "Camera2Renderer";
private CameraDevice mCameraDevice;
private CameraCaptureSession mCaptureSession;
private CaptureRequest.Builder mPreviewRequestBuilder;
private String mCameraID;
private Size mPreviewSize = new Size(-1, -1);
private HandlerThread mBackgroundThread;
private Handler mBackgroundHandler;
private Semaphore mCameraOpenCloseLock = new Semaphore(1);
Camera2Renderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected void doStart() {
Log.d(LOGTAG, "doStart");
startBackgroundThread();
super.doStart();
}
@Override
protected void doStop() {
Log.d(LOGTAG, "doStop");
super.doStop();
stopBackgroundThread();
}
boolean cacPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "cacPreviewSize: "+width+"x"+height);
if(mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) mView.getContext()
.getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager
.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics
.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
int bestWidth = 0, bestHeight = 0;
float aspect = (float)width / height;
for (Size psize : map.getOutputSizes(SurfaceTexture.class)) {
int w = psize.getWidth(), h = psize.getHeight();
Log.d(LOGTAG, "trying size: "+w+"x"+h);
if ( width >= w && height >= h &&
bestWidth <= w && bestHeight <= h &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
Log.i(LOGTAG, "best size: "+bestWidth+"x"+bestHeight);
if( bestWidth == 0 || bestHeight == 0 ||
mPreviewSize.getWidth() == bestWidth &&
mPreviewSize.getHeight() == bestHeight )
return false;
else {
mPreviewSize = new Size(bestWidth, bestHeight);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "cacPreviewSize - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "cacPreviewSize - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "cacPreviewSize - Security Exception");
}
return false;
}
@Override
protected void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
CameraManager manager = (CameraManager) mView.getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if(camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return;
}
if(id == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if( id == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK ||
id == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT) {
mCameraID = cameraID;
break;
}
}
}
if(mCameraID != null) {
if (!mCameraOpenCloseLock.tryAcquire(2500, TimeUnit.MILLISECONDS)) {
throw new RuntimeException(
"Time out waiting to lock camera opening.");
}
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception");
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception");
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception");
} catch (InterruptedException e) {
Log.e(LOGTAG, "OpenCamera - Interrupted Exception");
}
}
@Override
protected void closeCamera() {
Log.i(LOGTAG, "closeCamera");
try {
mCameraOpenCloseLock.acquire();
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != mCameraDevice) {
mCameraDevice.close();
mCameraDevice = null;
}
} catch (InterruptedException e) {
throw new RuntimeException("Interrupted while trying to lock camera closing.", e);
} finally {
mCameraOpenCloseLock.release();
}
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
mCameraOpenCloseLock.release();
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
mCameraOpenCloseLock.release();
}
};
private void createCameraPreviewSession() {
int w=mPreviewSize.getWidth(), h=mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession("+w+"x"+h+")");
if(w<0 || h<0)
return;
try {
mCameraOpenCloseLock.acquire();
if (null == mCameraDevice) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
if(null == mSTexture) {
mCameraOpenCloseLock.release();
Log.e(LOGTAG, "createCameraPreviewSession: preview SurfaceTexture is null");
return;
}
mSTexture.setDefaultBufferSize(w, h);
Surface surface = new Surface(mSTexture);
mPreviewRequestBuilder = mCameraDevice
.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured( CameraCaptureSession cameraCaptureSession) {
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE, CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE, CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCaptureSession failed");
}
mCameraOpenCloseLock.release();
}
@Override
public void onConfigureFailed(
CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
mCameraOpenCloseLock.release();
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession");
} catch (InterruptedException e) {
throw new RuntimeException(
"Interrupted while createCameraPreviewSession", e);
}
finally {
//mCameraOpenCloseLock.release();
}
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("CameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if(mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread");
}
}
@Override
protected void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize("+width+"x"+height+")");
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
try {
mCameraOpenCloseLock.acquire();
boolean needReconfig = cacPreviewSize(width, height);
mCameraWidth = mPreviewSize.getWidth();
mCameraHeight = mPreviewSize.getHeight();
if( !needReconfig ) {
mCameraOpenCloseLock.release();
return;
}
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
mCameraOpenCloseLock.release();
createCameraPreviewSession();
} catch (InterruptedException e) {
mCameraOpenCloseLock.release();
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
}
}

View File

@ -0,0 +1,523 @@
package org.opencv.android;
import java.util.List;
import org.opencv.BuildConfig;
import org.opencv.R;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.Context;
import android.content.DialogInterface;
import android.content.res.TypedArray;
import android.graphics.Bitmap;
import android.graphics.Canvas;
import android.graphics.Rect;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
import android.view.SurfaceView;
/**
* This is a basic class, implementing the interaction with Camera and OpenCV library.
* The main responsibility of it - is to control when camera can be enabled, process the frame,
* call external listener to make any adjustments to the frame and then draw the resulting
* frame to the screen.
* The clients shall implement CvCameraViewListener.
*/
public abstract class CameraBridgeViewBase extends SurfaceView implements SurfaceHolder.Callback {
private static final String TAG = "CameraBridge";
protected static final int MAX_UNSPECIFIED = -1;
private static final int STOPPED = 0;
private static final int STARTED = 1;
private int mState = STOPPED;
private Bitmap mCacheBitmap;
private CvCameraViewListener2 mListener;
private boolean mSurfaceExist;
private final Object mSyncObject = new Object();
protected int mFrameWidth;
protected int mFrameHeight;
protected int mMaxHeight;
protected int mMaxWidth;
protected float mScale = 0;
protected int mPreviewFormat = RGBA;
protected int mCameraIndex = CAMERA_ID_ANY;
protected boolean mEnabled;
protected FpsMeter mFpsMeter = null;
public static final int CAMERA_ID_ANY = -1;
public static final int CAMERA_ID_BACK = 99;
public static final int CAMERA_ID_FRONT = 98;
public static final int RGBA = 1;
public static final int GRAY = 2;
public CameraBridgeViewBase(Context context, int cameraId) {
super(context);
mCameraIndex = cameraId;
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
}
public CameraBridgeViewBase(Context context, AttributeSet attrs) {
super(context, attrs);
int count = attrs.getAttributeCount();
Log.d(TAG, "Attr count: " + Integer.valueOf(count));
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
if (styledAttrs.getBoolean(R.styleable.CameraBridgeViewBase_show_fps, false))
enableFpsMeter();
mCameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
getHolder().addCallback(this);
mMaxWidth = MAX_UNSPECIFIED;
mMaxHeight = MAX_UNSPECIFIED;
styledAttrs.recycle();
}
/**
* Sets the camera index
* @param cameraIndex new camera index
*/
public void setCameraIndex(int cameraIndex) {
this.mCameraIndex = cameraIndex;
}
public interface CvCameraViewListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(Mat inputFrame);
}
public interface CvCameraViewListener2 {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when delivery of the frame needs to be done.
* The returned values - is a modified frame which needs to be displayed on the screen.
* TODO: pass the parameters specifying the format of the frame (BPP, YUV or RGB and etc)
*/
public Mat onCameraFrame(CvCameraViewFrame inputFrame);
};
protected class CvCameraViewListenerAdapter implements CvCameraViewListener2 {
public CvCameraViewListenerAdapter(CvCameraViewListener oldStypeListener) {
mOldStyleListener = oldStypeListener;
}
public void onCameraViewStarted(int width, int height) {
mOldStyleListener.onCameraViewStarted(width, height);
}
public void onCameraViewStopped() {
mOldStyleListener.onCameraViewStopped();
}
public Mat onCameraFrame(CvCameraViewFrame inputFrame) {
Mat result = null;
switch (mPreviewFormat) {
case RGBA:
result = mOldStyleListener.onCameraFrame(inputFrame.rgba());
break;
case GRAY:
result = mOldStyleListener.onCameraFrame(inputFrame.gray());
break;
default:
Log.e(TAG, "Invalid frame format! Only RGBA and Gray Scale are supported!");
};
return result;
}
public void setFrameFormat(int format) {
mPreviewFormat = format;
}
private int mPreviewFormat = RGBA;
private CvCameraViewListener mOldStyleListener;
};
/**
* This class interface is abstract representation of single frame from camera for onCameraFrame callback
* Attention: Do not use objects, that represents this interface out of onCameraFrame callback!
*/
public interface CvCameraViewFrame {
/**
* This method returns RGBA Mat with frame
*/
public Mat rgba();
/**
* This method returns single channel gray scale Mat with frame
*/
public Mat gray();
};
public void surfaceChanged(SurfaceHolder arg0, int arg1, int arg2, int arg3) {
Log.d(TAG, "call surfaceChanged event");
synchronized(mSyncObject) {
if (!mSurfaceExist) {
mSurfaceExist = true;
checkCurrentState();
} else {
/** Surface changed. We need to stop camera and restart with new parameters */
/* Pretend that old surface has been destroyed */
mSurfaceExist = false;
checkCurrentState();
/* Now use new surface. Say we have it now */
mSurfaceExist = true;
checkCurrentState();
}
}
}
public void surfaceCreated(SurfaceHolder holder) {
/* Do nothing. Wait until surfaceChanged delivered */
}
public void surfaceDestroyed(SurfaceHolder holder) {
synchronized(mSyncObject) {
mSurfaceExist = false;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can enable the camera connection.
* The actual onCameraViewStarted callback will be delivered only after both this method is called and surface is available
*/
public void enableView() {
synchronized(mSyncObject) {
mEnabled = true;
checkCurrentState();
}
}
/**
* This method is provided for clients, so they can disable camera connection and stop
* the delivery of frames even though the surface view itself is not destroyed and still stays on the screen
*/
public void disableView() {
synchronized(mSyncObject) {
mEnabled = false;
checkCurrentState();
}
}
/**
* This method enables label with fps value on the screen
*/
public void enableFpsMeter() {
if (mFpsMeter == null) {
mFpsMeter = new FpsMeter();
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
}
public void disableFpsMeter() {
mFpsMeter = null;
}
/**
*
* @param listener
*/
public void setCvCameraViewListener(CvCameraViewListener2 listener) {
mListener = listener;
}
public void setCvCameraViewListener(CvCameraViewListener listener) {
CvCameraViewListenerAdapter adapter = new CvCameraViewListenerAdapter(listener);
adapter.setFrameFormat(mPreviewFormat);
mListener = adapter;
}
/**
* This method sets the maximum size that camera frame is allowed to be. When selecting
* size - the biggest size which less or equal the size set will be selected.
* As an example - we set setMaxFrameSize(200,200) and we have 176x152 and 320x240 sizes. The
* preview frame will be selected with 176x152 size.
* This method is useful when need to restrict the size of preview frame for some reason (for example for video recording)
* @param maxWidth - the maximum width allowed for camera frame.
* @param maxHeight - the maximum height allowed for camera frame
*/
public void setMaxFrameSize(int maxWidth, int maxHeight) {
mMaxWidth = maxWidth;
mMaxHeight = maxHeight;
}
public void SetCaptureFormat(int format)
{
mPreviewFormat = format;
if (mListener instanceof CvCameraViewListenerAdapter) {
CvCameraViewListenerAdapter adapter = (CvCameraViewListenerAdapter) mListener;
adapter.setFrameFormat(mPreviewFormat);
}
}
/**
* Called when mSyncObject lock is held
*/
private void checkCurrentState() {
Log.d(TAG, "call checkCurrentState");
int targetState;
if (mEnabled && mSurfaceExist && getVisibility() == VISIBLE) {
targetState = STARTED;
} else {
targetState = STOPPED;
}
if (targetState != mState) {
/* The state change detected. Need to exit the current state and enter target state */
processExitState(mState);
mState = targetState;
processEnterState(mState);
}
}
private void processEnterState(int state) {
Log.d(TAG, "call processEnterState: " + state);
switch(state) {
case STARTED:
onEnterStartedState();
if (mListener != null) {
mListener.onCameraViewStarted(mFrameWidth, mFrameHeight);
}
break;
case STOPPED:
onEnterStoppedState();
if (mListener != null) {
mListener.onCameraViewStopped();
}
break;
};
}
private void processExitState(int state) {
Log.d(TAG, "call processExitState: " + state);
switch(state) {
case STARTED:
onExitStartedState();
break;
case STOPPED:
onExitStoppedState();
break;
};
}
private void onEnterStoppedState() {
/* nothing to do */
}
private void onExitStoppedState() {
/* nothing to do */
}
// NOTE: The order of bitmap constructor and camera connection is important for android 4.1.x
// Bitmap must be constructed before surface
private void onEnterStartedState() {
Log.d(TAG, "call onEnterStartedState");
/* Connect camera */
if (!connectCamera(getWidth(), getHeight())) {
AlertDialog ad = new AlertDialog.Builder(getContext()).create();
ad.setCancelable(false); // This blocks the 'BACK' button
ad.setMessage("It seems that you device does not support camera (or it is locked). Application will be closed.");
ad.setButton(DialogInterface.BUTTON_NEUTRAL, "OK", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
((Activity) getContext()).finish();
}
});
ad.show();
}
}
private void onExitStartedState() {
disconnectCamera();
if (mCacheBitmap != null) {
mCacheBitmap.recycle();
}
}
/**
* This method shall be called by the subclasses when they have valid
* object and want it to be delivered to external client (via callback) and
* then displayed on the screen.
* @param frame - the current frame to be delivered
*/
protected void deliverAndDrawFrame(CvCameraViewFrame frame) {
Mat modified;
if (mListener != null) {
modified = mListener.onCameraFrame(frame);
} else {
modified = frame.rgba();
}
boolean bmpValid = true;
if (modified != null) {
try {
Utils.matToBitmap(modified, mCacheBitmap);
} catch(Exception e) {
Log.e(TAG, "Mat type: " + modified);
Log.e(TAG, "Bitmap type: " + mCacheBitmap.getWidth() + "*" + mCacheBitmap.getHeight());
Log.e(TAG, "Utils.matToBitmap() throws an exception: " + e.getMessage());
bmpValid = false;
}
}
if (bmpValid && mCacheBitmap != null) {
Canvas canvas = getHolder().lockCanvas();
// rotate canvas by 90 degree
float mScale1=0;
float mScale2=0;
if(canvas.getHeight()>canvas.getWidth()){
canvas.rotate(90f,canvas.getWidth()/2,canvas.getHeight()/2);
// for my Phone my scale values are
mScale1=1.8f;
mScale2=1.5f;
}
else{
mScale1=1.4f;
mScale2=1.3f;
}
if (canvas != null) {
canvas.drawColor(0, android.graphics.PorterDuff.Mode.CLEAR);
if (BuildConfig.DEBUG)
Log.d(TAG, "mStretch value: " + mScale);
// scale canvas to fit phone
if (mScale1 != 0) {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((int)((canvas.getWidth() - mScale1*mCacheBitmap.getWidth()) / 2),
(int)((canvas.getHeight() - mScale2*mCacheBitmap.getHeight()) / 2),
(int)((canvas.getWidth() - mScale1*mCacheBitmap.getWidth()) / 2 + mScale1*mCacheBitmap.getWidth()),
(int)((canvas.getHeight() - mScale2*mCacheBitmap.getHeight()) / 2 + mScale2*mCacheBitmap.getHeight())), null);
}
else {
canvas.drawBitmap(mCacheBitmap, new Rect(0,0,mCacheBitmap.getWidth(), mCacheBitmap.getHeight()),
new Rect((canvas.getWidth() - mCacheBitmap.getWidth()) / 2,
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2,
(canvas.getWidth() - mCacheBitmap.getWidth()) / 2 + mCacheBitmap.getWidth(),
(canvas.getHeight() - mCacheBitmap.getHeight()) / 2 + mCacheBitmap.getHeight()), null);
}
if (mFpsMeter != null) {
mFpsMeter.measure();
mFpsMeter.draw(canvas, 20, 30);
}
getHolder().unlockCanvasAndPost(canvas);
}
}
}
/**
* This method is invoked shall perform concrete operation to initialize the camera.
* CONTRACT: as a result of this method variables mFrameWidth and mFrameHeight MUST be
* initialized with the size of the Camera frames that will be delivered to external processor.
* @param width - the width of this SurfaceView
* @param height - the height of this SurfaceView
*/
protected abstract boolean connectCamera(int width, int height);
/**
* Disconnects and release the particular camera object being connected to this surface view.
* Called when syncObject lock is held
*/
protected abstract void disconnectCamera();
// NOTE: On Android 4.1.x the function must be called before SurfaceTexture constructor!
protected void AllocateCache()
{
mCacheBitmap = Bitmap.createBitmap(mFrameWidth, mFrameHeight, Bitmap.Config.ARGB_8888);
}
public interface ListItemAccessor {
public int getWidth(Object obj);
public int getHeight(Object obj);
};
/**
* This helper method can be called by subclasses to select camera preview size.
* It goes over the list of the supported preview sizes and selects the maximum one which
* fits both values set via setMaxFrameSize() and surface frame allocated for this view
* @param supportedSizes
* @param surfaceWidth
* @param surfaceHeight
* @return optimal frame size
*/
protected Size calculateCameraFrameSize(List<?> supportedSizes, ListItemAccessor accessor, int surfaceWidth, int surfaceHeight) {
int calcWidth = 0;
int calcHeight = 0;
int maxAllowedWidth = (mMaxWidth != MAX_UNSPECIFIED && mMaxWidth < surfaceWidth)? mMaxWidth : surfaceWidth;
int maxAllowedHeight = (mMaxHeight != MAX_UNSPECIFIED && mMaxHeight < surfaceHeight)? mMaxHeight : surfaceHeight;
for (Object size : supportedSizes) {
int width = accessor.getWidth(size);
int height = accessor.getHeight(size);
Log.d(TAG, "trying size: " + width + "x" + height);
if (width <= maxAllowedWidth && height <= maxAllowedHeight) {
if (width >= calcWidth && height >= calcHeight) {
calcWidth = (int) width;
calcHeight = (int) height;
}
}
}
if ((calcWidth == 0 || calcHeight == 0) && supportedSizes.size() > 0)
{
Log.i(TAG, "fallback to the first frame size");
Object size = supportedSizes.get(0);
calcWidth = accessor.getWidth(size);
calcHeight = accessor.getHeight(size);
}
return new Size(calcWidth, calcHeight);
}
}

View File

@ -0,0 +1,440 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.nio.ByteOrder;
import java.nio.FloatBuffer;
import javax.microedition.khronos.egl.EGLConfig;
import javax.microedition.khronos.opengles.GL10;
import org.opencv.android.CameraGLSurfaceView.CameraTextureListener;
import android.annotation.TargetApi;
import android.graphics.SurfaceTexture;
import android.opengl.GLES11Ext;
import android.opengl.GLES20;
import android.opengl.GLSurfaceView;
import android.util.Log;
import android.view.View;
@TargetApi(15)
public abstract class CameraGLRendererBase implements GLSurfaceView.Renderer, SurfaceTexture.OnFrameAvailableListener {
protected final String LOGTAG = "CameraGLRendererBase";
// shaders
private final String vss = ""
+ "attribute vec2 vPosition;\n"
+ "attribute vec2 vTexCoord;\n" + "varying vec2 texCoord;\n"
+ "void main() {\n" + " texCoord = vTexCoord;\n"
+ " gl_Position = vec4 ( vPosition.x, vPosition.y, 0.0, 1.0 );\n"
+ "}";
private final String fssOES = ""
+ "#extension GL_OES_EGL_image_external : require\n"
+ "precision mediump float;\n"
+ "uniform samplerExternalOES sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
private final String fss2D = ""
+ "precision mediump float;\n"
+ "uniform sampler2D sTexture;\n"
+ "varying vec2 texCoord;\n"
+ "void main() {\n"
+ " gl_FragColor = texture2D(sTexture,texCoord);\n" + "}";
// coord-s
private final float vertices[] = {
-1, -1,
-1, 1,
1, -1,
1, 1 };
private final float texCoordOES[] = {
0, 1,
0, 0,
1, 1,
1, 0 };
private final float texCoord2D[] = {
0, 0,
0, 1,
1, 0,
1, 1 };
private int[] texCamera = {0}, texFBO = {0}, texDraw = {0};
private int[] FBO = {0};
private int progOES = -1, prog2D = -1;
private int vPosOES, vTCOES, vPos2D, vTC2D;
private FloatBuffer vert, texOES, tex2D;
protected int mCameraWidth = -1, mCameraHeight = -1;
protected int mFBOWidth = -1, mFBOHeight = -1;
protected int mMaxCameraWidth = -1, mMaxCameraHeight = -1;
protected int mCameraIndex = CameraBridgeViewBase.CAMERA_ID_ANY;
protected SurfaceTexture mSTexture;
protected boolean mHaveSurface = false;
protected boolean mHaveFBO = false;
protected boolean mUpdateST = false;
protected boolean mEnabled = true;
protected boolean mIsStarted = false;
protected CameraGLSurfaceView mView;
protected abstract void openCamera(int id);
protected abstract void closeCamera();
protected abstract void setCameraPreviewSize(int width, int height); // updates mCameraWidth & mCameraHeight
public CameraGLRendererBase(CameraGLSurfaceView view) {
mView = view;
int bytes = vertices.length * Float.SIZE / Byte.SIZE;
vert = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
texOES = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
tex2D = ByteBuffer.allocateDirect(bytes).order(ByteOrder.nativeOrder()).asFloatBuffer();
vert.put(vertices).position(0);
texOES.put(texCoordOES).position(0);
tex2D.put(texCoord2D).position(0);
}
@Override
public synchronized void onFrameAvailable(SurfaceTexture surfaceTexture) {
//Log.i(LOGTAG, "onFrameAvailable");
mUpdateST = true;
mView.requestRender();
}
@Override
public void onDrawFrame(GL10 gl) {
//Log.i(LOGTAG, "onDrawFrame start");
if (!mHaveFBO)
return;
synchronized(this) {
if (mUpdateST) {
mSTexture.updateTexImage();
mUpdateST = false;
}
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
CameraTextureListener texListener = mView.getCameraTextureListener();
if(texListener != null) {
//Log.d(LOGTAG, "haveUserCallback");
// texCamera(OES) -> texFBO
drawTex(texCamera[0], true, FBO[0]);
// call user code (texFBO -> texDraw)
boolean modified = texListener.onCameraTexture(texFBO[0], texDraw[0], mCameraWidth, mCameraHeight);
if(modified) {
// texDraw -> screen
drawTex(texDraw[0], false, 0);
} else {
// texFBO -> screen
drawTex(texFBO[0], false, 0);
}
} else {
Log.d(LOGTAG, "texCamera(OES) -> screen");
// texCamera(OES) -> screen
drawTex(texCamera[0], true, 0);
}
//Log.i(LOGTAG, "onDrawFrame end");
}
}
@Override
public void onSurfaceChanged(GL10 gl, int surfaceWidth, int surfaceHeight) {
Log.i(LOGTAG, "onSurfaceChanged("+surfaceWidth+"x"+surfaceHeight+")");
mHaveSurface = true;
updateState();
setPreviewSize(surfaceWidth, surfaceHeight);
}
@Override
public void onSurfaceCreated(GL10 gl, EGLConfig config) {
Log.i(LOGTAG, "onSurfaceCreated");
initShaders();
}
private void initShaders() {
String strGLVersion = GLES20.glGetString(GLES20.GL_VERSION);
if (strGLVersion != null)
Log.i(LOGTAG, "OpenGL ES version: " + strGLVersion);
GLES20.glClearColor(1.0f, 1.0f, 1.0f, 1.0f);
progOES = loadShader(vss, fssOES);
vPosOES = GLES20.glGetAttribLocation(progOES, "vPosition");
vTCOES = GLES20.glGetAttribLocation(progOES, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPosOES);
GLES20.glEnableVertexAttribArray(vTCOES);
prog2D = loadShader(vss, fss2D);
vPos2D = GLES20.glGetAttribLocation(prog2D, "vPosition");
vTC2D = GLES20.glGetAttribLocation(prog2D, "vTexCoord");
GLES20.glEnableVertexAttribArray(vPos2D);
GLES20.glEnableVertexAttribArray(vTC2D);
}
private void initSurfaceTexture() {
Log.d(LOGTAG, "initSurfaceTexture");
deleteSurfaceTexture();
initTexOES(texCamera);
mSTexture = new SurfaceTexture(texCamera[0]);
mSTexture.setOnFrameAvailableListener(this);
}
private void deleteSurfaceTexture() {
Log.d(LOGTAG, "deleteSurfaceTexture");
if(mSTexture != null) {
mSTexture.release();
mSTexture = null;
deleteTex(texCamera);
}
}
private void initTexOES(int[] tex) {
if(tex.length == 1) {
GLES20.glGenTextures(1, tex, 0);
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex[0]);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
}
}
private static void deleteTex(int[] tex) {
if(tex.length == 1) {
GLES20.glDeleteTextures(1, tex, 0);
}
}
private static int loadShader(String vss, String fss) {
Log.d("CameraGLRendererBase", "loadShader");
int vshader = GLES20.glCreateShader(GLES20.GL_VERTEX_SHADER);
GLES20.glShaderSource(vshader, vss);
GLES20.glCompileShader(vshader);
int[] status = new int[1];
GLES20.glGetShaderiv(vshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile vertex shader: "+GLES20.glGetShaderInfoLog(vshader));
GLES20.glDeleteShader(vshader);
vshader = 0;
return 0;
}
int fshader = GLES20.glCreateShader(GLES20.GL_FRAGMENT_SHADER);
GLES20.glShaderSource(fshader, fss);
GLES20.glCompileShader(fshader);
GLES20.glGetShaderiv(fshader, GLES20.GL_COMPILE_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not compile fragment shader:"+GLES20.glGetShaderInfoLog(fshader));
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
fshader = 0;
return 0;
}
int program = GLES20.glCreateProgram();
GLES20.glAttachShader(program, vshader);
GLES20.glAttachShader(program, fshader);
GLES20.glLinkProgram(program);
GLES20.glDeleteShader(vshader);
GLES20.glDeleteShader(fshader);
GLES20.glGetProgramiv(program, GLES20.GL_LINK_STATUS, status, 0);
if (status[0] == 0) {
Log.e("CameraGLRendererBase", "Could not link shader program: "+GLES20.glGetProgramInfoLog(program));
program = 0;
return 0;
}
GLES20.glValidateProgram(program);
GLES20.glGetProgramiv(program, GLES20.GL_VALIDATE_STATUS, status, 0);
if (status[0] == 0)
{
Log.e("CameraGLRendererBase", "Shader program validation error: "+GLES20.glGetProgramInfoLog(program));
GLES20.glDeleteProgram(program);
program = 0;
return 0;
}
Log.d("CameraGLRendererBase", "Shader program is built OK");
return program;
}
private void deleteFBO()
{
Log.d(LOGTAG, "deleteFBO("+mFBOWidth+"x"+mFBOHeight+")");
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, 0);
GLES20.glDeleteFramebuffers(1, FBO, 0);
deleteTex(texFBO);
deleteTex(texDraw);
mFBOWidth = mFBOHeight = 0;
}
private void initFBO(int width, int height)
{
Log.d(LOGTAG, "initFBO("+width+"x"+height+")");
deleteFBO();
GLES20.glGenTextures(1, texDraw, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texDraw[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
GLES20.glGenTextures(1, texFBO, 0);
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, texFBO[0]);
GLES20.glTexImage2D(GLES20.GL_TEXTURE_2D, 0, GLES20.GL_RGBA, width, height, 0, GLES20.GL_RGBA, GLES20.GL_UNSIGNED_BYTE, null);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_S, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_WRAP_T, GLES20.GL_CLAMP_TO_EDGE);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MIN_FILTER, GLES20.GL_NEAREST);
GLES20.glTexParameteri(GLES20.GL_TEXTURE_2D, GLES20.GL_TEXTURE_MAG_FILTER, GLES20.GL_NEAREST);
//int hFBO;
GLES20.glGenFramebuffers(1, FBO, 0);
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, FBO[0]);
GLES20.glFramebufferTexture2D(GLES20.GL_FRAMEBUFFER, GLES20.GL_COLOR_ATTACHMENT0, GLES20.GL_TEXTURE_2D, texFBO[0], 0);
Log.d(LOGTAG, "initFBO error status: " + GLES20.glGetError());
int FBOstatus = GLES20.glCheckFramebufferStatus(GLES20.GL_FRAMEBUFFER);
if (FBOstatus != GLES20.GL_FRAMEBUFFER_COMPLETE)
Log.e(LOGTAG, "initFBO failed, status: " + FBOstatus);
mFBOWidth = width;
mFBOHeight = height;
}
// draw texture to FBO or to screen if fbo == 0
private void drawTex(int tex, boolean isOES, int fbo)
{
GLES20.glBindFramebuffer(GLES20.GL_FRAMEBUFFER, fbo);
if(fbo == 0)
GLES20.glViewport(0, 0, mView.getWidth(), mView.getHeight());
else
GLES20.glViewport(0, 0, mFBOWidth, mFBOHeight);
GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT);
if(isOES) {
GLES20.glUseProgram(progOES);
GLES20.glVertexAttribPointer(vPosOES, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTCOES, 2, GLES20.GL_FLOAT, false, 4*2, texOES);
} else {
GLES20.glUseProgram(prog2D);
GLES20.glVertexAttribPointer(vPos2D, 2, GLES20.GL_FLOAT, false, 4*2, vert);
GLES20.glVertexAttribPointer(vTC2D, 2, GLES20.GL_FLOAT, false, 4*2, tex2D);
}
GLES20.glActiveTexture(GLES20.GL_TEXTURE0);
if(isOES) {
GLES20.glBindTexture(GLES11Ext.GL_TEXTURE_EXTERNAL_OES, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(progOES, "sTexture"), 0);
} else {
GLES20.glBindTexture(GLES20.GL_TEXTURE_2D, tex);
GLES20.glUniform1i(GLES20.glGetUniformLocation(prog2D, "sTexture"), 0);
}
GLES20.glDrawArrays(GLES20.GL_TRIANGLE_STRIP, 0, 4);
GLES20.glFlush();
}
public synchronized void enableView() {
Log.d(LOGTAG, "enableView");
mEnabled = true;
updateState();
}
public synchronized void disableView() {
Log.d(LOGTAG, "disableView");
mEnabled = false;
updateState();
}
protected void updateState() {
Log.d(LOGTAG, "updateState");
Log.d(LOGTAG, "mEnabled="+mEnabled+", mHaveSurface="+mHaveSurface);
boolean willStart = mEnabled && mHaveSurface && mView.getVisibility() == View.VISIBLE;
if (willStart != mIsStarted) {
if(willStart) doStart();
else doStop();
} else {
Log.d(LOGTAG, "keeping State unchanged");
}
Log.d(LOGTAG, "updateState end");
}
protected synchronized void doStart() {
Log.d(LOGTAG, "doStart");
initSurfaceTexture();
openCamera(mCameraIndex);
mIsStarted = true;
if(mCameraWidth>0 && mCameraHeight>0)
setPreviewSize(mCameraWidth, mCameraHeight); // start preview and call listener.onCameraViewStarted()
}
protected void doStop() {
Log.d(LOGTAG, "doStop");
synchronized(this) {
mUpdateST = false;
mIsStarted = false;
mHaveFBO = false;
closeCamera();
deleteSurfaceTexture();
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStopped();
}
protected void setPreviewSize(int width, int height) {
synchronized(this) {
mHaveFBO = false;
mCameraWidth = width;
mCameraHeight = height;
setCameraPreviewSize(width, height); // can change mCameraWidth & mCameraHeight
initFBO(mCameraWidth, mCameraHeight);
mHaveFBO = true;
}
CameraTextureListener listener = mView.getCameraTextureListener();
if(listener != null) listener.onCameraViewStarted(mCameraWidth, mCameraHeight);
}
public void setCameraIndex(int cameraIndex) {
disableView();
mCameraIndex = cameraIndex;
enableView();
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
disableView();
mMaxCameraWidth = maxWidth;
mMaxCameraHeight = maxHeight;
enableView();
}
public void onResume() {
Log.i(LOGTAG, "onResume");
}
public void onPause() {
Log.i(LOGTAG, "onPause");
mHaveSurface = false;
updateState();
mCameraWidth = mCameraHeight = -1;
}
}

View File

@ -0,0 +1,119 @@
package org.opencv.android;
import org.opencv.R;
import android.content.Context;
import android.content.res.TypedArray;
import android.opengl.GLSurfaceView;
import android.util.AttributeSet;
import android.util.Log;
import android.view.SurfaceHolder;
public class CameraGLSurfaceView extends GLSurfaceView {
private static final String LOGTAG = "CameraGLSurfaceView";
public interface CameraTextureListener {
/**
* This method is invoked when camera preview has started. After this method is invoked
* the frames will start to be delivered to client via the onCameraFrame() callback.
* @param width - the width of the frames that will be delivered
* @param height - the height of the frames that will be delivered
*/
public void onCameraViewStarted(int width, int height);
/**
* This method is invoked when camera preview has been stopped for some reason.
* No frames will be delivered via onCameraFrame() callback after this method is called.
*/
public void onCameraViewStopped();
/**
* This method is invoked when a new preview frame from Camera is ready.
* @param texIn - the OpenGL texture ID that contains frame in RGBA format
* @param texOut - the OpenGL texture ID that can be used to store modified frame image t display
* @param width - the width of the frame
* @param height - the height of the frame
* @return `true` if `texOut` should be displayed, `false` - to show `texIn`
*/
public boolean onCameraTexture(int texIn, int texOut, int width, int height);
};
private CameraTextureListener mTexListener;
private CameraGLRendererBase mRenderer;
public CameraGLSurfaceView(Context context, AttributeSet attrs) {
super(context, attrs);
TypedArray styledAttrs = getContext().obtainStyledAttributes(attrs, R.styleable.CameraBridgeViewBase);
int cameraIndex = styledAttrs.getInt(R.styleable.CameraBridgeViewBase_camera_id, -1);
styledAttrs.recycle();
if(android.os.Build.VERSION.SDK_INT >= 21)
mRenderer = new Camera2Renderer(this);
else
mRenderer = new CameraRenderer(this);
setCameraIndex(cameraIndex);
setEGLContextClientVersion(2);
setRenderer(mRenderer);
setRenderMode(GLSurfaceView.RENDERMODE_WHEN_DIRTY);
}
public void setCameraTextureListener(CameraTextureListener texListener)
{
mTexListener = texListener;
}
public CameraTextureListener getCameraTextureListener()
{
return mTexListener;
}
public void setCameraIndex(int cameraIndex) {
mRenderer.setCameraIndex(cameraIndex);
}
public void setMaxCameraPreviewSize(int maxWidth, int maxHeight) {
mRenderer.setMaxCameraPreviewSize(maxWidth, maxHeight);
}
@Override
public void surfaceCreated(SurfaceHolder holder) {
super.surfaceCreated(holder);
}
@Override
public void surfaceDestroyed(SurfaceHolder holder) {
mRenderer.mHaveSurface = false;
super.surfaceDestroyed(holder);
}
@Override
public void surfaceChanged(SurfaceHolder holder, int format, int w, int h) {
super.surfaceChanged(holder, format, w, h);
}
@Override
public void onResume() {
Log.i(LOGTAG, "onResume");
super.onResume();
mRenderer.onResume();
}
@Override
public void onPause() {
Log.i(LOGTAG, "onPause");
mRenderer.onPause();
super.onPause();
}
public void enableView() {
mRenderer.enableView();
}
public void disableView() {
mRenderer.disableView();
}
}

View File

@ -0,0 +1,166 @@
package org.opencv.android;
import java.io.IOException;
import java.util.List;
import android.annotation.TargetApi;
import android.hardware.Camera;
import android.hardware.Camera.Size;
import android.os.Build;
import android.util.Log;
@TargetApi(15)
@SuppressWarnings("deprecation")
public class CameraRenderer extends CameraGLRendererBase {
public static final String LOGTAG = "CameraRenderer";
private Camera mCamera;
private boolean mPreviewStarted = false;
CameraRenderer(CameraGLSurfaceView view) {
super(view);
}
@Override
protected synchronized void closeCamera() {
Log.i(LOGTAG, "closeCamera");
if(mCamera != null) {
mCamera.stopPreview();
mPreviewStarted = false;
mCamera.release();
mCamera = null;
}
}
@Override
protected synchronized void openCamera(int id) {
Log.i(LOGTAG, "openCamera");
closeCamera();
if (id == CameraBridgeViewBase.CAMERA_ID_ANY) {
Log.d(LOGTAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(LOGTAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(LOGTAG, "Trying to open camera with new open(" + camIdx + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.i(LOGTAG, "Trying to open BACK camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.i(LOGTAG, "Trying to open FRONT camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK) {
Log.e(LOGTAG, "Back camera not found!");
} else if (localCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT) {
Log.e(LOGTAG, "Front camera not found!");
} else {
Log.d(LOGTAG, "Trying to open camera with new open(" + localCameraIndex + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(LOGTAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if(mCamera == null) {
Log.e(LOGTAG, "Error: can't open camera");
return;
}
Camera.Parameters params = mCamera.getParameters();
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
try {
mCamera.setPreviewTexture(mSTexture);
} catch (IOException ioe) {
Log.e(LOGTAG, "setPreviewTexture() failed: " + ioe.getMessage());
}
}
@Override
public synchronized void setCameraPreviewSize(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize: "+width+"x"+height);
if(mCamera == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return;
}
if(mMaxCameraWidth > 0 && mMaxCameraWidth < width) width = mMaxCameraWidth;
if(mMaxCameraHeight > 0 && mMaxCameraHeight < height) height = mMaxCameraHeight;
Camera.Parameters param = mCamera.getParameters();
List<Size> psize = param.getSupportedPreviewSizes();
int bestWidth = 0, bestHeight = 0;
if (psize.size() > 0) {
float aspect = (float)width / height;
for (Size size : psize) {
int w = size.width, h = size.height;
Log.d(LOGTAG, "checking camera preview size: "+w+"x"+h);
if ( w <= width && h <= height &&
w >= bestWidth && h >= bestHeight &&
Math.abs(aspect - (float)w/h) < 0.2 ) {
bestWidth = w;
bestHeight = h;
}
}
if(bestWidth <= 0 || bestHeight <= 0) {
bestWidth = psize.get(0).width;
bestHeight = psize.get(0).height;
Log.e(LOGTAG, "Error: best size was not selected, using "+bestWidth+" x "+bestHeight);
} else {
Log.i(LOGTAG, "Selected best size: "+bestWidth+" x "+bestHeight);
}
if(mPreviewStarted) {
mCamera.stopPreview();
mPreviewStarted = false;
}
mCameraWidth = bestWidth;
mCameraHeight = bestHeight;
param.setPreviewSize(bestWidth, bestHeight);
}
param.set("orientation", "landscape");
mCamera.setParameters(param);
mCamera.startPreview();
mPreviewStarted = true;
}
}

View File

@ -0,0 +1,66 @@
package org.opencv.android;
import java.text.DecimalFormat;
import org.opencv.core.Core;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.util.Log;
public class FpsMeter {
private static final String TAG = "FpsMeter";
private static final int STEP = 20;
private static final DecimalFormat FPS_FORMAT = new DecimalFormat("0.00");
private int mFramesCounter;
private double mFrequency;
private long mprevFrameTime;
private String mStrfps;
Paint mPaint;
boolean mIsInitialized = false;
int mWidth = 0;
int mHeight = 0;
public void init() {
mFramesCounter = 0;
mFrequency = Core.getTickFrequency();
mprevFrameTime = Core.getTickCount();
mStrfps = "";
mPaint = new Paint();
mPaint.setColor(Color.BLUE);
mPaint.setTextSize(20);
}
public void measure() {
if (!mIsInitialized) {
init();
mIsInitialized = true;
} else {
mFramesCounter++;
if (mFramesCounter % STEP == 0) {
long time = Core.getTickCount();
double fps = STEP * mFrequency / (time - mprevFrameTime);
mprevFrameTime = time;
if (mWidth != 0 && mHeight != 0)
mStrfps = FPS_FORMAT.format(fps) + " FPS@" + Integer.valueOf(mWidth) + "x" + Integer.valueOf(mHeight);
else
mStrfps = FPS_FORMAT.format(fps) + " FPS";
Log.i(TAG, mStrfps);
}
}
}
public void setResolution(int width, int height) {
mWidth = width;
mHeight = height;
}
public void draw(Canvas canvas, float offsetx, float offsety) {
Log.d(TAG, mStrfps);
canvas.drawText(mStrfps, offsetx, offsety, mPaint);
}
}

View File

@ -0,0 +1,34 @@
package org.opencv.android;
/**
* Installation callback interface.
*/
public interface InstallCallbackInterface
{
/**
* New package installation is required.
*/
static final int NEW_INSTALLATION = 0;
/**
* Current package installation is in progress.
*/
static final int INSTALLATION_PROGRESS = 1;
/**
* Target package name.
* @return Return target package name.
*/
public String getPackageName();
/**
* Installation is approved.
*/
public void install();
/**
* Installation is canceled.
*/
public void cancel();
/**
* Wait for package installation.
*/
public void wait_install();
};

View File

@ -0,0 +1,447 @@
package org.opencv.android;
import java.nio.ByteBuffer;
import java.util.Arrays;
import java.util.List;
import android.annotation.TargetApi;
import android.content.Context;
import android.graphics.ImageFormat;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.AttributeSet;
import android.util.Log;
import android.view.Surface;
import android.view.ViewGroup.LayoutParams;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
@TargetApi(21)
public class JavaCamera2View extends CameraBridgeViewBase {
private static final String LOGTAG = "JavaCamera2View";
protected ImageReader mImageReader;
protected int mPreviewFormat = ImageFormat.YUV_420_888;
protected CameraDevice mCameraDevice;
protected CameraCaptureSession mCaptureSession;
protected CaptureRequest.Builder mPreviewRequestBuilder;
protected String mCameraID;
protected android.util.Size mPreviewSize = new android.util.Size(-1, -1);
private HandlerThread mBackgroundThread;
protected Handler mBackgroundHandler;
public JavaCamera2View(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCamera2View(Context context, AttributeSet attrs) {
super(context, attrs);
}
private void startBackgroundThread() {
Log.i(LOGTAG, "startBackgroundThread");
stopBackgroundThread();
mBackgroundThread = new HandlerThread("OpenCVCameraBackground");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
private void stopBackgroundThread() {
Log.i(LOGTAG, "stopBackgroundThread");
if (mBackgroundThread == null)
return;
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
Log.e(LOGTAG, "stopBackgroundThread", e);
}
}
protected boolean initializeCamera() {
Log.i(LOGTAG, "initializeCamera");
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
String camList[] = manager.getCameraIdList();
if (camList.length == 0) {
Log.e(LOGTAG, "Error: camera isn't detected.");
return false;
}
if (mCameraIndex == CameraBridgeViewBase.CAMERA_ID_ANY) {
mCameraID = camList[0];
} else {
for (String cameraID : camList) {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraID);
if ((mCameraIndex == CameraBridgeViewBase.CAMERA_ID_BACK &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_BACK) ||
(mCameraIndex == CameraBridgeViewBase.CAMERA_ID_FRONT &&
characteristics.get(CameraCharacteristics.LENS_FACING) == CameraCharacteristics.LENS_FACING_FRONT)
) {
mCameraID = cameraID;
break;
}
}
}
if (mCameraID != null) {
Log.i(LOGTAG, "Opening camera: " + mCameraID);
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
} else { // make JavaCamera2View behaves in the same way as JavaCameraView
Log.i(LOGTAG, "Trying to open camera with the value (" + mCameraIndex + ")");
if (mCameraIndex < camList.length) {
mCameraID = camList[mCameraIndex];
manager.openCamera(mCameraID, mStateCallback, mBackgroundHandler);
} else {
// CAMERA_DISCONNECTED is used when the camera id is no longer valid
throw new CameraAccessException(CameraAccessException.CAMERA_DISCONNECTED);
}
}
return true;
} catch (CameraAccessException e) {
Log.e(LOGTAG, "OpenCamera - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "OpenCamera - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "OpenCamera - Security Exception", e);
}
return false;
}
private final CameraDevice.StateCallback mStateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice cameraDevice) {
mCameraDevice = cameraDevice;
createCameraPreviewSession();
}
@Override
public void onDisconnected(CameraDevice cameraDevice) {
cameraDevice.close();
mCameraDevice = null;
}
@Override
public void onError(CameraDevice cameraDevice, int error) {
cameraDevice.close();
mCameraDevice = null;
}
};
private void createCameraPreviewSession() {
final int w = mPreviewSize.getWidth(), h = mPreviewSize.getHeight();
Log.i(LOGTAG, "createCameraPreviewSession(" + w + "x" + h + ")");
if (w < 0 || h < 0)
return;
try {
if (null == mCameraDevice) {
Log.e(LOGTAG, "createCameraPreviewSession: camera isn't opened");
return;
}
if (null != mCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession: mCaptureSession is already started");
return;
}
mImageReader = ImageReader.newInstance(w, h, mPreviewFormat, 2);
mImageReader.setOnImageAvailableListener(new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = reader.acquireLatestImage();
if (image == null)
return;
// sanity checks - 3 planes
Image.Plane[] planes = image.getPlanes();
assert (planes.length == 3);
assert (image.getFormat() == mPreviewFormat);
JavaCamera2Frame tempFrame = new JavaCamera2Frame(image);
deliverAndDrawFrame(tempFrame);
tempFrame.release();
image.close();
}
}, mBackgroundHandler);
Surface surface = mImageReader.getSurface();
mPreviewRequestBuilder = mCameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
mPreviewRequestBuilder.addTarget(surface);
mCameraDevice.createCaptureSession(Arrays.asList(surface),
new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession cameraCaptureSession) {
Log.i(LOGTAG, "createCaptureSession::onConfigured");
if (null == mCameraDevice) {
return; // camera is already closed
}
mCaptureSession = cameraCaptureSession;
try {
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AF_MODE,
CaptureRequest.CONTROL_AF_MODE_CONTINUOUS_PICTURE);
mPreviewRequestBuilder.set(CaptureRequest.CONTROL_AE_MODE,
CaptureRequest.CONTROL_AE_MODE_ON_AUTO_FLASH);
mCaptureSession.setRepeatingRequest(mPreviewRequestBuilder.build(), null, mBackgroundHandler);
Log.i(LOGTAG, "CameraPreviewSession has been started");
} catch (Exception e) {
Log.e(LOGTAG, "createCaptureSession failed", e);
}
}
@Override
public void onConfigureFailed(CameraCaptureSession cameraCaptureSession) {
Log.e(LOGTAG, "createCameraPreviewSession failed");
}
},
null
);
} catch (CameraAccessException e) {
Log.e(LOGTAG, "createCameraPreviewSession", e);
}
}
@Override
protected void disconnectCamera() {
Log.i(LOGTAG, "close camera");
try {
CameraDevice c = mCameraDevice;
mCameraDevice = null;
if (null != mCaptureSession) {
mCaptureSession.close();
mCaptureSession = null;
}
if (null != c) {
c.close();
}
} finally {
stopBackgroundThread();
if (null != mImageReader) {
mImageReader.close();
mImageReader = null;
}
}
Log.i(LOGTAG, "camera closed!");
}
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
android.util.Size size = (android.util.Size)obj;
return size.getWidth();
}
@Override
public int getHeight(Object obj) {
android.util.Size size = (android.util.Size)obj;
return size.getHeight();
}
}
boolean calcPreviewSize(final int width, final int height) {
Log.i(LOGTAG, "calcPreviewSize: " + width + "x" + height);
if (mCameraID == null) {
Log.e(LOGTAG, "Camera isn't initialized!");
return false;
}
CameraManager manager = (CameraManager) getContext().getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(mCameraID);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
android.util.Size[] sizes = map.getOutputSizes(ImageReader.class);
List<android.util.Size> sizes_list = Arrays.asList(sizes);
Size frameSize = calculateCameraFrameSize(sizes_list, new JavaCameraSizeAccessor(), width, height);
Log.i(LOGTAG, "Selected preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
assert(!(frameSize.width == 0 || frameSize.height == 0));
if (mPreviewSize.getWidth() == frameSize.width && mPreviewSize.getHeight() == frameSize.height)
return false;
else {
mPreviewSize = new android.util.Size((int)frameSize.width, (int)frameSize.height);
return true;
}
} catch (CameraAccessException e) {
Log.e(LOGTAG, "calcPreviewSize - Camera Access Exception", e);
} catch (IllegalArgumentException e) {
Log.e(LOGTAG, "calcPreviewSize - Illegal Argument Exception", e);
} catch (SecurityException e) {
Log.e(LOGTAG, "calcPreviewSize - Security Exception", e);
}
return false;
}
@Override
protected boolean connectCamera(int width, int height) {
Log.i(LOGTAG, "setCameraPreviewSize(" + width + "x" + height + ")");
startBackgroundThread();
initializeCamera();
try {
boolean needReconfig = calcPreviewSize(width, height);
mFrameWidth = mPreviewSize.getWidth();
mFrameHeight = mPreviewSize.getHeight();
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
AllocateCache();
if (needReconfig) {
if (null != mCaptureSession) {
Log.d(LOGTAG, "closing existing previewSession");
mCaptureSession.close();
mCaptureSession = null;
}
createCameraPreviewSession();
}
} catch (RuntimeException e) {
throw new RuntimeException("Interrupted while setCameraPreviewSize.", e);
}
return true;
}
private class JavaCamera2Frame implements CvCameraViewFrame {
@Override
public Mat gray() {
Image.Plane[] planes = mImage.getPlanes();
int w = mImage.getWidth();
int h = mImage.getHeight();
assert(planes[0].getPixelStride() == 1);
ByteBuffer y_plane = planes[0].getBuffer();
int y_plane_step = planes[0].getRowStride();
mGray = new Mat(h, w, CvType.CV_8UC1, y_plane, y_plane_step);
return mGray;
}
@Override
public Mat rgba() {
Image.Plane[] planes = mImage.getPlanes();
int w = mImage.getWidth();
int h = mImage.getHeight();
int chromaPixelStride = planes[1].getPixelStride();
if (chromaPixelStride == 2) { // Chroma channels are interleaved
assert(planes[0].getPixelStride() == 1);
assert(planes[2].getPixelStride() == 2);
ByteBuffer y_plane = planes[0].getBuffer();
int y_plane_step = planes[0].getRowStride();
ByteBuffer uv_plane1 = planes[1].getBuffer();
int uv_plane1_step = planes[1].getRowStride();
ByteBuffer uv_plane2 = planes[2].getBuffer();
int uv_plane2_step = planes[2].getRowStride();
Mat y_mat = new Mat(h, w, CvType.CV_8UC1, y_plane, y_plane_step);
Mat uv_mat1 = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane1, uv_plane1_step);
Mat uv_mat2 = new Mat(h / 2, w / 2, CvType.CV_8UC2, uv_plane2, uv_plane2_step);
long addr_diff = uv_mat2.dataAddr() - uv_mat1.dataAddr();
if (addr_diff > 0) {
assert(addr_diff == 1);
Imgproc.cvtColorTwoPlane(y_mat, uv_mat1, mRgba, Imgproc.COLOR_YUV2RGBA_NV12);
} else {
assert(addr_diff == -1);
Imgproc.cvtColorTwoPlane(y_mat, uv_mat2, mRgba, Imgproc.COLOR_YUV2RGBA_NV21);
}
return mRgba;
} else { // Chroma channels are not interleaved
byte[] yuv_bytes = new byte[w*(h+h/2)];
ByteBuffer y_plane = planes[0].getBuffer();
ByteBuffer u_plane = planes[1].getBuffer();
ByteBuffer v_plane = planes[2].getBuffer();
int yuv_bytes_offset = 0;
int y_plane_step = planes[0].getRowStride();
if (y_plane_step == w) {
y_plane.get(yuv_bytes, 0, w*h);
yuv_bytes_offset = w*h;
} else {
int padding = y_plane_step - w;
for (int i = 0; i < h; i++){
y_plane.get(yuv_bytes, yuv_bytes_offset, w);
yuv_bytes_offset += w;
if (i < h - 1) {
y_plane.position(y_plane.position() + padding);
}
}
assert(yuv_bytes_offset == w * h);
}
int chromaRowStride = planes[1].getRowStride();
int chromaRowPadding = chromaRowStride - w/2;
if (chromaRowPadding == 0){
// When the row stride of the chroma channels equals their width, we can copy
// the entire channels in one go
u_plane.get(yuv_bytes, yuv_bytes_offset, w*h/4);
yuv_bytes_offset += w*h/4;
v_plane.get(yuv_bytes, yuv_bytes_offset, w*h/4);
} else {
// When not equal, we need to copy the channels row by row
for (int i = 0; i < h/2; i++){
u_plane.get(yuv_bytes, yuv_bytes_offset, w/2);
yuv_bytes_offset += w/2;
if (i < h/2-1){
u_plane.position(u_plane.position() + chromaRowPadding);
}
}
for (int i = 0; i < h/2; i++){
v_plane.get(yuv_bytes, yuv_bytes_offset, w/2);
yuv_bytes_offset += w/2;
if (i < h/2-1){
v_plane.position(v_plane.position() + chromaRowPadding);
}
}
}
Mat yuv_mat = new Mat(h+h/2, w, CvType.CV_8UC1);
yuv_mat.put(0, 0, yuv_bytes);
Imgproc.cvtColor(yuv_mat, mRgba, Imgproc.COLOR_YUV2RGBA_I420, 4);
return mRgba;
}
}
public JavaCamera2Frame(Image image) {
super();
mImage = image;
mRgba = new Mat();
mGray = new Mat();
}
public void release() {
mRgba.release();
mGray.release();
}
private Image mImage;
private Mat mRgba;
private Mat mGray;
};
}

View File

@ -0,0 +1,379 @@
package org.opencv.android;
import java.util.List;
import android.content.Context;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.PreviewCallback;
import android.os.Build;
import android.util.AttributeSet;
import android.util.Log;
import android.view.ViewGroup.LayoutParams;
import org.opencv.BuildConfig;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.core.Size;
import org.opencv.imgproc.Imgproc;
/**
* This class is an implementation of the Bridge View between OpenCV and Java Camera.
* This class relays on the functionality available in base class and only implements
* required functions:
* connectCamera - opens Java camera and sets the PreviewCallback to be delivered.
* disconnectCamera - closes the camera and stops preview.
* When frame is delivered via callback from Camera - it processed via OpenCV to be
* converted to RGBA32 and then passed to the external callback for modifications if required.
*/
public class JavaCameraView extends CameraBridgeViewBase implements PreviewCallback {
private static final int MAGIC_TEXTURE_ID = 10;
private static final String TAG = "JavaCameraView";
private byte mBuffer[];
private Mat[] mFrameChain;
private int mChainIdx = 0;
private Thread mThread;
private boolean mStopThread;
protected Camera mCamera;
protected JavaCameraFrame[] mCameraFrame;
private SurfaceTexture mSurfaceTexture;
private int mPreviewFormat = ImageFormat.NV21;
public static class JavaCameraSizeAccessor implements ListItemAccessor {
@Override
public int getWidth(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.width;
}
@Override
public int getHeight(Object obj) {
Camera.Size size = (Camera.Size) obj;
return size.height;
}
}
public JavaCameraView(Context context, int cameraId) {
super(context, cameraId);
}
public JavaCameraView(Context context, AttributeSet attrs) {
super(context, attrs);
}
protected boolean initializeCamera(int width, int height) {
Log.d(TAG, "Initialize java camera");
boolean result = true;
synchronized (this) {
mCamera = null;
if (mCameraIndex == CAMERA_ID_ANY) {
Log.d(TAG, "Trying to open camera with old open()");
try {
mCamera = Camera.open();
}
catch (Exception e){
Log.e(TAG, "Camera is not available (in use or does not exist): " + e.getLocalizedMessage());
}
if(mCamera == null && Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
boolean connected = false;
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(camIdx) + ")");
try {
mCamera = Camera.open(camIdx);
connected = true;
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + camIdx + "failed to open: " + e.getLocalizedMessage());
}
if (connected) break;
}
}
} else {
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.GINGERBREAD) {
int localCameraIndex = mCameraIndex;
if (mCameraIndex == CAMERA_ID_BACK) {
Log.i(TAG, "Trying to open back camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_BACK) {
localCameraIndex = camIdx;
break;
}
}
} else if (mCameraIndex == CAMERA_ID_FRONT) {
Log.i(TAG, "Trying to open front camera");
Camera.CameraInfo cameraInfo = new Camera.CameraInfo();
for (int camIdx = 0; camIdx < Camera.getNumberOfCameras(); ++camIdx) {
Camera.getCameraInfo( camIdx, cameraInfo );
if (cameraInfo.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) {
localCameraIndex = camIdx;
break;
}
}
}
if (localCameraIndex == CAMERA_ID_BACK) {
Log.e(TAG, "Back camera not found!");
} else if (localCameraIndex == CAMERA_ID_FRONT) {
Log.e(TAG, "Front camera not found!");
} else {
Log.d(TAG, "Trying to open camera with new open(" + Integer.valueOf(localCameraIndex) + ")");
try {
mCamera = Camera.open(localCameraIndex);
} catch (RuntimeException e) {
Log.e(TAG, "Camera #" + localCameraIndex + "failed to open: " + e.getLocalizedMessage());
}
}
}
}
if (mCamera == null)
return false;
/* Now set camera parameters */
try {
Camera.Parameters params = mCamera.getParameters();
Log.d(TAG, "getSupportedPreviewSizes()");
List<android.hardware.Camera.Size> sizes = params.getSupportedPreviewSizes();
if (sizes != null) {
/* Select the size that fits surface considering maximum size allowed */
Size frameSize = calculateCameraFrameSize(sizes, new JavaCameraSizeAccessor(), width, height);
/* Image format NV21 causes issues in the Android emulators */
if (Build.FINGERPRINT.startsWith("generic")
|| Build.FINGERPRINT.startsWith("unknown")
|| Build.MODEL.contains("google_sdk")
|| Build.MODEL.contains("Emulator")
|| Build.MODEL.contains("Android SDK built for x86")
|| Build.MANUFACTURER.contains("Genymotion")
|| (Build.BRAND.startsWith("generic") && Build.DEVICE.startsWith("generic"))
|| "google_sdk".equals(Build.PRODUCT))
params.setPreviewFormat(ImageFormat.YV12); // "generic" or "android" = android emulator
else
params.setPreviewFormat(ImageFormat.NV21);
mPreviewFormat = params.getPreviewFormat();
Log.d(TAG, "Set preview size to " + Integer.valueOf((int)frameSize.width) + "x" + Integer.valueOf((int)frameSize.height));
params.setPreviewSize((int)frameSize.width, (int)frameSize.height);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.ICE_CREAM_SANDWICH && !android.os.Build.MODEL.equals("GT-I9100"))
params.setRecordingHint(true);
List<String> FocusModes = params.getSupportedFocusModes();
if (FocusModes != null && FocusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO))
{
params.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_VIDEO);
}
mCamera.setParameters(params);
params = mCamera.getParameters();
mFrameWidth = params.getPreviewSize().width;
mFrameHeight = params.getPreviewSize().height;
if ((getLayoutParams().width == LayoutParams.MATCH_PARENT) && (getLayoutParams().height == LayoutParams.MATCH_PARENT))
mScale = Math.min(((float)height)/mFrameHeight, ((float)width)/mFrameWidth);
else
mScale = 0;
if (mFpsMeter != null) {
mFpsMeter.setResolution(mFrameWidth, mFrameHeight);
}
int size = mFrameWidth * mFrameHeight;
size = size * ImageFormat.getBitsPerPixel(params.getPreviewFormat()) / 8;
mBuffer = new byte[size];
mCamera.addCallbackBuffer(mBuffer);
mCamera.setPreviewCallbackWithBuffer(this);
mFrameChain = new Mat[2];
mFrameChain[0] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
mFrameChain[1] = new Mat(mFrameHeight + (mFrameHeight/2), mFrameWidth, CvType.CV_8UC1);
AllocateCache();
mCameraFrame = new JavaCameraFrame[2];
mCameraFrame[0] = new JavaCameraFrame(mFrameChain[0], mFrameWidth, mFrameHeight);
mCameraFrame[1] = new JavaCameraFrame(mFrameChain[1], mFrameWidth, mFrameHeight);
if (Build.VERSION.SDK_INT >= Build.VERSION_CODES.HONEYCOMB) {
mSurfaceTexture = new SurfaceTexture(MAGIC_TEXTURE_ID);
mCamera.setPreviewTexture(mSurfaceTexture);
} else
mCamera.setPreviewDisplay(null);
/* Finally we are ready to start the preview */
Log.d(TAG, "startPreview");
mCamera.startPreview();
}
else
result = false;
} catch (Exception e) {
result = false;
e.printStackTrace();
}
}
return result;
}
protected void releaseCamera() {
synchronized (this) {
if (mCamera != null) {
mCamera.stopPreview();
mCamera.setPreviewCallback(null);
mCamera.release();
}
mCamera = null;
if (mFrameChain != null) {
mFrameChain[0].release();
mFrameChain[1].release();
}
if (mCameraFrame != null) {
mCameraFrame[0].release();
mCameraFrame[1].release();
}
}
}
private boolean mCameraFrameReady = false;
@Override
protected boolean connectCamera(int width, int height) {
/* 1. We need to instantiate camera
* 2. We need to start thread which will be getting frames
*/
/* First step - initialize camera connection */
Log.d(TAG, "Connecting to camera");
if (!initializeCamera(width, height))
return false;
mCameraFrameReady = false;
/* now we can start update thread */
Log.d(TAG, "Starting processing thread");
mStopThread = false;
mThread = new Thread(new CameraWorker());
mThread.start();
return true;
}
@Override
protected void disconnectCamera() {
/* 1. We need to stop thread which updating the frames
* 2. Stop camera and release it
*/
Log.d(TAG, "Disconnecting from camera");
try {
mStopThread = true;
Log.d(TAG, "Notify thread");
synchronized (this) {
this.notify();
}
Log.d(TAG, "Waiting for thread");
if (mThread != null)
mThread.join();
} catch (InterruptedException e) {
e.printStackTrace();
} finally {
mThread = null;
}
/* Now release camera */
releaseCamera();
mCameraFrameReady = false;
}
@Override
public void onPreviewFrame(byte[] frame, Camera arg1) {
if (BuildConfig.DEBUG)
Log.d(TAG, "Preview Frame received. Frame size: " + frame.length);
synchronized (this) {
mFrameChain[mChainIdx].put(0, 0, frame);
mCameraFrameReady = true;
this.notify();
}
if (mCamera != null)
mCamera.addCallbackBuffer(mBuffer);
}
private class JavaCameraFrame implements CvCameraViewFrame {
@Override
public Mat gray() {
return mYuvFrameData.submat(0, mHeight, 0, mWidth);
}
@Override
public Mat rgba() {
if (mPreviewFormat == ImageFormat.NV21)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGBA_NV21, 4);
else if (mPreviewFormat == ImageFormat.YV12)
Imgproc.cvtColor(mYuvFrameData, mRgba, Imgproc.COLOR_YUV2RGB_I420, 4); // COLOR_YUV2RGBA_YV12 produces inverted colors
else
throw new IllegalArgumentException("Preview Format can be NV21 or YV12");
return mRgba;
}
public JavaCameraFrame(Mat Yuv420sp, int width, int height) {
super();
mWidth = width;
mHeight = height;
mYuvFrameData = Yuv420sp;
mRgba = new Mat();
}
public void release() {
mRgba.release();
}
private Mat mYuvFrameData;
private Mat mRgba;
private int mWidth;
private int mHeight;
};
private class CameraWorker implements Runnable {
@Override
public void run() {
do {
boolean hasFrame = false;
synchronized (JavaCameraView.this) {
try {
while (!mCameraFrameReady && !mStopThread) {
JavaCameraView.this.wait();
}
} catch (InterruptedException e) {
e.printStackTrace();
}
if (mCameraFrameReady)
{
mChainIdx = 1 - mChainIdx;
mCameraFrameReady = false;
hasFrame = true;
}
}
if (!mStopThread && hasFrame) {
if (!mFrameChain[1 - mChainIdx].empty())
deliverAndDrawFrame(mCameraFrame[1 - mChainIdx]);
}
} while (!mStopThread);
Log.d(TAG, "Finish processing thread");
}
}
}

View File

@ -0,0 +1,40 @@
package org.opencv.android;
/**
* Interface for callback object in case of asynchronous initialization of OpenCV.
*/
public interface LoaderCallbackInterface
{
/**
* OpenCV initialization finished successfully.
*/
static final int SUCCESS = 0;
/**
* Google Play Market cannot be invoked.
*/
static final int MARKET_ERROR = 2;
/**
* OpenCV library installation has been canceled by the user.
*/
static final int INSTALL_CANCELED = 3;
/**
* This version of OpenCV Manager Service is incompatible with the app. Possibly, a service update is required.
*/
static final int INCOMPATIBLE_MANAGER_VERSION = 4;
/**
* OpenCV library initialization has failed.
*/
static final int INIT_FAILED = 0xff;
/**
* Callback method, called after OpenCV library initialization.
* @param status status of initialization (see initialization status constants).
*/
public void onManagerConnected(int status);
/**
* Callback method, called in case the package installation is needed.
* @param callback answer object with approve and cancel methods and the package description.
*/
public void onPackageInstall(final int operation, InstallCallbackInterface callback);
};

View File

@ -0,0 +1,132 @@
package org.opencv.android;
import android.content.Context;
/**
* Helper class provides common initialization methods for OpenCV library.
*/
public class OpenCVLoader
{
/**
* OpenCV Library version 2.4.2.
*/
public static final String OPENCV_VERSION_2_4_2 = "2.4.2";
/**
* OpenCV Library version 2.4.3.
*/
public static final String OPENCV_VERSION_2_4_3 = "2.4.3";
/**
* OpenCV Library version 2.4.4.
*/
public static final String OPENCV_VERSION_2_4_4 = "2.4.4";
/**
* OpenCV Library version 2.4.5.
*/
public static final String OPENCV_VERSION_2_4_5 = "2.4.5";
/**
* OpenCV Library version 2.4.6.
*/
public static final String OPENCV_VERSION_2_4_6 = "2.4.6";
/**
* OpenCV Library version 2.4.7.
*/
public static final String OPENCV_VERSION_2_4_7 = "2.4.7";
/**
* OpenCV Library version 2.4.8.
*/
public static final String OPENCV_VERSION_2_4_8 = "2.4.8";
/**
* OpenCV Library version 2.4.9.
*/
public static final String OPENCV_VERSION_2_4_9 = "2.4.9";
/**
* OpenCV Library version 2.4.10.
*/
public static final String OPENCV_VERSION_2_4_10 = "2.4.10";
/**
* OpenCV Library version 2.4.11.
*/
public static final String OPENCV_VERSION_2_4_11 = "2.4.11";
/**
* OpenCV Library version 2.4.12.
*/
public static final String OPENCV_VERSION_2_4_12 = "2.4.12";
/**
* OpenCV Library version 2.4.13.
*/
public static final String OPENCV_VERSION_2_4_13 = "2.4.13";
/**
* OpenCV Library version 3.0.0.
*/
public static final String OPENCV_VERSION_3_0_0 = "3.0.0";
/**
* OpenCV Library version 3.1.0.
*/
public static final String OPENCV_VERSION_3_1_0 = "3.1.0";
/**
* OpenCV Library version 3.2.0.
*/
public static final String OPENCV_VERSION_3_2_0 = "3.2.0";
/**
* OpenCV Library version 3.3.0.
*/
public static final String OPENCV_VERSION_3_3_0 = "3.3.0";
/**
* OpenCV Library version 3.4.0.
*/
public static final String OPENCV_VERSION_3_4_0 = "3.4.0";
/**
* Current OpenCV Library version
*/
public static final String OPENCV_VERSION = "3.4.13";
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug()
{
return StaticHelper.initOpenCV(false);
}
/**
* Loads and initializes OpenCV library from current application package. Roughly, it's an analog of system.loadLibrary("opencv_java").
* @param InitCuda load and initialize CUDA runtime libraries.
* @return Returns true is initialization of OpenCV was successful.
*/
public static boolean initDebug(boolean InitCuda)
{
return StaticHelper.initOpenCV(InitCuda);
}
/**
* Loads and initializes OpenCV library using OpenCV Engine service.
* @param Version OpenCV library version.
* @param AppContext application context for connecting to the service.
* @param Callback object, that implements LoaderCallbackInterface for handling the connection status.
* @return Returns true if initialization of OpenCV is successful.
*/
public static boolean initAsync(String Version, Context AppContext,
LoaderCallbackInterface Callback)
{
return AsyncServiceHelper.initOpenCV(Version, AppContext, Callback);
}
}

View File

@ -0,0 +1,104 @@
package org.opencv.android;
import org.opencv.core.Core;
import java.util.StringTokenizer;
import android.util.Log;
class StaticHelper {
public static boolean initOpenCV(boolean InitCuda)
{
boolean result;
String libs = "";
if(InitCuda)
{
loadLibrary("cudart");
loadLibrary("nppc");
loadLibrary("nppi");
loadLibrary("npps");
loadLibrary("cufft");
loadLibrary("cublas");
}
Log.d(TAG, "Trying to get library list");
try
{
System.loadLibrary("opencv_info");
libs = getLibraryList();
}
catch(UnsatisfiedLinkError e)
{
Log.e(TAG, "OpenCV error: Cannot load info library for OpenCV");
}
Log.d(TAG, "Library list: \"" + libs + "\"");
Log.d(TAG, "First attempt to load libs");
if (initOpenCVLibs(libs))
{
Log.d(TAG, "First attempt to load libs is OK");
String eol = System.getProperty("line.separator");
for (String str : Core.getBuildInformation().split(eol))
Log.i(TAG, str);
result = true;
}
else
{
Log.d(TAG, "First attempt to load libs fails");
result = false;
}
return result;
}
private static boolean loadLibrary(String Name)
{
boolean result = true;
Log.d(TAG, "Trying to load library " + Name);
try
{
System.loadLibrary(Name);
Log.d(TAG, "Library " + Name + " loaded");
}
catch(UnsatisfiedLinkError e)
{
Log.d(TAG, "Cannot load library \"" + Name + "\"");
e.printStackTrace();
result = false;
}
return result;
}
private static boolean initOpenCVLibs(String Libs)
{
Log.d(TAG, "Trying to init OpenCV libs");
boolean result = true;
if ((null != Libs) && (Libs.length() != 0))
{
Log.d(TAG, "Trying to load libs by dependency list");
StringTokenizer splitter = new StringTokenizer(Libs, ";");
while(splitter.hasMoreTokens())
{
result &= loadLibrary(splitter.nextToken());
}
}
else
{
// If dependencies list is not defined or empty.
result = loadLibrary("opencv_java3");
}
return result;
}
private static final String TAG = "OpenCV/StaticHelper";
private static native String getLibraryList();
}

View File

@ -0,0 +1,139 @@
package org.opencv.android;
import android.content.Context;
import android.graphics.Bitmap;
import org.opencv.core.CvException;
import org.opencv.core.CvType;
import org.opencv.core.Mat;
import org.opencv.imgcodecs.Imgcodecs;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
public class Utils {
public static String exportResource(Context context, int resourceId) {
return exportResource(context, resourceId, "OpenCV_data");
}
public static String exportResource(Context context, int resourceId, String dirname) {
String fullname = context.getResources().getString(resourceId);
String resName = fullname.substring(fullname.lastIndexOf("/") + 1);
try {
InputStream is = context.getResources().openRawResource(resourceId);
File resDir = context.getDir(dirname, Context.MODE_PRIVATE);
File resFile = new File(resDir, resName);
FileOutputStream os = new FileOutputStream(resFile);
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
os.close();
return resFile.getAbsolutePath();
} catch (IOException e) {
e.printStackTrace();
throw new CvException("Failed to export resource " + resName
+ ". Exception thrown: " + e);
}
}
public static Mat loadResource(Context context, int resourceId) throws IOException
{
return loadResource(context, resourceId, -1);
}
public static Mat loadResource(Context context, int resourceId, int flags) throws IOException
{
InputStream is = context.getResources().openRawResource(resourceId);
ByteArrayOutputStream os = new ByteArrayOutputStream(is.available());
byte[] buffer = new byte[4096];
int bytesRead;
while ((bytesRead = is.read(buffer)) != -1) {
os.write(buffer, 0, bytesRead);
}
is.close();
Mat encoded = new Mat(1, os.size(), CvType.CV_8U);
encoded.put(0, 0, os.toByteArray());
os.close();
Mat decoded = Imgcodecs.imdecode(encoded, flags);
encoded.release();
return decoded;
}
/**
* Converts Android Bitmap to OpenCV Mat.
* <p>
* This function converts an Android Bitmap image to the OpenCV Mat.
* <br>'ARGB_8888' and 'RGB_565' input Bitmap formats are supported.
* <br>The output Mat is always created of the same size as the input Bitmap and of the 'CV_8UC4' type,
* it keeps the image in RGBA format.
* <br>This function throws an exception if the conversion fails.
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so it may be empty.
* @param unPremultiplyAlpha is a flag, that determines, whether the bitmap needs to be converted from alpha premultiplied format (like Android keeps 'ARGB_8888' ones) to regular one; this flag is ignored for 'RGB_565' bitmaps.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat, boolean unPremultiplyAlpha) {
if (bmp == null)
throw new IllegalArgumentException("bmp == null");
if (mat == null)
throw new IllegalArgumentException("mat == null");
nBitmapToMat2(bmp, mat.nativeObj, unPremultiplyAlpha);
}
/**
* Short form of the bitmapToMat(bmp, mat, unPremultiplyAlpha=false).
* @param bmp is a valid input Bitmap object of the type 'ARGB_8888' or 'RGB_565'.
* @param mat is a valid output Mat object, it will be reallocated if needed, so Mat may be empty.
*/
public static void bitmapToMat(Bitmap bmp, Mat mat) {
bitmapToMat(bmp, mat, false);
}
/**
* Converts OpenCV Mat to Android Bitmap.
* <p>
* <br>This function converts an image in the OpenCV Mat representation to the Android Bitmap.
* <br>The input Mat object has to be of the types 'CV_8UC1' (gray-scale), 'CV_8UC3' (RGB) or 'CV_8UC4' (RGBA).
* <br>The output Bitmap object has to be of the same size as the input Mat and of the types 'ARGB_8888' or 'RGB_565'.
* <br>This function throws an exception if the conversion fails.
*
* @param mat is a valid input Mat object of types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
* @param premultiplyAlpha is a flag, that determines, whether the Mat needs to be converted to alpha premultiplied format (like Android keeps 'ARGB_8888' bitmaps); the flag is ignored for 'RGB_565' bitmaps.
*/
public static void matToBitmap(Mat mat, Bitmap bmp, boolean premultiplyAlpha) {
if (mat == null)
throw new IllegalArgumentException("mat == null");
if (bmp == null)
throw new IllegalArgumentException("bmp == null");
nMatToBitmap2(mat.nativeObj, bmp, premultiplyAlpha);
}
/**
* Short form of the <b>matToBitmap(mat, bmp, premultiplyAlpha=false)</b>
* @param mat is a valid input Mat object of the types 'CV_8UC1', 'CV_8UC3' or 'CV_8UC4'.
* @param bmp is a valid Bitmap object of the same size as the Mat and of type 'ARGB_8888' or 'RGB_565'.
*/
public static void matToBitmap(Mat mat, Bitmap bmp) {
matToBitmap(mat, bmp, false);
}
private static native void nBitmapToMat2(Bitmap b, long m_addr, boolean unPremultiplyAlpha);
private static native void nMatToBitmap2(long m_addr, Bitmap b, boolean premultiplyAlpha);
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,294 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.calib3d.StereoBM;
import org.opencv.calib3d.StereoMatcher;
import org.opencv.core.Rect;
// C++: class StereoBM
/**
* Class for computing stereo correspondence using the block matching algorithm, introduced and
* contributed to OpenCV by K. Konolige.
*/
public class StereoBM extends StereoMatcher {
protected StereoBM(long addr) { super(addr); }
// internal usage only
public static StereoBM __fromPtr__(long addr) { return new StereoBM(addr); }
// C++: enum <unnamed>
public static final int
PREFILTER_NORMALIZED_RESPONSE = 0,
PREFILTER_XSOBEL = 1;
//
// C++: int cv::StereoBM::getPreFilterType()
//
public int getPreFilterType() {
return getPreFilterType_0(nativeObj);
}
//
// C++: void cv::StereoBM::setPreFilterType(int preFilterType)
//
public void setPreFilterType(int preFilterType) {
setPreFilterType_0(nativeObj, preFilterType);
}
//
// C++: int cv::StereoBM::getPreFilterSize()
//
public int getPreFilterSize() {
return getPreFilterSize_0(nativeObj);
}
//
// C++: void cv::StereoBM::setPreFilterSize(int preFilterSize)
//
public void setPreFilterSize(int preFilterSize) {
setPreFilterSize_0(nativeObj, preFilterSize);
}
//
// C++: int cv::StereoBM::getPreFilterCap()
//
public int getPreFilterCap() {
return getPreFilterCap_0(nativeObj);
}
//
// C++: void cv::StereoBM::setPreFilterCap(int preFilterCap)
//
public void setPreFilterCap(int preFilterCap) {
setPreFilterCap_0(nativeObj, preFilterCap);
}
//
// C++: int cv::StereoBM::getTextureThreshold()
//
public int getTextureThreshold() {
return getTextureThreshold_0(nativeObj);
}
//
// C++: void cv::StereoBM::setTextureThreshold(int textureThreshold)
//
public void setTextureThreshold(int textureThreshold) {
setTextureThreshold_0(nativeObj, textureThreshold);
}
//
// C++: int cv::StereoBM::getUniquenessRatio()
//
public int getUniquenessRatio() {
return getUniquenessRatio_0(nativeObj);
}
//
// C++: void cv::StereoBM::setUniquenessRatio(int uniquenessRatio)
//
public void setUniquenessRatio(int uniquenessRatio) {
setUniquenessRatio_0(nativeObj, uniquenessRatio);
}
//
// C++: int cv::StereoBM::getSmallerBlockSize()
//
public int getSmallerBlockSize() {
return getSmallerBlockSize_0(nativeObj);
}
//
// C++: void cv::StereoBM::setSmallerBlockSize(int blockSize)
//
public void setSmallerBlockSize(int blockSize) {
setSmallerBlockSize_0(nativeObj, blockSize);
}
//
// C++: Rect cv::StereoBM::getROI1()
//
public Rect getROI1() {
return new Rect(getROI1_0(nativeObj));
}
//
// C++: void cv::StereoBM::setROI1(Rect roi1)
//
public void setROI1(Rect roi1) {
setROI1_0(nativeObj, roi1.x, roi1.y, roi1.width, roi1.height);
}
//
// C++: Rect cv::StereoBM::getROI2()
//
public Rect getROI2() {
return new Rect(getROI2_0(nativeObj));
}
//
// C++: void cv::StereoBM::setROI2(Rect roi2)
//
public void setROI2(Rect roi2) {
setROI2_0(nativeObj, roi2.x, roi2.y, roi2.width, roi2.height);
}
//
// C++: static Ptr_StereoBM cv::StereoBM::create(int numDisparities = 0, int blockSize = 21)
//
/**
* Creates StereoBM object
*
* @param numDisparities the disparity search range. For each pixel algorithm will find the best
* disparity from 0 (default minimum disparity) to numDisparities. The search range can then be
* shifted by changing the minimum disparity.
* @param blockSize the linear size of the blocks compared by the algorithm. The size should be odd
* (as the block is centered at the current pixel). Larger block size implies smoother, though less
* accurate disparity map. Smaller block size gives more detailed disparity map, but there is higher
* chance for algorithm to find a wrong correspondence.
*
* The function create StereoBM object. You can then call StereoBM::compute() to compute disparity for
* a specific stereo pair.
* @return automatically generated
*/
public static StereoBM create(int numDisparities, int blockSize) {
return StereoBM.__fromPtr__(create_0(numDisparities, blockSize));
}
/**
* Creates StereoBM object
*
* @param numDisparities the disparity search range. For each pixel algorithm will find the best
* disparity from 0 (default minimum disparity) to numDisparities. The search range can then be
* shifted by changing the minimum disparity.
* (as the block is centered at the current pixel). Larger block size implies smoother, though less
* accurate disparity map. Smaller block size gives more detailed disparity map, but there is higher
* chance for algorithm to find a wrong correspondence.
*
* The function create StereoBM object. You can then call StereoBM::compute() to compute disparity for
* a specific stereo pair.
* @return automatically generated
*/
public static StereoBM create(int numDisparities) {
return StereoBM.__fromPtr__(create_1(numDisparities));
}
/**
* Creates StereoBM object
*
* disparity from 0 (default minimum disparity) to numDisparities. The search range can then be
* shifted by changing the minimum disparity.
* (as the block is centered at the current pixel). Larger block size implies smoother, though less
* accurate disparity map. Smaller block size gives more detailed disparity map, but there is higher
* chance for algorithm to find a wrong correspondence.
*
* The function create StereoBM object. You can then call StereoBM::compute() to compute disparity for
* a specific stereo pair.
* @return automatically generated
*/
public static StereoBM create() {
return StereoBM.__fromPtr__(create_2());
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: int cv::StereoBM::getPreFilterType()
private static native int getPreFilterType_0(long nativeObj);
// C++: void cv::StereoBM::setPreFilterType(int preFilterType)
private static native void setPreFilterType_0(long nativeObj, int preFilterType);
// C++: int cv::StereoBM::getPreFilterSize()
private static native int getPreFilterSize_0(long nativeObj);
// C++: void cv::StereoBM::setPreFilterSize(int preFilterSize)
private static native void setPreFilterSize_0(long nativeObj, int preFilterSize);
// C++: int cv::StereoBM::getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: void cv::StereoBM::setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: int cv::StereoBM::getTextureThreshold()
private static native int getTextureThreshold_0(long nativeObj);
// C++: void cv::StereoBM::setTextureThreshold(int textureThreshold)
private static native void setTextureThreshold_0(long nativeObj, int textureThreshold);
// C++: int cv::StereoBM::getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void cv::StereoBM::setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// C++: int cv::StereoBM::getSmallerBlockSize()
private static native int getSmallerBlockSize_0(long nativeObj);
// C++: void cv::StereoBM::setSmallerBlockSize(int blockSize)
private static native void setSmallerBlockSize_0(long nativeObj, int blockSize);
// C++: Rect cv::StereoBM::getROI1()
private static native double[] getROI1_0(long nativeObj);
// C++: void cv::StereoBM::setROI1(Rect roi1)
private static native void setROI1_0(long nativeObj, int roi1_x, int roi1_y, int roi1_width, int roi1_height);
// C++: Rect cv::StereoBM::getROI2()
private static native double[] getROI2_0(long nativeObj);
// C++: void cv::StereoBM::setROI2(Rect roi2)
private static native void setROI2_0(long nativeObj, int roi2_x, int roi2_y, int roi2_width, int roi2_height);
// C++: static Ptr_StereoBM cv::StereoBM::create(int numDisparities = 0, int blockSize = 21)
private static native long create_0(int numDisparities, int blockSize);
private static native long create_1(int numDisparities);
private static native long create_2();
// native support for java finalize()
private static native void delete(long nativeObj);
}

View File

@ -0,0 +1,201 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.core.Algorithm;
import org.opencv.core.Mat;
// C++: class StereoMatcher
/**
* The base class for stereo correspondence algorithms.
*/
public class StereoMatcher extends Algorithm {
protected StereoMatcher(long addr) { super(addr); }
// internal usage only
public static StereoMatcher __fromPtr__(long addr) { return new StereoMatcher(addr); }
// C++: enum <unnamed>
public static final int
DISP_SHIFT = 4,
DISP_SCALE = (1 << DISP_SHIFT);
//
// C++: void cv::StereoMatcher::compute(Mat left, Mat right, Mat& disparity)
//
/**
* Computes disparity map for the specified stereo pair
*
* @param left Left 8-bit single-channel image.
* @param right Right image of the same size and the same type as the left one.
* @param disparity Output disparity map. It has the same size as the input images. Some algorithms,
* like StereoBM or StereoSGBM compute 16-bit fixed-point disparity map (where each disparity value
* has 4 fractional bits), whereas other algorithms output 32-bit floating-point disparity map.
*/
public void compute(Mat left, Mat right, Mat disparity) {
compute_0(nativeObj, left.nativeObj, right.nativeObj, disparity.nativeObj);
}
//
// C++: int cv::StereoMatcher::getMinDisparity()
//
public int getMinDisparity() {
return getMinDisparity_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setMinDisparity(int minDisparity)
//
public void setMinDisparity(int minDisparity) {
setMinDisparity_0(nativeObj, minDisparity);
}
//
// C++: int cv::StereoMatcher::getNumDisparities()
//
public int getNumDisparities() {
return getNumDisparities_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setNumDisparities(int numDisparities)
//
public void setNumDisparities(int numDisparities) {
setNumDisparities_0(nativeObj, numDisparities);
}
//
// C++: int cv::StereoMatcher::getBlockSize()
//
public int getBlockSize() {
return getBlockSize_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setBlockSize(int blockSize)
//
public void setBlockSize(int blockSize) {
setBlockSize_0(nativeObj, blockSize);
}
//
// C++: int cv::StereoMatcher::getSpeckleWindowSize()
//
public int getSpeckleWindowSize() {
return getSpeckleWindowSize_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setSpeckleWindowSize(int speckleWindowSize)
//
public void setSpeckleWindowSize(int speckleWindowSize) {
setSpeckleWindowSize_0(nativeObj, speckleWindowSize);
}
//
// C++: int cv::StereoMatcher::getSpeckleRange()
//
public int getSpeckleRange() {
return getSpeckleRange_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setSpeckleRange(int speckleRange)
//
public void setSpeckleRange(int speckleRange) {
setSpeckleRange_0(nativeObj, speckleRange);
}
//
// C++: int cv::StereoMatcher::getDisp12MaxDiff()
//
public int getDisp12MaxDiff() {
return getDisp12MaxDiff_0(nativeObj);
}
//
// C++: void cv::StereoMatcher::setDisp12MaxDiff(int disp12MaxDiff)
//
public void setDisp12MaxDiff(int disp12MaxDiff) {
setDisp12MaxDiff_0(nativeObj, disp12MaxDiff);
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: void cv::StereoMatcher::compute(Mat left, Mat right, Mat& disparity)
private static native void compute_0(long nativeObj, long left_nativeObj, long right_nativeObj, long disparity_nativeObj);
// C++: int cv::StereoMatcher::getMinDisparity()
private static native int getMinDisparity_0(long nativeObj);
// C++: void cv::StereoMatcher::setMinDisparity(int minDisparity)
private static native void setMinDisparity_0(long nativeObj, int minDisparity);
// C++: int cv::StereoMatcher::getNumDisparities()
private static native int getNumDisparities_0(long nativeObj);
// C++: void cv::StereoMatcher::setNumDisparities(int numDisparities)
private static native void setNumDisparities_0(long nativeObj, int numDisparities);
// C++: int cv::StereoMatcher::getBlockSize()
private static native int getBlockSize_0(long nativeObj);
// C++: void cv::StereoMatcher::setBlockSize(int blockSize)
private static native void setBlockSize_0(long nativeObj, int blockSize);
// C++: int cv::StereoMatcher::getSpeckleWindowSize()
private static native int getSpeckleWindowSize_0(long nativeObj);
// C++: void cv::StereoMatcher::setSpeckleWindowSize(int speckleWindowSize)
private static native void setSpeckleWindowSize_0(long nativeObj, int speckleWindowSize);
// C++: int cv::StereoMatcher::getSpeckleRange()
private static native int getSpeckleRange_0(long nativeObj);
// C++: void cv::StereoMatcher::setSpeckleRange(int speckleRange)
private static native void setSpeckleRange_0(long nativeObj, int speckleRange);
// C++: int cv::StereoMatcher::getDisp12MaxDiff()
private static native int getDisp12MaxDiff_0(long nativeObj);
// C++: void cv::StereoMatcher::setDisp12MaxDiff(int disp12MaxDiff)
private static native void setDisp12MaxDiff_0(long nativeObj, int disp12MaxDiff);
// native support for java finalize()
private static native void delete(long nativeObj);
}

View File

@ -0,0 +1,657 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.calib3d;
import org.opencv.calib3d.StereoMatcher;
import org.opencv.calib3d.StereoSGBM;
// C++: class StereoSGBM
/**
* The class implements the modified H. Hirschmuller algorithm CITE: HH08 that differs from the original
* one as follows:
*
* <ul>
* <li>
* By default, the algorithm is single-pass, which means that you consider only 5 directions
* instead of 8. Set mode=StereoSGBM::MODE_HH in createStereoSGBM to run the full variant of the
* algorithm but beware that it may consume a lot of memory.
* </li>
* <li>
* The algorithm matches blocks, not individual pixels. Though, setting blockSize=1 reduces the
* blocks to single pixels.
* </li>
* <li>
* Mutual information cost function is not implemented. Instead, a simpler Birchfield-Tomasi
* sub-pixel metric from CITE: BT98 is used. Though, the color images are supported as well.
* </li>
* <li>
* Some pre- and post- processing steps from K. Konolige algorithm StereoBM are included, for
* example: pre-filtering (StereoBM::PREFILTER_XSOBEL type) and post-filtering (uniqueness
* check, quadratic interpolation and speckle filtering).
* </li>
* </ul>
*
* <b>Note:</b>
* <ul>
* <li>
* (Python) An example illustrating the use of the StereoSGBM matching algorithm can be found
* at opencv_source_code/samples/python/stereo_match.py
* </li>
* </ul>
*/
public class StereoSGBM extends StereoMatcher {
protected StereoSGBM(long addr) { super(addr); }
// internal usage only
public static StereoSGBM __fromPtr__(long addr) { return new StereoSGBM(addr); }
// C++: enum <unnamed>
public static final int
MODE_SGBM = 0,
MODE_HH = 1,
MODE_SGBM_3WAY = 2,
MODE_HH4 = 3;
//
// C++: int cv::StereoSGBM::getPreFilterCap()
//
public int getPreFilterCap() {
return getPreFilterCap_0(nativeObj);
}
//
// C++: void cv::StereoSGBM::setPreFilterCap(int preFilterCap)
//
public void setPreFilterCap(int preFilterCap) {
setPreFilterCap_0(nativeObj, preFilterCap);
}
//
// C++: int cv::StereoSGBM::getUniquenessRatio()
//
public int getUniquenessRatio() {
return getUniquenessRatio_0(nativeObj);
}
//
// C++: void cv::StereoSGBM::setUniquenessRatio(int uniquenessRatio)
//
public void setUniquenessRatio(int uniquenessRatio) {
setUniquenessRatio_0(nativeObj, uniquenessRatio);
}
//
// C++: int cv::StereoSGBM::getP1()
//
public int getP1() {
return getP1_0(nativeObj);
}
//
// C++: void cv::StereoSGBM::setP1(int P1)
//
public void setP1(int P1) {
setP1_0(nativeObj, P1);
}
//
// C++: int cv::StereoSGBM::getP2()
//
public int getP2() {
return getP2_0(nativeObj);
}
//
// C++: void cv::StereoSGBM::setP2(int P2)
//
public void setP2(int P2) {
setP2_0(nativeObj, P2);
}
//
// C++: int cv::StereoSGBM::getMode()
//
public int getMode() {
return getMode_0(nativeObj);
}
//
// C++: void cv::StereoSGBM::setMode(int mode)
//
public void setMode(int mode) {
setMode_0(nativeObj, mode);
}
//
// C++: static Ptr_StereoSGBM cv::StereoSGBM::create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
//
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* @param uniquenessRatio Margin in percentage by which the best (minimum) computed cost function
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* @param speckleWindowSize Maximum size of smooth disparity regions to consider their noise speckles
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* @param speckleRange Maximum disparity variation within each connected component. If you do speckle
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* @param mode Set it to StereoSGBM::MODE_HH to run the full-scale two-pass dynamic programming
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode) {
return StereoSGBM.__fromPtr__(create_0(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange, mode));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* @param uniquenessRatio Margin in percentage by which the best (minimum) computed cost function
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* @param speckleWindowSize Maximum size of smooth disparity regions to consider their noise speckles
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* @param speckleRange Maximum disparity variation within each connected component. If you do speckle
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange) {
return StereoSGBM.__fromPtr__(create_1(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize, speckleRange));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* @param uniquenessRatio Margin in percentage by which the best (minimum) computed cost function
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* @param speckleWindowSize Maximum size of smooth disparity regions to consider their noise speckles
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize) {
return StereoSGBM.__fromPtr__(create_2(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio, speckleWindowSize));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* @param uniquenessRatio Margin in percentage by which the best (minimum) computed cost function
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio) {
return StereoSGBM.__fromPtr__(create_3(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap, uniquenessRatio));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* @param preFilterCap Truncation value for the prefiltered image pixels. The algorithm first
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap) {
return StereoSGBM.__fromPtr__(create_4(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff, preFilterCap));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* @param disp12MaxDiff Maximum allowed difference (in integer pixel units) in the left-right
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff) {
return StereoSGBM.__fromPtr__(create_5(minDisparity, numDisparities, blockSize, P1, P2, disp12MaxDiff));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* @param P2 The second parameter controlling the disparity smoothness. The larger the values are,
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1, int P2) {
return StereoSGBM.__fromPtr__(create_6(minDisparity, numDisparities, blockSize, P1, P2));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* @param P1 The first parameter controlling the disparity smoothness. See below.
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize, int P1) {
return StereoSGBM.__fromPtr__(create_7(minDisparity, numDisparities, blockSize, P1));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* @param blockSize Matched block size. It must be an odd number &gt;=1 . Normally, it should be
* somewhere in the 3..11 range.
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities, int blockSize) {
return StereoSGBM.__fromPtr__(create_8(minDisparity, numDisparities, blockSize));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* @param numDisparities Maximum disparity minus minimum disparity. The value is always greater than
* zero. In the current implementation, this parameter must be divisible by 16.
* somewhere in the 3..11 range.
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity, int numDisparities) {
return StereoSGBM.__fromPtr__(create_9(minDisparity, numDisparities));
}
/**
* Creates StereoSGBM object
*
* @param minDisparity Minimum possible disparity value. Normally, it is zero but sometimes
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* zero. In the current implementation, this parameter must be divisible by 16.
* somewhere in the 3..11 range.
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create(int minDisparity) {
return StereoSGBM.__fromPtr__(create_10(minDisparity));
}
/**
* Creates StereoSGBM object
*
* rectification algorithms can shift images, so this parameter needs to be adjusted accordingly.
* zero. In the current implementation, this parameter must be divisible by 16.
* somewhere in the 3..11 range.
* the smoother the disparity is. P1 is the penalty on the disparity change by plus or minus 1
* between neighbor pixels. P2 is the penalty on the disparity change by more than 1 between neighbor
* pixels. The algorithm requires P2 &gt; P1 . See stereo_match.cpp sample where some reasonably good
* P1 and P2 values are shown (like 8\*number_of_image_channels\*blockSize\*blockSize and
* 32\*number_of_image_channels\*blockSize\*blockSize , respectively).
* disparity check. Set it to a non-positive value to disable the check.
* computes x-derivative at each pixel and clips its value by [-preFilterCap, preFilterCap] interval.
* The result values are passed to the Birchfield-Tomasi pixel cost function.
* value should "win" the second best value to consider the found match correct. Normally, a value
* within the 5-15 range is good enough.
* and invalidate. Set it to 0 to disable speckle filtering. Otherwise, set it somewhere in the
* 50-200 range.
* filtering, set the parameter to a positive value, it will be implicitly multiplied by 16.
* Normally, 1 or 2 is good enough.
* algorithm. It will consume O(W\*H\*numDisparities) bytes, which is large for 640x480 stereo and
* huge for HD-size pictures. By default, it is set to false .
*
* The first constructor initializes StereoSGBM with all the default parameters. So, you only have to
* set StereoSGBM::numDisparities at minimum. The second constructor enables you to set each parameter
* to a custom value.
* @return automatically generated
*/
public static StereoSGBM create() {
return StereoSGBM.__fromPtr__(create_11());
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: int cv::StereoSGBM::getPreFilterCap()
private static native int getPreFilterCap_0(long nativeObj);
// C++: void cv::StereoSGBM::setPreFilterCap(int preFilterCap)
private static native void setPreFilterCap_0(long nativeObj, int preFilterCap);
// C++: int cv::StereoSGBM::getUniquenessRatio()
private static native int getUniquenessRatio_0(long nativeObj);
// C++: void cv::StereoSGBM::setUniquenessRatio(int uniquenessRatio)
private static native void setUniquenessRatio_0(long nativeObj, int uniquenessRatio);
// C++: int cv::StereoSGBM::getP1()
private static native int getP1_0(long nativeObj);
// C++: void cv::StereoSGBM::setP1(int P1)
private static native void setP1_0(long nativeObj, int P1);
// C++: int cv::StereoSGBM::getP2()
private static native int getP2_0(long nativeObj);
// C++: void cv::StereoSGBM::setP2(int P2)
private static native void setP2_0(long nativeObj, int P2);
// C++: int cv::StereoSGBM::getMode()
private static native int getMode_0(long nativeObj);
// C++: void cv::StereoSGBM::setMode(int mode)
private static native void setMode_0(long nativeObj, int mode);
// C++: static Ptr_StereoSGBM cv::StereoSGBM::create(int minDisparity = 0, int numDisparities = 16, int blockSize = 3, int P1 = 0, int P2 = 0, int disp12MaxDiff = 0, int preFilterCap = 0, int uniquenessRatio = 0, int speckleWindowSize = 0, int speckleRange = 0, int mode = StereoSGBM::MODE_SGBM)
private static native long create_0(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange, int mode);
private static native long create_1(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize, int speckleRange);
private static native long create_2(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio, int speckleWindowSize);
private static native long create_3(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap, int uniquenessRatio);
private static native long create_4(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff, int preFilterCap);
private static native long create_5(int minDisparity, int numDisparities, int blockSize, int P1, int P2, int disp12MaxDiff);
private static native long create_6(int minDisparity, int numDisparities, int blockSize, int P1, int P2);
private static native long create_7(int minDisparity, int numDisparities, int blockSize, int P1);
private static native long create_8(int minDisparity, int numDisparities, int blockSize);
private static native long create_9(int minDisparity, int numDisparities);
private static native long create_10(int minDisparity);
private static native long create_11();
// native support for java finalize()
private static native void delete(long nativeObj);
}

View File

@ -0,0 +1,120 @@
//
// This file is auto-generated. Please don't modify it!
//
package org.opencv.core;
// C++: class Algorithm
/**
* This is a base class for all more or less complex algorithms in OpenCV
*
* especially for classes of algorithms, for which there can be multiple implementations. The examples
* are stereo correspondence (for which there are algorithms like block matching, semi-global block
* matching, graph-cut etc.), background subtraction (which can be done using mixture-of-gaussians
* models, codebook-based algorithm etc.), optical flow (block matching, Lucas-Kanade, Horn-Schunck
* etc.).
*
* Here is example of SimpleBlobDetector use in your application via Algorithm interface:
* SNIPPET: snippets/core_various.cpp Algorithm
*/
public class Algorithm {
protected final long nativeObj;
protected Algorithm(long addr) { nativeObj = addr; }
public long getNativeObjAddr() { return nativeObj; }
// internal usage only
public static Algorithm __fromPtr__(long addr) { return new Algorithm(addr); }
//
// C++: void cv::Algorithm::clear()
//
/**
* Clears the algorithm state
*/
public void clear() {
clear_0(nativeObj);
}
//
// C++: void cv::Algorithm::write(Ptr_FileStorage fs, String name = String())
//
// Unknown type 'Ptr_FileStorage' (I), skipping the function
//
// C++: void cv::Algorithm::read(FileNode fn)
//
// Unknown type 'FileNode' (I), skipping the function
//
// C++: bool cv::Algorithm::empty()
//
/**
* Returns true if the Algorithm is empty (e.g. in the very beginning or after unsuccessful read
* @return automatically generated
*/
public boolean empty() {
return empty_0(nativeObj);
}
//
// C++: void cv::Algorithm::save(String filename)
//
/**
* Saves the algorithm to a file.
* In order to make this method work, the derived class must implement Algorithm::write(FileStorage&amp; fs).
* @param filename automatically generated
*/
public void save(String filename) {
save_0(nativeObj, filename);
}
//
// C++: String cv::Algorithm::getDefaultName()
//
/**
* Returns the algorithm string identifier.
* This string is used as top level xml/yml node tag when the object is saved to a file or string.
* @return automatically generated
*/
public String getDefaultName() {
return getDefaultName_0(nativeObj);
}
@Override
protected void finalize() throws Throwable {
delete(nativeObj);
}
// C++: void cv::Algorithm::clear()
private static native void clear_0(long nativeObj);
// C++: bool cv::Algorithm::empty()
private static native boolean empty_0(long nativeObj);
// C++: void cv::Algorithm::save(String filename)
private static native void save_0(long nativeObj, String filename);
// C++: String cv::Algorithm::getDefaultName()
private static native String getDefaultName_0(long nativeObj);
// native support for java finalize()
private static native void delete(long nativeObj);
}

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,15 @@
package org.opencv.core;
public class CvException extends RuntimeException {
private static final long serialVersionUID = 1L;
public CvException(String msg) {
super(msg);
}
@Override
public String toString() {
return "CvException [" + super.toString() + "]";
}
}

View File

@ -0,0 +1,136 @@
package org.opencv.core;
public final class CvType {
// type depth constants
public static final int
CV_8U = 0, CV_8S = 1,
CV_16U = 2, CV_16S = 3,
CV_32S = 4,
CV_32F = 5,
CV_64F = 6,
CV_USRTYPE1 = 7;
// predefined type constants
public static final int
CV_8UC1 = CV_8UC(1), CV_8UC2 = CV_8UC(2), CV_8UC3 = CV_8UC(3), CV_8UC4 = CV_8UC(4),
CV_8SC1 = CV_8SC(1), CV_8SC2 = CV_8SC(2), CV_8SC3 = CV_8SC(3), CV_8SC4 = CV_8SC(4),
CV_16UC1 = CV_16UC(1), CV_16UC2 = CV_16UC(2), CV_16UC3 = CV_16UC(3), CV_16UC4 = CV_16UC(4),
CV_16SC1 = CV_16SC(1), CV_16SC2 = CV_16SC(2), CV_16SC3 = CV_16SC(3), CV_16SC4 = CV_16SC(4),
CV_32SC1 = CV_32SC(1), CV_32SC2 = CV_32SC(2), CV_32SC3 = CV_32SC(3), CV_32SC4 = CV_32SC(4),
CV_32FC1 = CV_32FC(1), CV_32FC2 = CV_32FC(2), CV_32FC3 = CV_32FC(3), CV_32FC4 = CV_32FC(4),
CV_64FC1 = CV_64FC(1), CV_64FC2 = CV_64FC(2), CV_64FC3 = CV_64FC(3), CV_64FC4 = CV_64FC(4);
private static final int CV_CN_MAX = 512, CV_CN_SHIFT = 3, CV_DEPTH_MAX = (1 << CV_CN_SHIFT);
public static final int makeType(int depth, int channels) {
if (channels <= 0 || channels >= CV_CN_MAX) {
throw new UnsupportedOperationException(
"Channels count should be 1.." + (CV_CN_MAX - 1));
}
if (depth < 0 || depth >= CV_DEPTH_MAX) {
throw new UnsupportedOperationException(
"Data type depth should be 0.." + (CV_DEPTH_MAX - 1));
}
return (depth & (CV_DEPTH_MAX - 1)) + ((channels - 1) << CV_CN_SHIFT);
}
public static final int CV_8UC(int ch) {
return makeType(CV_8U, ch);
}
public static final int CV_8SC(int ch) {
return makeType(CV_8S, ch);
}
public static final int CV_16UC(int ch) {
return makeType(CV_16U, ch);
}
public static final int CV_16SC(int ch) {
return makeType(CV_16S, ch);
}
public static final int CV_32SC(int ch) {
return makeType(CV_32S, ch);
}
public static final int CV_32FC(int ch) {
return makeType(CV_32F, ch);
}
public static final int CV_64FC(int ch) {
return makeType(CV_64F, ch);
}
public static final int channels(int type) {
return (type >> CV_CN_SHIFT) + 1;
}
public static final int depth(int type) {
return type & (CV_DEPTH_MAX - 1);
}
public static final boolean isInteger(int type) {
return depth(type) < CV_32F;
}
public static final int ELEM_SIZE(int type) {
switch (depth(type)) {
case CV_8U:
case CV_8S:
return channels(type);
case CV_16U:
case CV_16S:
return 2 * channels(type);
case CV_32S:
case CV_32F:
return 4 * channels(type);
case CV_64F:
return 8 * channels(type);
default:
throw new UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
}
public static final String typeToString(int type) {
String s;
switch (depth(type)) {
case CV_8U:
s = "CV_8U";
break;
case CV_8S:
s = "CV_8S";
break;
case CV_16U:
s = "CV_16U";
break;
case CV_16S:
s = "CV_16S";
break;
case CV_32S:
s = "CV_32S";
break;
case CV_32F:
s = "CV_32F";
break;
case CV_64F:
s = "CV_64F";
break;
case CV_USRTYPE1:
s = "CV_USRTYPE1";
break;
default:
throw new UnsupportedOperationException(
"Unsupported CvType value: " + type);
}
int ch = channels(type);
if (ch <= 4)
return s + "C" + ch;
else
return s + "C(" + ch + ")";
}
}

View File

@ -0,0 +1,58 @@
package org.opencv.core;
//C++: class DMatch
/**
* Structure for matching: query descriptor index, train descriptor index, train
* image index and distance between descriptors.
*/
public class DMatch {
/**
* Query descriptor index.
*/
public int queryIdx;
/**
* Train descriptor index.
*/
public int trainIdx;
/**
* Train image index.
*/
public int imgIdx;
// javadoc: DMatch::distance
public float distance;
// javadoc: DMatch::DMatch()
public DMatch() {
this(-1, -1, Float.MAX_VALUE);
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = -1;
distance = _distance;
}
// javadoc: DMatch::DMatch(_queryIdx, _trainIdx, _imgIdx, _distance)
public DMatch(int _queryIdx, int _trainIdx, int _imgIdx, float _distance) {
queryIdx = _queryIdx;
trainIdx = _trainIdx;
imgIdx = _imgIdx;
distance = _distance;
}
public boolean lessThan(DMatch it) {
return distance < it.distance;
}
@Override
public String toString() {
return "DMatch [queryIdx=" + queryIdx + ", trainIdx=" + trainIdx
+ ", imgIdx=" + imgIdx + ", distance=" + distance + "]";
}
}

View File

@ -0,0 +1,77 @@
package org.opencv.core;
import org.opencv.core.Point;
//javadoc: KeyPoint
public class KeyPoint {
/**
* Coordinates of the keypoint.
*/
public Point pt;
/**
* Diameter of the useful keypoint adjacent area.
*/
public float size;
/**
* Computed orientation of the keypoint (-1 if not applicable).
*/
public float angle;
/**
* The response, by which the strongest keypoints have been selected. Can
* be used for further sorting or subsampling.
*/
public float response;
/**
* Octave (pyramid layer), from which the keypoint has been extracted.
*/
public int octave;
/**
* Object ID, that can be used to cluster keypoints by an object they
* belong to.
*/
public int class_id;
// javadoc:KeyPoint::KeyPoint(x,y,_size,_angle,_response,_octave,_class_id)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave, int _class_id) {
pt = new Point(x, y);
size = _size;
angle = _angle;
response = _response;
octave = _octave;
class_id = _class_id;
}
// javadoc: KeyPoint::KeyPoint()
public KeyPoint() {
this(0, 0, 0, -1, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response, _octave)
public KeyPoint(float x, float y, float _size, float _angle, float _response, int _octave) {
this(x, y, _size, _angle, _response, _octave, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle, _response)
public KeyPoint(float x, float y, float _size, float _angle, float _response) {
this(x, y, _size, _angle, _response, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size, _angle)
public KeyPoint(float x, float y, float _size, float _angle) {
this(x, y, _size, _angle, 0, 0, -1);
}
// javadoc: KeyPoint::KeyPoint(x, y, _size)
public KeyPoint(float x, float y, float _size) {
this(x, y, _size, -1, 0, 0, -1);
}
@Override
public String toString() {
return "KeyPoint [pt=" + pt + ", size=" + size + ", angle=" + angle
+ ", response=" + response + ", octave=" + octave
+ ", class_id=" + class_id + "]";
}
}

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More