Java에서 camera2를 이용하여 프로젝트를 만들어 직접 실행하는 순서를 만들었습니다 실제로 순서대로 따라하시여 실행하도록하면 프로젝트의 흐름을 이해할수 있다고 생각합니다. 그리고 Java를 이용한 프로그램은 Kotlin으로 변경할수 있고, Java를 이용하면 보다 하드웨어에 접근할수 있어 ARCore등 tof(Time-of-Flight)를 실장하는데 도움이 되리가 생각됩니다 ( 작성일 2023 / 10 / 20 )
- 프로젝트생성
Api 를 21 이상으로설정하여 프로젝트생성 를 하나 만듭니다
이때 패키지 이름 을 기억해두거나 따로 저장합니다
여기서의 패키지 이름은
package com.mogusatech.camera2injava; 입니다
여러분이 설정한 패키지 이름으로 변경하기길 바랍니다
2. AndroidManifest.xml 파일 변경
수정사항
package=”com.mogusatech.camera2injava” 하고
메인 Activity를 AndroidCameraApi 로하고 현재의 AndroidCameraApi 에러 무시합니다
android:theme=”@style/Theme.Camera2InJava 를 자신의
Theme.Camera2InJava 로설정합니다 (themes.xml참조)
<?xml version="1.0" encoding="utf-8"?>
<manifest xmlns:android="http://schemas.android.com/apk/res/android"
package="com.mogusatech.camera2injava">
<uses-sdk
android:minSdkVersion="21"
android:targetSdkVersion="21" />
<uses-permission android:name="android.permission.CAMERA" />
<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" />
<uses-feature android:name="android.hardware.camera2.full" />
<uses-feature
android:name="android.hardware.camera"
android:required="false" />
<application
android:allowBackup="true"
android:icon="@mipmap/ic_launcher"
android:label="@string/app_name"
android:supportsRtl="true"
android:theme="@style/Theme.Camera2InJava">
<activity android:name=".AndroidCameraApi"
android:exported="true">
<intent-filter>
<action android:name="android.intent.action.MAIN" />
<category android:name="android.intent.category.LAUNCHER" />
</intent-filter>
</activity>
</application>
</manifest>
3. values폴더의 strings.xml파일
<resources>
<string name="app_name">Camera2InJava</string>
<string name="take_picture">Take picture</string>
</resources>
4. values폴더의 colors.xml파일
<?xml version="1.0" encoding="utf-8"?>
<resources>
<color name="black">#FF000000</color>
<color name="white">#FFFFFFFF</color>
<color name="colorPrimary">#3F51B5</color>
<color name="colorPrimaryDark">#303F9F</color>
<color name="colorAccent">#FF4081</color>
</resources>
5. layout폴더에 아래 파일작성하여 넣습니다
activity_android_camera_api.xml
수정사항
tools:context=”com.mogusatech.camera2injava.AndroidCameraApi”
AndroidCameraApi.java 파일은 다음에 넣습니다
<?xml version="1.0" encoding="utf-8"?>
<RelativeLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:orientation="vertical"
tools:context="com.mogusatech.camera2injava.AndroidCameraApi">
<TextureView
android:id="@+id/texture"
android:layout_width="match_parent"
android:layout_height="wrap_content"
android:layout_above="@+id/btn_takepicture"
android:layout_alignParentTop="true"/>
<Button
android:id="@+id/btn_takepicture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_alignParentBottom="true"
android:layout_centerHorizontal="true"
android:layout_marginBottom="16dp"
android:layout_marginTop="16dp"
android:text="@string/take_picture" />
</RelativeLayout>
6. AndroidCameraApi.java 파일 작성
프로젝트의 패키지폴더내 (Camera2InJava/app/src/main/java/com/mogusatech/camera2injava)에 아래내용을 복사하여 넣습니다
수정사항
자신패키지 추가
package com.mogusatech.camera2injava;
package com.mogusatech.camera2injava;
import android.Manifest;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.ImageFormat;
import android.graphics.SurfaceTexture;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCaptureSession;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.CameraMetadata;
import android.hardware.camera2.CaptureRequest;
import android.hardware.camera2.TotalCaptureResult;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.media.Image;
import android.media.ImageReader;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
//import android.support.annotation.NonNull;
//import android.support.v4.app.ActivityCompat;
//import android.support.v7.app.AppCompatActivity;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.widget.Button;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.app.ActivityCompat;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.OutputStream;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
public class AndroidCameraApi extends AppCompatActivity {
private static final String TAG = "AndroidCameraApi";
private Button takePictureButton;
private TextureView textureView;
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String cameraId;
protected CameraDevice cameraDevice;
protected CameraCaptureSession cameraCaptureSessions;
protected CaptureRequest captureRequest;
protected CaptureRequest.Builder captureRequestBuilder;
private Size imageDimension;
private ImageReader imageReader;
private File file;
private static final int REQUEST_CAMERA_PERMISSION = 200;
private boolean mFlashSupported;
private Handler mBackgroundHandler;
private HandlerThread mBackgroundThread;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_android_camera_api);
textureView = (TextureView) findViewById(R.id.texture);
assert textureView != null;
textureView.setSurfaceTextureListener(textureListener);
takePictureButton = (Button) findViewById(R.id.btn_takepicture);
assert takePictureButton != null;
takePictureButton.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
takePicture();
}
});
}
TextureView.SurfaceTextureListener textureListener = new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(SurfaceTexture surface, int width, int height) {
//open your camera here
openCamera();
}
@Override
public void onSurfaceTextureSizeChanged(SurfaceTexture surface, int width, int height) {
// Transform you image captured size according to the surface width and height
}
@Override
public boolean onSurfaceTextureDestroyed(SurfaceTexture surface) {
return false;
}
@Override
public void onSurfaceTextureUpdated(SurfaceTexture surface) {
// Bitmap frameBitmap = textureView.getBitmap();
}
};
private final CameraDevice.StateCallback stateCallback = new CameraDevice.StateCallback() {
@Override
public void onOpened(CameraDevice camera) {
//This is called when the camera is open
Log.e(TAG, "onOpened");
cameraDevice = camera;
createCameraPreview();
}
@Override
public void onDisconnected(CameraDevice camera) {
cameraDevice.close();
}
@Override
public void onError(CameraDevice camera, int error) {
cameraDevice.close();
cameraDevice = null;
}
};
final CameraCaptureSession.CaptureCallback captureCallbackListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
protected void startBackgroundThread() {
mBackgroundThread = new HandlerThread("Camera Background");
mBackgroundThread.start();
mBackgroundHandler = new Handler(mBackgroundThread.getLooper());
}
protected void stopBackgroundThread() {
mBackgroundThread.quitSafely();
try {
mBackgroundThread.join();
mBackgroundThread = null;
mBackgroundHandler = null;
} catch (InterruptedException e) {
e.printStackTrace();
}
}
protected void takePicture() {
if(null == cameraDevice) {
Log.e(TAG, "cameraDevice is null");
return;
}
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
try {
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraDevice.getId());
Size[] jpegSizes = null;
if (characteristics != null) {
jpegSizes = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP).getOutputSizes(ImageFormat.JPEG);
}
int width = 640;
int height = 480;
if (jpegSizes != null && 0 < jpegSizes.length) {
width = jpegSizes[0].getWidth();
height = jpegSizes[0].getHeight();
}
ImageReader reader = ImageReader.newInstance(width, height, ImageFormat.JPEG, 1);
List<Surface> outputSurfaces = new ArrayList<Surface>(2);
outputSurfaces.add(reader.getSurface());
outputSurfaces.add(new Surface(textureView.getSurfaceTexture()));
final CaptureRequest.Builder captureBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_STILL_CAPTURE);
captureBuilder.addTarget(reader.getSurface());
captureBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
// Orientation
int rotation = getWindowManager().getDefaultDisplay().getRotation();
captureBuilder.set(CaptureRequest.JPEG_ORIENTATION, ORIENTATIONS.get(rotation));
final File file = new File(Environment.getExternalStorageDirectory()+"/pic.jpg");
ImageReader.OnImageAvailableListener readerListener = new ImageReader.OnImageAvailableListener() {
@Override
public void onImageAvailable(ImageReader reader) {
Image image = null;
try {
image = reader.acquireLatestImage();
ByteBuffer buffer = image.getPlanes()[0].getBuffer();
byte[] bytes = new byte[buffer.capacity()];
buffer.get(bytes);
save(bytes);
} catch (FileNotFoundException e) {
e.printStackTrace();
} catch (IOException e) {
e.printStackTrace();
} finally {
if (image != null) {
image.close();
}
}
}
private void save(byte[] bytes) throws IOException {
OutputStream output = null;
try {
output = new FileOutputStream(file);
output.write(bytes);
} finally {
if (null != output) {
output.close();
}
}
}
};
reader.setOnImageAvailableListener(readerListener, mBackgroundHandler);
final CameraCaptureSession.CaptureCallback captureListener = new CameraCaptureSession.CaptureCallback() {
@Override
public void onCaptureCompleted(CameraCaptureSession session, CaptureRequest request, TotalCaptureResult result) {
super.onCaptureCompleted(session, request, result);
Toast.makeText(AndroidCameraApi.this, "Saved:" + file, Toast.LENGTH_SHORT).show();
createCameraPreview();
}
};
cameraDevice.createCaptureSession(outputSurfaces, new CameraCaptureSession.StateCallback() {
@Override
public void onConfigured(CameraCaptureSession session) {
try {
session.capture(captureBuilder.build(), captureListener, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
@Override
public void onConfigureFailed(CameraCaptureSession session) {
}
}, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
protected void createCameraPreview() {
try {
SurfaceTexture texture = textureView.getSurfaceTexture();
assert texture != null;
texture.setDefaultBufferSize(imageDimension.getWidth(), imageDimension.getHeight());
Surface surface = new Surface(texture);
captureRequestBuilder = cameraDevice.createCaptureRequest(CameraDevice.TEMPLATE_PREVIEW);
captureRequestBuilder.addTarget(surface);
cameraDevice.createCaptureSession(Arrays.asList(surface), new CameraCaptureSession.StateCallback(){
@Override
public void onConfigured(@NonNull CameraCaptureSession cameraCaptureSession) {
//The camera is already closed
if (null == cameraDevice) {
return;
}
// When the session is ready, we start displaying the preview.
cameraCaptureSessions = cameraCaptureSession;
updatePreview();
}
@Override
public void onConfigureFailed(@NonNull CameraCaptureSession cameraCaptureSession) {
Toast.makeText(AndroidCameraApi.this, "Configuration change", Toast.LENGTH_SHORT).show();
}
}, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void openCamera() {
CameraManager manager = (CameraManager) getSystemService(Context.CAMERA_SERVICE);
Log.e(TAG, "is camera open");
try {
cameraId = manager.getCameraIdList()[0];
CameraCharacteristics characteristics = manager.getCameraCharacteristics(cameraId);
StreamConfigurationMap map = characteristics.get(CameraCharacteristics.SCALER_STREAM_CONFIGURATION_MAP);
assert map != null;
imageDimension = map.getOutputSizes(SurfaceTexture.class)[0];
// Add permission for camera and let user grant the permission
if (ActivityCompat.checkSelfPermission(this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED && ActivityCompat.checkSelfPermission(this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(AndroidCameraApi.this, new String[]{Manifest.permission.CAMERA, Manifest.permission.WRITE_EXTERNAL_STORAGE}, REQUEST_CAMERA_PERMISSION);
return;
}
manager.openCamera(cameraId, stateCallback, null);
} catch (CameraAccessException e) {
e.printStackTrace();
}
Log.e(TAG, "openCamera X");
}
protected void updatePreview() {
if(null == cameraDevice) {
Log.e(TAG, "updatePreview error, return");
}
captureRequestBuilder.set(CaptureRequest.CONTROL_MODE, CameraMetadata.CONTROL_MODE_AUTO);
try {
cameraCaptureSessions.setRepeatingRequest(captureRequestBuilder.build(), null, mBackgroundHandler);
} catch (CameraAccessException e) {
e.printStackTrace();
}
}
private void closeCamera() {
if (null != cameraDevice) {
cameraDevice.close();
cameraDevice = null;
}
if (null != imageReader) {
imageReader.close();
imageReader = null;
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == REQUEST_CAMERA_PERMISSION) {
if (grantResults[0] == PackageManager.PERMISSION_DENIED) {
// close the app
Toast.makeText(AndroidCameraApi.this, "Sorry!!!, you can't use this app without granting permission", Toast.LENGTH_LONG).show();
finish();
}
}
}
@Override
protected void onResume() {
super.onResume();
Log.e(TAG, "onResume");
startBackgroundThread();
if (textureView.isAvailable()) {
openCamera();
} else {
textureView.setSurfaceTextureListener(textureListener);
}
}
@Override
protected void onPause() {
Log.e(TAG, "onPause");
//closeCamera();
stopBackgroundThread();
super.onPause();
}
}
コメント