현재 버튼을 누르면 카메라가 열리게하여 detection을 하고있습니다.
하는 중 계속 저런에러가 발생하는데 button값에 null을 주지도않았고, onClick button 에 openCamera()라는 클래스를 생성하여 가져오게 하는데 Log도 찍히지 않는 상황입니다 이유를 모르겠습니다
package kr.co.kpst.mec_digtwin.activity;
import android.Manifest;
import android.annotation.SuppressLint;
import android.app.Activity;
import android.app.Fragment;
import android.content.Context;
import android.content.pm.PackageManager;
import android.graphics.SurfaceTexture;
import android.hardware.Camera;
import android.hardware.Camera.CameraInfo;
import android.hardware.camera2.CameraAccessException;
import android.hardware.camera2.CameraCharacteristics;
import android.hardware.camera2.CameraDevice;
import android.hardware.camera2.CameraManager;
import android.hardware.camera2.params.StreamConfigurationMap;
import android.os.Bundle;
import android.os.Handler;
import android.os.HandlerThread;
import android.util.Log;
import android.util.Size;
import android.util.SparseIntArray;
import android.view.LayoutInflater;
import android.view.Surface;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import androidx.core.app.ActivityCompat;
import androidx.core.content.ContextCompat;
import kr.co.kpst.mec_digtwin.customview.AutoFitTextureView;
import kr.co.kpst.mec_digtwin.env.ImageUtils;
import kr.co.kpst.mec_digtwin.env.Logger;
import java.io.IOException;
import java.util.List;
import kr.co.kpst.mec_digtwin.R;
import kr.co.kpst.mec_digtwin.env.ImageUtils;
@SuppressLint("ValidFragment")
public class LegacyCameraConnectionFragment extends Fragment {
private static final Logger LOGGER = new Logger();
/**
* Conversion from screen rotation to JPEG orientation.
*/
private static final SparseIntArray ORIENTATIONS = new SparseIntArray();
public Context mContext;
static {
ORIENTATIONS.append(Surface.ROTATION_0, 90);
ORIENTATIONS.append(Surface.ROTATION_90, 0);
ORIENTATIONS.append(Surface.ROTATION_180, 270);
ORIENTATIONS.append(Surface.ROTATION_270, 180);
}
private String mCameraId = "0";
private Camera camera;
private Camera.PreviewCallback imageListener;
private Size desiredSize;
/**
* The layout identifier to inflate for this Fragment.
*/
private int layout;
/**
* An {@link AutoFitTextureView} for camera preview.
*/
private AutoFitTextureView textureView;
/**
* {@link TextureView.SurfaceTextureListener} handles several lifecycle events on a {@link
* TextureView}.
*/
private final TextureView.SurfaceTextureListener surfaceTextureListener =
new TextureView.SurfaceTextureListener() {
@Override
public void onSurfaceTextureAvailable(
final SurfaceTexture texture, final int width, final int height) {
int index = getCameraId();
camera = Camera.open(index);
try {
Camera.Parameters parameters = camera.getParameters();
List<String> focusModes = parameters.getSupportedFocusModes();
if (focusModes != null
&& focusModes.contains(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE)) {
parameters.setFocusMode(Camera.Parameters.FOCUS_MODE_CONTINUOUS_PICTURE);
}
List<Camera.Size> cameraSizes = parameters.getSupportedPreviewSizes();
Size[] sizes = new Size[cameraSizes.size()];
int i = 0;
for (Camera.Size size : cameraSizes) {
sizes[i++] = new Size(size.width, size.height);
}
Size previewSize =
CameraConnectionFragment.chooseOptimalSize(
sizes, desiredSize.getWidth(), desiredSize.getHeight());
parameters.setPreviewSize(previewSize.getWidth(), previewSize.getHeight());
camera.setDisplayOrientation(0);
camera.setParameters(parameters);
camera.setPreviewTexture(texture);
} catch (IOException exception) {
camera.release();
}
camera.setPreviewCallbackWithBuffer(imageListener);
Camera.Size s = camera.getParameters().getPreviewSize();
camera.addCallbackBuffer(new byte[ImageUtils.getYUVByteSize(s.height, s.width)]);
textureView.setAspectRatio(2000, 800);
openCamera();
camera.startPreview();
}
@Override
public void onSurfaceTextureSizeChanged(
final SurfaceTexture texture, final int width, final int height) {
}
@Override
public boolean onSurfaceTextureDestroyed(final SurfaceTexture texture) {
return true;
}
@Override
public void onSurfaceTextureUpdated(final SurfaceTexture texture) {
}
};
/**
* An additional thread for running tasks that shouldn't block the UI.
*/
private HandlerThread backgroundThread;
public void openCamera() {
Log.e("로그찍어보기","5555");
// camera.startPreview();
}
@Override
public void onPause() {
stopCamera();
stopBackgroundThread();
super.onPause();
}
@SuppressLint("ValidFragment")
public LegacyCameraConnectionFragment(
final Camera.PreviewCallback imageListener, final int layout, final Size desiredSize) {
this.imageListener = imageListener;
this.layout = layout;
this.desiredSize = desiredSize;
}
@Override
public View onCreateView(
final LayoutInflater inflater, final ViewGroup container, final Bundle savedInstanceState) {
return inflater.inflate(layout, container, false);
}
@Override
public void onViewCreated(final View view, final Bundle savedInstanceState) {
textureView = (AutoFitTextureView) view.findViewById(R.id.texture);
}
@Override
public void onActivityCreated(final Bundle savedInstanceState) {
super.onActivityCreated(savedInstanceState);
}
@Override
public void onResume() {
super.onResume();
startBackgroundThread();
if (textureView.isAvailable()) {
if (camera != null) {
camera.startPreview();
}
} else {
textureView.setSurfaceTextureListener(surfaceTextureListener);
}
}
/** Starts a background thread and its {@link Handler}. */
private void startBackgroundThread() {
backgroundThread = new HandlerThread("CameraBackground");
backgroundThread.start();
}
/** Stops the background thread and its {@link Handler}. */
private void stopBackgroundThread() {
backgroundThread.quitSafely();
try {
backgroundThread.join();
backgroundThread = null;
} catch (final InterruptedException e) {
LOGGER.e(e, "Exception!");
}
}
protected void stopCamera() {
if (camera != null) {
camera.stopPreview();
camera.setPreviewCallback(null);
camera.release();
camera = null;
}
}
private int getCameraId() {
CameraInfo ci = new CameraInfo();
for (int i = 0; i < Camera.getNumberOfCameras(); i++) {
Camera.getCameraInfo(i, ci);
if (ci.facing == CameraInfo.CAMERA_FACING_BACK) return i;
}
return -1; // No camera found
}
}
package kr.co.kpst.mec_digtwin.activity;
import android.Manifest;
import android.animation.ObjectAnimator;
import android.annotation.TargetApi;
import android.app.Activity;
import android.app.AlertDialog;
import android.content.DialogInterface;
import android.content.Intent;
import android.content.pm.PackageManager;
import android.graphics.Bitmap;
import android.graphics.BitmapFactory;
import android.graphics.Camera;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Matrix;
import android.graphics.Paint;
import android.graphics.PixelFormat;
import android.graphics.RectF;
import android.graphics.Typeface;
import android.hardware.camera2.CameraDevice;
import android.media.ImageReader;
import android.media.MediaPlayer;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Environment;
import android.os.Handler;
import android.os.HandlerThread;
import android.os.Message;
import android.os.SystemClock;
import android.provider.Settings;
import android.util.Log;
import android.util.Size;
import android.util.TypedValue;
import android.view.Gravity;
import android.view.LayoutInflater;
import android.view.TextureView;
import android.view.View;
import android.view.ViewGroup;
import android.view.WindowManager;
import android.widget.Button;
import android.widget.CompoundButton;
import android.widget.ImageButton;
import android.widget.ImageView;
import android.widget.LinearLayout;
import android.widget.RelativeLayout;
import android.widget.Spinner;
import android.widget.Switch;
import android.widget.Switch;
import android.widget.TextView;
import android.widget.Toast;
import androidx.annotation.NonNull;
import androidx.appcompat.app.AppCompatActivity;
import androidx.core.graphics.BitmapCompat;
import androidx.fragment.app.FragmentManager;
import androidx.fragment.app.FragmentTransaction;
import java.io.DataOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.net.Socket;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import kr.co.kpst.mec_digtwin.customview.OverlayView;
import kr.co.kpst.mec_digtwin.env.BorderedText;
import kr.co.kpst.mec_digtwin.env.ImageUtils;
import kr.co.kpst.mec_digtwin.env.Logger;
import kr.co.kpst.mec_digtwin.R;
import kr.co.kpst.mec_digtwin.data.InitData;
import kr.co.kpst.mec_digtwin.data.SimulationResult;
import kr.co.kpst.mec_digtwin.tflite.Classifier;
import kr.co.kpst.mec_digtwin.tracking.MultiBoxTracker;
import kr.co.kpst.mec_digtwin.util.Constant;
import kr.co.kpst.mec_digtwin.util.PreferenceUtils;
import kr.co.kpst.mec_digtwin.view.MECIconView;
import kr.co.kpst.mec_digtwin.view.MECTollgateView;
import kr.co.kpst.mec_digtwin.tflite.TFLiteObjectDetectionAPIModel;
import static android.Manifest.permission.WRITE_EXTERNAL_STORAGE;
import static android.Manifest.permission_group.CAMERA;
public class MainActivity extends CameraActivity implements ImageReader.OnImageAvailableListener {
// static final int REQUEST_CAMERA = 1;
// static final int REQUEST_STORAGE = 2;
private static final Logger LOGGER = new Logger();
private static final Size DESIRED_PREVIEW_SIZE = new Size(640, 480);
// Configuration values for the prepackaged SSD model.
private static final int TF_OD_API_INPUT_SIZE = 300;
private static final boolean TF_OD_API_IS_QUANTIZED = true;
private static final String TF_OD_API_MODEL_FILE = "detect.tflite";
// FileInputStream f_input_stream= new FileInputStream(new File("file:///android_asset/detect.tflite"));
// FileChannel f_channel = f_input_stream.getChannel();
// MappedByteBuffer tflite_model = f_channel.map(FileChannel.MapMode.READ_ONLY, 0, f_channel .size());
private static final String TF_OD_API_LABELS_FILE = "labelmap.txt";
private static final DetectorMode MODE = DetectorMode.tflite_model;
// Minimum detection confidence to track a detection.
private static final float MINIMUM_CONFIDENCE_TF_OD_API = 0.5f;
private static final boolean MAINTAIN_ASPECT = false;
private static final boolean SAVE_PREVIEW_BITMAP = false;
private static final float TEXT_SIZE_DIP = 10;
// OverlayView trackingOverlay;
private Integer sensorOrientation;
private Classifier detector;
private long lastProcessingTimeMs;
private Bitmap rgbFrameBitmap = null;
private Bitmap croppedBitmap = null;
private Bitmap cropCopyBitmap = null;
private boolean computingDetection = false;
private long timestamp = 0;
private Matrix frameToCropTransform;
private Matrix cropToFrameTransform;
private MultiBoxTracker tracker;
private BorderedText borderedText;
public MainActivity() {
}
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
}
//
// private Handler handler;
// private HandlerThread handlerThread;
// private boolean useCamera2API;
// private boolean isProcessingFrame = false;
// private byte[][] yuvBytes = new byte[3][];
// private int[] rgbBytes = null;
// private int yRowStride;
// private Runnable postInferenceCallback;
// private Runnable imageConverter;
// private LinearLayout bottomSheetLayout;
// private LinearLayout gestureLayout;
// private BottomSheetBehavior<LinearLayout> sheetBehavior;
// protected TextView recognitionTextView,
// recognition1TextView,
// recognition2TextView,
// recognitionValueTextView,
// recognition1ValueTextView,
// recognition2ValueTextView;
// protected TextView frameValueTextView,
// cropValueTextView,
// cameraResolutionTextView,
// rotationTextView,
// inferenceTimeTextView;
// protected ImageView bottomSheetArrowImageView;
// private ImageView plusImageView, minusImageView;
// private Spinner modelSpinner;
// private Spinner deviceSpinner;
// private TextView threadsTextView;
// private CameraDevice cameraDevice;
// private Camera camera;
private enum TIME_ENUM {
// TIME_3SEC("톨게이트에 진입합니다.", 3000),
TITLE("MEC DigTwin으로부터 디지털 트윈 정보를 불러옵니다.", 5000),
START("MEC DigTwin으로부터 최적 게이트를 시뮬레이션 합니다.", 7000),
END_POINT("MEC DigTwin으로부터 %s번 게이트가 결정되었습니다.", 10000);
// final private String title;
private String mTitle;
private int mSec;
TIME_ENUM(String title, int sec) {
mTitle = title;
mSec = sec;
}
}
private static final int UPDATE_TITLE = 0x01;
// private final static String mediaPath = "/mec_media/tollgate.mp4";
// private VideoView videoView;
private TextView tvNoti;
private ImageButton btSetting;
private MECTollgateView testAni;
/* 주변정보 뷰 */
private InitData data;
private MECIconView infoCar;
private MECIconView infoSelfDrive;
private MECIconView infoManualDrive;
private MECIconView infoAutoPay;
private MECIconView infoManualPay;
/* 톨게이트 정보 뷰 */
private LinearLayout infoTollgates; // addView로 동적으로 뷰 추가할 것.
/* 시뮬레이션 관련 */
private Button buttonSimulation;
private Switch switchSelfDrive;
private Switch switchAutoPay;
private ImageView switchDirection;
private int selfDrive, autoPay, direction;
private MECTollgateView selectedTollgate;
private ViewGroup rootView;
private DataThread dataThread;
private SimulationResult simulationResult;
private Button buttonResult;
private int tollgateNum;
private Classifier tflite;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
getWindow().addFlags(WindowManager.LayoutParams.FLAG_KEEP_SCREEN_ON);
setContentView(R.layout.activity_main);
// Frame textureView = findViewById(R.id.container);
int rotation = getWindowManager().getDefaultDisplay().getRotation();
// textureView.bringToFront();
//fragmentManager = getSupportFragmentManager();
//transaction = fragmentManager.beginTransaction();
//getSupportFragmentManager().beginTransaction().replace(R.id.Textureview, P);
//transaction.replace(R.id.Textureview, P).commitAllowingStateLoss();
boolean is_recording;
is_recording = false;
// transaction = fragmentManager.beginTransaction();
// SettingActivity로부터 데이터 받음.
if (getIntent() != null && getIntent().getSerializableExtra(Constant.EXTRA_NAME_DATA) != null) {
data = (InitData) getIntent().getSerializableExtra(Constant.EXTRA_NAME_DATA);
} else {
data = new InitData();
int[] temp = new int[]{0, 1, 0, 0, 1};
// data.setTollgate(temp);
data.setAutoPaymentRate(25);
data.setSelfDrivingRate(25);
data.setSessionCount(100);
data.setWaitingCars(25);
data.setTollgate(temp);
// Toast.makeText(getApplicationContext(), "데이터를 가져오지 못했습니다.", Toast.LENGTH_SHORT).show();
}
// Log.e("Data received from SettingActivity", data.toString());
// 뷰 바인딩
rootView = findViewById(R.id.layout_root);
tvNoti = findViewById(R.id.tv_notification);
btSetting = findViewById(R.id.ibt_logo);
infoCar = findViewById(R.id.info_car);
infoSelfDrive = findViewById(R.id.info_hipass);
infoManualDrive = findViewById(R.id.info_not_hipass);
infoAutoPay = findViewById(R.id.info_unmanned);
infoManualPay = findViewById(R.id.info_manned);
infoTollgates = findViewById(R.id.ll_middle_center);
buttonSimulation = findViewById(R.id.button_simulation);
buttonSimulation.setOnClickListener(clickListener);
// 새로추가
// mPreview = new int[].....
// getSupportFrame...add replace ... (Fragment);
// updateTitle(TIME_ENUM.TITLE);
// 아이콘 뷰 값 셋트
// infoCar.setValue(String.valueOf(data.getWaitingCars())); // 대기 자동차 수
//
// infoSelfDrive.setValue(String.valueOf(data.getSelfDrivingCars())); // 자율주행 자동차 수
// infoManualDrive.setValue(String.valueOf(data.getManualDrivingCars())); // 수동주행 자동차 수
//
// infoAutoPay.setValue(String.valueOf(data.getAutoPaymentCars())); // 자율주행 자동차 수
// infoManualPay.setValue(String.valueOf(data.getManualPaymentCars())); // 수동주행 자동차 수
// infoSelfDrive.setValue(String.valueOf(data.getSelfDrivingCars())); // 자율주행 자동차 수
// infoManualDrive.setValue(String.valueOf(data.getManualDrivingCars())); // 수동주행 자동차 수
// int sum = data.getSelfDrivingCars() + data.getManualDrivingCars();
// if(sum > data.getWaitingCars()){
// infoSelfDrive.setValue(String.valueOf(data.getSelfDrivingCars())); // 자율주행 자동차 수
// infoManualDrive.setValue(String.valueOf(data.getManualDrivingCars()-1)); // 수동주행 자동차 수
// }else if(sum < data.getWaitingCars()){
// infoSelfDrive.setValue(String.valueOf(data.getSelfDrivingCars() +1)); // 자율주행 자동차 수
// infoManualDrive.setValue(String.valueOf(data.getManualDrivingCars())); // 수동주행 자동차 수
// }else{
// infoSelfDrive.setValue(String.valueOf(data.getSelfDrivingCars())); // 자율주행 자동차 수
// infoManualDrive.setValue(String.valueOf(data.getManualDrivingCars())); // 수동주행 자동차 수
// }
//sum = data.getAutoPaymentCars() + data.getManualPaymentCars();
// if(sum > data.getWaitingCars()){
// infoAutoPay.setValue(String.valueOf(data.getAutoPaymentCars())); // 자율주행 자동차 수
// infoManualPay.setValue(String.valueOf(data.getManualPaymentCars()-1)); // 수동주행 자동차 수
// }else if(sum < data.getWaitingCars()){
// infoAutoPay.setValue(String.valueOf(data.getAutoPaymentCars() +1)); // 자율주행 자동차 수
// infoManualPay.setValue(String.valueOf(data.getManualPaymentCars())); // 수동주행 자동차 수
// }else{
// infoSelfDrive.setValue(String.valueOf(data.getAutoPaymentCars())); // 자율주행 자동차 수
// infoManualPay.setValue(String.valueOf(data.getManualPaymentCars())); // 수동주행 자동차 수
// }
// 톨게이트 뷰 동적 추가
for (int i = 0; i < data.getTotalTollgates(); i++) {
final MECTollgateView childView = new MECTollgateView(this);
childView.setLayoutParams(new LinearLayout.LayoutParams(
RelativeLayout.LayoutParams.WRAP_CONTENT/*width*/,
RelativeLayout.LayoutParams.MATCH_PARENT/*height*/,
1.0f));
childView.setTollgateType(data.getTollgateType(i)); // 톨게이트 이미지 설정
childView.setTollgateNum(i + 1); // 톨게이트 번호 설정
infoTollgates.addView(childView);
}
// textDirection = findViewById(R.id.tv_direction);
// buttonSimulation = findViewById(R.id.button_simulation);
// buttonSimulation.setOnClickListener(clickListener);
switchDirection = findViewById(R.id.iv_arrow);
switchDirection.setOnClickListener(clickListener);
switchSelfDrive = findViewById(R.id.switch_self_drive);
switchSelfDrive.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
selfDrive = (isChecked) ? 0/* 자율주행 ON */ : 1/* 자율주행 OFF */;
}
});
switchAutoPay = findViewById(R.id.switch_auto_pay);
switchAutoPay.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton buttonView, boolean isChecked) {
autoPay = (isChecked) ? 0/* 자동결제 ON */ : 1/* 자동결제 OFF */;
}
});
// videoView.requestFocus();
btSetting.setOnClickListener(clickListener);
buttonResult = findViewById(R.id.button);
buttonResult.setOnClickListener(clickListener);
}
//
// @Overrride
// public void onRequestPermissionsResult(int requestCode, String[] permissions, int[] grantResults) {
// super.onRequestPermissionsResult(requestCode, permissions, grantResults);
// Log.d(TAG, "onRequestPermissionsResult()");
// switch (requestCode) {
// case REQUEST_CAMERA:
// for (int i = 0; i < permissions.length; i++) {
// String permission = permissions[i];
// int grantResult = grantResults[i];
// if (permission.equals(Manifest.permission.CAMERA)) {
// Log.d(TAG, "CAMERA");
// if (grantResult == PackageManager.PERMISSION_GRANTED) {
// mPreview.openCamera();
// } else {
// Toast.makeText(this, "Should have camera permission to run", Toast.LENGTH_LONG).show();
// finish();
// }
// }
// }
// break;
@TargetApi(Build.VERSION_CODES.M)
public void checkVerify() {
if (
checkSelfPermission(Manifest.permission.READ_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED
) {
requestPermissions(new String[]{Manifest.permission.READ_EXTERNAL_STORAGE, CAMERA, WRITE_EXTERNAL_STORAGE},
1);
} else {
// setup();
}
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions, @NonNull int[] grantResults) {
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
if (requestCode == 1) {
if (grantResults.length > 0) {
for (int i = 0; i < grantResults.length; ++i) {
if (grantResults[i] == PackageManager.PERMISSION_DENIED) {
// 하나라도 거부한다면.
new AlertDialog.Builder(this).setTitle("알림").setMessage("권한을 허용해주셔야 앱을 이용할 수 있습니다.")
.setPositiveButton("종료", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
}).setNegativeButton("권한 설정", new DialogInterface.OnClickListener() {
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
Intent intent = new Intent(Settings.ACTION_APPLICATION_DETAILS_SETTINGS)
.setData(Uri.parse("package:" + getApplicationContext().getPackageName()));
getApplicationContext().startActivity(intent);
}
}).setCancelable(false).show();
return;
}
}
// setup();
}
}
}
@Override
protected void processImage() {
++timestamp;
final long currTimestamp = timestamp;
// trackingOverlay.postInvalidate();
// No mutex needed as this method is not reentrant.
if (computingDetection) {
readyForNextImage();
return;
}
computingDetection = true;
LOGGER.i("Preparing image " + currTimestamp + " for detection in bg thread.");
rgbFrameBitmap.setPixels(getRgbBytes(), 0, previewWidth, 0, 0, previewWidth, previewHeight);
readyForNextImage();
final Canvas canvas = new Canvas(croppedBitmap);
canvas.drawBitmap(rgbFrameBitmap, frameToCropTransform, null);
// For examining the actual TF input.
if (SAVE_PREVIEW_BITMAP) {
ImageUtils.saveBitmap(croppedBitmap);
}
runInBackground(
new Runnable() {
@Override
public void run() {
LOGGER.i("Running detection on image " + currTimestamp);
final long startTime = SystemClock.uptimeMillis();
final List<Classifier.Recognition> results = detector.recognizeImage(croppedBitmap);
lastProcessingTimeMs = SystemClock.uptimeMillis() - startTime;
Log.e("Result" , Arrays.toString(results.toArray()) );
cropCopyBitmap = Bitmap.createBitmap(croppedBitmap);
final Canvas canvas = new Canvas(cropCopyBitmap);
final Paint paint = new Paint();
paint.setColor(Color.RED);
paint.setStyle(Paint.Style.STROKE);
paint.setStrokeWidth(2.0f);
float minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
switch (MODE) {
case TF_OD_API:
minimumConfidence = MINIMUM_CONFIDENCE_TF_OD_API;
break;
}
final List<Classifier.Recognition> mappedRecognitions =
new LinkedList<Classifier.Recognition>();
for (final Classifier.Recognition result : results) {
final RectF location = result.getLocation();
if (location != null && result.getConfidence() >= minimumConfidence) {
canvas.drawRect(location, paint);
cropToFrameTransform.mapRect(location);
result.setLocation(location);
mappedRecognitions.add(result);
}
}
tracker.trackResults(mappedRecognitions, currTimestamp);
// trackingOverlay.postInvalidate();
computingDetection = false;
// runOnUiThread(
// new Runnable() {
// @Override
// public void run() {
//// showFrameInfo(previewWidth + "x" + previewHeight);
// showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
// showInference(lastProcessingTimeMs + "ms");
// }
// });
}
});
}
@Override
protected int getLayoutId() {
return R.layout.camera_connection_fragment;
}
private LegacyCameraConnectionFragment plz;
@Override
protected Size getDesiredPreviewFrameSize() {
return DESIRED_PREVIEW_SIZE;
}
private enum DetectorMode {
TF_OD_API, tflite_model;
}
@Override
protected void setUseNNAPI(final boolean isChecked) {
runInBackground(() -> detector.setUseNNAPI(isChecked));
}
@Override
protected void setNumThreads(final int numThreads) {
runInBackground(() -> detector.setNumThreads(numThreads));
}
@Override
protected void onPreviewSizeChosen(Size size, int rotation) {
final float textSizePx =
TypedValue.applyDimension(
TypedValue.COMPLEX_UNIT_DIP, TEXT_SIZE_DIP, getResources().getDisplayMetrics());
borderedText = new BorderedText(textSizePx);
borderedText.setTypeface(Typeface.MONOSPACE);
tracker = new MultiBoxTracker(this);
int cropSize = TF_OD_API_INPUT_SIZE;
try {
detector =
TFLiteObjectDetectionAPIModel.create(
getAssets(),
TF_OD_API_MODEL_FILE,
TF_OD_API_LABELS_FILE,
TF_OD_API_INPUT_SIZE,
TF_OD_API_IS_QUANTIZED);
cropSize = TF_OD_API_INPUT_SIZE;
} catch (final IOException e) {
e.printStackTrace();
LOGGER.e(e, "Exception initializing classifier!");
Toast toast =
Toast.makeText(
getApplicationContext(), "Classifier could not be initialized", Toast.LENGTH_SHORT);
toast.show();
finish();
}
previewWidth = size.getWidth();
previewHeight = size.getHeight();
sensorOrientation = rotation - getScreenOrientation();
LOGGER.i("Camera orientation relative to screen canvas: %d", sensorOrientation);
LOGGER.i("Initializing at size %dx%d", previewWidth, previewHeight);
rgbFrameBitmap = Bitmap.createBitmap(previewWidth, previewHeight, Bitmap.Config.ARGB_8888);
croppedBitmap = Bitmap.createBitmap(cropSize, cropSize, Bitmap.Config.ARGB_8888);
frameToCropTransform =
ImageUtils.getTransformationMatrix(
previewWidth, previewHeight,
cropSize, cropSize,
sensorOrientation, MAINTAIN_ASPECT);
cropToFrameTransform = new Matrix();
frameToCropTransform.invert(cropToFrameTransform);
// trackingOverlay = findViewById(R.id.tracking_overlay);
// trackingOverlay.addCallback(
// new OverlayView.DrawCallback() {
// @Override
// public void drawCallback(final Canvas canvas) {
// tracker.draw(canvas);
// if (isDebug()) {
// tracker.drawDebug(canvas);
// }
// }
// });
tracker.setFrameConfiguration(previewWidth, previewHeight, sensorOrientation);
}
private void updateTitle(TIME_ENUM info) {
Message message = mHandler.obtainMessage();
message.what = UPDATE_TITLE;
message.obj = info;
mHandler.sendMessage(message);
}
private Handler mHandler = new Handler() {
// message
@Override
public void handleMessage(Message msg) {
super.handleMessage(msg);
Log.e("HANDLER", msg.what + "");
switch (msg.what) {
case UPDATE_TITLE:
TIME_ENUM info = (TIME_ENUM) msg.obj;
tvNoti.setText(String.format(info.mTitle, tollgateNum + 1));
if (info == TIME_ENUM.END_POINT) {
ObjectAnimator animator = ObjectAnimator.ofFloat(selectedTollgate, "rotationY", 0, 720);
animator.setRepeatCount(ObjectAnimator.INFINITE);
animator.setDuration(3000);
animator.start();
}
break;
}
}
};
private LegacyCameraConnectionFragment lf;
private FragmentTransaction transaction;
private FragmentManager fragmentManager;
// private CameraActivity P ;//= new Preview();
// private CameraConnectionFragment cf;
private WindowManager.LayoutParams layoutParameter;
private View.OnClickListener clickListener = new View.OnClickListener() {
@Override
public void onClick(View view) {
switch (view.getId()) {
case R.id.ibt_logo:
Intent intent = new Intent(getApplicationContext(), SettingActivity.class);
startActivity(intent);
finish();
break;
case R.id.iv_arrow: // 진행방향 변경
direction = ++direction % 3; // 0 -> 1 -> 2 -> 0 -> ...
switch (direction) {
case 0: // 좌회전
switchDirection.setImageResource(R.drawable.left);
break;
case 1: // 직진
switchDirection.setImageResource(R.drawable.straight);
break;
case 2: // 우회전
switchDirection.setImageResource(R.drawable.right);
break;
}
break;
case R.id.button_simulation: // 시뮬레이션 시작
updateTitle(TIME_ENUM.START);
// dataThread = new DataThread();
// dataThread.start();
view.setVisibility(view.GONE);
Log.e("123","3333");
plz.openCamera();
break;
case R.id.button: // 상세정보 버튼
if (simulationResult != null) {
createSimulationResultDialog(); // 시뮬레이션 결과 dialog 표시
} else {
Toast.makeText(MainActivity.this, "시뮬레이션이 완료되지 않았습니다.", Toast.LENGTH_SHORT).show();
}
break;
default:
}
}
};
// @Override
// public synchronized void onResume() {
// super.onResume();
// try {
// detector =
// TFLiteObjectDetectionAPIModel.create(
// getAssets(),
// TF_OD_API_MODEL_FILE,
// TF_OD_API_LABELS_FILE,
// TF_OD_API_INPUT_SIZE,
// TF_OD_API_IS_QUANTIZED);
// }catch (Exception e){
// e.printStackTrace();
// }
// }
private class DataThread extends Thread {
Socket socket;
String ip = PreferenceUtils.getStringValue(Constant.PREFS_KEY_IP);
int port = Integer.parseInt(PreferenceUtils.getStringValue(Constant.PREFS_KEY_DATA_PORT));
private InputStream in;
private DataOutputStream out;
//
// private boolean connect() {
// try {
// socket = new Socket();
// SocketAddress addrs = new InetSocketAddress(ip, port);
// socket.connect(addrs, 5000);
//
// } catch (Exception e) {
// e.printStackTrace();
// Log.e("socket", "Failed connecting to server.");
// return false;
// }
// return true;
// }
private SimulationResult receiveData() {
// 파일 사이즈
int size = 0;
try {
byte[] sizeBuff = new byte[4];
while (true) {
if (in.read(sizeBuff, 0, 4) != -1) {
int MASK = 0xFF;
size = sizeBuff[0] & MASK;
size = size + ((sizeBuff[1] & MASK) << 8);
size = size + ((sizeBuff[2] & MASK) << 16);
size = size + ((sizeBuff[3] & MASK) << 24);
Log.e("socket", "File Size: " + size);
break;
}
}
} catch (IOException e) {
Log.e("socket", "Failed receiving file size.");
e.printStackTrace();
return null;
}
// 파일 받기 (읽기)
try {
byte[] bytes = new byte[size];
String results = null;
while (socket.isConnected()) {
while (in.read(bytes, 0, size) != -1) {
results = new String(bytes);
Log.e("socket", "Received file content: " + results);
break;
}
break;
}
// in.close();
if (results != null && !results.isEmpty()) {
String[] lineSplit = results.split("\n");
if (lineSplit.length >= 2) {
// 결정된 톨게이트 번호
tollgateNum = Integer.parseInt(lineSplit[0]);
selectedTollgate = (MECTollgateView) infoTollgates.getChildAt(tollgateNum);
String[] tollData = lineSplit[1].split("\t");
SimulationResult simulationResult = new SimulationResult(tollData);
simulationResult.setTollgateNum(tollgateNum + 1);
return simulationResult;
}
}
} catch (IOException e) {
Log.e("socket", "Failed receiving simulation data.");
e.printStackTrace();
return null;
}
return null;
}
}
private void createSimulationResultDialog() {
// Bind views
View dialogView = LayoutInflater.from(getApplicationContext()).inflate(R.layout.dialog_result, rootView, false);
TextView tvTollgate = dialogView.findViewById(R.id.tollgate_num);
TextView tvAvgTotalTime = dialogView.findViewById(R.id.avg_total_time);
TextView tvAvgTollgateProcessTime = dialogView.findViewById(R.id.avg_tollgate_process_time);
TextView tvAvgHipassProcessTime = dialogView.findViewById(R.id.avg_hipass_process_time);
TextView tvAvgNonHipassProcessTime = dialogView.findViewById(R.id.avg_non_hipass_process_time);
TextView tvAvgTotalWaitingTime = dialogView.findViewById(R.id.avg_total_waiting_time);
TextView tvAvgHipassWaitingTime = dialogView.findViewById(R.id.avg_hipass_waiting_time);
TextView tvAvgNonHipassWaitingTime = dialogView.findViewById(R.id.avg_non_hipass_waiting_time);
TextView tvAvgQueueLength = dialogView.findViewById(R.id.avg_queue_length);
TextView tvAvgHipassQueueLength = dialogView.findViewById(R.id.avg_hipass_queue_length);
TextView tvAvgNonHipassQueueLength = dialogView.findViewById(R.id.avg_non_hipass_queue_length);
// Set values
tvTollgate.setText(String.valueOf(simulationResult.getTollgateNum()));
tvAvgTotalTime.setText(stringToDouble(simulationResult.getAvgTotalTime(), true));
tvAvgTollgateProcessTime.setText(stringToDouble(simulationResult.getAvgTollgateProcessTime(), true));
tvAvgHipassProcessTime.setText(stringToDouble(simulationResult.getAvgHipassProcessTime(), true));
tvAvgNonHipassProcessTime.setText(stringToDouble(simulationResult.getAvgNonHipassProcessTime(), true));
tvAvgTotalWaitingTime.setText(stringToDouble(simulationResult.getAvgTotalWaitingTime(), true));
tvAvgHipassWaitingTime.setText(stringToDouble(simulationResult.getAvgHipassWaitingTime(), true));
tvAvgNonHipassWaitingTime.setText(stringToDouble(simulationResult.getAvgNonHipassWaitingTime(), true));
tvAvgQueueLength.setText(stringToDouble(simulationResult.getAvgQueueLength(), false));
tvAvgHipassQueueLength.setText(stringToDouble(simulationResult.getAvgHipassQueueLength(), false));
tvAvgNonHipassQueueLength.setText(stringToDouble(simulationResult.getAvgNonHipassQueueLength(), false));
final AlertDialog.Builder builder = new AlertDialog.Builder(MainActivity.this)
.setView(dialogView)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
@Override
public void onClick(DialogInterface dialog, int which) {
dialog.dismiss();
}
});
// runOnUiThread(new Runnable() {
// @Override
// public void run() {
//// builder.create().show();
//// showFrameInfo(previewWidth + "x" + previewHeight);
// showCropInfo(cropCopyBitmap.getWidth() + "x" + cropCopyBitmap.getHeight());
// showInference(lastProcessingTimeMs + "ms");
//
// }
// });
}
//잘못된 부분 밝견
private String stringToDouble(String value, boolean sec) {
try {
if (sec)
return String.format("%.2f초", Double.parseDouble(value));
else {
return String.format("%.2f대", Double.parseDouble(value));
}
//return Double.parseDouble(value);
} catch (Exception e) {
e.printStackTrace();
}
return "N/A";
}
}