Firebase 연동은 생략하고 layout과 MainActivity만 올림
<?xml version="1.0" encoding="utf-8"?>
<LinearLayout xmlns:android="http://schemas.android.com/apk/res/android"
xmlns:app="http://schemas.android.com/apk/res-auto"
xmlns:tools="http://schemas.android.com/tools"
android:layout_width="match_parent"
android:layout_height="match_parent"
android:gravity="center"
android:orientation="vertical"
tools:context=".MainActivity">
<ProgressBar
android:id="@+id/loading"
android:layout_width="wrap_content"
android:layout_height="wrap_content" />
<ImageView
android:id="@+id/image"
android:layout_width="350dp"
android:layout_height="500dp"
android:scaleType="fitXY"
app:layout_constraintStart_toStartOf="parent"
app:layout_constraintTop_toTopOf="parent" />
<LinearLayout
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:orientation="horizontal">
<Button
android:id="@+id/button_album"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Album"
android:layout_marginRight="10dp"
app:layout_constraintStart_toStartOf="@+id/image"
app:layout_constraintTop_toTopOf="@+id/image" />
<Button
android:id="@+id/button_capture"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:text="Capture"
app:layout_constraintStart_toStartOf="@+id/image"
app:layout_constraintTop_toTopOf="@+id/image" />
</LinearLayout>
<TextView
android:id="@+id/textView"
android:layout_width="wrap_content"
android:layout_height="wrap_content"
android:layout_marginTop="10dp"
android:gravity="center"
android:text="__________"
android:textStyle="bold"
app:layout_constraintBottom_toTopOf="@+id/button"
app:layout_constraintStart_toStartOf="@+id/image"
app:layout_constraintTop_toTopOf="@+id/button" />
</LinearLayout>
public class MainActivity extends AppCompatActivity {
private ImageView imageView;
private TextView textView;
private ProgressBar bar;
private static final int REQUEST_FILE = 10;
private static final int PERMISSION_FILE = 20;
private static final int REQUEST_CAMERA = 30;
private static final int PERMISSION_CAMERA = 40;
String currentPhotoPath;
FirebaseAutoMLRemoteModel remoteModel =
new FirebaseAutoMLRemoteModel.Builder("Fire_Scene_202079143722").build();
// FirebaseAutoMLLocalModel localModel = new FirebaseAutoMLLocalModel.Builder()
// .setAssetFilePath("model/manifest.json").build();
FirebaseModelDownloadConditions conditions = new FirebaseModelDownloadConditions.Builder()
.requireWifi()
.build();
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
Button buttonA = findViewById(R.id.button_album);
Button buttonC = findViewById(R.id.button_capture);
imageView = findViewById(R.id.image);
textView = findViewById(R.id.textView);
bar = findViewById(R.id.loading);
bar.setVisibility(View.INVISIBLE);
buttonA.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.WRITE_EXTERNAL_STORAGE) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.WRITE_EXTERNAL_STORAGE}, PERMISSION_FILE);
} else {
Intent intent = new Intent(Intent.ACTION_GET_CONTENT);
intent.setType("image/*");
startActivityForResult(Intent.createChooser(intent, "Select Picture"), REQUEST_FILE);
}
}
});
buttonC.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
if (ContextCompat.checkSelfPermission(MainActivity.this, Manifest.permission.CAMERA) != PackageManager.PERMISSION_GRANTED) {
ActivityCompat.requestPermissions(MainActivity.this, new String[]{Manifest.permission.CAMERA}, PERMISSION_CAMERA);
} else {
dispatchTakePictureIntent();
}
}
});
}
@Override
protected void onActivityResult(int requestCode, int resultCode, @Nullable Intent data) {
super.onActivityResult(requestCode, resultCode, data);
if (resultCode == RESULT_OK) {
Uri uri;
switch (requestCode) {
case REQUEST_FILE:
uri = data.getData();
FirebaseModelManager.getInstance().download(remoteModel, conditions);
setLabelerFromRemoteModel(uri);
textView.setText("");
imageView.setImageURI(uri);
break;
case REQUEST_CAMERA:
File imgFile = new File(currentPhotoPath);
uri = Uri.fromFile(imgFile);
FirebaseModelManager.getInstance().download(remoteModel, conditions);
setLabelerFromRemoteModel(uri);
textView.setText("");
imageView.setImageURI(uri);
break;
}
}
}
}
remoteModel이 아니라 localModel을 사용하기 위해선 assets/model폴더 안에 모델 관련 파일(txt, json, tflite)이 존재해야함
onCreate()의 중요 역할: 버튼 별 클릭 리스너
onActivityResult()의 중요 역할: requestCode에 따라 다른 이벤트를 발생시킴
private void dispatchTakePictureIntent() {
Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
// 인텐트를 처리하기 위한 카메라 액티비티가 있는지 확인 (Ensure that there's a camera activity to handle the intent)
if (takePictureIntent.resolveActivity(getPackageManager()) != null) {
// photo 가 들어갈 임시 파일 생성
File photoFile = null;
try {
photoFile = createImageFile();
} catch (IOException ex) {
// Error occurred while creating the File
}
if (photoFile != null) { // photoFile 이 성공적으로 만들어졌으면 진행
Uri photoURI = FileProvider.getUriForFile(this,
"com.example.automlvisionedge.fileprovider",
photoFile);
takePictureIntent.putExtra(MediaStore.EXTRA_OUTPUT, photoURI);
startActivityForResult(takePictureIntent, REQUEST_CAMERA);
}
}
}
CAPTURE버튼을 눌렀을 때 동작하는 함수
(기본 카메라 앱을 이용하여 사진을 캡처한 후 해당 이미지를 라벨링 할 수 있도록 전처리)
private void setLabelerFromRemoteModel(Uri uri) {
bar.setVisibility(View.VISIBLE);
FirebaseVisionImageLabeler labeler;
try {
FirebaseVisionOnDeviceAutoMLImageLabelerOptions options =
new FirebaseVisionOnDeviceAutoMLImageLabelerOptions.Builder(remoteModel)
.setConfidenceThreshold(0.0f)
.build();
labeler = FirebaseVision.getInstance().getOnDeviceAutoMLImageLabeler(options);
FirebaseVisionImage image = FirebaseVisionImage.fromFilePath(MainActivity.this, uri);
processImageLabeler(labeler, image);
} catch (FirebaseMLException | IOException e) {
e.printStackTrace();
}
}
리모트 모델을 통해 라벨러를 set하는 함수
리모트 모델: Firebase AutoML에 데이터세트를 추가시켜 학습시킨 후 게시된 모델
private void processImageLabeler(FirebaseVisionImageLabeler labeler, FirebaseVisionImage image) {
labeler.processImage(image)
.addOnSuccessListener(new OnSuccessListener<List<FirebaseVisionImageLabel>>() {
@Override // 이미지 라벨링 성공
public void onSuccess(List<FirebaseVisionImageLabel> labels) {
bar.setVisibility(View.INVISIBLE);
for (FirebaseVisionImageLabel label : labels) {
String eachLabel = label.getText().toUpperCase();
float confidence = label.getConfidence();
textView.append(eachLabel + " : " + ("" + confidence * 100).subSequence(0, 4) + "%" + "\n");
}
}
})
.addOnFailureListener(new OnFailureListener() {
@Override // 이미지 라벨링 실패
public void onFailure(@NonNull Exception e) {
Toast.makeText(MainActivity.this, e.getMessage(), Toast.LENGTH_SHORT).show();
}
});
}
불러온 라벨러를 이용하여 이미지를 라벨링 및 confidence를 텍스트로 출력
private File createImageFile() throws IOException {
String timeStamp = new SimpleDateFormat("yyyyMMdd_HHmmss").format(new Date());
String imageFileName = "JPEG_" + timeStamp + "_";
File storageDir = getExternalFilesDir(Environment.DIRECTORY_PICTURES);
File image = File.createTempFile(
imageFileName, /* prefix */
".jpg", /* suffix */
storageDir /* directory */
);
// Save a file: path for use with ACTION_VIEW intents
currentPhotoPath = image.getAbsolutePath();
return image;
}
기본 카메라 앱을 통해 촬영한 사진이 들어갈 임시 파일을 생성하는 함수
이 작업을 거치지 않으면 썸네일 수준인 낮은 해상도의 캡처 이미지가 ImageView에 보여지고 라벨링 또한 해당 이미지를 대상으로 처리함
본인은 불, 연기 사진을 각각 10장 씩 데이터세트에 넣었음
참고자료: https://firebase.google.com/docs/ml-kit/android/label-images-with-automl?hl=ko