简体   繁体   中英

ML Kit Firebase textDetection does not detect clearly

The project that show in textView after taked photo from camera. Using Firebase ML kit for text detection. That's not detect text clearly. It detect some of words but not detect all clearly. Using bitmap for it I don't if this bitmap make this problem. Should I use SurfaceView for camera? Or whats solution to solve that?

在此处输入图像描述

activity_main.xml

 <?xml version="1.0" encoding="utf-8"?>
<androidx.constraintlayout.widget.ConstraintLayout xmlns:android="http://schemas.android.com/apk/res/android"
    xmlns:app="http://schemas.android.com/apk/res-auto"
    xmlns:tools="http://schemas.android.com/tools"
    android:layout_width="match_parent"
    android:layout_height="match_parent"
    tools:context=".MainActivity">

    <ImageButton
        android:id="@+id/cameraButton"
        android:layout_width="108dp"
        android:layout_height="72dp"
        android:layout_marginStart="44dp"
        android:layout_marginTop="8dp"
        android:layout_marginBottom="32dp"
        android:background="@color/white"
        android:src="@drawable/ic_baseline_camera_alt_24"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintStart_toStartOf="parent"
        app:layout_constraintTop_toBottomOf="@+id/textView"
        app:layout_constraintVertical_bias="0.427" />

    <ImageButton
        android:id="@+id/detectButton"
        android:layout_width="108dp"
        android:layout_height="72dp"
        android:layout_marginTop="8dp"
        android:layout_marginEnd="44dp"
        android:layout_marginBottom="41dp"
        android:background="@color/white"
        android:src="@drawable/ic_baseline_done_outline_24"
        app:layout_constraintBottom_toBottomOf="parent"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintTop_toBottomOf="@+id/textView"
        app:layout_constraintVertical_bias="0.445" />

    <ImageView
        android:id="@+id/mImageView"
        android:layout_width="0dp"
        android:layout_height="346dp"
        android:layout_marginStart="8dp"
        android:layout_marginTop="8dp"
        android:layout_marginEnd="8dp"
        android:scaleType="fitXY"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintStart_toStartOf="parent"
        app:layout_constraintTop_toTopOf="parent"
        app:srcCompat="@drawable/ic_baseline_image_24" />

    <TextView
        android:id="@+id/textView"
        android:layout_width="0dp"
        android:layout_height="wrap_content"
        android:layout_marginStart="16dp"
        android:layout_marginTop="24dp"
        android:layout_marginEnd="16dp"
        android:fontFamily="@font/segoeui"
        android:textSize="20sp"
        app:layout_constraintEnd_toEndOf="parent"
        app:layout_constraintHorizontal_bias="0.0"
        app:layout_constraintStart_toStartOf="parent"
        app:layout_constraintTop_toBottomOf="@+id/mImageView" />

</androidx.constraintlayout.widget.ConstraintLayout>

MainActivity.java

public class MainActivity extends AppCompatActivity {
    ImageView mImageView;
    ImageButton cameraBtn;
    ImageButton detectBtn;
    Bitmap imageBitmap;
    TextView textView;
    String log = "error";
    static final int REQUEST_IMAGE_CAPTURE = 1;

    @Override
    protected void onCreate(Bundle savedInstanceState) {
        super.onCreate(savedInstanceState);
        setContentView(R.layout.activity_main);
        mImageView = findViewById(R.id.mImageView);
        cameraBtn = findViewById(R.id.cameraButton);
        detectBtn = findViewById(R.id.detectButton);
        textView = findViewById(R.id.textView);
        textView.setTypeface(ResourcesCompat.getFont(this, R.font.segoeui));

        cameraBtn.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                dispatchTakePictureIntent();
                textView.setText("");
            }
        });
        detectBtn.setOnClickListener(new View.OnClickListener() {
            @Override
            public void onClick(View view) {
                detectTextFromImage();

            }
        });
    }


    private void dispatchTakePictureIntent() {
        Intent takePictureIntent = new Intent(MediaStore.ACTION_IMAGE_CAPTURE);
        if (takePictureIntent.resolveActivity(getPackageManager())!= null) {
            startActivityForResult(takePictureIntent, REQUEST_IMAGE_CAPTURE);
        }
    }

    @Override
    protected void onActivityResult(int requestCode, int resultCode, Intent data)
    {
        super.onActivityResult(requestCode, resultCode, data);

        if (requestCode == REQUEST_IMAGE_CAPTURE && resultCode == RESULT_OK) {
              Bundle extras = data.getExtras();
              imageBitmap = (Bitmap) extras.get("data");
              mImageView.setImageBitmap(imageBitmap);
        }
    }

    private void detectTextFromImage() {
        FirebaseVisionImage firebaseVisionImage = FirebaseVisionImage.fromBitmap(imageBitmap);
        FirebaseVisionCloudTextRecognizerOptions options = new FirebaseVisionCloudTextRecognizerOptions.Builder()
                .setLanguageHints(Arrays.asList("eng","hi"))
                .build();
        FirebaseVisionTextRecognizer detector = FirebaseVision.getInstance()
                .getCloudTextRecognizer(options);
        Task<FirebaseVisionText> result =
                detector.processImage(firebaseVisionImage)
                        .addOnSuccessListener(new OnSuccessListener<FirebaseVisionText>() {
                            @Override
                            public void onSuccess(FirebaseVisionText firebaseVisionText) {
                                // Task completed successfully
                                // ...
                                displayTextFromImage(firebaseVisionText);
                            }
                        })
                        .addOnFailureListener(
                                new OnFailureListener() {
                                    @Override
                                    public void onFailure(@NonNull Exception e) {
                                        // Task failed with an exception
                                    }
                                });

    }

    private void displayTextFromImage(FirebaseVisionText firebaseVisionText) {
        List<FirebaseVisionText.TextBlock> blockList = firebaseVisionText.getTextBlocks();
        if (blockList.size() == 0){
            Toast.makeText(this,"No Text Found in image!",Toast.LENGTH_SHORT).show();
        }
        else{
            for (FirebaseVisionText.TextBlock block: firebaseVisionText.getTextBlocks()) {
                String text = block.getText();
                textView.setText(text);
            }
        }
    }

https://developers.google.com/ml-kit/samples (视觉快速入门)上有 iOS 和 Android 平台上的文本识别示例应用程序,似乎两者都可以成功检测给定图像中的文本?

private void displayTextFromImage(FirebaseVisionText firebaseVisionText) { List<FirebaseVisionText.TextBlock> blockList = firebaseVisionText.getTextBlocks(); if (blockList.size() == 0){ Toast.makeText(this,"No Text Found in image,".Toast.LENGTH_SHORT);show(); } else{ String text = "". for (FirebaseVisionText:TextBlock block. firebaseVisionText.getTextBlocks()) { text = text + "\n" + block;getText(). textView;setText(text); } } }

By declaring and initializing String text inside for loop, will cause you lost the text of previous block. Actually, it takes each line as one block. Loop executes so fast, then you see only last line or last block of text set as your textview. Replace that code with this one, to resolve that issue.

The technical post webpages of this site follow the CC BY-SA 4.0 protocol. If you need to reprint, please indicate the site URL or the original address.Any question please contact:yoyou2525@163.com.

 
粤ICP备18138465号  © 2020-2024 STACKOOM.COM