2016-05-22 3 views
1

Я могу определить наибольший контур листа ответов (20 вопросов, каждый из которых имеет 4 альтернативы)Как обнаружить отмеченные черные области внутри самого крупного контура прямоугольника?

После того, как самый большой контур ничьей, что мне делать? Разделите матрицу прямоугольником на ячейку 20x4? Или найти счетчик снова, но на этот раз внутри прямоугольника? Я не знаю, что мне нужно. Просто я хочу получить, что отмечено.

Я смотрел this documant.

Как загорать «разделение изображения и разделение»?

enter image description here

public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { 

     return findLargestRectangle(inputFrame.rgba()); 
    } 


    private Mat findLargestRectangle(Mat original_image) { 
     Mat imgSource = original_image; 
     hierarchy = new Mat(); 

     //convert the image to black and white 
     Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BGR2GRAY); 

     //convert the image to black and white does (8 bit) 
     Imgproc.Canny(imgSource, imgSource, 50, 50); 

     //apply gaussian blur to smoothen lines of dots 
     Imgproc.GaussianBlur(imgSource, imgSource, new Size(5, 5), 5); 

     //find the contours 
     List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); 
     Imgproc.findContours(imgSource, contours, hierarchy, Imgproc.RETR_LIST, Imgproc.CHAIN_APPROX_SIMPLE); 

     hierarchy.release(); 

     double maxArea = -1; 
     int maxAreaIdx = -1; 
     MatOfPoint temp_contour = contours.get(0); //the largest is at the index 0 for starting point 
     MatOfPoint2f approxCurve = new MatOfPoint2f(); 
     Mat largest_contour = contours.get(0); 
     List<MatOfPoint> largest_contours = new ArrayList<MatOfPoint>(); 
     for (int idx = 0; idx < contours.size(); idx++) { 
      temp_contour = contours.get(idx); 
      double contourarea = Imgproc.contourArea(temp_contour); 
      //compare this contour to the previous largest contour found 
      if (contourarea > maxArea) { 
       //check if this contour is a square 
       MatOfPoint2f new_mat = new MatOfPoint2f(temp_contour.toArray()); 
       int contourSize = (int)temp_contour.total(); 
       Imgproc.approxPolyDP(new_mat, approxCurve, contourSize*0.05, true); 
       if (approxCurve.total() == 4) { 
        maxArea = contourarea; 
        maxAreaIdx = idx; 
        largest_contours.add(temp_contour); 
        largest_contour = temp_contour; 
       } 
      } 
     } 
     MatOfPoint temp_largest = largest_contours.get(largest_contours.size()-1); 
     largest_contours = new ArrayList<MatOfPoint>(); 
     largest_contours.add(temp_largest); 


     Imgproc.cvtColor(imgSource, imgSource, Imgproc.COLOR_BayerBG2RGB); 
     Imgproc.drawContours(imgSource, contours, maxAreaIdx, new Scalar(0, 255, 0), 1); 
     Log.d(TAG, "Largers Contour:" + contours.get(maxAreaIdx).toString()); 


     return imgSource; 
    } 

UPDATE 1:

Я хочу поблагодарить вас @sturkmen за его ответ. Теперь я могу читать и находить черные регионы. Здесь коды Android:

public View onCreateView(LayoutInflater inflater, ViewGroup container, 
         Bundle savedInstanceState) { 
    View _view = inflater.inflate(R.layout.fragment_main, container, false); 
    // Inflate the layout for this fragment 


    Button btnTest = (Button) _view.findViewById(R.id.btnTest); 
    btnTest.setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 

      Mat img = Imgcodecs.imread(mediaStorageDir().getPath() + "/" + "test2.jpg"); 
      if (img.empty()) { 
       Log.d("Fragment", "IMG EMPTY"); 
      } 


      Mat gray = new Mat(); 
      Mat thresh = new Mat(); 

      //convert the image to black and white 
      Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY); 

      //convert the image to black and white does (8 bit) 
      Imgproc.threshold(gray, thresh, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); 
      Mat temp = thresh.clone(); 
      //find the contours 
      Mat hierarchy = new Mat(); 

      Mat corners = new Mat(4,1, CvType.CV_32FC2); 
      List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); 
      Imgproc.findContours(temp, contours,hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); 
      hierarchy.release(); 

      for (int idx = 0; idx < contours.size(); idx++) 
      { 
       MatOfPoint contour = contours.get(idx); 
       MatOfPoint2f contour_points = new MatOfPoint2f(contour.toArray()); 
       RotatedRect minRect = Imgproc.minAreaRect(contour_points); 
       Point[] rect_points = new Point[4]; 
       minRect.points(rect_points); 
       if(minRect.size.height > img.width()/2) 
       { 
        List<Point> srcPoints = new ArrayList<Point>(4); 
        srcPoints.add(rect_points[2]); 
        srcPoints.add(rect_points[3]); 
        srcPoints.add(rect_points[0]); 
        srcPoints.add(rect_points[1]); 

        corners = Converters.vector_Point_to_Mat(
          srcPoints, CvType.CV_32F); 
       } 

      } 
      Imgproc.erode(thresh, thresh, new Mat(), new Point(-1,-1), 10); 
      Imgproc.dilate(thresh, thresh, new Mat(), new Point(-1,-1), 5); 
      Mat results = new Mat(1000,250,CvType.CV_8UC3); 
      Mat quad = new Mat(1000,250,CvType.CV_8UC1); 

      List<Point> dstPoints = new ArrayList<Point>(4); 
      dstPoints.add(new Point(0, 0)); 
      dstPoints.add(new Point(1000, 0)); 
      dstPoints.add(new Point(1000, 250)); 
      dstPoints.add(new Point(0, 250)); 
      Mat quad_pts = Converters.vector_Point_to_Mat(
        dstPoints, CvType.CV_32F); 

      Mat transmtx = Imgproc.getPerspectiveTransform(corners, quad_pts); 
      Imgproc.warpPerspective(img, results, transmtx, new Size(1000,250)); 
      Imgproc.warpPerspective(thresh, quad, transmtx, new Size(1000,250)); 

      Imgproc.resize(quad,quad,new Size(20,5)); 

      Imgcodecs.imwrite("results.png",quad); 

      //show image 
      showImage(quad); 

      //store image 
      storeImage(quad); 

     } 

    }); 

    return _view; 
} 

public void showImage (Mat img) { 
    ImageView imgView = (ImageView) getActivity().findViewById(R.id.sampleImageView); 
    //Mat mRgba = new Mat(); 

    //mRgba = Utils.loadResource(MainAct.this, R.drawable.your_image,Highgui.CV_LOAD_IMAGE_COLOR); 
    Bitmap img2 = Bitmap.createBitmap(img.cols(), img.rows(),Bitmap.Config.ARGB_8888); 
    Utils.matToBitmap(img, img2); 
    imgView.setImageBitmap(img2); 
} 

public File mediaStorageDir() { 
    File _mediaStorageDir = new File(Environment.getExternalStorageDirectory() 
      + "/Android/data/" 
      + getActivity().getApplicationContext().getPackageName()); 

    return _mediaStorageDir; 
} 

public void storeImage(Mat matImg) { 

    Bitmap bitmapImg = Bitmap.createBitmap(matImg.cols(), matImg.rows(),Bitmap.Config.ARGB_8888); 
    Utils.matToBitmap(matImg, bitmapImg); 
    String timeStamp = new SimpleDateFormat("ddMMyyyy_HHmm").format(new Date()); 
    File mediaFile; 
    String mImageName="IMG_"+ timeStamp +".jpg"; 
    mediaFile = new File(mediaStorageDir().getPath() + File.separator + mImageName); 

    File pictureFile = mediaFile; 

    try { 
     FileOutputStream fos = new FileOutputStream(pictureFile); 
     bitmapImg.compress(Bitmap.CompressFormat.PNG, 90, fos); 
     fos.close(); 
    } catch (FileNotFoundException e) { 
     Log.d("FragmentMain", "File not found: " + e.getMessage()); 
    } catch (IOException e) { 
     Log.d("FragmentMain", "Error accessing file: " + e.getMessage()); 
    } 
} 

ответ

2

вот мой пробный код в качестве образца.

Надеюсь, это будет полезно. (Я добавлю некоторые пояснения о коде позже)

Test Image (отредактированный ваше изображение. Что пустой и недействительных двойной знак)

image description http://answers.opencv.org/upfiles/14639435664447751.jpg

Результат Изображение

image description http://answers.opencv.org/upfiles/14639682486508377.jpeg

#include <opencv2/highgui.hpp> 
#include <opencv2/imgproc.hpp> 

using namespace cv; 
using namespace std; 

int main(int argc, const char** argv) 
{ 
    Mat img = imread(argv[1]); 
    if(img.empty()) 
    { 
     return -1; 
    } 

    Size dims(20,5); // this variable should be changed according input 
    Mat gray,thresh; 
    cvtColor(img, gray, COLOR_BGR2GRAY); 
    threshold(gray, thresh, 0, 255, THRESH_BINARY_INV + THRESH_OTSU); 

    Mat quad(img.size(), CV_8UC1); // should be improved 
    Mat results(img.size(), CV_8UC3); 

    vector<Point2f> quad_pts; 
    quad_pts.push_back(cv::Point2f(0, 0)); 
    quad_pts.push_back(cv::Point2f(quad.cols, 0)); 
    quad_pts.push_back(cv::Point2f(quad.cols, quad.rows)); 
    quad_pts.push_back(cv::Point2f(0, quad.rows)); 

    vector<Point2f> corners; 
    vector<vector<Point> > contours; 

    findContours(thresh.clone(), contours, RETR_EXTERNAL, CHAIN_APPROX_SIMPLE); 

    for(size_t i = 0; i< contours.size(); i++) 
    { 
     RotatedRect minRect = minAreaRect(Mat(contours[i])); 

     // rotated rectangle 
     Point2f rect_points[4]; 
     minRect.points(rect_points); 

     if(Rect(minRect.boundingRect()).width > img.cols/2) // should be improved 
      for(int j = 0; j < 4; j++) 
      { 
       Point2f pt = quad_pts[j]; 
       Point2f nearest_pt = rect_points[0]; 
       float dist = norm(pt - nearest_pt); 
       for(int k = 1; k < 4; k++) 
       { 
       if(norm(pt - rect_points[k]) < dist) 
       { 
        dist = norm(pt - rect_points[k]); 
        nearest_pt = rect_points[k]; 
       } 
       } 
       corners.push_back(nearest_pt); 
      } 
    } 

    erode(thresh,thresh,Mat(),Point(-1,-1), 10); // should be improved 
    dilate(thresh,thresh,Mat(),Point(-1,-1), 5); // should be improved 

    Mat transmtx = getPerspectiveTransform(corners, quad_pts); 
    warpPerspective(img, results, transmtx, img.size()); // Create a Mat To Show results 
    warpPerspective(thresh, quad, transmtx, img.size()); 

    resize(quad,quad,dims); 

    for(int i = 0; i < quad.cols; i++) 
    { 
     String answer = ""; 

     answer += quad.at<uchar>(1,i) == 0 ? "" : "A"; 
     answer += quad.at<uchar>(2,i) == 0 ? "" : "B"; 
     answer += quad.at<uchar>(3,i) == 0 ? "" : "C"; 
     answer += quad.at<uchar>(4,i) == 0 ? "" : "D"; 

     if(answer.length() > 1) answer = "X"; // Double mark 
     int y = 0; 
     if(answer == "A") y = results.rows/dims.height; 
     if(answer == "B") y = results.rows/dims.height *2; 
     if(answer == "C") y = results.rows/dims.height *3; 
     if(answer == "D") y = results.rows/dims.height *4; 
     if(answer == "") answer = "[-]"; 
     putText(results, answer, Point(50* i + 15, 30 + y), FONT_HERSHEY_PLAIN, 2, Scalar(0,0,255),2); 

    } 
    imshow("results", results); 
    waitKey(0); 

    return 0; 
} 

как вызов себе я попытался реализовать основную часть в JAVA (новичок копировать вставить код)

Mat img = Imgcodecs.imread("test.jpg"); 
Mat gray = new Mat(); 
Mat thresh = new Mat(); 

//convert the image to black and white 
Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY); 

//convert the image to black and white does (8 bit) 
Imgproc.threshold(gray, thresh, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); 
Mat temp = thresh.clone(); 
//find the contours 
Mat hierarchy = new Mat(); 

Mat corners = new Mat(4,1,CvType.CV_32FC2); 
List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); 
Imgproc.findContours(temp, contours,hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); 
hierarchy.release(); 

for (int idx = 0; idx < contours.size(); idx++) 
{ 
    MatOfPoint contour = contours.get(idx); 
    MatOfPoint2f contour_points = new MatOfPoint2f(contour.toArray()); 
    RotatedRect minRect = Imgproc.minAreaRect(contour_points); 
    Point[] rect_points = new Point[4]; 
    minRect.points(rect_points); 
    if(minRect.size.height > img.width()/2) 
    { 
     List<Point> srcPoints = new ArrayList<Point>(4); 
     srcPoints.add(rect_points[2]); 
     srcPoints.add(rect_points[3]); 
     srcPoints.add(rect_points[0]); 
     srcPoints.add(rect_points[1]); 

     corners = Converters.vector_Point_to_Mat(
         srcPoints, CvType.CV_32F); 
    } 

} 
Imgproc.erode(thresh, thresh, new Mat(), new Point(-1,-1), 10); 
Imgproc.dilate(thresh, thresh, new Mat(), new Point(-1,-1), 5); 
Mat results = new Mat(1000,250,CvType.CV_8UC3); 
Mat quad = new Mat(1000,250,CvType.CV_8UC1); 

List<Point> dstPoints = new ArrayList<Point>(4); 
dstPoints.add(new Point(0, 0)); 
dstPoints.add(new Point(1000, 0)); 
dstPoints.add(new Point(1000, 250)); 
dstPoints.add(new Point(0, 250)); 
Mat quad_pts = Converters.vector_Point_to_Mat(
        dstPoints, CvType.CV_32F); 

Mat transmtx = Imgproc.getPerspectiveTransform(corners, quad_pts); 
Imgproc.warpPerspective(img, results, transmtx, new Size(1000,250)); 
Imgproc.warpPerspective(thresh, quad, transmtx, new Size(1000,250)); 

Imgproc.resize(quad,quad,new Size(20,5)); 

Imgcodecs.imwrite("results.png",quad); 

здесь является (20x5px) результат изображения: image description http://answers.opencv.org/upfiles/14639684166300772.png

+0

спасибо. Я постараюсь как можно скорее. @sturkmen – RedLEON

+0

i обновленный код C++. – sturkmen

1

I imrove код @sturkmen»ы.

fragment_main.xml

<FrameLayout xmlns:android="http://schemas.android.com/apk/res/android" 
xmlns:tools="http://schemas.android.com/tools" 
android:layout_width="match_parent" 
android:layout_height="match_parent" 
tools:context="{your package name}.FragmentMain"> 

<!-- TODO: Update blank fragment layout --> 

<LinearLayout 
    android:orientation="vertical" 
    android:layout_width="match_parent" 
    android:layout_height="match_parent"> 

    <Button 
     android:id="@+id/btnTest" 
     android:layout_width="match_parent" 
     android:layout_height="80dp" 
     android:text="Test" /> 

    <ImageView 
     android:id="@+id/sampleImageView" 
     android:layout_width="match_parent" 
     android:layout_height="150dp" 
     android:layout_centerHorizontal="true"/> 
</LinearLayout> 
</framelayout> 

AndroidManifest.xml

Добавить эту линию для разрешения записи.

<uses-permission android:name="android.permission.WRITE_EXTERNAL_STORAGE" /> 

FragmentMain.java

IMAGE FILE:Добавить Internal Storage/Android/Data/ваша папка пакет/тест.JPG

public View onCreateView(LayoutInflater inflater, ViewGroup container, 
         Bundle savedInstanceState) { 
    View _view = inflater.inflate(R.layout.fragment_main, container, false); 

    Button btnTest = (Button) _view.findViewById(R.id.btnTest); 
    btnTest.setOnClickListener(new View.OnClickListener() { 
     @Override 
     public void onClick(View v) { 


      Mat img = Imgcodecs.imread(mediaStorageDir().getPath() + "/" + "test.JPG"); 
      if (img.empty()) { 
       Log.d("FragmentMain", "Empty Image"); 
      } 


      Size dims = new Size (20,5); 
      Mat gray = new Mat(); 
      Mat thresh = new Mat(); 

      //convert the image to black and white 
      Imgproc.cvtColor(img, gray, Imgproc.COLOR_BGR2GRAY); 
      storeImage(gray); 

      //convert the image to black and white does (8 bit) 
      Imgproc.threshold(gray, thresh, 0, 255, Imgproc.THRESH_BINARY_INV + Imgproc.THRESH_OTSU); 

      storeImage(thresh); 

      Mat temp = thresh.clone(); 
      //find the contours 
      Mat hierarchy = new Mat(); 

      Mat corners = new Mat(4,1, CvType.CV_32FC2); 
      List<MatOfPoint> contours = new ArrayList<MatOfPoint>(); 
      Imgproc.findContours(temp, contours,hierarchy, Imgproc.RETR_EXTERNAL, Imgproc.CHAIN_APPROX_SIMPLE); 
      hierarchy.release(); 

      for (int idx = 0; idx < contours.size(); idx++) 
      { 
       MatOfPoint contour = contours.get(idx); 
       MatOfPoint2f contour_points = new MatOfPoint2f(contour.toArray()); 
       RotatedRect minRect = Imgproc.minAreaRect(contour_points); 
       Point[] rect_points = new Point[4]; 
       minRect.points(rect_points); 
       if(minRect.size.height > img.width()/2) 
       { 
        List<Point> srcPoints = new ArrayList<Point>(4); 
        srcPoints.add(rect_points[2]); 
        srcPoints.add(rect_points[3]); 
        srcPoints.add(rect_points[0]); 
        srcPoints.add(rect_points[1]); 

        corners = Converters.vector_Point_to_Mat(
          srcPoints, CvType.CV_32F); 
       } 

      } 
      Imgproc.erode(thresh, thresh, new Mat(), new Point(-1,-1), 10); 

      storeImage(thresh); 
      Imgproc.dilate(thresh, thresh, new Mat(), new Point(-1,-1), 5); 

      storeImage(thresh); 

      Mat results = new Mat(1000,250,CvType.CV_8UC3); 
      Mat quad = new Mat(1000,250,CvType.CV_8UC1); 

      List<Point> dstPoints = new ArrayList<Point>(4); 
      dstPoints.add(new Point(0, 0)); 
      dstPoints.add(new Point(1000, 0)); 
      dstPoints.add(new Point(1000, 250)); 
      dstPoints.add(new Point(0, 250)); 
      Mat quad_pts = Converters.vector_Point_to_Mat(
        dstPoints, CvType.CV_32F); 

      Mat transmtx = Imgproc.getPerspectiveTransform(corners, quad_pts); 
      Imgproc.warpPerspective(img, results, transmtx, new Size(1000,250)); 
      Imgproc.warpPerspective(thresh, quad, transmtx, new Size(1000,250)); 

      Imgproc.resize(quad, quad, new Size(20,5)); 

      Imgcodecs.imwrite("results.png",quad); 

      //store image 
      storeImage(quad); 

      //show image 
      showImage(quad); 


      System.out.println(quad.dump()); 

      for(int i = 0; i < quad.cols(); i++) 
      { 
       int size = (int) (quad.total() * quad.channels()); 
       byte[] tmp = new byte[size]; 

       String answer = ""; 
       double[] d = new double[0]; 
       d = quad.get(1, i); 
       answer += d[0] == 0 ? "" : "A"; 
       d = quad.get(2, i); 
       answer += d[0] == 0 ? "" : "B"; 
       d = quad.get(3, i); 
       answer += d[0] == 0 ? "" : "C"; 
       d = quad.get(4, i); 
       answer += d[0] == 0 ? "" : "D"; 

       if(answer.length() > 1) answer = "X"; // Double mark 
       int y = 0; 
       if(answer.equals("A")) y = results.rows()/(int) dims.height; 
       if(answer.equals("B")) y = results.rows()/(int) dims.height *2; 
       if(answer.equals("C")) y = results.rows()/(int) dims.height *3; 
       if(answer.equals("D")) y = results.rows()/(int) dims.height *4; 
       if(answer == "") answer = "[-]"; 
       Imgproc.putText(results, answer, new Point(50* i + 15, 30 + y), Core.FONT_HERSHEY_PLAIN, 2, new Scalar(0,0,255),2); 

      } 


      //store image 
      storeImage(results); 

      //show image 
      showImage(results); 

     } 

    }); 
public void showImage (Mat img) { 
    ImageView imgView = (ImageView) getActivity().findViewById(R.id.sampleImageView); 
    //Mat mRgba = new Mat(); 

    //mRgba = Utils.loadResource(MainAct.this, R.drawable.your_image,Highgui.CV_LOAD_IMAGE_COLOR); 
    Bitmap img2 = Bitmap.createBitmap(img.cols(), img.rows(),Bitmap.Config.ARGB_8888); 
    Utils.matToBitmap(img, img2); 
    imgView.setImageBitmap(img2); 
} 

public File mediaStorageDir() { 
    File _mediaStorageDir = new File(Environment.getExternalStorageDirectory() 
      + "/Android/data/" 
      + getActivity().getApplicationContext().getPackageName()); 

    return _mediaStorageDir; 
} 

public void storeImage(Mat matImg) { 

    Bitmap bitmapImg = Bitmap.createBitmap(matImg.cols(), matImg.rows(),Bitmap.Config.ARGB_8888); 
    Utils.matToBitmap(matImg, bitmapImg); 
    String timeStamp = new SimpleDateFormat("ddMMyyyy_HHmm").format(new Date()); 
    File mediaFile; 
    String mImageName="IMG_"+ timeStamp +".jpg"; 
    mediaFile = new File(mediaStorageDir().getPath() + File.separator + mImageName); 

    File pictureFile = mediaFile; 

    try { 
     FileOutputStream fos = new FileOutputStream(pictureFile); 
     bitmapImg.compress(Bitmap.CompressFormat.PNG, 90, fos); 
     fos.close(); 
    } catch (FileNotFoundException e) { 
     Log.d("FragmentMain", "File not found: " + e.getMessage()); 
    } catch (IOException e) { 
     Log.d("FragmentMain", "Error accessing file: " + e.getMessage()); 
    } 
} 
+0

Имейте в виду, что я обновил код C++, но не смог обновить JAVA-код. этот код необходимо обновить. – sturkmen