ch.zhaw.facerecognitionlibrary.PreProcessor.PreProcessorFactory Java Examples
The following examples show how to use
ch.zhaw.facerecognitionlibrary.PreProcessor.PreProcessorFactory.
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example. You may check out the related API usage on the sidebar.
Example #1
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 6 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); numberOfTries = 0; animalOverlay = animalOverlayHelper.getAnimalOverlay(""); if (animalOverlay != null) { mediaPlayerAnimalSound = MediaPlayer.create(this, getResources().getIdentifier(animalOverlay.getSoundFile(), AuthenticationInstructionHelper.RESOURCES_RAW_FOLDER, getPackageName())); } preview.enableView(); mediaPlayerTabletPlacement = AuthenticationInstructionHelper.getMediaPlayerTabletPlacement(getApplicationContext()); mediaPlayerTabletPlacement.start(); mediaPlayerTabletPlacementOverlay = AuthenticationInstructionHelper.getMediaPlayerTabletPlacementOverlay(getApplicationContext()); tensorFlowLoadingThread.start(); startTimeFallback = new Date().getTime(); startTimeAuthenticationAnimation = new Date().getTime(); }
Example #2
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 6 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); animalOverlay = animalOverlayHelper.getAnimalOverlay(animalOverlayName); if (animalOverlay != null){ mediaPlayerAnimalSound = MediaPlayer.create(this, getResources().getIdentifier(animalOverlay.getSoundFile(), AuthenticationInstructionHelper.RESOURCES_RAW_FOLDER, getPackageName())); } preview.enableView(); if (!authenticationAnimationAlreadyPlayed){ mediaPlayerTabletPlacement = AuthenticationInstructionHelper.getMediaPlayerTabletPlacement(getApplicationContext()); mediaPlayerTabletPlacement.start(); } mediaPlayerTabletPlacementOverlay = AuthenticationInstructionHelper.getMediaPlayerTabletPlacementOverlay(getApplicationContext()); startTimeFallback = new Date().getTime(); startTimeAuthenticationAnimation = new Date().getTime(); if (authenticationAnimationAlreadyPlayed){ prepareForAuthentication(); } }
Example #3
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 6 votes |
public Mat onCameraFrame(CameraBridgeViewBase.CvCameraViewFrame inputFrame) { Mat imgRgba = inputFrame.rgba(); Mat img = new Mat(); imgRgba.copyTo(img); List<Mat> images = ppF.getProcessedImage(img, PreProcessorFactory.PreprocessingMode.RECOGNITION); Rect[] faces = ppF.getFacesForRecognition(); // Selfie / Mirror mode if(front_camera){ Core.flip(imgRgba,imgRgba,1); } if(images == null || images.size() == 0 || faces == null || faces.length == 0 || ! (images.size() == faces.length)){ // skip return imgRgba; } else { faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(imgRgba, faces[i], rec.recognize(images.get(i), ""), front_camera); } return imgRgba; } }
Example #4
Source File: AuthenticationActivity.java From ml-authentication with Apache License 2.0 | 6 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); numberOfTries = 0; animalOverlay = animalOverlayHelper.getAnimalOverlay(""); if (animalOverlay != null) { mediaPlayerAnimalSound = MediaPlayer.create(this, getResources().getIdentifier(animalOverlay.getSoundFile(), AuthenticationInstructionHelper.RESOURCES_RAW_FOLDER, getPackageName())); } preview.enableView(); mediaPlayerTabletPlacement = AuthenticationInstructionHelper.getMediaPlayerTabletPlacement(getApplicationContext()); mediaPlayerTabletPlacement.start(); mediaPlayerTabletPlacementOverlay = AuthenticationInstructionHelper.getMediaPlayerTabletPlacementOverlay(getApplicationContext()); tensorFlowLoadingThread.start(); startTimeFallback = new Date().getTime(); startTimeAuthenticationAnimation = new Date().getTime(); }
Example #5
Source File: StudentImageCollectionActivity.java From ml-authentication with Apache License 2.0 | 6 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); animalOverlay = animalOverlayHelper.getAnimalOverlay(animalOverlayName); if (animalOverlay != null){ mediaPlayerAnimalSound = MediaPlayer.create(this, getResources().getIdentifier(animalOverlay.getSoundFile(), AuthenticationInstructionHelper.RESOURCES_RAW_FOLDER, getPackageName())); } preview.enableView(); if (!authenticationAnimationAlreadyPlayed){ mediaPlayerTabletPlacement = AuthenticationInstructionHelper.getMediaPlayerTabletPlacement(getApplicationContext()); mediaPlayerTabletPlacement.start(); } mediaPlayerTabletPlacementOverlay = AuthenticationInstructionHelper.getMediaPlayerTabletPlacementOverlay(getApplicationContext()); startTimeFallback = new Date().getTime(); startTimeAuthenticationAnimation = new Date().getTime(); if (authenticationAnimationAlreadyPlayed){ prepareForAuthentication(); } }
Example #6
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 5 votes |
public MergeThread(MergeSimilarStudentsJobService mergeService){ Context context = mergeService.getApplicationContext(); trainingThread = new TrainingThread(context); ppF = new PreProcessorFactory(context); LiteracyApplication literacyApplication = (LiteracyApplication) context.getApplicationContext(); DaoSession daoSession = literacyApplication.getDaoSession(); studentDao = daoSession.getStudentDao(); studentImageCollectionEventDao = daoSession.getStudentImageCollectionEventDao(); gson = new Gson(); this.mergeService = mergeService; }
Example #7
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 5 votes |
/** * Find similar students * Case 2: Student was added regularly but maybe on another tablet or due to some reason the authentication didn't recognize the student correctly in the numberOfTries * ---> Use the meanFeatureVector as input for the cosineSimilarityScore calculation * @param ppF * @param tensorFlow */ private synchronized void findSimilarStudentsUsingMeanFeatureVector(PreProcessorFactory ppF, TensorFlow tensorFlow){ Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector"); // Iterate through all StudentImageCollectionEvents, where the Student is not null List<StudentImageCollectionEvent> studentImageCollectionEvents = studentImageCollectionEventDao.queryBuilder().where(StudentImageCollectionEventDao.Properties.StudentId.notEq(0)).list(); Log.i(getClass().getName(), "studentImageCollectionEvents.size(): " + studentImageCollectionEvents.size()); for (StudentImageCollectionEvent studentImageCollectionEvent : studentImageCollectionEvents){ Student student = studentImageCollectionEvent.getStudent(); // Take the meanFeatureVector of the StudentImageCollectionEvent List<Float> meanFeatureVectorList = gson.fromJson(studentImageCollectionEvent.getMeanFeatureVector(), new TypeToken<List<Float>>(){}.getType()); Mat meanFeatureVector = Converters.vector_float_to_Mat(meanFeatureVectorList); RecognitionThread recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(meanFeatureVector); recognitionThread.setStudent(student); // To indicate, that this Mat object contains the already extracted features and therefore this step can be skipped in the RecognitionThread recognitionThread.setFeaturesAlreadyExtracted(true); Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: recognitionThread will be started to recognize student: " + student.getUniqueId()); recognitionThread.start(); try { recognitionThread.join(); List<Student> recognizedStudents = recognitionThread.getRecognizedStudent(); if (recognizedStudents.size() > 0){ for (Student recognizedStudent : recognizedStudents){ if (recognizedStudent != null){ Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: The student " + student.getUniqueId() + " has been recognized as " + recognizedStudent.getUniqueId()); mergeSimilarStudents(student, recognizedStudent); } } } else { Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: The student " + student.getUniqueId() + " was not recognized"); } } catch (InterruptedException e) { e.printStackTrace(); } } }
Example #8
Source File: AddPersonPreviewActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 5 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(this); mAddPersonView.enableView(); }
Example #9
Source File: RecognitionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 5 votes |
@Override public void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); final android.os.Handler handler = new android.os.Handler(Looper.getMainLooper()); Thread t = new Thread(new Runnable() { public void run() { handler.post(new Runnable() { @Override public void run() { progressBar.setVisibility(View.VISIBLE); } }); SharedPreferences sharedPref = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()); String algorithm = sharedPref.getString("key_classification_method", getResources().getString(R.string.eigenfaces)); rec = RecognitionFactory.getRecognitionAlgorithm(getApplicationContext(), Recognition.RECOGNITION, algorithm); handler.post(new Runnable() { @Override public void run() { progressBar.setVisibility(View.GONE); } }); } }); t.start(); // Wait until Eigenfaces loading thread has finished try { t.join(); } catch (InterruptedException e) { e.printStackTrace(); } mRecognitionView.enableView(); }
Example #10
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 5 votes |
public MergeThread(MergeSimilarStudentsJobService mergeService){ Context context = mergeService.getApplicationContext(); trainingThread = new TrainingThread(context); ppF = new PreProcessorFactory(context); LiteracyApplication literacyApplication = (LiteracyApplication) context.getApplicationContext(); DaoSession daoSession = literacyApplication.getDaoSession(); studentDao = daoSession.getStudentDao(); studentImageCollectionEventDao = daoSession.getStudentImageCollectionEventDao(); gson = new Gson(); this.mergeService = mergeService; }
Example #11
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 5 votes |
/** * Find similar students * Case 2: Student was added regularly but maybe on another tablet or due to some reason the authentication didn't recognize the student correctly in the numberOfTries * ---> Use the meanFeatureVector as input for the cosineSimilarityScore calculation * @param ppF * @param tensorFlow */ private synchronized void findSimilarStudentsUsingMeanFeatureVector(PreProcessorFactory ppF, TensorFlow tensorFlow){ Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector"); // Iterate through all StudentImageCollectionEvents, where the Student is not null List<StudentImageCollectionEvent> studentImageCollectionEvents = studentImageCollectionEventDao.queryBuilder().where(StudentImageCollectionEventDao.Properties.StudentId.notEq(0)).list(); Log.i(getClass().getName(), "studentImageCollectionEvents.size(): " + studentImageCollectionEvents.size()); for (StudentImageCollectionEvent studentImageCollectionEvent : studentImageCollectionEvents){ Student student = studentImageCollectionEvent.getStudent(); // Take the meanFeatureVector of the StudentImageCollectionEvent List<Float> meanFeatureVectorList = gson.fromJson(studentImageCollectionEvent.getMeanFeatureVector(), new TypeToken<List<Float>>(){}.getType()); Mat meanFeatureVector = Converters.vector_float_to_Mat(meanFeatureVectorList); RecognitionThread recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(meanFeatureVector); recognitionThread.setStudent(student); // To indicate, that this Mat object contains the already extracted features and therefore this step can be skipped in the RecognitionThread recognitionThread.setFeaturesAlreadyExtracted(true); Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: recognitionThread will be started to recognize student: " + student.getUniqueId()); recognitionThread.start(); try { recognitionThread.join(); List<Student> recognizedStudents = recognitionThread.getRecognizedStudent(); if (recognizedStudents.size() > 0){ for (Student recognizedStudent : recognizedStudents){ if (recognizedStudent != null){ Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: The student " + student.getUniqueId() + " has been recognized as " + recognizedStudent.getUniqueId()); mergeSimilarStudents(student, recognizedStudent); } } } else { Log.i(getClass().getName(), "findSimilarStudentsUsingMeanFeatureVector: The student " + student.getUniqueId() + " was not recognized"); } } catch (InterruptedException e) { e.printStackTrace(); } } }
Example #12
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 4 votes |
/** * Find similar students * Case 1: Student was added during fallback but in the meantime the same person has an existing StudentImageCollectionEvent and a new Student entry * ---> Use the avatar image as input for the recognition * @param ppF * @param tensorFlow */ private synchronized void findSimilarStudentsUsingAvatarImages(PreProcessorFactory ppF, TensorFlow tensorFlow){ Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages"); // Iterate through all Students List<Student> students = studentDao.loadAll(); for (Student student : students){ // Take the avatar image of the Student Mat avatarImage = Imgcodecs.imread(student.getAvatar()); // Search for faces in the avatar image List<Mat> faceImages = ppF.getCroppedImage(avatarImage); if (faceImages != null && faceImages.size() == 1) { // Proceed if exactly one face has been detected Mat faceImage = faceImages.get(0); if (faceImage != null) { // Get detected face rectangles Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1) { // Proceed if exactly one face rectangle exists RecognitionThread recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(faceImage); recognitionThread.setStudent(student); Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: recognitionThread will be started to recognize student: " + student.getUniqueId()); recognitionThread.start(); try { recognitionThread.join(); List<Student> recognizedStudents = recognitionThread.getRecognizedStudent(); if (recognizedStudents.size() > 0){ for (Student recognizedStudent : recognizedStudents){ if (recognizedStudent != null){ Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: The student " + student.getUniqueId() + " has been recognized as " + recognizedStudent.getUniqueId()); mergeSimilarStudents(student, recognizedStudent); } } } else { Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: The student " + student.getUniqueId() + " was not recognized"); } } catch (InterruptedException e) { e.printStackTrace(); } } } } } }
Example #13
Source File: DetectionTestActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 4 votes |
@Override protected void onResume() { super.onResume(); final Handler handler = new Handler(Looper.getMainLooper()); thread = new Thread(new Runnable() { public void run() { if(!Thread.currentThread().isInterrupted()){ PreProcessorFactory ppF = new PreProcessorFactory(getApplicationContext()); FileHelper fileHelper = new FileHelper(); File[] detectionFolders = fileHelper.getDetectionTestList(); if (detectionFolders.length > 0) { // total and matches are used to calculate the accuracy afterwards int total = 0; int matches = 0; List<String> results = new ArrayList<>(); results.add("Expected Name;Expected File;Result"); Date time_start = new Date(); for (File folder : detectionFolders) { File[] files = folder.listFiles(); int counter = 1; for (File file : files) { if (FileHelper.isFileAnImage(file)) { Mat imgRgba = Imgcodecs.imread(file.getAbsolutePath()); Imgproc.cvtColor(imgRgba, imgRgba, Imgproc.COLOR_BGRA2RGBA); List<Mat> images = ppF.getProcessedImage(imgRgba, PreProcessorFactory.PreprocessingMode.DETECTION); Rect[] faces = ppF.getFacesForRecognition(); String result = ""; if (faces == null || faces.length == 0) { result = RESULT_NEGATIVE; } else { result = RESULT_POSITIVE; faces = MatOperation.rotateFaces(imgRgba, faces, ppF.getAngleForRecognition()); for(int i = 0; i<faces.length; i++){ MatOperation.drawRectangleAndLabelOnPreview(images.get(0), faces[i], "", false); } } // Save images String[] tokens = file.getName().split("\\."); String filename = tokens[0]; for (int i=0; i<images.size();i++){ MatName m = new MatName(filename + "_" + (i + 1), images.get(i)); fileHelper.saveMatToImage(m, FileHelper.RESULTS_PATH + "/" + time_start.toString() + "/"); } tokens = file.getParent().split("/"); final String name = tokens[tokens.length - 1]; results.add(name + ";" + file.getName() + ";" + result); total++; if (name.equals(result)) { matches++; } // Update screen to show the progress final int counterPost = counter; final int filesLength = files.length; progress.post(new Runnable() { @Override public void run() { progress.append("Image " + counterPost + " of " + filesLength + " from " + name + "\n"); } }); counter++; } } } Date time_end = new Date(); long duration = time_end.getTime() - time_start.getTime(); int durationPerImage = (int) duration / total; double accuracy = (double) matches / (double) total; Map<String, ?> printMap = PreferenceManager.getDefaultSharedPreferences(getApplicationContext()).getAll(); fileHelper.saveResultsToFile(printMap, accuracy, durationPerImage, results); final Intent intent = new Intent(getApplicationContext(), MainActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); intent.putExtra("accuracy", accuracy); handler.post(new Runnable() { @Override public void run() { startActivity(intent); } }); } } else { Thread.currentThread().interrupt(); } } }); thread.start(); }
Example #14
Source File: TrainingActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 4 votes |
@Override public void onResume() { super.onResume(); final Handler handler = new Handler(Looper.getMainLooper()); thread = new Thread(new Runnable() { public void run() { if(!Thread.currentThread().isInterrupted()){ PreProcessorFactory ppF = new PreProcessorFactory(getApplicationContext()); PreferencesHelper preferencesHelper = new PreferencesHelper(getApplicationContext()); String algorithm = preferencesHelper.getClassificationMethod(); FileHelper fileHelper = new FileHelper(); fileHelper.createDataFolderIfNotExsiting(); final File[] persons = fileHelper.getTrainingList(); if (persons.length > 0) { Recognition rec = RecognitionFactory.getRecognitionAlgorithm(getApplicationContext(), Recognition.TRAINING, algorithm); for (File person : persons) { if (person.isDirectory()){ File[] files = person.listFiles(); int counter = 1; for (File file : files) { if (FileHelper.isFileAnImage(file)){ Mat imgRgb = Imgcodecs.imread(file.getAbsolutePath()); Imgproc.cvtColor(imgRgb, imgRgb, Imgproc.COLOR_BGRA2RGBA); Mat processedImage = new Mat(); imgRgb.copyTo(processedImage); List<Mat> images = ppF.getProcessedImage(processedImage, PreProcessorFactory.PreprocessingMode.RECOGNITION); if (images == null || images.size() > 1) { // More than 1 face detected --> cannot use this file for training continue; } else { processedImage = images.get(0); } if (processedImage.empty()) { continue; } // The last token is the name --> Folder name = Person name String[] tokens = file.getParent().split("/"); final String name = tokens[tokens.length - 1]; MatName m = new MatName("processedImage", processedImage); fileHelper.saveMatToImage(m, FileHelper.DATA_PATH); rec.addImage(processedImage, name, false); // fileHelper.saveCroppedImage(imgRgb, ppF, file, name, counter); // Update screen to show the progress final int counterPost = counter; final int filesLength = files.length; progress.post(new Runnable() { @Override public void run() { progress.append("Image " + counterPost + " of " + filesLength + " from " + name + " imported.\n"); } }); counter++; } } } } final Intent intent = new Intent(getApplicationContext(), MainActivity.class); intent.setFlags(Intent.FLAG_ACTIVITY_CLEAR_TOP); if (rec.train()) { intent.putExtra("training", "Training successful"); } else { intent.putExtra("training", "Training failed"); } handler.post(new Runnable() { @Override public void run() { startActivity(intent); } }); } else { Thread.currentThread().interrupt(); } } } }); thread.start(); }
Example #15
Source File: DetectionActivity.java From Android-Face-Recognition-with-Deep-Learning-Test-Framework with Apache License 2.0 | 4 votes |
@Override protected void onResume() { super.onResume(); ppF = new PreProcessorFactory(getApplicationContext()); mDetectionView.enableView(); }
Example #16
Source File: MergeThread.java From ml-authentication with Apache License 2.0 | 4 votes |
/** * Find similar students * Case 1: Student was added during fallback but in the meantime the same person has an existing StudentImageCollectionEvent and a new Student entry * ---> Use the avatar image as input for the recognition * @param ppF * @param tensorFlow */ private synchronized void findSimilarStudentsUsingAvatarImages(PreProcessorFactory ppF, TensorFlow tensorFlow){ Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages"); // Iterate through all Students List<Student> students = studentDao.loadAll(); for (Student student : students){ // Take the avatar image of the Student Mat avatarImage = Imgcodecs.imread(student.getAvatar()); // Search for faces in the avatar image List<Mat> faceImages = ppF.getCroppedImage(avatarImage); if (faceImages != null && faceImages.size() == 1) { // Proceed if exactly one face has been detected Mat faceImage = faceImages.get(0); if (faceImage != null) { // Get detected face rectangles Rect[] faces = ppF.getFacesForRecognition(); if (faces != null && faces.length == 1) { // Proceed if exactly one face rectangle exists RecognitionThread recognitionThread = new RecognitionThread(tensorFlow, studentImageCollectionEventDao); recognitionThread.setImg(faceImage); recognitionThread.setStudent(student); Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: recognitionThread will be started to recognize student: " + student.getUniqueId()); recognitionThread.start(); try { recognitionThread.join(); List<Student> recognizedStudents = recognitionThread.getRecognizedStudent(); if (recognizedStudents.size() > 0){ for (Student recognizedStudent : recognizedStudents){ if (recognizedStudent != null){ Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: The student " + student.getUniqueId() + " has been recognized as " + recognizedStudent.getUniqueId()); mergeSimilarStudents(student, recognizedStudent); } } } else { Log.i(getClass().getName(), "findSimilarStudentsUsingAvatarImages: The student " + student.getUniqueId() + " was not recognized"); } } catch (InterruptedException e) { e.printStackTrace(); } } } } } }