passing built models for analysis from api

This commit is contained in:
Şefik Serangil 2020-04-17 20:19:32 +03:00
parent f742dfd1f8
commit fa81b4f7fd
3 changed files with 91 additions and 13 deletions

View File

@ -3,37 +3,82 @@ from flask import Flask, jsonify, request, make_response
import uuid import uuid
import json import json
import time import time
from tqdm import tqdm
import tensorflow as tf import tensorflow as tf
from deepface import DeepFace from deepface import DeepFace
from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
from deepface.extendedmodels import Age, Gender, Race, Emotion
#import DeepFace #import DeepFace
#from basemodels import VGGFace, OpenFace, Facenet, FbDeepFace #from basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
#from extendedmodels import Age, Gender, Race, Emotion
#------------------------------ #------------------------------
app = Flask(__name__) app = Flask(__name__)
#------------------------------
tic = time.time() tic = time.time()
print("Loading Face Recognition Models...")
pbar = tqdm(range(0,4), desc='Loading Face Recognition Models...')
for index in pbar:
if index == 0:
pbar.set_description("Loading VGG-Face")
vggface_model = VGGFace.loadModel() vggface_model = VGGFace.loadModel()
print("VGG-Face model is built.") elif index == 1:
pbar.set_description("Loading OpenFace")
openface_model = OpenFace.loadModel() openface_model = OpenFace.loadModel()
print("OpenFace model is built") elif index == 2:
pbar.set_description("Loading Google FaceNet")
facenet_model = Facenet.loadModel() facenet_model = Facenet.loadModel()
print("FaceNet model is built") elif index == 3:
pbar.set_description("Loading Facebook DeepFace")
deepface_model = FbDeepFace.loadModel() deepface_model = FbDeepFace.loadModel()
print("DeepFace model is built")
toc = time.time() toc = time.time()
print("Face recognition models are built in ", toc-tic," seconds") print("Face recognition models are built in ", toc-tic," seconds")
#------------------------------
tic = time.time()
print("Loading Facial Attribute Analysis Models...")
pbar = tqdm(range(0,4), desc='Loading Facial Attribute Analysis Models...')
for index in pbar:
if index == 0:
pbar.set_description("Loading emotion analysis model")
emotion_model = Emotion.loadModel()
elif index == 1:
pbar.set_description("Loading age prediction model")
age_model = Age.loadModel()
elif index == 2:
pbar.set_description("Loading gender prediction model")
gender_model = Gender.loadModel()
elif index == 3:
pbar.set_description("Loading race prediction model")
race_model = Race.loadModel()
toc = time.time()
facial_attribute_models = {}
facial_attribute_models["emotion"] = emotion_model
facial_attribute_models["age"] = age_model
facial_attribute_models["gender"] = gender_model
facial_attribute_models["race"] = race_model
print("Facial attribute analysis models are built in ", toc-tic," seconds")
#------------------------------
graph = tf.get_default_graph() graph = tf.get_default_graph()
#------------------------------ #------------------------------
@ -76,7 +121,8 @@ def analyze():
#--------------------------- #---------------------------
resp_obj = DeepFace.analyze(instances, actions=actions) #resp_obj = DeepFace.analyze(instances, actions=actions)
resp_obj = DeepFace.analyze(instances, actions=actions, models=facial_attribute_models)
#--------------------------- #---------------------------
@ -85,7 +131,7 @@ def analyze():
resp_obj["trx_id"] = trx_id resp_obj["trx_id"] = trx_id
resp_obj["seconds"] = toc-tic resp_obj["seconds"] = toc-tic
return resp_obj return resp_obj, 200
@app.route('/verify', methods=['POST']) @app.route('/verify', methods=['POST'])

View File

@ -20,7 +20,6 @@ from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
from deepface.extendedmodels import Age, Gender, Race, Emotion from deepface.extendedmodels import Age, Gender, Race, Emotion
from deepface.commons import functions, realtime, distance as dst from deepface.commons import functions, realtime, distance as dst
def verify(img1_path, img2_path='' def verify(img1_path, img2_path=''
, model_name ='VGG-Face', distance_metric = 'cosine', model = None): , model_name ='VGG-Face', distance_metric = 'cosine', model = None):
@ -171,24 +170,28 @@ def analyze(img_path, actions= [], models= {}):
if 'emotion' in actions: if 'emotion' in actions:
if 'emotion' in models: if 'emotion' in models:
print("already built emotion model is passed")
emotion_model = models['emotion'] emotion_model = models['emotion']
else: else:
emotion_model = Emotion.loadModel() emotion_model = Emotion.loadModel()
if 'age' in actions: if 'age' in actions:
if 'age' in models: if 'age' in models:
print("already built age model is passed")
age_model = models['age'] age_model = models['age']
else: else:
age_model = Age.loadModel() age_model = Age.loadModel()
if 'gender' in actions: if 'gender' in actions:
if 'gender' in models: if 'gender' in models:
print("already built gender model is passed")
gender_model = models['gender'] gender_model = models['gender']
else: else:
gender_model = Gender.loadModel() gender_model = Gender.loadModel()
if 'race' in actions: if 'race' in actions:
if 'race' in models: if 'race' in models:
print("already built race model is passed")
race_model = models['race'] race_model = models['race']
else: else:
race_model = Race.loadModel() race_model = Race.loadModel()

View File

@ -125,3 +125,32 @@ if accuracy > 75:
print("Unit tests are completed successfully. Score: ",accuracy,"%") print("Unit tests are completed successfully. Score: ",accuracy,"%")
else: else:
raise ValueError("Unit test score does not satisfy the minimum required accuracy. Minimum expected score is 80% but this got ",accuracy,"%") raise ValueError("Unit test score does not satisfy the minimum required accuracy. Minimum expected score is 80% but this got ",accuracy,"%")
#-----------------------------------
# api tests - already built models will be passed to the functions
from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
#-----------------------------------
vggface_model = VGGFace.loadModel()
resp_obj = DeepFace.verify("dataset/img1.jpg", "dataset/img2.jpg", model_name = "VGG-Face", model = vggface_model)
print(resp_obj)
#-----------------------------------
from deepface.extendedmodels import Age, Gender, Race, Emotion
emotion_model = Emotion.loadModel()
age_model = Age.loadModel()
gender_model = Gender.loadModel()
race_model = Race.loadModel()
facial_attribute_models = {}
facial_attribute_models["emotion"] = emotion_model
facial_attribute_models["age"] = age_model
facial_attribute_models["gender"] = gender_model
facial_attribute_models["race"] = race_model
resp_obj = DeepFace.analyze("dataset/img1.jpg", models=facial_attribute_models)