mirror of
https://github.com/serengil/deepface.git
synced 2025-06-06 11:35:21 +00:00
passing built models for analysis from api
This commit is contained in:
parent
f742dfd1f8
commit
fa81b4f7fd
68
api/api.py
68
api/api.py
@ -3,37 +3,82 @@ from flask import Flask, jsonify, request, make_response
|
||||
import uuid
|
||||
import json
|
||||
import time
|
||||
from tqdm import tqdm
|
||||
|
||||
import tensorflow as tf
|
||||
|
||||
from deepface import DeepFace
|
||||
from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
|
||||
from deepface.extendedmodels import Age, Gender, Race, Emotion
|
||||
|
||||
#import DeepFace
|
||||
#from basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
|
||||
#from extendedmodels import Age, Gender, Race, Emotion
|
||||
|
||||
#------------------------------
|
||||
|
||||
app = Flask(__name__)
|
||||
|
||||
#------------------------------
|
||||
|
||||
tic = time.time()
|
||||
|
||||
vggface_model = VGGFace.loadModel()
|
||||
print("VGG-Face model is built.")
|
||||
print("Loading Face Recognition Models...")
|
||||
|
||||
openface_model = OpenFace.loadModel()
|
||||
print("OpenFace model is built")
|
||||
pbar = tqdm(range(0,4), desc='Loading Face Recognition Models...')
|
||||
|
||||
facenet_model = Facenet.loadModel()
|
||||
print("FaceNet model is built")
|
||||
|
||||
deepface_model = FbDeepFace.loadModel()
|
||||
print("DeepFace model is built")
|
||||
for index in pbar:
|
||||
if index == 0:
|
||||
pbar.set_description("Loading VGG-Face")
|
||||
vggface_model = VGGFace.loadModel()
|
||||
elif index == 1:
|
||||
pbar.set_description("Loading OpenFace")
|
||||
openface_model = OpenFace.loadModel()
|
||||
elif index == 2:
|
||||
pbar.set_description("Loading Google FaceNet")
|
||||
facenet_model = Facenet.loadModel()
|
||||
elif index == 3:
|
||||
pbar.set_description("Loading Facebook DeepFace")
|
||||
deepface_model = FbDeepFace.loadModel()
|
||||
|
||||
toc = time.time()
|
||||
|
||||
print("Face recognition models are built in ", toc-tic," seconds")
|
||||
|
||||
#------------------------------
|
||||
|
||||
tic = time.time()
|
||||
|
||||
print("Loading Facial Attribute Analysis Models...")
|
||||
|
||||
pbar = tqdm(range(0,4), desc='Loading Facial Attribute Analysis Models...')
|
||||
|
||||
for index in pbar:
|
||||
if index == 0:
|
||||
pbar.set_description("Loading emotion analysis model")
|
||||
emotion_model = Emotion.loadModel()
|
||||
elif index == 1:
|
||||
pbar.set_description("Loading age prediction model")
|
||||
age_model = Age.loadModel()
|
||||
elif index == 2:
|
||||
pbar.set_description("Loading gender prediction model")
|
||||
gender_model = Gender.loadModel()
|
||||
elif index == 3:
|
||||
pbar.set_description("Loading race prediction model")
|
||||
race_model = Race.loadModel()
|
||||
|
||||
toc = time.time()
|
||||
|
||||
facial_attribute_models = {}
|
||||
facial_attribute_models["emotion"] = emotion_model
|
||||
facial_attribute_models["age"] = age_model
|
||||
facial_attribute_models["gender"] = gender_model
|
||||
facial_attribute_models["race"] = race_model
|
||||
|
||||
print("Facial attribute analysis models are built in ", toc-tic," seconds")
|
||||
|
||||
#------------------------------
|
||||
|
||||
graph = tf.get_default_graph()
|
||||
|
||||
#------------------------------
|
||||
@ -76,7 +121,8 @@ def analyze():
|
||||
|
||||
#---------------------------
|
||||
|
||||
resp_obj = DeepFace.analyze(instances, actions=actions)
|
||||
#resp_obj = DeepFace.analyze(instances, actions=actions)
|
||||
resp_obj = DeepFace.analyze(instances, actions=actions, models=facial_attribute_models)
|
||||
|
||||
#---------------------------
|
||||
|
||||
@ -85,7 +131,7 @@ def analyze():
|
||||
resp_obj["trx_id"] = trx_id
|
||||
resp_obj["seconds"] = toc-tic
|
||||
|
||||
return resp_obj
|
||||
return resp_obj, 200
|
||||
|
||||
@app.route('/verify', methods=['POST'])
|
||||
|
||||
|
@ -20,7 +20,6 @@ from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
|
||||
from deepface.extendedmodels import Age, Gender, Race, Emotion
|
||||
from deepface.commons import functions, realtime, distance as dst
|
||||
|
||||
|
||||
def verify(img1_path, img2_path=''
|
||||
, model_name ='VGG-Face', distance_metric = 'cosine', model = None):
|
||||
|
||||
@ -150,7 +149,7 @@ def verify(img1_path, img2_path=''
|
||||
#return resp_objects
|
||||
|
||||
|
||||
def analyze(img_path, actions= [], models= {}):
|
||||
def analyze(img_path, actions = [], models = {}):
|
||||
|
||||
if type(img_path) == list:
|
||||
img_paths = img_path.copy()
|
||||
@ -171,24 +170,28 @@ def analyze(img_path, actions= [], models= {}):
|
||||
|
||||
if 'emotion' in actions:
|
||||
if 'emotion' in models:
|
||||
print("already built emotion model is passed")
|
||||
emotion_model = models['emotion']
|
||||
else:
|
||||
emotion_model = Emotion.loadModel()
|
||||
|
||||
if 'age' in actions:
|
||||
if 'age' in models:
|
||||
print("already built age model is passed")
|
||||
age_model = models['age']
|
||||
else:
|
||||
age_model = Age.loadModel()
|
||||
|
||||
if 'gender' in actions:
|
||||
if 'gender' in models:
|
||||
print("already built gender model is passed")
|
||||
gender_model = models['gender']
|
||||
else:
|
||||
gender_model = Gender.loadModel()
|
||||
|
||||
if 'race' in actions:
|
||||
if 'race' in models:
|
||||
print("already built race model is passed")
|
||||
race_model = models['race']
|
||||
else:
|
||||
race_model = Race.loadModel()
|
||||
|
@ -125,3 +125,32 @@ if accuracy > 75:
|
||||
print("Unit tests are completed successfully. Score: ",accuracy,"%")
|
||||
else:
|
||||
raise ValueError("Unit test score does not satisfy the minimum required accuracy. Minimum expected score is 80% but this got ",accuracy,"%")
|
||||
|
||||
#-----------------------------------
|
||||
|
||||
# api tests - already built models will be passed to the functions
|
||||
|
||||
from deepface.basemodels import VGGFace, OpenFace, Facenet, FbDeepFace
|
||||
|
||||
#-----------------------------------
|
||||
|
||||
vggface_model = VGGFace.loadModel()
|
||||
resp_obj = DeepFace.verify("dataset/img1.jpg", "dataset/img2.jpg", model_name = "VGG-Face", model = vggface_model)
|
||||
print(resp_obj)
|
||||
|
||||
#-----------------------------------
|
||||
|
||||
from deepface.extendedmodels import Age, Gender, Race, Emotion
|
||||
|
||||
emotion_model = Emotion.loadModel()
|
||||
age_model = Age.loadModel()
|
||||
gender_model = Gender.loadModel()
|
||||
race_model = Race.loadModel()
|
||||
|
||||
facial_attribute_models = {}
|
||||
facial_attribute_models["emotion"] = emotion_model
|
||||
facial_attribute_models["age"] = age_model
|
||||
facial_attribute_models["gender"] = gender_model
|
||||
facial_attribute_models["race"] = race_model
|
||||
|
||||
resp_obj = DeepFace.analyze("dataset/img1.jpg", models=facial_attribute_models)
|
Loading…
x
Reference in New Issue
Block a user