mirror of
https://github.com/serengil/deepface.git
synced 2025-06-07 03:55:21 +00:00
bugfixes
This commit is contained in:
parent
3f29c6a606
commit
07220feb2d
@ -418,23 +418,26 @@ def analyze(img_path, actions = ('emotion', 'age', 'gender', 'race') , models =
|
||||
resp_obj["age"] = int(apparent_age) #int cast is for the exception - object of type 'float32' is not JSON serializable
|
||||
|
||||
elif action == 'gender':
|
||||
if img_224 is None:
|
||||
img_224, region = functions.preprocess_face(img = img_path, target_size = (224, 224), grayscale = False, enforce_detection = enforce_detection, detector_backend = detector_backend, return_region = True)
|
||||
try:
|
||||
if img_224 is None:
|
||||
img_224, region = functions.preprocess_face(img = img_path, target_size = (224, 224), grayscale = False, enforce_detection = enforce_detection, detector_backend = detector_backend, return_region = True)
|
||||
|
||||
gender_predictions = models['gender'].predict(img_224)[0,:]
|
||||
gender_predictions = models['gender'].predict(img_224)[0,:]
|
||||
|
||||
sum_of_predictions = gender_predictions.sum()
|
||||
gender_labels = ["Woman", "Man"]
|
||||
resp_obj["gender"] = {}
|
||||
|
||||
for i in range(0, len(gender_labels)):
|
||||
gender_label = gender_labels[i]
|
||||
gender_prediction = 100 * gender_predictions[i] / sum_of_predictions
|
||||
resp_obj["gender"][gender_label] = gender_prediction
|
||||
|
||||
resp_obj["dominant_gender"] = gender_labels[np.argmax(gender_predictions)]
|
||||
sum_of_predictions = gender_predictions.sum()
|
||||
gender_labels = ["Woman", "Man"]
|
||||
resp_obj["gender"] = {}
|
||||
|
||||
for i in range(0, len(gender_labels)):
|
||||
gender_label = gender_labels[i]
|
||||
gender_prediction = 100 * gender_predictions[i] / sum_of_predictions
|
||||
resp_obj["gender"][gender_label] = gender_prediction
|
||||
|
||||
resp_obj["dominant_gender"] = gender_labels[np.argmax(gender_predictions)]
|
||||
except Exception as e:
|
||||
resp_obj["dominant_gender"] = None
|
||||
resp_obj["gender"] = None
|
||||
resp_obj["error"] = e
|
||||
elif action == 'race':
|
||||
if img_224 is None:
|
||||
img_224, region = functions.preprocess_face(img = img_path, target_size = (224, 224), grayscale = False, enforce_detection = enforce_detection, detector_backend = detector_backend, return_region = True) #just emotion model expects grayscale images
|
||||
@ -453,7 +456,7 @@ def analyze(img_path, actions = ('emotion', 'age', 'gender', 'race') , models =
|
||||
|
||||
#-----------------------------
|
||||
|
||||
if is_region_set != True:
|
||||
if is_region_set != True and region:
|
||||
resp_obj["region"] = {}
|
||||
is_region_set = True
|
||||
for i, parameter in enumerate(region_labels):
|
||||
@ -467,14 +470,14 @@ def analyze(img_path, actions = ('emotion', 'age', 'gender', 'race') , models =
|
||||
return resp_obj
|
||||
|
||||
if bulkProcess == True:
|
||||
|
||||
resp_obj = {}
|
||||
|
||||
for i in range(0, len(resp_objects)):
|
||||
resp_item = resp_objects[i]
|
||||
resp_obj["instance_%d" % (i+1)] = resp_item
|
||||
|
||||
return resp_obj
|
||||
return resp_objects
|
||||
# resp_obj = {}
|
||||
#
|
||||
# for i in range(0, len(resp_objects)):
|
||||
# resp_item = resp_objects[i]
|
||||
# resp_obj["instance_%d" % (i+1)] = resp_item
|
||||
#
|
||||
# return resp_obj
|
||||
|
||||
def find(img_path, db_path, model_name ='VGG-Face', distance_metric = 'cosine', model = None, enforce_detection = True, detector_backend = 'opencv', align = True, prog_bar = True, normalization = 'base', silent=False):
|
||||
|
||||
|
@ -83,7 +83,7 @@ def load_image(img):
|
||||
img = loadBase64Img(img)
|
||||
|
||||
elif url_img:
|
||||
img = np.array(Image.open(requests.get(img, stream=True).raw))
|
||||
img = np.array(Image.open(requests.get(img, stream=True).raw).convert('RGB'))
|
||||
|
||||
elif exact_image != True: #image path passed as input
|
||||
if os.path.isfile(img) != True:
|
||||
|
@ -1,14 +1,17 @@
|
||||
from deepface import DeepFace
|
||||
|
||||
dataset = [
|
||||
'dataset/img1.jpg',
|
||||
'dataset/img5.jpg',
|
||||
'dataset/img6.jpg',
|
||||
'dataset/img8.jpg',
|
||||
'dataset/img7.jpg',
|
||||
'dataset/img9.jpg',
|
||||
'dataset/img11.jpg',
|
||||
'dataset/img11.jpg',
|
||||
'https://datasets-626827236627.s3.amazonaws.com/avatars/orly-hamzani-046368aa_06122022.jpg?AWSAccessKeyId=ASIAZD4OQSUJ3IBDFZUN&Signature=pnRMEwKHNA5PfwDgHNoGavUxZvM%3D&x-amz-security-token=IQoJb3JpZ2luX2VjEID%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaCXVzLWVhc3QtMSJIMEYCIQCfwrDPDOtQ7qval0EdjUQEhah2PvnNeJmO3KqjRmSzzQIhALmpYTK%2BIGxCNwfBBqtqCvme8cAhS6S2LCc6ti4rHC6VKtsECIn%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEQARoMNjI2ODI3MjM2NjI3IgxmYcKEgfQcYn6Ktf0qrwSTcdMXW6jBfrKQn1FtoasmBah2JYCy2X%2Bb07cLFKKNrwH7YLFlCoXiKWP6ntzrm0R2wSsr%2BTmn6RH8WoiXnnaC%2BagFiyUdqPlTdBGy2L%2BO2EKBNA0FnRx%2FnHR0rLG%2Fmcv7cSVcG%2Bsthv5nVSafUgDlQ8dLlcW%2FiGk10eSSAM2tMcUOPyoOLlsSJ%2B0RiSppJEhy3%2F5S63p7RT2fVdlSE1XH%2BnQYvIoUEw0uB3rVywTMknFFi7h8teki%2BGE%2BuONqcHwxSjfnDE53DzZZmmZ%2F5yW2TiK3KbbV974PVgGxA4epRwFCGKqmf7%2FeQCxjmOhPpvXL%2FWiltAIIxtUxlx6IL6IfTsf8i0bVU9cO4Jj14r5%2BdGt96h%2FHGftm%2BdBp1v7JD0vBIxiwFFgv073YjBBmg4gUoryI%aqaaaaaaaa',
|
||||
'https://datasets-626827236627.s3.amazonaws.com/avatars/%25D7%25A9%25D7%2599-%25D7%259E%25D7%2595%25D7%25A2%25D7%259C%25D7%259D-011174120_06122022.jpg?AWSAccessKeyId=ASIAZD4OQSUJ3IBDFZUN&Signature=QPaXXLUwBvflmxMEcJ98TiSvMTk%3D&x-amz-security-token=IQoJb3JpZ2luX2VjEID%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaCXVzLWVhc3QtMSJIMEYCIQCfwrDPDOtQ7qval0EdjUQEhah2PvnNeJmO3KqjRmSzzQIhALmpYTK%2BIGxCNwfBBqtqCvme8cAhS6S2LCc6ti4rHC6VKtsECIn%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEQARoMNjI2ODI3MjM2NjI3IgxmYcKEgfQcYn6Ktf0qrwSTcdMXW6jBfrKQn1FtoasmBah2JYCy2X%2Bb07cLFKKNrwH7YLFlCoXiKWP6ntzrm0R2wSsr%2BTmn6RH8WoiXnnaC%2BagFiyUdqPlTdBGy2L%2BO2EKBNA0FnRx%2FnHR0rLG%2Fmcv7cSVcG%2Bsthv5nVSafUgDlQ8dLlcW%2FiGk10eSSAM2tMcUOPyoOLlsSJ%2B0RiSppJEhy3%2F5S63p7RT2fVdlSE1XH%2BnQYvIoUEw0uB3rVywTMknFFi7h8teki%2BGE%2BuONqcHwxSjfnDE53DzZZmmZ%2F5yW2TiK3KbbV974PVgGxA4epRwFCGKqmf7%2FeQCxjmOhPpvXL%2FWiltAIIxtUxlx6IL6IfTsf8i0bVU9cO4Jj14r5%2BdGt96h%2FHGftm%2BdBp1v7JD0vBIxiwFFgv073YjBBmg4gUoryI%2BKaWwf8ISc%2FNtJEg5e2ouslYm4GYYkDFosaYv1WIPztmWybvGeERGlpJg4apsJMEp2McrL7bT1dRPYRSiK9IZYSLXiW3gUnN3KpV62xDD6x5Y1ZZOw92dri5YdHu%2FyPUtn2JaZGDsgKNsSu2QuxFnDK5kiJJjeykTJPGEmqoP7EynzBnD3uCDGj3pH8GBseU8MR2fkHn%2F8ARqpwx%2F8U34XMTUgFX%2BQSCw6qEuFbDCMngyMAngFR2aF6aAqq7Oms1A61bKaGjHCzsEEyEhamLbpvqbGwjN7vrQhuiU6VsKLSnozfUI9eiGFGdko4xmXxbJkuPJHHiAEEtJoKSMPf8oJUGOqgB%2BS48LRDY83OvX5W2mX%2FTqb9UvUdwE9VB2HBQ11mkE36LD9BtOyLhxaFuMYFOUqk1JBKatWSV49UlQUZuRnEFnNmlB4v2zLX3qQ6WjbkZsGIGUynNugAGbtoxqHW9wWklimJKha1nDv3JmF4vDlfWMLfCe42Kth0gxEpXmMkdjY%2BSdJm%2FeELOzTqFrp3yMm%2FziDttBzx3mZnHgz0qaZIEp0BVmn4kSRH5&Expires=1655198312',
|
||||
'https://datasets-626827236627.s3.amazonaws.com/avatars/orly-hamzani-046368aa_06122022.jpg?AWSAccessKeyId=ASIAZD4OQSUJ3IBDFZUN&Signature=pnRMEwKHNA5PfwDgHNoGavUxZvM%3D&x-amz-security-token=IQoJb3JpZ2luX2VjEID%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEaCXVzLWVhc3QtMSJIMEYCIQCfwrDPDOtQ7qval0EdjUQEhah2PvnNeJmO3KqjRmSzzQIhALmpYTK%2BIGxCNwfBBqtqCvme8cAhS6S2LCc6ti4rHC6VKtsECIn%2F%2F%2F%2F%2F%2F%2F%2F%2F%2FwEQARoMNjI2ODI3MjM2NjI3IgxmYcKEgfQcYn6Ktf0qrwSTcdMXW6jBfrKQn1FtoasmBah2JYCy2X%2Bb07cLFKKNrwH7YLFlCoXiKWP6ntzrm0R2wSsr%2BTmn6RH8WoiXnnaC%2BagFiyUdqPlTdBGy2L%2BO2EKBNA0FnRx%2FnHR0rLG%2Fmcv7cSVcG%2Bsthv5nVSafUgDlQ8dLlcW%2FiGk10eSSAM2tMcUOPyoOLlsSJ%2B0RiSppJEhy3%2F5S63p7RT2fVdlSE1XH%2BnQYvIoUEw0uB3rVywTMknFFi7h8teki%2BGE%2BuONqcHwxSjfnDE53DzZZmmZ%2F5yW2TiK3KbbV974PVgGxA4epRwFCGKqmf7%2FeQCxjmOhPpvXL%2FWiltAIIxtUxlx6IL6IfTsf8i0bVU9cO4Jj14r5%2BdGt96h%2FHGftm%2BdBp1v7JD0vBIxiwFFgv073YjBBmg4gUoryI%2BKaWwf8ISc%2FNtJEg5e2ouslYm4GYYkDFosaYv1WIPztmWybvGeERGlpJg4apsJMEp2McrL7bT1dRPYRSiK9IZYSLXiW3gUnN3KpV62xDD6x5Y1ZZOw92dri5YdHu%2FyPUtn2JaZGDsgKNsSu2QuxFnDK5kiJJjeykTJPGEmqoP7EynzBnD3uCDGj3pH8GBseU8MR2fkHn%2F8ARqpwx%2F8U34XMTUgFX%2BQSCw6qEuFbDCMngyMAngFR2aF6aAqq7Oms1A61bKaGjHCzsEEyEhamLbpvqbGwjN7vrQhuiU6VsKLSnozfUI9eiGFGdko4xmXxbJkuPJHHiAEEtJoKSMPf8oJUGOqgB%2BS48LRDY83OvX5W2mX%2FTqb9UvUdwE9VB2HBQ11mkE36LD9BtOyLhxaFuMYFOUqk1JBKatWSV49UlQUZuRnEFnNmlB4v2zLX3qQ6WjbkZsGIGUynNugAGbtoxqHW9wWklimJKha1nDv3JmF4vDlfWMLfCe42Kth0gxEpXmMkdjY%2BSdJm%2FeELOzTqFrp3yMm%2FziDttBzx3mZnHgz0qaZIEp0BVmn4kSRH5&Expires=1655199524',
|
||||
# 'dataset/img1.jpg',
|
||||
# 'dataset/img5.jpg',
|
||||
# 'dataset/img6.jpg',
|
||||
# 'dataset/img8.jpg',
|
||||
# 'dataset/img7.jpg',
|
||||
# 'dataset/img9.jpg',
|
||||
# 'dataset/img11.jpg',
|
||||
# 'dataset/img11.jpg',
|
||||
]
|
||||
|
||||
detectors = ['opencv', 'ssd', 'retinaface', 'mtcnn'] # dlib not tested
|
||||
@ -16,9 +19,8 @@ detectors = ['opencv', 'ssd', 'retinaface', 'mtcnn'] # dlib not tested
|
||||
|
||||
def test_gender_prediction():
|
||||
for detector in detectors:
|
||||
results = DeepFace.analyze(dataset, actions=('gender',), detector_backend=detector, prog_bar=False)
|
||||
for key in results.keys():
|
||||
result = results[key]
|
||||
results = DeepFace.analyze(dataset, actions=('gender',), detector_backend=detector, prog_bar=False, enforce_detection=False)
|
||||
for result in results:
|
||||
assert 'gender' in result.keys()
|
||||
assert 'dominant_gender' in result.keys() and result["dominant_gender"] in ["Man", "Woman"]
|
||||
if result["dominant_gender"] == "Man":
|
||||
|
Loading…
x
Reference in New Issue
Block a user