mirror of
https://github.com/serengil/deepface.git
synced 2025-06-06 11:35:21 +00:00
logger instead of print
This commit is contained in:
parent
9c3e548b9f
commit
5464ac511c
5
Makefile
Normal file
5
Makefile
Normal file
@ -0,0 +1,5 @@
|
||||
test:
|
||||
cd tests && python -m pytest unit_tests.py -s --disable-warnings
|
||||
|
||||
lint:
|
||||
python -m pylint deepface/ --fail-under=10
|
@ -28,6 +28,9 @@ from deepface.basemodels import (
|
||||
)
|
||||
from deepface.extendedmodels import Age, Gender, Race, Emotion
|
||||
from deepface.commons import functions, realtime, distance as dst
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# -----------------------------------
|
||||
# configurations for dependencies
|
||||
@ -461,8 +464,8 @@ def find(
|
||||
if path.exists(db_path + "/" + file_name):
|
||||
|
||||
if not silent:
|
||||
print(
|
||||
f"WARNING: Representations for images in {db_path} folder were previously stored"
|
||||
logger.warn(
|
||||
f"Representations for images in {db_path} folder were previously stored"
|
||||
+ f" in {file_name}. If you added new instances after the creation, then please "
|
||||
+ "delete this file and call find function again. It will create it again."
|
||||
)
|
||||
@ -471,7 +474,7 @@ def find(
|
||||
representations = pickle.load(f)
|
||||
|
||||
if not silent:
|
||||
print("There are ", len(representations), " representations found in ", file_name)
|
||||
logger.info(f"There are {len(representations)} representations found in {file_name}")
|
||||
|
||||
else: # create representation.pkl from scratch
|
||||
employees = []
|
||||
@ -539,7 +542,7 @@ def find(
|
||||
pickle.dump(representations, f)
|
||||
|
||||
if not silent:
|
||||
print(
|
||||
logger.info(
|
||||
f"Representations stored in {db_path}/{file_name} file."
|
||||
+ "Please delete this file when you add new identities in your database."
|
||||
)
|
||||
@ -614,7 +617,7 @@ def find(
|
||||
toc = time.time()
|
||||
|
||||
if not silent:
|
||||
print("find function lasts ", toc - tic, " seconds")
|
||||
logger.info(f"find function lasts {toc - tic} seconds")
|
||||
|
||||
return resp_obj
|
||||
|
||||
@ -869,7 +872,7 @@ def detectFace(
|
||||
detected and aligned face as numpy array
|
||||
|
||||
"""
|
||||
print("⚠️ Function detectFace is deprecated. Use extract_faces instead.")
|
||||
logger.info("⚠️ Function detectFace is deprecated. Use extract_faces instead.")
|
||||
face_objs = extract_faces(
|
||||
img_path=img_path,
|
||||
target_size=target_size,
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# pylint: disable=unsubscriptable-object
|
||||
|
||||
@ -71,7 +74,7 @@ def loadModel(
|
||||
|
||||
if os.path.isfile(output) != True:
|
||||
|
||||
print(file_name, " will be downloaded to ", output)
|
||||
logger.info(f"{file_name} will be downloaded to {output}")
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
||||
# ---------------------------------------
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
tf_version = int(tf.__version__.split(".", maxsplit=1)[0])
|
||||
|
||||
@ -71,7 +74,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/deepid_keras_weights.h5") != True:
|
||||
print("deepid_keras_weights.h5 will be downloaded...")
|
||||
logger.info("deepid_keras_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/deepid_keras_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -3,6 +3,9 @@ import bz2
|
||||
import gdown
|
||||
import numpy as np
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# pylint: disable=too-few-public-methods
|
||||
|
||||
@ -24,7 +27,7 @@ class DlibResNet:
|
||||
|
||||
# download pre-trained model if it does not exist
|
||||
if os.path.isfile(weight_file) != True:
|
||||
print("dlib_face_recognition_resnet_model_v1.dat is going to be downloaded")
|
||||
logger.info("dlib_face_recognition_resnet_model_v1.dat is going to be downloaded")
|
||||
|
||||
file_name = "dlib_face_recognition_resnet_model_v1.dat.bz2"
|
||||
url = f"http://dlib.net/files/{file_name}"
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# --------------------------------
|
||||
# dependency configuration
|
||||
@ -1628,7 +1631,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/facenet_weights.h5") != True:
|
||||
print("facenet_weights.h5 will be downloaded...")
|
||||
logger.info("facenet_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/facenet_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -2,7 +2,9 @@ import os
|
||||
import gdown
|
||||
from deepface.basemodels import Facenet
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
def loadModel(
|
||||
url="https://github.com/serengil/deepface_models/releases/download/v1.0/facenet512_weights.h5",
|
||||
@ -15,7 +17,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/facenet512_weights.h5") != True:
|
||||
print("facenet512_weights.h5 will be downloaded...")
|
||||
logger.info("facenet512_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/facenet512_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -3,6 +3,9 @@ import zipfile
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# --------------------------------
|
||||
# dependency configuration
|
||||
@ -57,7 +60,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/VGGFace2_DeepFace_weights_val-0.9034.h5") != True:
|
||||
print("VGGFace2_DeepFace_weights_val-0.9034.h5 will be downloaded...")
|
||||
logger.info("VGGFace2_DeepFace_weights_val-0.9034.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/VGGFace2_DeepFace_weights_val-0.9034.h5.zip"
|
||||
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
tf_version = int(tf.__version__.split(".", maxsplit=1)[0])
|
||||
if tf_version == 1:
|
||||
@ -362,7 +365,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/openface_weights.h5") != True:
|
||||
print("openface_weights.h5 will be downloaded...")
|
||||
logger.info("openface_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/openface_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -4,6 +4,9 @@ import cv2 as cv
|
||||
import gdown
|
||||
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# pylint: disable=line-too-long, too-few-public-methods
|
||||
|
||||
@ -44,7 +47,7 @@ def load_model(
|
||||
|
||||
if not os.path.isfile(file_name):
|
||||
|
||||
print("sface weights will be downloaded...")
|
||||
logger.info("sface weights will be downloaded...")
|
||||
|
||||
gdown.download(url, file_name, quiet=False)
|
||||
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# ---------------------------------------
|
||||
|
||||
@ -95,7 +98,7 @@ def loadModel(
|
||||
output = home + "/.deepface/weights/vgg_face_weights.h5"
|
||||
|
||||
if os.path.isfile(output) != True:
|
||||
print("vgg_face_weights.h5 will be downloaded...")
|
||||
logger.info("vgg_face_weights.h5 will be downloaded...")
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
||||
# -----------------------------------
|
||||
|
@ -12,7 +12,9 @@ from deprecated import deprecated
|
||||
|
||||
# package dependencies
|
||||
from deepface.detectors import FaceDetector
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# --------------------------------------------------
|
||||
# configurations of dependencies
|
||||
@ -41,11 +43,11 @@ def initialize_folder():
|
||||
|
||||
if not os.path.exists(deepFaceHomePath):
|
||||
os.makedirs(deepFaceHomePath, exist_ok=True)
|
||||
print("Directory ", home, "/.deepface created")
|
||||
logger.info(f"Directory {home}/.deepface created")
|
||||
|
||||
if not os.path.exists(weightsPath):
|
||||
os.makedirs(weightsPath, exist_ok=True)
|
||||
print("Directory ", home, "/.deepface/weights created")
|
||||
logger.info(f"Directory {home}/.deepface/weights created")
|
||||
|
||||
|
||||
def get_deepface_home():
|
||||
@ -115,6 +117,7 @@ def load_image(img):
|
||||
# This causes troubles when reading files with non english names
|
||||
# return cv2.imread(img)
|
||||
|
||||
|
||||
# --------------------------------------------------
|
||||
|
||||
|
||||
@ -357,7 +360,7 @@ def preprocess_face(
|
||||
Deprecated:
|
||||
0.0.78: Use extract_faces instead of preprocess_face.
|
||||
"""
|
||||
print("⚠️ Function preprocess_face is deprecated. Use extract_faces instead.")
|
||||
logger.info("⚠️ Function preprocess_face is deprecated. Use extract_faces instead.")
|
||||
result = None
|
||||
img_objs = extract_faces(
|
||||
img=img,
|
||||
|
38
deepface/commons/logger.py
Normal file
38
deepface/commons/logger.py
Normal file
@ -0,0 +1,38 @@
|
||||
import os
|
||||
import logging
|
||||
|
||||
# pylint: disable=broad-except
|
||||
class Logger:
|
||||
def __init__(self):
|
||||
log_level = os.environ.get("DEEPFACE_LOG_LEVEL", str(logging.INFO))
|
||||
try:
|
||||
self.log_level = int(log_level)
|
||||
except Exception as err:
|
||||
self.dump_log(
|
||||
f"Exception while parsing $DEEPFACE_LOG_LEVEL."
|
||||
f"Expected int but it is {log_level} ({str(err)})"
|
||||
)
|
||||
self.log_level = logging.INFO
|
||||
|
||||
def info(self, message):
|
||||
if self.log_level <= logging.INFO:
|
||||
self.dump_log(message)
|
||||
|
||||
def debug(self, message):
|
||||
if self.log_level <= logging.DEBUG:
|
||||
self.dump_log(f"🕷️ {message}")
|
||||
|
||||
def warn(self, message):
|
||||
if self.log_level <= logging.WARNING:
|
||||
self.dump_log(f"⚠️ {message}")
|
||||
|
||||
def error(self, message):
|
||||
if self.log_level <= logging.ERROR:
|
||||
self.dump_log(f"🔴 {message}")
|
||||
|
||||
def critical(self, message):
|
||||
if self.log_level <= logging.CRITICAL:
|
||||
self.dump_log(f"💥 {message}")
|
||||
|
||||
def dump_log(self, message):
|
||||
print(message)
|
@ -5,6 +5,9 @@ import pandas as pd
|
||||
import cv2
|
||||
from deepface import DeepFace
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# dependency configuration
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "2"
|
||||
@ -35,15 +38,15 @@ def analysis(
|
||||
# build models once to store them in the memory
|
||||
# otherwise, they will be built after cam started and this will cause delays
|
||||
DeepFace.build_model(model_name=model_name)
|
||||
print(f"facial recognition model {model_name} is just built")
|
||||
logger.info(f"facial recognition model {model_name} is just built")
|
||||
|
||||
if enable_face_analysis:
|
||||
DeepFace.build_model(model_name="Age")
|
||||
print("Age model is just built")
|
||||
logger.info("Age model is just built")
|
||||
DeepFace.build_model(model_name="Gender")
|
||||
print("Gender model is just built")
|
||||
logger.info("Gender model is just built")
|
||||
DeepFace.build_model(model_name="Emotion")
|
||||
print("Emotion model is just built")
|
||||
logger.info("Emotion model is just built")
|
||||
# -----------------------
|
||||
# call a dummy find function for db_path once to create embeddings in the initialization
|
||||
DeepFace.find(
|
||||
@ -300,7 +303,7 @@ def analysis(
|
||||
apparent_age = demography["age"]
|
||||
dominant_gender = demography["dominant_gender"]
|
||||
gender = "M" if dominant_gender == "Man" else "W"
|
||||
# print(f"{apparent_age} years old {dominant_emotion}")
|
||||
logger.debug(f"{apparent_age} years old {dominant_gender}")
|
||||
analysis_report = str(int(apparent_age)) + " " + gender
|
||||
|
||||
# -------------------------------
|
||||
@ -675,7 +678,7 @@ def analysis(
|
||||
1,
|
||||
)
|
||||
except Exception as err: # pylint: disable=broad-except
|
||||
print(str(err))
|
||||
logger.error(str(err))
|
||||
|
||||
tic = time.time() # in this way, freezed image can show 5 seconds
|
||||
|
||||
|
@ -2,7 +2,9 @@ import os
|
||||
import bz2
|
||||
import gdown
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
def build_model():
|
||||
|
||||
@ -14,7 +16,7 @@ def build_model():
|
||||
if os.path.isfile(home + "/.deepface/weights/shape_predictor_5_face_landmarks.dat") != True:
|
||||
|
||||
file_name = "shape_predictor_5_face_landmarks.dat.bz2"
|
||||
print(f"{file_name} is going to be downloaded")
|
||||
logger.info(f"{file_name} is going to be downloaded")
|
||||
|
||||
url = f"http://dlib.net/files/{file_name}"
|
||||
output = f"{home}/.deepface/weights/{file_name}"
|
||||
|
@ -4,6 +4,9 @@ import cv2
|
||||
import pandas as pd
|
||||
from deepface.detectors import OpenCvWrapper
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# pylint: disable=line-too-long
|
||||
|
||||
@ -15,7 +18,7 @@ def build_model():
|
||||
# model structure
|
||||
if os.path.isfile(home + "/.deepface/weights/deploy.prototxt") != True:
|
||||
|
||||
print("deploy.prototxt will be downloaded...")
|
||||
logger.info("deploy.prototxt will be downloaded...")
|
||||
|
||||
url = "https://github.com/opencv/opencv/raw/3.4.0/samples/dnn/face_detector/deploy.prototxt"
|
||||
|
||||
@ -26,7 +29,7 @@ def build_model():
|
||||
# pre-trained weights
|
||||
if os.path.isfile(home + "/.deepface/weights/res10_300x300_ssd_iter_140000.caffemodel") != True:
|
||||
|
||||
print("res10_300x300_ssd_iter_140000.caffemodel will be downloaded...")
|
||||
logger.info("res10_300x300_ssd_iter_140000.caffemodel will be downloaded...")
|
||||
|
||||
url = "https://github.com/opencv/opencv_3rdparty/raw/dnn_samples_face_detector_20170830/res10_300x300_ssd_iter_140000.caffemodel"
|
||||
|
||||
|
@ -1,4 +1,7 @@
|
||||
from deepface.detectors import FaceDetector
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# Model's weights paths
|
||||
PATH = "/.deepface/weights/yolov8n-face.pt"
|
||||
@ -25,7 +28,7 @@ def build_model():
|
||||
# Download the model's weights if they don't exist
|
||||
if not os.path.isfile(weight_path):
|
||||
gdown.download(WEIGHT_URL, weight_path, quiet=False)
|
||||
print(f"Downloaded YOLO model {os.path.basename(weight_path)}")
|
||||
logger.info(f"Downloaded YOLO model {os.path.basename(weight_path)}")
|
||||
|
||||
# Return face_detector
|
||||
return YOLO(weight_path)
|
||||
|
@ -3,7 +3,9 @@ import cv2
|
||||
import gdown
|
||||
from deepface.detectors import FaceDetector
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
def build_model():
|
||||
# pylint: disable=C0301
|
||||
@ -11,7 +13,7 @@ def build_model():
|
||||
file_name = "face_detection_yunet_2023mar.onnx"
|
||||
home = functions.get_deepface_home()
|
||||
if os.path.isfile(home + f"/.deepface/weights/{file_name}") is False:
|
||||
print(f"{file_name} will be downloaded...")
|
||||
logger.info(f"{file_name} will be downloaded...")
|
||||
output = home + f"/.deepface/weights/{file_name}"
|
||||
gdown.download(url, output, quiet=False)
|
||||
face_detector = cv2.FaceDetectorYN_create(home + f"/.deepface/weights/{file_name}", "", (0, 0))
|
||||
|
@ -4,6 +4,9 @@ import numpy as np
|
||||
import tensorflow as tf
|
||||
from deepface.basemodels import VGGFace
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# ----------------------------------------
|
||||
# dependency configurations
|
||||
@ -45,7 +48,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/age_model_weights.h5") != True:
|
||||
print("age_model_weights.h5 will be downloaded...")
|
||||
logger.info("age_model_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/age_model_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -2,6 +2,9 @@ import os
|
||||
import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# -------------------------------------------
|
||||
# pylint: disable=line-too-long
|
||||
@ -65,7 +68,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/facial_expression_model_weights.h5") != True:
|
||||
print("facial_expression_model_weights.h5 will be downloaded...")
|
||||
logger.info("facial_expression_model_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/facial_expression_model_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -3,6 +3,9 @@ import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.basemodels import VGGFace
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# -------------------------------------
|
||||
# pylint: disable=line-too-long
|
||||
@ -48,7 +51,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/gender_model_weights.h5") != True:
|
||||
print("gender_model_weights.h5 will be downloaded...")
|
||||
logger.info("gender_model_weights.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/gender_model_weights.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -3,6 +3,9 @@ import gdown
|
||||
import tensorflow as tf
|
||||
from deepface.basemodels import VGGFace
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# --------------------------
|
||||
# pylint: disable=line-too-long
|
||||
@ -46,7 +49,7 @@ def loadModel(
|
||||
home = functions.get_deepface_home()
|
||||
|
||||
if os.path.isfile(home + "/.deepface/weights/race_model_single_batch.h5") != True:
|
||||
print("race_model_single_batch.h5 will be downloaded...")
|
||||
logger.info("race_model_single_batch.h5 will be downloaded...")
|
||||
|
||||
output = home + "/.deepface/weights/race_model_single_batch.h5"
|
||||
gdown.download(url, output, quiet=False)
|
||||
|
@ -2,6 +2,9 @@ import matplotlib.pyplot as plt
|
||||
import numpy as np
|
||||
from deepface import DeepFace
|
||||
from deepface.commons import functions
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# ----------------------------------------------
|
||||
# build face recognition model
|
||||
@ -12,7 +15,7 @@ model = DeepFace.build_model(model_name=model_name)
|
||||
|
||||
target_size = functions.find_target_size(model_name)
|
||||
|
||||
print(f"target_size: {target_size}")
|
||||
logger.info(f"target_size: {target_size}")
|
||||
|
||||
# ----------------------------------------------
|
||||
# load images and find embeddings
|
||||
@ -29,10 +32,10 @@ img2_representation = model.predict(img2)[0, :]
|
||||
# distance between two images
|
||||
|
||||
distance_vector = np.square(img1_representation - img2_representation)
|
||||
# print(distance_vector)
|
||||
logger.debug(distance_vector)
|
||||
|
||||
distance = np.sqrt(distance_vector.sum())
|
||||
print("Euclidean distance: ", distance)
|
||||
logger.info("Euclidean distance: ", distance)
|
||||
|
||||
# ----------------------------------------------
|
||||
# expand vectors to be shown better in graph
|
||||
|
@ -5,10 +5,13 @@ import numpy as np
|
||||
import pandas as pd
|
||||
import cv2
|
||||
from deepface import DeepFace
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
# pylint: disable=consider-iterating-dictionary
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
warnings.filterwarnings("ignore")
|
||||
os.environ["TF_CPP_MIN_LOG_LEVEL"] = "3"
|
||||
@ -20,9 +23,9 @@ if tf_major_version == 2:
|
||||
|
||||
tf.get_logger().setLevel(logging.ERROR)
|
||||
|
||||
print("Running unit tests for TF ", tf.__version__)
|
||||
logger.info("Running unit tests for TF " + tf.__version__)
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
expected_coverage = 97
|
||||
num_cases = 0
|
||||
@ -58,12 +61,12 @@ dataset = [
|
||||
["dataset/img6.jpg", "dataset/img9.jpg", False],
|
||||
]
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
|
||||
def test_cases():
|
||||
|
||||
print("Enforce detection test")
|
||||
logger.info("Enforce detection test")
|
||||
black_img = np.zeros([224, 224, 3])
|
||||
|
||||
# enforce detection on for represent
|
||||
@ -96,7 +99,7 @@ def test_cases():
|
||||
assert isinstance(objs[0]["embedding"], list)
|
||||
assert len(objs[0]["embedding"]) == 2622 # embedding of VGG-Face
|
||||
except Exception as err:
|
||||
print(f"Unexpected exception thrown: {str(err)}")
|
||||
logger.error(f"Unexpected exception thrown: {str(err)}")
|
||||
exception_thrown = True
|
||||
|
||||
assert exception_thrown is False
|
||||
@ -118,15 +121,15 @@ def test_cases():
|
||||
assert isinstance(obj, dict)
|
||||
exception_thrown = False
|
||||
except Exception as err:
|
||||
print(f"Unexpected exception thrown: {str(err)}")
|
||||
logger.error(f"Unexpected exception thrown: {str(err)}")
|
||||
exception_thrown = True
|
||||
|
||||
assert exception_thrown is False
|
||||
# -------------------------------------------
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Extract faces test")
|
||||
logger.info("Extract faces test")
|
||||
|
||||
for detector in detectors:
|
||||
img_objs = DeepFace.extract_faces(img_path="dataset/img11.jpg", detector_backend=detector)
|
||||
@ -142,23 +145,23 @@ def test_cases():
|
||||
|
||||
img = img_obj["face"]
|
||||
evaluate(img.shape[0] > 0 and img.shape[1] > 0)
|
||||
print(detector, " test is done")
|
||||
logger.info(f"{detector} test is done")
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
img_path = "dataset/img1.jpg"
|
||||
embedding_objs = DeepFace.represent(img_path)
|
||||
for embedding_obj in embedding_objs:
|
||||
embedding = embedding_obj["embedding"]
|
||||
print("Function returned ", len(embedding), "dimensional vector")
|
||||
logger.info(f"Function returned {len(embedding)} dimensional vector")
|
||||
evaluate(len(embedding) == 2622)
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Different face detectors on verification test")
|
||||
logger.info("Different face detectors on verification test")
|
||||
|
||||
for detector in detectors:
|
||||
print(detector + " detector")
|
||||
logger.info(detector + " detector")
|
||||
res = DeepFace.verify(dataset[0][0], dataset[0][1], detector_backend=detector)
|
||||
|
||||
assert isinstance(res, dict)
|
||||
@ -181,67 +184,72 @@ def test_cases():
|
||||
assert "w" in res["facial_areas"]["img2"].keys()
|
||||
assert "h" in res["facial_areas"]["img2"].keys()
|
||||
|
||||
print(res)
|
||||
logger.info(res)
|
||||
evaluate(res["verified"] == dataset[0][2])
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Find function test")
|
||||
logger.info("Find function test")
|
||||
|
||||
dfs = DeepFace.find(img_path="dataset/img1.jpg", db_path="dataset")
|
||||
for df in dfs:
|
||||
assert isinstance(df, pd.DataFrame)
|
||||
print(df.head())
|
||||
logger.info(df.head())
|
||||
evaluate(df.shape[0] > 0)
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Facial analysis test. Passing nothing as an action")
|
||||
logger.info("Facial analysis test. Passing nothing as an action")
|
||||
|
||||
img = "dataset/img4.jpg"
|
||||
demography_objs = DeepFace.analyze(img)
|
||||
for demography in demography_objs:
|
||||
print(demography)
|
||||
logger.info(demography)
|
||||
evaluate(demography["age"] > 20 and demography["age"] < 40)
|
||||
evaluate(demography["dominant_gender"] == "Woman")
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Facial analysis test. Passing all to the action")
|
||||
logger.info("Facial analysis test. Passing all to the action")
|
||||
demography_objs = DeepFace.analyze(img, ["age", "gender", "race", "emotion"])
|
||||
|
||||
for demography in demography_objs:
|
||||
# print(f"Demography: {demography}")
|
||||
logger.debug(f"Demography: {demography}")
|
||||
# check response is a valid json
|
||||
print("Age: ", demography["age"])
|
||||
print("Gender: ", demography["dominant_gender"])
|
||||
print("Race: ", demography["dominant_race"])
|
||||
print("Emotion: ", demography["dominant_emotion"])
|
||||
age = demography["age"]
|
||||
gender = demography["dominant_gender"]
|
||||
race = demography["dominant_race"]
|
||||
emotion = demography["dominant_emotion"]
|
||||
logger.info(f"Age: {age}")
|
||||
logger.info(f"Gender: {gender}")
|
||||
logger.info(f"Race: {race}")
|
||||
logger.info(f"Emotion: {emotion}")
|
||||
|
||||
evaluate(demography.get("age") is not None)
|
||||
evaluate(demography.get("dominant_gender") is not None)
|
||||
evaluate(demography.get("dominant_race") is not None)
|
||||
evaluate(demography.get("dominant_emotion") is not None)
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Facial analysis test 2. Remove some actions and check they are not computed")
|
||||
logger.info("Facial analysis test 2. Remove some actions and check they are not computed")
|
||||
demography_objs = DeepFace.analyze(img, ["age", "gender"])
|
||||
|
||||
for demography in demography_objs:
|
||||
print("Age: ", demography.get("age"))
|
||||
print("Gender: ", demography.get("dominant_gender"))
|
||||
print("Race: ", demography.get("dominant_race"))
|
||||
print("Emotion: ", demography.get("dominant_emotion"))
|
||||
age = demography["age"]
|
||||
gender = demography["dominant_gender"]
|
||||
|
||||
logger.info(f"Age: { age }")
|
||||
logger.info(f"Gender: {gender}")
|
||||
|
||||
evaluate(demography.get("age") is not None)
|
||||
evaluate(demography.get("dominant_gender") is not None)
|
||||
evaluate(demography.get("dominant_race") is None)
|
||||
evaluate(demography.get("dominant_emotion") is None)
|
||||
|
||||
print("-----------------------------------------")
|
||||
logger.info("-----------------------------------------")
|
||||
|
||||
print("Facial recognition tests")
|
||||
logger.info("Facial recognition tests")
|
||||
|
||||
for model in models:
|
||||
for metric in metrics:
|
||||
@ -270,64 +278,55 @@ def test_cases():
|
||||
else:
|
||||
classified_label = "unverified"
|
||||
|
||||
print(
|
||||
img1.split("/", maxsplit=1)[-1],
|
||||
"-",
|
||||
img2.split("/", maxsplit=1)[-1],
|
||||
classified_label,
|
||||
"as same person based on",
|
||||
model,
|
||||
"and",
|
||||
metric,
|
||||
". Distance:",
|
||||
distance,
|
||||
", Threshold:",
|
||||
threshold,
|
||||
"(",
|
||||
test_result_label,
|
||||
")",
|
||||
img1_alias = img1.split("/", maxsplit=1)[-1]
|
||||
img2_alias = img2.split("/", maxsplit=1)[-1]
|
||||
|
||||
logger.info(
|
||||
f"{img1_alias} - {img2_alias}"
|
||||
f". {classified_label} as same person based on {model} and {metric}"
|
||||
f". Distance: {distance}, Threshold:{threshold} ({test_result_label})",
|
||||
)
|
||||
|
||||
print("--------------------------")
|
||||
logger.info("--------------------------")
|
||||
|
||||
# -----------------------------------------
|
||||
|
||||
print("Passing numpy array to analyze function")
|
||||
logger.info("Passing numpy array to analyze function")
|
||||
|
||||
img = cv2.imread("dataset/img1.jpg")
|
||||
resp_objs = DeepFace.analyze(img)
|
||||
|
||||
for resp_obj in resp_objs:
|
||||
print(resp_obj)
|
||||
logger.info(resp_obj)
|
||||
evaluate(resp_obj["age"] > 20 and resp_obj["age"] < 40)
|
||||
evaluate(resp_obj["gender"] == "Woman")
|
||||
|
||||
print("--------------------------")
|
||||
logger.info("--------------------------")
|
||||
|
||||
print("Passing numpy array to verify function")
|
||||
logger.info("Passing numpy array to verify function")
|
||||
|
||||
img1 = cv2.imread("dataset/img1.jpg")
|
||||
img2 = cv2.imread("dataset/img2.jpg")
|
||||
|
||||
res = DeepFace.verify(img1, img2)
|
||||
print(res)
|
||||
logger.info(res)
|
||||
evaluate(res["verified"] == True)
|
||||
|
||||
print("--------------------------")
|
||||
logger.info("--------------------------")
|
||||
|
||||
print("Passing numpy array to find function")
|
||||
logger.info("Passing numpy array to find function")
|
||||
|
||||
img1 = cv2.imread("dataset/img1.jpg")
|
||||
|
||||
dfs = DeepFace.find(img1, db_path="dataset")
|
||||
|
||||
for df in dfs:
|
||||
print(df.head())
|
||||
logger.info(df.head())
|
||||
evaluate(df.shape[0] > 0)
|
||||
|
||||
print("--------------------------")
|
||||
logger.info("--------------------------")
|
||||
|
||||
print("non-binary gender tests")
|
||||
logger.info("non-binary gender tests")
|
||||
|
||||
# interface validation - no need to call evaluate here
|
||||
|
||||
@ -338,7 +337,7 @@ def test_cases():
|
||||
)
|
||||
|
||||
for result in results:
|
||||
print(result)
|
||||
logger.info(result)
|
||||
|
||||
assert "gender" in result.keys()
|
||||
assert "dominant_gender" in result.keys() and result["dominant_gender"] in [
|
||||
@ -356,16 +355,16 @@ def test_cases():
|
||||
|
||||
test_cases()
|
||||
|
||||
print("num of test cases run: " + str(num_cases))
|
||||
print("succeeded test cases: " + str(succeed_cases))
|
||||
logger.info("num of test cases run: " + str(num_cases))
|
||||
logger.info("succeeded test cases: " + str(succeed_cases))
|
||||
|
||||
test_score = (100 * succeed_cases) / num_cases
|
||||
|
||||
print("test coverage: " + str(test_score))
|
||||
logger.info("test coverage: " + str(test_score))
|
||||
|
||||
if test_score > expected_coverage:
|
||||
print("well done! min required test coverage is satisfied")
|
||||
logger.info("well done! min required test coverage is satisfied")
|
||||
else:
|
||||
print("min required test coverage is NOT satisfied")
|
||||
logger.info("min required test coverage is NOT satisfied")
|
||||
|
||||
assert test_score > expected_coverage
|
||||
|
@ -1,5 +1,8 @@
|
||||
import matplotlib.pyplot as plt
|
||||
from deepface import DeepFace
|
||||
from deepface.commons.logger import Logger
|
||||
|
||||
logger = Logger()
|
||||
|
||||
model_names = [
|
||||
"VGG-Face",
|
||||
@ -19,22 +22,22 @@ for model_name in model_names:
|
||||
obj = DeepFace.verify(
|
||||
img1_path="dataset/img1.jpg", img2_path="dataset/img2.jpg", model_name=model_name
|
||||
)
|
||||
print(obj)
|
||||
print("---------------------")
|
||||
logger.info(obj)
|
||||
logger.info("---------------------")
|
||||
|
||||
# represent
|
||||
for model_name in model_names:
|
||||
embedding_objs = DeepFace.represent(img_path="dataset/img1.jpg", model_name=model_name)
|
||||
for embedding_obj in embedding_objs:
|
||||
embedding = embedding_obj["embedding"]
|
||||
print(f"{model_name} produced {len(embedding)}D vector")
|
||||
logger.info(f"{model_name} produced {len(embedding)}D vector")
|
||||
|
||||
# find
|
||||
dfs = DeepFace.find(
|
||||
img_path="dataset/img1.jpg", db_path="dataset", model_name="Facenet", detector_backend="mtcnn"
|
||||
)
|
||||
for df in dfs:
|
||||
print(df)
|
||||
logger.info(df)
|
||||
|
||||
# extract faces
|
||||
for detector_backend in detector_backends:
|
||||
@ -43,8 +46,8 @@ for detector_backend in detector_backends:
|
||||
)
|
||||
for face_obj in face_objs:
|
||||
face = face_obj["face"]
|
||||
print(detector_backend)
|
||||
logger.info(detector_backend)
|
||||
plt.imshow(face)
|
||||
plt.axis("off")
|
||||
plt.show()
|
||||
print("-----------")
|
||||
logger.info("-----------")
|
||||
|
Loading…
x
Reference in New Issue
Block a user