mirror of
https://github.com/serengil/deepface.git
synced 2025-06-07 20:15:21 +00:00
creating image utils
This commit is contained in:
parent
c75ee244e0
commit
821cb6b895
@ -10,7 +10,6 @@ os.environ["TF_USE_LEGACY_KERAS"] = "1"
|
|||||||
# pylint: disable=wrong-import-position
|
# pylint: disable=wrong-import-position
|
||||||
|
|
||||||
# 3rd party dependencies
|
# 3rd party dependencies
|
||||||
import cv2
|
|
||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
import tensorflow as tf
|
import tensorflow as tf
|
||||||
@ -26,6 +25,7 @@ from deepface.modules import (
|
|||||||
demography,
|
demography,
|
||||||
detection,
|
detection,
|
||||||
streaming,
|
streaming,
|
||||||
|
preprocessing,
|
||||||
)
|
)
|
||||||
from deepface import __version__
|
from deepface import __version__
|
||||||
|
|
||||||
@ -548,5 +548,5 @@ def detectFace(
|
|||||||
extracted_face = None
|
extracted_face = None
|
||||||
if len(face_objs) > 0:
|
if len(face_objs) > 0:
|
||||||
extracted_face = face_objs[0]["face"]
|
extracted_face = face_objs[0]["face"]
|
||||||
extracted_face = cv2.resize(extracted_face, target_size)
|
extracted_face = preprocessing.resize_image(img=extracted_face, target_size=target_size)
|
||||||
return extracted_face
|
return extracted_face
|
||||||
|
55
deepface/commons/file_utils.py
Normal file
55
deepface/commons/file_utils.py
Normal file
@ -0,0 +1,55 @@
|
|||||||
|
# built-in dependencies
|
||||||
|
import os
|
||||||
|
from typing import List
|
||||||
|
import hashlib
|
||||||
|
|
||||||
|
# 3rd party dependencies
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
|
||||||
|
def list_images(path: str) -> List[str]:
|
||||||
|
"""
|
||||||
|
List images in a given path
|
||||||
|
Args:
|
||||||
|
path (str): path's location
|
||||||
|
Returns:
|
||||||
|
images (list): list of exact image paths
|
||||||
|
"""
|
||||||
|
images = []
|
||||||
|
for r, _, f in os.walk(path):
|
||||||
|
for file in f:
|
||||||
|
exact_path = os.path.join(r, file)
|
||||||
|
|
||||||
|
_, ext = os.path.splitext(exact_path)
|
||||||
|
ext_lower = ext.lower()
|
||||||
|
|
||||||
|
if ext_lower not in {".jpg", ".jpeg", ".png"}:
|
||||||
|
continue
|
||||||
|
|
||||||
|
with Image.open(exact_path) as img: # lazy
|
||||||
|
if img.format.lower() in ["jpeg", "png"]:
|
||||||
|
images.append(exact_path)
|
||||||
|
return images
|
||||||
|
|
||||||
|
|
||||||
|
def find_hash_of_file(file_path: str) -> str:
|
||||||
|
"""
|
||||||
|
Find the hash of given image file with its properties
|
||||||
|
finding the hash of image content is costly operation
|
||||||
|
Args:
|
||||||
|
file_path (str): exact image path
|
||||||
|
Returns:
|
||||||
|
hash (str): digest with sha1 algorithm
|
||||||
|
"""
|
||||||
|
file_stats = os.stat(file_path)
|
||||||
|
|
||||||
|
# some properties
|
||||||
|
file_size = file_stats.st_size
|
||||||
|
creation_time = file_stats.st_ctime
|
||||||
|
modification_time = file_stats.st_mtime
|
||||||
|
|
||||||
|
properties = f"{file_size}-{creation_time}-{modification_time}"
|
||||||
|
|
||||||
|
hasher = hashlib.sha1()
|
||||||
|
hasher.update(properties.encode("utf-8"))
|
||||||
|
return hasher.hexdigest()
|
@ -1,6 +1,5 @@
|
|||||||
# built-in dependencies
|
# built-in dependencies
|
||||||
import os
|
import os
|
||||||
import hashlib
|
|
||||||
|
|
||||||
# 3rd party dependencies
|
# 3rd party dependencies
|
||||||
import tensorflow as tf
|
import tensorflow as tf
|
||||||
@ -29,29 +28,6 @@ def get_tf_minor_version() -> int:
|
|||||||
return int(tf.__version__.split(".", maxsplit=-1)[1])
|
return int(tf.__version__.split(".", maxsplit=-1)[1])
|
||||||
|
|
||||||
|
|
||||||
def find_hash_of_file(file_path: str) -> str:
|
|
||||||
"""
|
|
||||||
Find the hash of given image file with its properties
|
|
||||||
finding the hash of image content is costly operation
|
|
||||||
Args:
|
|
||||||
file_path (str): exact image path
|
|
||||||
Returns:
|
|
||||||
hash (str): digest with sha1 algorithm
|
|
||||||
"""
|
|
||||||
file_stats = os.stat(file_path)
|
|
||||||
|
|
||||||
# some properties
|
|
||||||
file_size = file_stats.st_size
|
|
||||||
creation_time = file_stats.st_ctime
|
|
||||||
modification_time = file_stats.st_mtime
|
|
||||||
|
|
||||||
properties = f"{file_size}-{creation_time}-{modification_time}"
|
|
||||||
|
|
||||||
hasher = hashlib.sha1()
|
|
||||||
hasher.update(properties.encode("utf-8"))
|
|
||||||
return hasher.hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def validate_for_keras3():
|
def validate_for_keras3():
|
||||||
tf_major = get_tf_major_version()
|
tf_major = get_tf_major_version()
|
||||||
tf_minor = get_tf_minor_version()
|
tf_minor = get_tf_minor_version()
|
||||||
|
@ -8,10 +8,9 @@ import time
|
|||||||
import numpy as np
|
import numpy as np
|
||||||
import pandas as pd
|
import pandas as pd
|
||||||
from tqdm import tqdm
|
from tqdm import tqdm
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
# project dependencies
|
# project dependencies
|
||||||
from deepface.commons import package_utils
|
from deepface.commons import package_utils, file_utils
|
||||||
from deepface.modules import representation, detection, verification
|
from deepface.modules import representation, detection, verification
|
||||||
from deepface.commons import logger as log
|
from deepface.commons import logger as log
|
||||||
|
|
||||||
@ -144,7 +143,7 @@ def find(
|
|||||||
pickled_images = [representation["identity"] for representation in representations]
|
pickled_images = [representation["identity"] for representation in representations]
|
||||||
|
|
||||||
# Get the list of images on storage
|
# Get the list of images on storage
|
||||||
storage_images = __list_images(path=db_path)
|
storage_images = file_utils.list_images(path=db_path)
|
||||||
|
|
||||||
if len(storage_images) == 0:
|
if len(storage_images) == 0:
|
||||||
raise ValueError(f"No item found in {db_path}")
|
raise ValueError(f"No item found in {db_path}")
|
||||||
@ -161,7 +160,7 @@ def find(
|
|||||||
if identity in old_images:
|
if identity in old_images:
|
||||||
continue
|
continue
|
||||||
alpha_hash = current_representation["hash"]
|
alpha_hash = current_representation["hash"]
|
||||||
beta_hash = package_utils.find_hash_of_file(identity)
|
beta_hash = file_utils.find_hash_of_file(identity)
|
||||||
if alpha_hash != beta_hash:
|
if alpha_hash != beta_hash:
|
||||||
logger.debug(f"Even though {identity} represented before, it's replaced later.")
|
logger.debug(f"Even though {identity} represented before, it's replaced later.")
|
||||||
replaced_images.append(identity)
|
replaced_images.append(identity)
|
||||||
@ -292,31 +291,6 @@ def find(
|
|||||||
return resp_obj
|
return resp_obj
|
||||||
|
|
||||||
|
|
||||||
def __list_images(path: str) -> List[str]:
|
|
||||||
"""
|
|
||||||
List images in a given path
|
|
||||||
Args:
|
|
||||||
path (str): path's location
|
|
||||||
Returns:
|
|
||||||
images (list): list of exact image paths
|
|
||||||
"""
|
|
||||||
images = []
|
|
||||||
for r, _, f in os.walk(path):
|
|
||||||
for file in f:
|
|
||||||
exact_path = os.path.join(r, file)
|
|
||||||
|
|
||||||
_, ext = os.path.splitext(exact_path)
|
|
||||||
ext_lower = ext.lower()
|
|
||||||
|
|
||||||
if ext_lower not in {".jpg", ".jpeg", ".png"}:
|
|
||||||
continue
|
|
||||||
|
|
||||||
with Image.open(exact_path) as img: # lazy
|
|
||||||
if img.format.lower() in ["jpeg", "png"]:
|
|
||||||
images.append(exact_path)
|
|
||||||
return images
|
|
||||||
|
|
||||||
|
|
||||||
def __find_bulk_embeddings(
|
def __find_bulk_embeddings(
|
||||||
employees: List[str],
|
employees: List[str],
|
||||||
model_name: str = "VGG-Face",
|
model_name: str = "VGG-Face",
|
||||||
@ -360,7 +334,7 @@ def __find_bulk_embeddings(
|
|||||||
desc="Finding representations",
|
desc="Finding representations",
|
||||||
disable=silent,
|
disable=silent,
|
||||||
):
|
):
|
||||||
file_hash = package_utils.find_hash_of_file(employee)
|
file_hash = file_utils.find_hash_of_file(employee)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
img_objs = detection.extract_faces(
|
img_objs = detection.extract_faces(
|
||||||
|
Loading…
x
Reference in New Issue
Block a user