Compare commits
16 Commits
fe6c4b3f9d
...
666f129739
Author | SHA1 | Date |
---|---|---|
|
666f129739 | |
|
5845b9c480 | |
|
71cf39fd98 | |
|
92db20eba4 | |
|
f1e365799e | |
|
6d1238212a | |
|
92a0994f01 | |
|
cad40b25dc | |
|
1b4c0ce43e | |
|
fd4e3f546d | |
|
5bcd6dabde | |
|
3e1f333e5e | |
|
1f71d274b5 | |
|
bbfdf83267 | |
|
88254c3952 | |
|
069e9b46e6 |
|
@ -1,5 +1,6 @@
|
|||
|
||||

|
||||

|
||||
|
||||
## Deep Live Cam
|
||||
|
||||
|
@ -44,7 +45,7 @@ Track and change faces on the fly.
|
|||
|
||||
## Quick Start (Windows / Nvidia)
|
||||
|
||||
[Download pre-built version with CUDA support](https://hacksider.gumroad.com/l/vccdmm)
|
||||
[Download latest pre-built version with CUDA support](https://hacksider.gumroad.com/l/vccdmm) - No Manual Installation/Downloading required.
|
||||
|
||||
## Installation (Manual)
|
||||
|
||||
|
@ -69,7 +70,7 @@ https://github.com/hacksider/Deep-Live-Cam.git
|
|||
**3. Download Models**
|
||||
|
||||
1. [GFPGANv1.4](https://huggingface.co/hacksider/deep-live-cam/resolve/main/GFPGANv1.4.pth)
|
||||
2. [inswapper_128_fp16.onnx](https://huggingface.co/hacksider/deep-live-cam/resolve/main/inswapper_128_fp16.onnx) (Note: Use this [replacement version](https://github.com/facefusion/facefusion-assets/releases/download/models/inswapper_128_fp16.onnx) if you encounter issues)
|
||||
2. [inswapper_128_fp16.onnx](https://huggingface.co/hacksider/deep-live-cam/resolve/main/inswapper_128.onnx) (Note: Use this [replacement version](https://github.com/facefusion/facefusion-assets/releases/download/models/inswapper_128.onnx) if you encounter issues)
|
||||
|
||||
Place these files in the "**models**" folder.
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
name = 'Deep Live Cam'
|
||||
version = '1.4.0'
|
||||
version = '1.5.0'
|
||||
edition = 'Portable'
|
||||
|
|
|
@ -18,7 +18,7 @@ NAME = 'DLC.FACE-SWAPPER'
|
|||
|
||||
def pre_check() -> bool:
|
||||
download_directory_path = resolve_relative_path('../models')
|
||||
conditional_download(download_directory_path, ['https://huggingface.co/hacksider/deep-live-cam/blob/main/inswapper_128_fp16.onnx'])
|
||||
conditional_download(download_directory_path, ['https://huggingface.co/hacksider/deep-live-cam/blob/main/inswapper_128.onnx'])
|
||||
return True
|
||||
|
||||
|
||||
|
@ -40,7 +40,7 @@ def get_face_swapper() -> Any:
|
|||
|
||||
with THREAD_LOCK:
|
||||
if FACE_SWAPPER is None:
|
||||
model_path = resolve_relative_path('../models/inswapper_128_fp16.onnx')
|
||||
model_path = resolve_relative_path('../models/inswapper_128.onnx')
|
||||
FACE_SWAPPER = insightface.model_zoo.get_model(model_path, providers=modules.globals.execution_providers)
|
||||
return FACE_SWAPPER
|
||||
|
||||
|
|
|
@ -0,0 +1,158 @@
|
|||
{
|
||||
"CTk": {
|
||||
"fg_color": ["gray95", "gray10"]
|
||||
},
|
||||
"CTkToplevel": {
|
||||
"fg_color": ["gray95", "gray10"]
|
||||
},
|
||||
"CTkFrame": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 0,
|
||||
"fg_color": ["gray90", "gray13"],
|
||||
"top_fg_color": ["gray85", "gray16"],
|
||||
"border_color": ["gray65", "gray28"]
|
||||
},
|
||||
"CTkButton": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 0,
|
||||
"fg_color": ["#2aa666", "#1f538d"],
|
||||
"hover_color": ["#3cb666", "#14375e"],
|
||||
"border_color": ["#3e4a40", "#949A9F"],
|
||||
"text_color": ["#f3faf6", "#f3faf6"],
|
||||
"text_color_disabled": ["gray74", "gray60"]
|
||||
},
|
||||
"CTkLabel": {
|
||||
"corner_radius": 0,
|
||||
"fg_color": "transparent",
|
||||
"text_color": ["gray14", "gray84"]
|
||||
},
|
||||
"CTkEntry": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 2,
|
||||
"fg_color": ["#F9F9FA", "#343638"],
|
||||
"border_color": ["#979DA2", "#565B5E"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"placeholder_text_color": ["gray52", "gray62"]
|
||||
},
|
||||
"CTkCheckbox": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 3,
|
||||
"fg_color": ["#2aa666", "#1f538d"],
|
||||
"border_color": ["#3e4a40", "#949A9F"],
|
||||
"hover_color": ["#3cb666", "#14375e"],
|
||||
"checkmark_color": ["#f3faf6", "gray90"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"text_color_disabled": ["gray60", "gray45"]
|
||||
},
|
||||
"CTkSwitch": {
|
||||
"corner_radius": 1000,
|
||||
"border_width": 3,
|
||||
"button_length": 0,
|
||||
"fg_color": ["#939BA2", "#4A4D50"],
|
||||
"progress_color": ["#2aa666", "#1f538d"],
|
||||
"button_color": ["gray36", "#D5D9DE"],
|
||||
"button_hover_color": ["gray20", "gray100"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"text_color_disabled": ["gray60", "gray45"]
|
||||
},
|
||||
"CTkRadiobutton": {
|
||||
"corner_radius": 1000,
|
||||
"border_width_checked": 6,
|
||||
"border_width_unchecked": 3,
|
||||
"fg_color": ["#2aa666", "#1f538d"],
|
||||
"border_color": ["#3e4a40", "#949A9F"],
|
||||
"hover_color": ["#3cb666", "#14375e"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"text_color_disabled": ["gray60", "gray45"]
|
||||
},
|
||||
"CTkProgressBar": {
|
||||
"corner_radius": 1000,
|
||||
"border_width": 0,
|
||||
"fg_color": ["#939BA2", "#4A4D50"],
|
||||
"progress_color": ["#2aa666", "#1f538d"],
|
||||
"border_color": ["gray", "gray"]
|
||||
},
|
||||
"CTkSlider": {
|
||||
"corner_radius": 1000,
|
||||
"button_corner_radius": 1000,
|
||||
"border_width": 6,
|
||||
"button_length": 0,
|
||||
"fg_color": ["#939BA2", "#4A4D50"],
|
||||
"progress_color": ["gray40", "#AAB0B5"],
|
||||
"button_color": ["#2aa666", "#1f538d"],
|
||||
"button_hover_color": ["#3cb666", "#14375e"]
|
||||
},
|
||||
"CTkOptionMenu": {
|
||||
"corner_radius": 0,
|
||||
"fg_color": ["#2aa666", "#1f538d"],
|
||||
"button_color": ["#3cb666", "#14375e"],
|
||||
"button_hover_color": ["#234567", "#1e2c40"],
|
||||
"text_color": ["#f3faf6", "#f3faf6"],
|
||||
"text_color_disabled": ["gray74", "gray60"]
|
||||
},
|
||||
"CTkComboBox": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 2,
|
||||
"fg_color": ["#F9F9FA", "#343638"],
|
||||
"border_color": ["#979DA2", "#565B5E"],
|
||||
"button_color": ["#979DA2", "#565B5E"],
|
||||
"button_hover_color": ["#6E7174", "#7A848D"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"text_color_disabled": ["gray50", "gray45"]
|
||||
},
|
||||
"CTkScrollbar": {
|
||||
"corner_radius": 1000,
|
||||
"border_spacing": 4,
|
||||
"fg_color": "transparent",
|
||||
"button_color": ["gray55", "gray41"],
|
||||
"button_hover_color": ["gray40", "gray53"]
|
||||
},
|
||||
"CTkSegmentedButton": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 2,
|
||||
"fg_color": ["#979DA2", "gray29"],
|
||||
"selected_color": ["#2aa666", "#1f538d"],
|
||||
"selected_hover_color": ["#3cb666", "#14375e"],
|
||||
"unselected_color": ["#979DA2", "gray29"],
|
||||
"unselected_hover_color": ["gray70", "gray41"],
|
||||
"text_color": ["#f3faf6", "#f3faf6"],
|
||||
"text_color_disabled": ["gray74", "gray60"]
|
||||
},
|
||||
"CTkTextbox": {
|
||||
"corner_radius": 0,
|
||||
"border_width": 0,
|
||||
"fg_color": ["gray100", "gray20"],
|
||||
"border_color": ["#979DA2", "#565B5E"],
|
||||
"text_color": ["gray14", "gray84"],
|
||||
"scrollbar_button_color": ["gray55", "gray41"],
|
||||
"scrollbar_button_hover_color": ["gray40", "gray53"]
|
||||
},
|
||||
"CTkScrollableFrame": {
|
||||
"label_fg_color": ["gray80", "gray21"]
|
||||
},
|
||||
"DropdownMenu": {
|
||||
"fg_color": ["gray90", "gray20"],
|
||||
"hover_color": ["gray75", "gray28"],
|
||||
"text_color": ["gray14", "gray84"]
|
||||
},
|
||||
"CTkFont": {
|
||||
"macOS": {
|
||||
"family": "Avenir",
|
||||
"size": 18,
|
||||
"weight": "normal"
|
||||
},
|
||||
"Windows": {
|
||||
"family": "Corbel",
|
||||
"size": 18,
|
||||
"weight": "normal"
|
||||
},
|
||||
"Linux": {
|
||||
"family": "Montserrat",
|
||||
"size": 18,
|
||||
"weight": "normal"
|
||||
}
|
||||
},
|
||||
"URL": {
|
||||
"text_color": ["gray74", "gray60"]
|
||||
}
|
||||
}
|
|
@ -25,6 +25,10 @@ from modules.utilities import (
|
|||
has_image_extension,
|
||||
)
|
||||
|
||||
os.environ["QT_AUTO_SCREEN_SCALE_FACTOR"] = "1"
|
||||
os.environ["QT_SCREEN_SCALE_FACTORS"] = "1"
|
||||
os.environ["QT_SCALE_FACTOR"] = "1"
|
||||
|
||||
ROOT = None
|
||||
POPUP = None
|
||||
POPUP_LIVE = None
|
||||
|
@ -219,6 +223,7 @@ def create_root(
|
|||
root.configure(bg="#1a1a1a")
|
||||
root.protocol("WM_DELETE_WINDOW", lambda: destroy())
|
||||
root.resizable(True, True)
|
||||
root.attributes("-alpha", 1.0) # Set window opacity to fully opaque
|
||||
|
||||
main_frame = ctk.CTkFrame(root, fg_color="#1a1a1a")
|
||||
main_frame.pack(fill="both", expand=True, padx=20, pady=20)
|
||||
|
@ -879,15 +884,43 @@ def init_preview() -> None:
|
|||
def update_preview(frame_number: int = 0) -> None:
|
||||
if modules.globals.source_path and modules.globals.target_path:
|
||||
update_status("Processing...")
|
||||
temp_frame = get_video_frame(modules.globals.target_path, frame_number)
|
||||
|
||||
# Debug: Print the target path and frame number
|
||||
print(
|
||||
f"Target path: {modules.globals.target_path}, Frame number: {frame_number}"
|
||||
)
|
||||
|
||||
temp_frame = None
|
||||
if is_video(modules.globals.target_path):
|
||||
temp_frame = get_video_frame(modules.globals.target_path, frame_number)
|
||||
elif is_image(modules.globals.target_path):
|
||||
temp_frame = cv2.imread(modules.globals.target_path)
|
||||
|
||||
# Debug: Check if temp_frame is None
|
||||
if temp_frame is None:
|
||||
print("Error: temp_frame is None")
|
||||
update_status("Error: Could not read frame from video or image.")
|
||||
return
|
||||
|
||||
if modules.globals.nsfw_filter and check_and_ignore_nsfw(temp_frame):
|
||||
return
|
||||
|
||||
for frame_processor in get_frame_processors_modules(
|
||||
modules.globals.frame_processors
|
||||
):
|
||||
# Debug: Print the type of frame_processor
|
||||
print(f"Processing frame with: {type(frame_processor).__name__}")
|
||||
|
||||
temp_frame = frame_processor.process_frame(
|
||||
get_one_face(cv2.imread(modules.globals.source_path)), temp_frame
|
||||
)
|
||||
|
||||
# Debug: Check if temp_frame is None after processing
|
||||
if temp_frame is None:
|
||||
print("Error: temp_frame is None after processing")
|
||||
update_status("Error: Frame processing failed.")
|
||||
return
|
||||
|
||||
image = Image.fromarray(cv2.cvtColor(temp_frame, cv2.COLOR_BGR2RGB))
|
||||
image = ImageOps.contain(
|
||||
image, (PREVIEW_MAX_WIDTH, PREVIEW_MAX_HEIGHT), Image.LANCZOS
|
||||
|
|
|
@ -12,10 +12,8 @@ torch==2.0.1+cu118; sys_platform != 'darwin'
|
|||
torch==2.0.1; sys_platform == 'darwin'
|
||||
torchvision==0.15.2+cu118; sys_platform != 'darwin'
|
||||
torchvision==0.15.2; sys_platform == 'darwin'
|
||||
onnxruntime==1.18.0; sys_platform == 'darwin' and platform_machine != 'arm64'
|
||||
onnxruntime-silicon==1.16.3; sys_platform == 'darwin' and platform_machine == 'arm64'
|
||||
onnxruntime-gpu==1.18.0; sys_platform != 'darwin'
|
||||
tensorflow==2.13.0rc1; sys_platform == 'darwin'
|
||||
onnxruntime-gpu==1.16.3; sys_platform != 'darwin'
|
||||
tensorflow==2.12.1; sys_platform != 'darwin'
|
||||
opennsfw2==0.10.2
|
||||
protobuf==4.23.2
|
||||
|
|
Loading…
Reference in New Issue