jallenjia commited on
Commit
a44d19a
·
1 Parent(s): b48214e

update workflow, use Realistic_Vision and 4x_NMKD-Superscale

Browse files
Files changed (1) hide show
  1. app.py +13 -14
app.py CHANGED
@@ -10,11 +10,14 @@ import subprocess
10
 
11
  from huggingface_hub import hf_hub_download
12
  from huggingface_hub import snapshot_download
13
- #hf_hub_download(repo_id="black-forest-labs/FLUX.1-Redux-dev", filename="flux1-redux-dev.safetensors", local_dir="models/style_models")
14
 
15
- #https://huggingface.co/autismanon/modeldump/blob/main/dreamshaper_8.safetensors
16
- print("dreamshaper_8.safetensors")
17
- hf_hub_download(repo_id="autismanon/modeldump", filename="dreamshaper_8.safetensors", local_dir="models/checkpoints/SD1.5/")
 
 
 
 
18
 
19
  #https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_openpose.pth
20
  print("control_v11p_sd15_openpose.pth")
@@ -36,10 +39,6 @@ hf_hub_download(repo_id="naonovn/Lora", filename="add_detail.safetensors", local
36
  print("BaldifierW2.safetensors")
37
  hf_hub_download(repo_id="Dreamspire/BaldifierW2", filename="BaldifierW2.safetensors", local_dir="models/loras/")
38
 
39
- #https://huggingface.co/lokCX/4x-Ultrasharp/blob/main/4x-UltraSharp.pth
40
- print("UltraSharp.pth")
41
- hf_hub_download(repo_id="lokCX/4x-Ultrasharp", filename="4x-UltraSharp.pth", local_dir="models/upscale_models/")
42
-
43
  #./clip_vision/CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors
44
  #https://huggingface.co/h94/IP-Adapter/blob/main/models/image_encoder/model.safetensors
45
  print("CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors")
@@ -109,7 +108,7 @@ print("ipadapter done")
109
 
110
  #./annotator/yzd-v/DWPose/yolox_l.onnx
111
 
112
- print("UltraSharp.pth")
113
 
114
  def get_value_at_index(obj: Union[Sequence, Mapping], index: int) -> Any:
115
  """Returns the value at the given index of a sequence or mapping.
@@ -277,13 +276,13 @@ if "Florence2ModelLoader" in NODE_CLASS_MAPPINGS:
277
  #5、其它需要提前加载的模型,放到顶层加载
278
 
279
  checkpointloadersimple_50 = checkpointloadersimple.load_checkpoint(
280
- ckpt_name="SD1.5/dreamshaper_8.safetensors"
281
  )
282
 
283
 
284
  loraloader_841 = loraloader.load_lora(
285
  lora_name="add_detail.safetensors",
286
- strength_model=2,
287
  strength_clip=1,
288
  model=get_value_at_index(checkpointloadersimple_50, 0),
289
  clip=get_value_at_index(checkpointloadersimple_50, 1),
@@ -328,7 +327,7 @@ faceanalysismodels_506 = faceanalysismodels.load_models(
328
  )
329
 
330
  upscalemodelloader_835 = upscalemodelloader.load_model(
331
- model_name="4x-UltraSharp.pth"
332
  )
333
 
334
  faceanalysismodels_840 = faceanalysismodels.load_models(
@@ -596,9 +595,9 @@ def generate_image(model_image, hairstyle_template_image):
596
  seed=random.randint(1, 2**64),
597
  steps=8,
598
  cfg=1,
599
- sampler_name="dpmpp_2m",
600
  scheduler="karras",
601
- denoise=1,
602
  model=get_value_at_index(ipadapteradvanced_85, 0),
603
  positive=get_value_at_index(controlnetapplyadvanced_189, 0),
604
  negative=get_value_at_index(controlnetapplyadvanced_189, 1),
 
10
 
11
  from huggingface_hub import hf_hub_download
12
  from huggingface_hub import snapshot_download
 
13
 
14
+ #https://huggingface.co/SG161222/Realistic_Vision_V6.0_B1_noVAE/blob/main/Realistic_Vision_V6.0_NV_B1_fp16.safetensors
15
+ print("Realistic_Vision_V6.0_NV_B1_fp16.safetensors")
16
+ hf_hub_download(repo_id="SG161222/Realistic_Vision_V6.0_B1_noVAE", filename="Realistic_Vision_V6.0_NV_B1_fp16.safetensors", local_dir="models/checkpoints/SD1.5/")
17
+
18
+ #https://huggingface.co/gemasai/4x_NMKD-Superscale-SP_178000_G/blob/main/4x_NMKD-Superscale-SP_178000_G.pth
19
+ print("4x_NMKD-Superscale-SP_178000_G.pth")
20
+ hf_hub_download(repo_id="gemasai/4x_NMKD-Superscale-SP_178000_G", filename="4x_NMKD-Superscale-SP_178000_G.pth", local_dir="models/upscale_models/")
21
 
22
  #https://huggingface.co/lllyasviel/ControlNet-v1-1/blob/main/control_v11p_sd15_openpose.pth
23
  print("control_v11p_sd15_openpose.pth")
 
39
  print("BaldifierW2.safetensors")
40
  hf_hub_download(repo_id="Dreamspire/BaldifierW2", filename="BaldifierW2.safetensors", local_dir="models/loras/")
41
 
 
 
 
 
42
  #./clip_vision/CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors
43
  #https://huggingface.co/h94/IP-Adapter/blob/main/models/image_encoder/model.safetensors
44
  print("CLIP-ViT-H-14-laion2B-s32B-b79K.safetensors")
 
108
 
109
  #./annotator/yzd-v/DWPose/yolox_l.onnx
110
 
111
+
112
 
113
  def get_value_at_index(obj: Union[Sequence, Mapping], index: int) -> Any:
114
  """Returns the value at the given index of a sequence or mapping.
 
276
  #5、其它需要提前加载的模型,放到顶层加载
277
 
278
  checkpointloadersimple_50 = checkpointloadersimple.load_checkpoint(
279
+ ckpt_name="SD1.5/Realistic_Vision_V6.0_NV_B1_fp16.safetensors"
280
  )
281
 
282
 
283
  loraloader_841 = loraloader.load_lora(
284
  lora_name="add_detail.safetensors",
285
+ strength_model=1,
286
  strength_clip=1,
287
  model=get_value_at_index(checkpointloadersimple_50, 0),
288
  clip=get_value_at_index(checkpointloadersimple_50, 1),
 
327
  )
328
 
329
  upscalemodelloader_835 = upscalemodelloader.load_model(
330
+ model_name="4x_NMKD-Superscale-SP_178000_G.pth"
331
  )
332
 
333
  faceanalysismodels_840 = faceanalysismodels.load_models(
 
595
  seed=random.randint(1, 2**64),
596
  steps=8,
597
  cfg=1,
598
+ sampler_name="dpmpp_2m_sde",
599
  scheduler="karras",
600
+ denoise=0.9,
601
  model=get_value_at_index(ipadapteradvanced_85, 0),
602
  positive=get_value_at_index(controlnetapplyadvanced_189, 0),
603
  negative=get_value_at_index(controlnetapplyadvanced_189, 1),