@@ -507,7 +507,7 @@ def _load_diffusers_model(self, mconfig):
507507 if vae := self ._load_vae (mconfig ["vae" ]):
508508 pipeline_args .update (vae = vae )
509509 if not isinstance (name_or_path , Path ):
510- pipeline_args .update (cache_dir = global_cache_dir ("diffusers " ))
510+ pipeline_args .update (cache_dir = global_cache_dir ("hub " ))
511511 if using_fp16 :
512512 pipeline_args .update (torch_dtype = torch .float16 )
513513 fp_args_list = [{"revision" : "fp16" }, {}]
@@ -1093,27 +1093,39 @@ def migrate_models(cls):
10931093 to the 2.3.0 "diffusers" version. This should be a one-time operation, called at
10941094 script startup time.
10951095 """
1096- # Three transformer models to check: bert, clip and safety checker
1096+ # Three transformer models to check: bert, clip and safety checker, and
1097+ # the diffusers as well
1098+ models_dir = Path (Globals .root , "models" )
10971099 legacy_locations = [
10981100 Path (
1101+ models_dir ,
10991102 "CompVis/stable-diffusion-safety-checker/models--CompVis--stable-diffusion-safety-checker"
11001103 ),
11011104 Path ("bert-base-uncased/models--bert-base-uncased" ),
11021105 Path (
11031106 "openai/clip-vit-large-patch14/models--openai--clip-vit-large-patch14"
11041107 ),
11051108 ]
1106- models_dir = Path ( Globals . root , "models" )
1109+ legacy_locations . extend ( list ( global_cache_dir ( "diffusers" ). glob ( '*' )) )
11071110 legacy_layout = False
11081111 for model in legacy_locations :
1109- legacy_layout = legacy_layout or Path ( models_dir , model ) .exists ()
1112+ legacy_layout = legacy_layout or model .exists ()
11101113 if not legacy_layout :
11111114 return
11121115
11131116 print (
1114- "** Legacy version <= 2.2.5 model directory layout detected. Reorganizing."
1117+ """
1118+ >> ALERT:
1119+ >> The location of your previously-installed diffusers models needs to move from
1120+ >> invokeai/models/diffusers to invokeai/models/hub due to a change introduced by
1121+ >> diffusers version 0.14. InvokeAI will now move all models from the "diffusers" directory
1122+ >> into "hub" and then remove the diffusers directory. This is a quick, safe, one-time
1123+ >> operation. However if you have customized either of these directories and need to
1124+ >> make adjustments, please press ctrl-C now to abort and relaunch InvokeAI when you are ready.
1125+ >> Otherwise press <enter> to continue."""
11151126 )
11161127 print ("** This is a quick one-time operation." )
1128+ input ("continue> " )
11171129
11181130 # transformer files get moved into the hub directory
11191131 if cls ._is_huggingface_hub_directory_present ():
@@ -1125,33 +1137,20 @@ def migrate_models(cls):
11251137 for model in legacy_locations :
11261138 source = models_dir / model
11271139 dest = hub / model .stem
1140+ if dest .exists () and not source .exists ():
1141+ continue
11281142 print (f"** { source } => { dest } " )
11291143 if source .exists ():
1130- if dest .exists ():
1131- rmtree (source )
1144+ if dest .is_symlink ():
1145+ print (f"** Found symlink at { dest .name } . Not migrating." )
1146+ elif dest .exists ():
1147+ if source .is_dir ():
1148+ rmtree (source )
1149+ else :
1150+ source .unlink ()
11321151 else :
11331152 move (source , dest )
1134-
1135- # anything else gets moved into the diffusers directory
1136- if cls ._is_huggingface_hub_directory_present ():
1137- diffusers = global_cache_dir ("diffusers" )
1138- else :
1139- diffusers = models_dir / "diffusers"
1140-
1141- os .makedirs (diffusers , exist_ok = True )
1142- for root , dirs , _ in os .walk (models_dir , topdown = False ):
1143- for dir in dirs :
1144- full_path = Path (root , dir )
1145- if full_path .is_relative_to (hub ) or full_path .is_relative_to (diffusers ):
1146- continue
1147- if Path (dir ).match ("models--*--*" ):
1148- dest = diffusers / dir
1149- print (f"** { full_path } => { dest } " )
1150- if dest .exists ():
1151- rmtree (full_path )
1152- else :
1153- move (full_path , dest )
1154-
1153+
11551154 # now clean up by removing any empty directories
11561155 empty = [
11571156 root
@@ -1249,7 +1248,7 @@ def _diffuser_sha256(
12491248 path = name_or_path
12501249 else :
12511250 owner , repo = name_or_path .split ("/" )
1252- path = Path (global_cache_dir ("diffusers " ) / f"models--{ owner } --{ repo } " )
1251+ path = Path (global_cache_dir ("hub " ) / f"models--{ owner } --{ repo } " )
12531252 if not path .exists ():
12541253 return None
12551254 hashpath = path / "checksum.sha256"
@@ -1310,7 +1309,7 @@ def _load_vae(self, vae_config) -> AutoencoderKL:
13101309 using_fp16 = self .precision == "float16"
13111310
13121311 vae_args .update (
1313- cache_dir = global_cache_dir ("diffusers " ),
1312+ cache_dir = global_cache_dir ("hug " ),
13141313 local_files_only = not Globals .internet_available ,
13151314 )
13161315
0 commit comments