Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -297,7 +297,6 @@ def crea_list_task(id_session,list_param,list_models_to_gen,nb_images_by_prompt)
|
|
| 297 |
for i in range(nb_images_by_prompt):
|
| 298 |
dict_temp["task"].append({"txt_input":p[0],"neg_input":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
|
| 299 |
cache_list_task[f"{id_session}"].append(dict_temp)
|
| 300 |
-
print(cache_list_task[f"{id_session}"])###########################################
|
| 301 |
|
| 302 |
|
| 303 |
def cutStrg(longStrg,start,end):
|
|
@@ -323,6 +322,50 @@ def aff_models_perso(txt_list_perso,models=models):
|
|
| 323 |
if start==-1:
|
| 324 |
t1=False
|
| 325 |
return gr.Dropdown(choices=[["",list_perso]], value=list_perso )
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 326 |
|
| 327 |
def make_me():
|
| 328 |
with gr.Column():
|
|
@@ -394,7 +437,7 @@ def make_me():
|
|
| 394 |
load_model_custom=gr.Button("Load Models Custom")
|
| 395 |
load_model_custom.click(fonc_load_info_custom,[nb_of_models_to_gen,list_model_custom,index_first_model_custom],[nb_of_models_to_gen,disp_info,list_models_to_gen])
|
| 396 |
|
| 397 |
-
list_models_to_gen.change(crea_list_task,[id_session,list_param,list_models_to_gen,nb_images_by_prompt],[])
|
| 398 |
|
| 399 |
|
| 400 |
|
|
@@ -405,17 +448,24 @@ def make_me():
|
|
| 405 |
|
| 406 |
with gr.Row():
|
| 407 |
outputs=[]
|
| 408 |
-
|
| 409 |
for i in range(nb_req_simult.value):
|
| 410 |
outputs.append(gr.Image(None,interactive=False,render=False))
|
| 411 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 412 |
|
| 413 |
-
|
| 414 |
-
|
| 415 |
-
|
| 416 |
-
|
| 417 |
-
|
| 418 |
-
|
| 419 |
|
| 420 |
|
| 421 |
|
|
|
|
| 297 |
for i in range(nb_images_by_prompt):
|
| 298 |
dict_temp["task"].append({"txt_input":p[0],"neg_input":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
|
| 299 |
cache_list_task[f"{id_session}"].append(dict_temp)
|
|
|
|
| 300 |
|
| 301 |
|
| 302 |
def cutStrg(longStrg,start,end):
|
|
|
|
| 322 |
if start==-1:
|
| 323 |
t1=False
|
| 324 |
return gr.Dropdown(choices=[["",list_perso]], value=list_perso )
|
| 325 |
+
|
| 326 |
+
def add_gallery(image, model_str, gallery):
|
| 327 |
+
if gallery is None: gallery = []
|
| 328 |
+
#with lock:
|
| 329 |
+
if image is not None: gallery.append((image, model_str))
|
| 330 |
+
return gallery
|
| 331 |
+
|
| 332 |
+
def reset_gallery(gallery):
|
| 333 |
+
return add_gallery(None,"",[])
|
| 334 |
+
|
| 335 |
+
def load_gallery(gallery,id):
|
| 336 |
+
gallery = reset_gallery(gallery)
|
| 337 |
+
for c in cache_image[f"{id}"]:
|
| 338 |
+
gallery=add_gallery(c[0],c[1],gallery)
|
| 339 |
+
return gallery
|
| 340 |
+
|
| 341 |
+
def fonc_start(id_session,id_module):
|
| 342 |
+
task_actu={}
|
| 343 |
+
model_actu=""
|
| 344 |
+
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
| 345 |
+
if model_plus_tasks["id_module"]==id_module:
|
| 346 |
+
model_actu=model_plus_tasks["model"]
|
| 347 |
+
task_actu=model_plus_tasks["task"][0]
|
| 348 |
+
if model_actu=="":
|
| 349 |
+
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
| 350 |
+
if model_plus_tasks["id_module"]==-1 and model_actu=="":
|
| 351 |
+
model_actu=model_plus_tasks["model"]
|
| 352 |
+
task_actu=model_plus_tasks["task"][0]
|
| 353 |
+
if model_actu=="":#crash
|
| 354 |
+
return None,None
|
| 355 |
+
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"])
|
| 356 |
+
if result!=None:
|
| 357 |
+
result=gr.Image(result)
|
| 358 |
+
id_image=len(cache_image[f"{id_session}"])
|
| 359 |
+
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
| 360 |
+
if model_plus_tasks["id_module"]==id_module:
|
| 361 |
+
model_plus_tasks["task"].remove(task_actu)
|
| 362 |
+
if len(model_plus_tasks["task"])==0:
|
| 363 |
+
cache_list_task[f"{id_session}"].remove(model_plus_tasks)
|
| 364 |
+
|
| 365 |
+
task_actu["id_image"]=id_image
|
| 366 |
+
cache_image[f"{id_session}"].append(result.value)
|
| 367 |
+
cache_id_image[f"{id_session}"].append(task_actu)
|
| 368 |
+
return result
|
| 369 |
|
| 370 |
def make_me():
|
| 371 |
with gr.Column():
|
|
|
|
| 437 |
load_model_custom=gr.Button("Load Models Custom")
|
| 438 |
load_model_custom.click(fonc_load_info_custom,[nb_of_models_to_gen,list_model_custom,index_first_model_custom],[nb_of_models_to_gen,disp_info,list_models_to_gen])
|
| 439 |
|
| 440 |
+
list_models_to_gen.change(crea_list_task,[id_session,list_param,list_models_to_gen,nb_images_by_prompt],[])
|
| 441 |
|
| 442 |
|
| 443 |
|
|
|
|
| 448 |
|
| 449 |
with gr.Row():
|
| 450 |
outputs=[]
|
| 451 |
+
id_modules=[]
|
| 452 |
for i in range(nb_req_simult.value):
|
| 453 |
outputs.append(gr.Image(None,interactive=False,render=False))
|
| 454 |
+
id_modules.append(gr.Number(i,interactive=False,render=False))
|
| 455 |
+
for o,i in zip(outputs,id_modules):
|
| 456 |
+
o.change(fonc_start,[id_session,i],[o])
|
| 457 |
+
gallery = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
|
| 458 |
+
interactive=False, show_share_button=True, container=True, format="png",
|
| 459 |
+
preview=True, object_fit="cover",columns=4,rows=4)
|
| 460 |
+
button_load_gallery=("Load Gallery")
|
| 461 |
+
button_load_gallery.click(fonc_load_gallery,[id_session,gallery],[])
|
| 462 |
|
| 463 |
+
def fonc_load_gallery(id_session,gallery):
|
| 464 |
+
gallery = reset_gallery(gallery)
|
| 465 |
+
for i in range(len(cache_image[f"{id_session}"])):
|
| 466 |
+
gallery=add_gallery(cache_image[f"{id_session}"][i],cache_image[f"{id_session}"][i]["model"],gallery)
|
| 467 |
+
return gallery
|
| 468 |
+
|
| 469 |
|
| 470 |
|
| 471 |
|