Faysal4200 commited on
Commit
15db47d
·
verified ·
1 Parent(s): 75ac152

Lets hope final code update app.py

Browse files
Files changed (1) hide show
  1. app.py +23 -1
app.py CHANGED
@@ -451,7 +451,29 @@ if st.button("Generate VLM Explanation"):
451
  if not st.session_state.get("vlm_response", False):
452
  try:
453
  with st.spinner("Loading VLM Model. First time load will take time. Please be patient..."):
454
-
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
455
  response = "Debugging VLM response."
456
  if response is None:
457
  st.error("VLM did not return a response.")
 
451
  if not st.session_state.get("vlm_response", False):
452
  try:
453
  with st.spinner("Loading VLM Model. First time load will take time. Please be patient..."):
454
+ try:
455
+ vlm_info = load_vlm_model()
456
+ except Exception as e:
457
+ st.error("VLM load failed. See logs above.")
458
+ vlm_info = None
459
+
460
+ if vlm_info is not None:
461
+ try:
462
+ img_for_vlm = overlay_pil.convert("RGB").resize((224, 224), Image.BILINEAR)
463
+ except Exception:
464
+ st.warning("Overlay image not available for VLM input; using original image.")
465
+ img_for_vlm = pil_img.convert("RGB").resize((224, 224), Image.BILINEAR)
466
+
467
+ with st.spinner("Generating Explanation...."):
468
+ response = generate_vlm_response(
469
+ vlm_info["processor"],
470
+ vlm_info["model"],
471
+ vlm_info["device"],
472
+ img_for_vlm,
473
+ pred_label,
474
+ max_new_tokens=128
475
+ )
476
+
477
  response = "Debugging VLM response."
478
  if response is None:
479
  st.error("VLM did not return a response.")