Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
|
@@ -189,41 +189,42 @@ entities_data = {}
|
|
| 189 |
if text_input and model is not None:
|
| 190 |
try:
|
| 191 |
if selected_language in ["German", "English - spaCy"]:
|
| 192 |
-
# Process the text with error handling
|
| 193 |
doc = model(text_input)
|
| 194 |
-
|
| 195 |
-
# Fixed the syntax error: ent._.kb_qid instead of ent..kb_qid
|
| 196 |
entities = []
|
| 197 |
for ent in doc.ents:
|
| 198 |
try:
|
| 199 |
-
|
| 200 |
-
|
| 201 |
-
url_wikidata = getattr(ent._, 'url_wikidata', None) if hasattr(ent, '_') else None
|
| 202 |
entities.append((ent.text, ent.label_, kb_qid, url_wikidata))
|
| 203 |
-
except AttributeError
|
| 204 |
-
# If the entityfishing attributes don't exist, use basic entity info
|
| 205 |
entities.append((ent.text, ent.label_, None, None))
|
| 206 |
|
| 207 |
-
for
|
| 208 |
-
entity_string, entity_type, wikidata_id, wikidata_url = entity
|
| 209 |
if wikidata_url:
|
| 210 |
-
|
| 211 |
-
|
| 212 |
-
|
| 213 |
-
entity_data = get_entity_data(formatted_wikidata_url)
|
| 214 |
-
|
| 215 |
-
if entity_data is not None:
|
| 216 |
entities_data[entity_string] = entity_data
|
|
|
|
| 217 |
else:
|
|
|
|
| 218 |
entities = model.process_text(text_input)
|
| 219 |
|
|
|
|
| 220 |
for entity in entities:
|
| 221 |
-
|
| 222 |
-
if
|
| 223 |
-
|
| 224 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 225 |
if entity_data is not None:
|
| 226 |
-
entities_data[
|
|
|
|
| 227 |
|
| 228 |
except Exception as e:
|
| 229 |
st.error(f"Error processing text: {e}")
|
|
|
|
| 189 |
if text_input and model is not None:
|
| 190 |
try:
|
| 191 |
if selected_language in ["German", "English - spaCy"]:
|
| 192 |
+
# Process the text with error handling for spaCy
|
| 193 |
doc = model(text_input)
|
|
|
|
|
|
|
| 194 |
entities = []
|
| 195 |
for ent in doc.ents:
|
| 196 |
try:
|
| 197 |
+
kb_qid = getattr(ent._, 'kb_qid', None)
|
| 198 |
+
url_wikidata = getattr(ent._, 'url_wikidata', None)
|
|
|
|
| 199 |
entities.append((ent.text, ent.label_, kb_qid, url_wikidata))
|
| 200 |
+
except AttributeError:
|
|
|
|
| 201 |
entities.append((ent.text, ent.label_, None, None))
|
| 202 |
|
| 203 |
+
for entity_string, entity_type, wikidata_id, wikidata_url in entities:
|
|
|
|
| 204 |
if wikidata_url:
|
| 205 |
+
entities_map[entity_string] = {"id": wikidata_id, "link": wikidata_url}
|
| 206 |
+
entity_data = get_entity_data(wikidata_url)
|
| 207 |
+
if entity_data:
|
|
|
|
|
|
|
|
|
|
| 208 |
entities_data[entity_string] = entity_data
|
| 209 |
+
|
| 210 |
else:
|
| 211 |
+
# === CORRECTED ReFinED PROCESSING LOGIC ===
|
| 212 |
entities = model.process_text(text_input)
|
| 213 |
|
| 214 |
+
# Iterate through the entity objects directly and safely
|
| 215 |
for entity in entities:
|
| 216 |
+
# Check if the entity has a wikidata_id before processing
|
| 217 |
+
if entity.wikidata_id:
|
| 218 |
+
entity_text = entity.text
|
| 219 |
+
entity_id = entity.wikidata_id
|
| 220 |
+
entity_link = f"http://www.wikidata.org/entity/{entity_id}"
|
| 221 |
+
|
| 222 |
+
# Populate your dictionaries
|
| 223 |
+
entities_map[entity_text] = {"id": entity_id, "link": entity_link}
|
| 224 |
+
entity_data = get_entity_data(entity_link)
|
| 225 |
if entity_data is not None:
|
| 226 |
+
entities_data[entity_text] = entity_data
|
| 227 |
+
|
| 228 |
|
| 229 |
except Exception as e:
|
| 230 |
st.error(f"Error processing text: {e}")
|