diff --git a/components/EntityLinker.py b/components/EntityLinker.py index 17c9195..bb7b180 100644 --- a/components/EntityLinker.py +++ b/components/EntityLinker.py @@ -3,15 +3,20 @@ from components import Db from lib.EntityLinked import EntityLinked from lib.Entity import Entity -from fuzzywuzzy import fuzz +from fuzzywuzzy import process -async def entitylinkerFunc(entities: List[Entity], db_path: str, threshold:int=80): + +async def entitylinkerFunc( + entities: List[Entity], db_path: str, threshold: int = 80 +): iri_dict = {} linked_entities = [] + for entity in entities: if entity.type == "Literal": - linked_entities.append(EntityLinked(entity, "")) + linked_entities.append(EntityLinked(entity, "")) continue + # Use the Read function to get all entities starting with the same name potential_matches = await Db.Read( db_path, "EntityIndex", searchPred=entity.name @@ -19,13 +24,25 @@ async def entitylinkerFunc(entities: List[Entity], db_path: str, threshold:int=8 if potential_matches: names_only = [match[1] for match in potential_matches] - # Sort the potential matches by length difference and select the first one - best_candidate_name = min( - names_only, - key=lambda x: abs(len(x[0]) - len(entity.name)), + + # Use fuzzy matching to find the best candidate + best_candidate_name, similarity = process.extractOne( + entity.name, names_only ) - iri = best_candidate_name.replace(" ", "_") - iri_dict[entity] = EntityLinked(entity, iri) + + # Check if the similarity is above the threshold + if similarity >= threshold: + iri = best_candidate_name.replace(" ", "_") + iri_dict[entity] = EntityLinked(entity, iri) + else: + # If no match above the threshold, add to the result and update the database + iri = entity.name.replace(" ", "_") + iri_dict[entity] = EntityLinked(entity, iri) + await Db.Insert( + db_path, + "EntityIndex", + queryInformation={"entity": entity.name}, + ) else: # If not found in the database, add to the result and update the database iri = entity.name.replace(" ", "_") diff --git a/docs/entitylinker.md b/docs/entitylinker.md new file mode 100644 index 0000000..cd9c0d0 --- /dev/null +++ b/docs/entitylinker.md @@ -0,0 +1,81 @@ +# Entity Linking + +Entity linking in the knox project is performed using a string comparison algorithm to determine the closest comparable entity. + +## How it is linked + +Linking entities to eachother happens through IRI's. An entity is given a unique IRI. Whenever an entity is identified as being the same entity as another, the entitiy is linked to the same IRI. + +## Comparison Algorithm + +Currently, KNOX utilizes the FuzzyWuzzy library for python to determine candidates to link an entity to. FuzzyWuzzy is build upon the Levenshtein algorithm, which works by looking at how many modifications is needed to change one string to another. The less modification needed to alter the string to be equal to the other, the closer the string is. Using FuzzyWuzzy we naively determine entities to link to. It is therefore not the optimal solution, and this should be changed later on. + +## Performing entity linking on an input + +```PYTHON +async def entitylinkerFunc( + entities: List[Entity], db_path: str, threshold: int = 80 +): + iri_dict = {} + linked_entities = [] + + for entity in entities: + if entity.type == "Literal": + linked_entities.append(EntityLinked(entity, "")) + continue + + # Use the Read function to get all entities starting with the same name + potential_matches = await Db.Read( + db_path, "EntityIndex", searchPred=entity.name + ) + + if potential_matches: + names_only = [match[1] for match in potential_matches] + + # Use fuzzy matching to find the best candidate + best_candidate_name, similarity = process.extractOne( + entity.name, names_only + ) + + # Check if the similarity is above the threshold + if similarity >= threshold: + iri = best_candidate_name.replace(" ", "_") + iri_dict[entity] = EntityLinked(entity, iri) + else: + # If no match above the threshold, add to the result and update the database + iri = entity.name.replace(" ", "_") + iri_dict[entity] = EntityLinked(entity, iri) + await Db.Insert( + db_path, + "EntityIndex", + queryInformation={"entity": entity.name}, + ) + else: + # If not found in the database, add to the result and update the database + iri = entity.name.replace(" ", "_") + iri_dict[entity] = EntityLinked(entity, iri) + await Db.Insert( + db_path, + "EntityIndex", + queryInformation={"entity": entity.name}, + ) + + # Convert the result to an array of EntityLinked + for linked_entity in iri_dict.values(): + linked_entities.append(linked_entity) + + return linked_entities +``` + +Entity linking is performed using the above function. The function takes in a list of entities which would be found in a new article processed in the KNOX pipeline. It then iterates through all found entities and sort out all that is of type LITERAL. + +After this, a list of potential matches is then found from the database, that all start the string of the entity to be linked. + +FuzzyWuzzy is then used to find the best candidate. + +
+ Up next: +
+ Entity Linker + +
diff --git a/docs/entityrecognition.md b/docs/entityrecognition.md index bd7c6c6..11c5354 100644 --- a/docs/entityrecognition.md +++ b/docs/entityrecognition.md @@ -1,25 +1,30 @@ # Entity Recognition -The entity recognition part is performed by using danish and english pre-trained models published by SpaCy. + +The entity recognition part is performed by using danish and english pre-trained models published by SpaCy. ## Model Links + - Danish model: [https://spacy.io/models/da#da_core_news_lg](https://spacy.io/models/da#da_core_news_lg) - English model: [https://spacy.io/models/en#en_core_web_lg](https://spacy.io/models/en#en_core_web_lg) ## Custom Danish Model + The danish model has been trained on top of the danish pre-trained SpaCy model to improve its accuracy and be able to recognize literals. See [Pypi Repository](https://github.com/Knox-AAU/PreProcessingLayer_EntityRecognitionAndLinking/blob/main/docs/pypi.md) for more information on where to find the custom model. ## Loading a SpaCy Model + ```python import en_core_web_lg -import da_core_news_lg +import da_core_news_knox_lg nlp_en = en_core_web_lg.load() -nlp_da = da_core_news_lg.load() +nlp_da = da_core_news_knox_lg.load() ``` > Full code available [here](https://github.com/Knox-AAU/PreProcessingLayer_EntityRecognitionAndLinking/blob/5fcd59bac0fbd91b2543d7d78a893f16da49f25f/components/GetSpacyData.py#L17#L18). ## Performing Entity Recognition on Input + The entity recognition is performed using either the `nlp_en` or `nlp_da` variable defined in [Loading a SpaCy Model](https://github.com/Knox-AAU/PreProcessingLayer_EntityRecognitionAndLinking/blob/main/docs/entityrecognition.md#loading-a-spacy-model). ```python @@ -37,10 +42,11 @@ def GetTokens(text: str): The return type of this function is a [Doc](https://spacy.io/api/doc) containing information such as the entity's start and end index, the entity's belonging sentence, and so on. ------------ +--- +
Up next:
Entity Linker -
\ No newline at end of file + diff --git a/tests/unit/test_EntityLinker.py b/tests/unit/test_EntityLinker.py index 64cd991..eb38e52 100644 --- a/tests/unit/test_EntityLinker.py +++ b/tests/unit/test_EntityLinker.py @@ -17,7 +17,7 @@ async def mock_read(db_path, table, searchPred): return [("1", "Entity1"), ("2", "Entity2")] return [] - async def mock_insert(db_path, table, entity_name): + async def mock_insert(db_path, table, queryInformation): return None # Patch the Db.Read and Db.Insert functions with the mock functions @@ -28,7 +28,9 @@ async def mock_insert(db_path, table, entity_name): # Create some Entity instances entMentions = [ Entity("Entity1", 0, 6, "Sentence1", 0, 9, "PERSON", "Entity"), - Entity("newEntity3", 0, 6, "Sentence2", 0, 9, "PERSON", "Entity"), + Entity( + "newEntity3", 0, 6, "Sentence2", 0, 9, "PERSON", "Entity" + ), ] # Call the entitylinkerFunc @@ -40,7 +42,7 @@ async def mock_insert(db_path, table, entity_name): assert entLinks[0].iri == "Entity1" # Ensure the second mention creates a new entity - assert entLinks[1].iri == "Entity1" + assert entLinks[1].iri == "newEntity3" # Define a test case with a mock database and Entity instances @@ -52,7 +54,7 @@ async def mock_read(db_path, table, searchPred): return [("1", "Entity1")] return [] - async def mock_insert(db_path, table, entity_name): + async def mock_insert(db_path, table, queryInformation): return None # Patch the Db.Read and Db.Insert functions with the mock functions @@ -84,7 +86,7 @@ async def mock_read(db_path, table, searchPred): return [("1", "Entity 1")] return [] - async def mock_insert(db_path, table, entity_name): + async def mock_insert(db_path, table, queryInformation): return None # Patch the Db.Read and Db.Insert functions with the mock functions @@ -167,7 +169,9 @@ async def mock_insert(db_path, table, queryInformation): } # Call the entitylinkerFunc - entLinks = await entitylinkerFunc(TestingDataset["test"], db_path="DB_PATH") + entLinks = await entitylinkerFunc( + TestingDataset["test"], db_path="DB_PATH" + ) for index, link in enumerate(entLinks): assert link.name == TestingDataset["GoldStandardNames"][index] assert link.iri == TestingDataset["GoldStandardIRIs"][index]