[ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/provenance", "@graph": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/assertion", "http://www.w3.org/ns/prov#wasAttributedTo": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/agent" } ], "http://www.w3.org/ns/prov#wasDerivedFrom": [ { "@id": "https://doi.org/10.48550/arXiv.2512.10440" } ] } ] }, { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/assertion", "@graph": [ { "@id": "https://doi.org/10.48550/arXiv.2512.10440", "http://purl.org/dc/terms/title": [ { "@value": "Enhancing Next-Generation Language Models with Knowledge Graphs: Extending Claude, Mistral IA, and GPT-4 via KG-BERT" } ], "http://purl.org/spar/cito/describes": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgBertTailoredLlmIntegrationStrategy" } ], "http://purl.org/spar/cito/discusses": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#jiEtAlKgIntegrationIntoTransformers" }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgBert" }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgGat" }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#xuEtAlScalingKgIntegration" } ], "@type": [ "http://www.w3.org/ns/prov#Entity" ] }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgBertTailoredLlmIntegrationStrategy", "http://purl.org/dc/terms/subject": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/categories#KGEnhancedLLMInference" } ], "@type": [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#comment": [ { "@value": "This method proposes a specific architectural integration strategy for incorporating KG-BERT into diverse pre-trained LLMs (Claude, Mistral IA, GPT-4). It involves adding dedicated components such as a KG-dedicated attention layer, modularized cross-layers with lightweight aggregation, or a dedicated attention head. The goal is to enhance the LLMs' factual accuracy, reasoning, and consistency in knowledge-intensive tasks like question answering and entity linking during their inference phase." } ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "KG-BERT-Tailored LLM Integration Strategy" } ], "https://neverblink.eu/ontologies/llm-kg/hasTopCategory": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/top-categories#KGEnhancedLLM" } ] }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#jiEtAlKgIntegrationIntoTransformers", "@type": [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "Ji et al. KGs Integration into Transformers" } ] }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgBert", "@type": [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "KG-BERT" } ] }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#kgGat", "@type": [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "K-GAT" } ] }, { "@id": "https://neverblink.eu/ontologies/llm-kg/methods#xuEtAlScalingKgIntegration", "@type": [ "http://purl.org/spar/fabio/Workflow" ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "Xu et al. Scaling KG Integration" } ] } ] }, { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/pubinfo", "@graph": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0", "http://purl.org/dc/terms/created": [ { "@value": "2026-02-26T16:07:05.720Z", "@type": "http://www.w3.org/2001/XMLSchema#dateTime" } ], "http://purl.org/dc/terms/creator": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/agent" } ], "http://purl.org/nanopub/x/hasNanopubType": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/PaperAssessmentResult" } ], "http://www.w3.org/2000/01/rdf-schema#label": [ { "@value": "LLM-KG assessment for paper 10.48550/arXiv.2512.10440" } ] }, { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/sig", "http://purl.org/nanopub/x/hasAlgorithm": [ { "@value": "RSA" } ], "http://purl.org/nanopub/x/hasPublicKey": [ { "@value": "MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAwNz2QK3SEifno78S7+48zUB0xpTex3mAzW73ZimHqNcdEMU5/apslrGrTHGFAt/Chocgo++r6JQp5ygY7NyJHGWdaIqnt85pjX4PbNfLAvapyUO00qZP34fY61w4eZ9UMtleWEsmZKRtQPyJ8ODl46i/rfPuZlcJGpM9Nmy5mpGWuepqIEvF4a/t7pLVeCEDFSYXT+yaiygt6ynIK5f7TtEDhZpeUf/Q74WhMPJXm4yTU/hqOX4IW+50kWHNArGGZwUaXwzyG6M3Zd6UMModryGkLqS4H/MSE3ZA1Ylnms7BfWLEXhMWlaKi6HRV4nGRDLhxVSi9LSRi3LWKLhNIIQIDAQAB" } ], "http://purl.org/nanopub/x/hasSignature": [ { "@value": "e3pi4u4gwlQlwvXc/DRsajklSe4Sm8HCaMnTcy3varzcOlRxKo7f3dN5jEMXgq5pECHKusNL1iT62VFt33KGJAkfxZTTN2kYcFX5qmKWgX8msXBnU77NbMcuZt8Ocbi5q6SaKduaeS10HaLS5DaN18HBiWeao5E38NLjq6rfionngnMZBZ4/gyz3GsZgndSVZa2Joao+kUDbnvZOb3Bj6pO9bkCdY4CVXKd769+ZaA8MEUOKmjeMdRxq4HHQaytxbYHxi5aK36DD+DMBOxTZwG7nxetQO7nWHXEgcEBAItqgQpz3CZkkK59Oio5MQ6XxRSqVq8wSCXhqRadg/e8EyQ==" } ], "http://purl.org/nanopub/x/hasSignatureTarget": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0" } ], "http://purl.org/nanopub/x/signedBy": [ { "@id": "https://neverblink.eu/ontologies/llm-kg/agent" } ] } ] }, { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/Head", "@graph": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0", "http://www.nanopub.org/nschema#hasAssertion": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/assertion" } ], "http://www.nanopub.org/nschema#hasProvenance": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/provenance" } ], "http://www.nanopub.org/nschema#hasPublicationInfo": [ { "@id": "https://w3id.org/np/RAGWWU9GP3oQaMbf6-BftQobentkRz2FwGFYFzFd8bOn0/pubinfo" } ], "@type": [ "http://www.nanopub.org/nschema#Nanopublication" ] } ] } ]