@inproceedings{flugel2024fuzzy, abstract = {Deep learning models are often unaware of the inherent constraints of the task they are applied to. However, many downstream tasks require logical consistency. For ontology classification tasks, such constraints include subsumption and disjointness relations between classes. In order to increase the consistency of deep learning models, we propose a fuzzy loss that combines label-based loss with terms penalising subsumption- or disjointness-violations. Our evaluation on the ChEBI ontology shows that the fuzzy loss is able to decrease the number of consistency violations by several orders of magnitude without decreasing the classification performance. In addition, we use the fuzzy loss for unsupervised learning. We show that this can further improve consistency on data from a distribution outside the scope of the supervised training.}, added-at = {2024-07-03T19:07:29.000+0200}, author = {Flügel, Simon and Glauer, Martin and Mossakowski, Till and Neuhaus, Fabian}, biburl = {https://www.bibsonomy.org/bibtex/2b1f877997e3b2d4e68fe3ed982c5b5c7/tillmo}, booktitle = {NeSy 2024: The 18th International Conference on Neural-symbolic Learning and Reasoning}, editor = {Besold, Tarek R. and d'Avila Garcez, Artur and Jimenez-Ruiz, Ernesto and Confalonieri, Roberto and Wagner, Benedikt and Madhyastha, Pranava}, interhash = {99ecf50069d6bfec5d90fd1dbec94631}, intrahash = {b1f877997e3b2d4e68fe3ed982c5b5c7}, keywords = {myown}, note = {to appear}, series = {Springer lecture notes}, timestamp = {2024-07-03T19:07:29.000+0200}, title = {A fuzzy loss for ontology classification}, year = 2024 }