From ae14a1d7c0c54aa24cc39738be8287c67fed4252 Mon Sep 17 00:00:00 2001 From: Cian Hughes Date: Wed, 15 May 2024 19:01:35 +0100 Subject: [PATCH] Added a garbage semantic function Added a semantic function where the injected "knowledge" is just random garbage. This function was written to isolate the "knowledge" component of the other semantic functions, basically to ensure it's the matrices and not the rest of the process that is making the difference in training. --- symbolic_nn_tests/__main__.py | 1 + symbolic_nn_tests/semantic_loss.py | 8 ++++++++ 2 files changed, 9 insertions(+) diff --git a/symbolic_nn_tests/__main__.py b/symbolic_nn_tests/__main__.py index ac6d79b..3be6a0b 100644 --- a/symbolic_nn_tests/__main__.py +++ b/symbolic_nn_tests/__main__.py @@ -31,6 +31,7 @@ def main(): run_test(semantic_loss.hasline_cross_entropy, "hasline_cross_entropy") run_test(semantic_loss.hasloop_cross_entropy, "hasloop_cross_entropy") run_test(semantic_loss.multisemantic_cross_entropy, "multisemantic_cross_entropy") + run_test(semantic_loss.garbage_cross_entropy, "garbage_cross_entropy") if __name__ == "__main__": diff --git a/symbolic_nn_tests/semantic_loss.py b/symbolic_nn_tests/semantic_loss.py index 7634e4b..ade67ed 100644 --- a/symbolic_nn_tests/semantic_loss.py +++ b/symbolic_nn_tests/semantic_loss.py @@ -73,3 +73,11 @@ MULTISEMANTIC_MATRIX = SIMILARITY_MATRIX * HASLINE_MATRIX * HASLOOP_MATRIX MULTISEMANTIC_MATRIX /= MULTISEMANTIC_MATRIX.sum() multisemantic_cross_entropy = create_semantic_cross_entropy(MULTISEMANTIC_MATRIX) + +# NOTE: As a final test, lets make something similar to tehse but where there's no knowledge, +# just random data. This will create a benchmark for the effects of this process wothout the +# "knowledge" component +GARBAGE_MATRIX = torch.rand(10, 10).to("cuda") +GARBAGE_MATRIX /= GARBAGE_MATRIX.sum() + +garbage_cross_entropy = create_semantic_cross_entropy(GARBAGE_MATRIX)