diff --git a/Makefile b/Makefile index 80141b8..f6d3af9 100644 --- a/Makefile +++ b/Makefile @@ -53,7 +53,15 @@ bazel: @bash ./src/scripts/bazel.sh $(filter-out $@, $(MAKECMDGOALS)) test-all: build-image - $(MAKE) bazel test //... + @$(MAKE) bazel test //... + +lint-all: + @$(MAKE) bazel lint \ + "//... --fix --report --diff" \ + | grep -vE "(Lint results|All checks passed|^[[:blank:]]*$$)" + +format-all: + @$(MAKE) bazel run format # Catch-all pattern to prevent make from complaining about unknown targets %: diff --git a/src/.aspect/cli/config.yaml b/src/.aspect/cli/config.yaml new file mode 100644 index 0000000..642f7cc --- /dev/null +++ b/src/.aspect/cli/config.yaml @@ -0,0 +1,3 @@ +lint: + aspects: + - //tools/lint:linters.bzl%ruff diff --git a/src/.bazeliskrc b/src/.bazeliskrc new file mode 100644 index 0000000..9b9ec9c --- /dev/null +++ b/src/.bazeliskrc @@ -0,0 +1,2 @@ +BAZELISK_BASE_URL=https://github.com/aspect-build/aspect-cli/releases/download +USE_BAZEL_VERSION=aspect/2024.49.18 diff --git a/src/.bazelrc b/src/.bazelrc index 965b7c1..6dc6b0d 100644 --- a/src/.bazelrc +++ b/src/.bazelrc @@ -20,10 +20,16 @@ common --enable_bzlmod # localy build --spawn_strategy=local +# Define versions +build --define=ATOMDB_VERSION=0.8.11 +build --define=DAS_VERSION=0.9.17 +build --define=DAS_NODE_VERSION=0.0.1 + # Enable debugging symbols for development build:debug --compilation_mode=dbg build:debug --strip=never + # Optimeze for speed in production build:release --compilation_mode=opt build:release --strip=always @@ -40,6 +46,7 @@ test --flaky_test_attempts=1 test --cache_test_results=auto test --test_env=BAZEL_TEST_ENV=1 test --test_tag_filters=-skip +test --notest_keep_going #################################### RUN ###################################### diff --git a/src/.ruff.toml b/src/.ruff.toml new file mode 100644 index 0000000..ee63492 --- /dev/null +++ b/src/.ruff.toml @@ -0,0 +1,36 @@ +line-length = 100 +indent-width = 4 + +target-version = "py310" + +[lint] + +select = ["E4", "E7", "E9", "F", "B"] +ignore = ["E501", "B905"] +fixable = ["ALL"] +unfixable = ["B"] + + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + + +# Ignore `E402` (import violations) in all `__init__.py` files. +[lint.per-file-ignores] +"__init__.py" = ["E402", "F403"] +"**/{tests,docs,tools}/*" = ["B006", "B011", "B017", "B018"] +# Auto generated code +"**/grpc/*.py" = ["ALL"] + +[format] +quote-style = "double" + +indent-style = "space" + +skip-magic-trailing-comma = false + +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings +docstring-code-format = true +docstring-code-line-length = "dynamic" diff --git a/src/BUILD b/src/BUILD index d9254e0..cd8de03 100644 --- a/src/BUILD +++ b/src/BUILD @@ -1,5 +1,22 @@ package(default_visibility = ["//visibility:public"]) +exports_files( + [ + ".ruff.toml", + ], + visibility = ["//visibility:public"], +) + +alias( + name = "format.check", + actual = "//tools/format:format.check" +) + +alias( + name = "format", + actual = "//tools/format" +) + cc_binary( name = "attention_broker_service", srcs = [], diff --git a/src/MODULE.bazel b/src/MODULE.bazel index 460f001..16ff9f1 100644 --- a/src/MODULE.bazel +++ b/src/MODULE.bazel @@ -16,6 +16,8 @@ Additionally, this file configures: module(name = "das") +# Bazel to run linters and formatters +bazel_dep(name = "aspect_rules_lint", version = "1.2.0") # C++ diff --git a/src/MODULE.bazel.lock b/src/MODULE.bazel.lock index 14eb628..e3f3c94 100644 --- a/src/MODULE.bazel.lock +++ b/src/MODULE.bazel.lock @@ -19,6 +19,18 @@ "https://bcr.bazel.build/modules/apple_support/1.15.1/MODULE.bazel": "a0556fefca0b1bb2de8567b8827518f94db6a6e7e7d632b4c48dc5f865bc7c85", "https://bcr.bazel.build/modules/apple_support/1.17.1/MODULE.bazel": "655c922ab1209978a94ef6ca7d9d43e940cd97d9c172fb55f94d91ac53f8610b", "https://bcr.bazel.build/modules/apple_support/1.17.1/source.json": "6b2b8c74d14e8d485528a938e44bdb72a5ba17632b9e14ef6e68a5ee96c8347f", + "https://bcr.bazel.build/modules/aspect_bazel_lib/1.31.2/MODULE.bazel": "7bee702b4862612f29333590f4b658a5832d433d6f8e4395f090e8f4e85d442f", + "https://bcr.bazel.build/modules/aspect_bazel_lib/1.38.0/MODULE.bazel": "6307fec451ba9962c1c969eb516ebfe1e46528f7fa92e1c9ac8646bef4cdaa3f", + "https://bcr.bazel.build/modules/aspect_bazel_lib/1.42.2/MODULE.bazel": "2e0d8ab25c57a14f56ace1c8e881b69050417ff91b2fb7718dc00d201f3c3478", + "https://bcr.bazel.build/modules/aspect_bazel_lib/2.7.7/MODULE.bazel": "491f8681205e31bb57892d67442ce448cda4f472a8e6b3dc062865e29a64f89c", + "https://bcr.bazel.build/modules/aspect_bazel_lib/2.7.7/source.json": "87f12b449cd1d27d3e83840a59a6966d557e7c3c5f19e7b2e0361da5edc6b397", + "https://bcr.bazel.build/modules/aspect_rules_js/1.33.1/MODULE.bazel": "db3e7f16e471cf6827059d03af7c21859e7a0d2bc65429a3a11f005d46fc501b", + "https://bcr.bazel.build/modules/aspect_rules_js/1.40.0/MODULE.bazel": "01a1014e95e6816b68ecee2584ae929c7d6a1b72e4333ab1ff2d2c6c30babdf1", + "https://bcr.bazel.build/modules/aspect_rules_js/1.40.0/source.json": "b6fd491369e9ef888fdef64b839023a2360caaea8eb370d2cfbfdd2a96721311", + "https://bcr.bazel.build/modules/aspect_rules_lint/0.12.0/MODULE.bazel": "e767c5dbfeb254ec03275a7701b5cfde2c4d2873676804bc7cb27ddff3728fed", + "https://bcr.bazel.build/modules/aspect_rules_lint/1.2.0/MODULE.bazel": "4a7f65caa2578065cb2264a45ef7a12e1f153a3b6fa2a0c80c5ba56e270d7252", + "https://bcr.bazel.build/modules/aspect_rules_lint/1.2.0/source.json": "8031f086c4ee4e7e6d55bd5b33d60822efc72c677cc908047cbc568db95985e8", + "https://bcr.bazel.build/modules/bazel_features/0.1.0/MODULE.bazel": "47011d645b0f949f42ee67f2e8775188a9cf4a0a1528aa2fa4952f2fd00906fd", "https://bcr.bazel.build/modules/bazel_features/1.1.0/MODULE.bazel": "cfd42ff3b815a5f39554d97182657f8c4b9719568eb7fded2b9135f084bf760b", "https://bcr.bazel.build/modules/bazel_features/1.1.1/MODULE.bazel": "27b8c79ef57efe08efccbd9dd6ef70d61b4798320b8d3c134fd571f78963dbcd", "https://bcr.bazel.build/modules/bazel_features/1.10.0/MODULE.bazel": "f75e8807570484a99be90abcd52b5e1f390362c258bcb73106f4544957a48101", @@ -27,6 +39,7 @@ "https://bcr.bazel.build/modules/bazel_features/1.17.0/MODULE.bazel": "039de32d21b816b47bd42c778e0454217e9c9caac4a3cf8e15c7231ee3ddee4d", "https://bcr.bazel.build/modules/bazel_features/1.18.0/MODULE.bazel": "1be0ae2557ab3a72a57aeb31b29be347bcdc5d2b1eb1e70f39e3851a7e97041a", "https://bcr.bazel.build/modules/bazel_features/1.19.0/MODULE.bazel": "59adcdf28230d220f0067b1f435b8537dd033bfff8db21335ef9217919c7fb58", + "https://bcr.bazel.build/modules/bazel_features/1.2.0/MODULE.bazel": "122b2b606622afbaa498913d54f52d9bcd2d19a5edd1bd6d6c5aa17441c4d5f9", "https://bcr.bazel.build/modules/bazel_features/1.21.0/MODULE.bazel": "675642261665d8eea09989aa3b8afb5c37627f1be178382c320d1b46afba5e3b", "https://bcr.bazel.build/modules/bazel_features/1.21.0/source.json": "3e8379efaaef53ce35b7b8ba419df829315a880cb0a030e5bb45c96d6d5ecb5f", "https://bcr.bazel.build/modules/bazel_features/1.3.0/MODULE.bazel": "cdcafe83ec318cda34e02948e81d790aab8df7a929cec6f6969f13a489ccecd9", @@ -51,6 +64,8 @@ "https://bcr.bazel.build/modules/boringssl/0.20240913.0/MODULE.bazel": "fcaa7503a5213290831a91ed1eb538551cf11ac0bc3a6ad92d0fef92c5bd25fb", "https://bcr.bazel.build/modules/boringssl/0.20241024.0/MODULE.bazel": "b540cff73d948cb79cb0bc108d7cef391d2098a25adabfda5043e4ef548dbc87", "https://bcr.bazel.build/modules/boringssl/0.20241024.0/source.json": "d843092e682b84188c043ac742965d7f96e04c846c7e338187e03238674909a9", + "https://bcr.bazel.build/modules/buildifier_prebuilt/6.1.2/MODULE.bazel": "2ef4962c8b0b6d8d21928a89190755619254459bc67f870dc0ccb9ba9952d444", + "https://bcr.bazel.build/modules/buildifier_prebuilt/6.1.2/source.json": "19fb45ed3f0d55cbff94e402c39512940833ae3a68f9cbfd9518a1926b609c7c", "https://bcr.bazel.build/modules/buildozer/7.1.2/MODULE.bazel": "2e8dd40ede9c454042645fd8d8d0cd1527966aa5c919de86661e62953cd73d84", "https://bcr.bazel.build/modules/buildozer/7.1.2/source.json": "c9028a501d2db85793a6996205c8de120944f50a0d570438fcae0457a5f9d1f8", "https://bcr.bazel.build/modules/c-ares/1.15.0/MODULE.bazel": "ba0a78360fdc83f02f437a9e7df0532ad1fbaa59b722f6e715c11effebaa0166", @@ -170,6 +185,8 @@ "https://bcr.bazel.build/modules/rules_apple/3.17.1/MODULE.bazel": "1f8f5b16da1861db86ad3f474354c292b664c92008950407e2391fa36d9aab32", "https://bcr.bazel.build/modules/rules_apple/3.17.1/source.json": "795b05da73beb218d6125277a300c295b8a546ab41a0d80beafadb733cb14310", "https://bcr.bazel.build/modules/rules_apple/3.5.1/MODULE.bazel": "3d1bbf65ad3692003d36d8a29eff54d4e5c1c5f4bfb60f79e28646a924d9101c", + "https://bcr.bazel.build/modules/rules_buf/0.1.1/MODULE.bazel": "6189aec18a4f7caff599ad41b851ab7645d4f1e114aa6431acf9b0666eb92162", + "https://bcr.bazel.build/modules/rules_buf/0.1.1/source.json": "021363d254f7438f3f10725355969c974bb2c67e0c28667782ade31a9cdb747f", "https://bcr.bazel.build/modules/rules_cc/0.0.1/MODULE.bazel": "cb2aa0747f84c6c3a78dad4e2049c154f08ab9d166b1273835a8174940365647", "https://bcr.bazel.build/modules/rules_cc/0.0.10/MODULE.bazel": "ec1705118f7eaedd6e118508d3d26deba2a4e76476ada7e0e3965211be012002", "https://bcr.bazel.build/modules/rules_cc/0.0.13/MODULE.bazel": "0e8529ed7b323dad0775ff924d2ae5af7640b23553dfcd4d34344c7e7a867191", @@ -233,12 +250,19 @@ "https://bcr.bazel.build/modules/rules_license/0.0.7/MODULE.bazel": "088fbeb0b6a419005b89cf93fe62d9517c0a2b8bb56af3244af65ecfe37e7d5d", "https://bcr.bazel.build/modules/rules_license/1.0.0/MODULE.bazel": "a7fda60eefdf3d8c827262ba499957e4df06f659330bbe6cdbdb975b768bb65c", "https://bcr.bazel.build/modules/rules_license/1.0.0/source.json": "a52c89e54cc311196e478f8382df91c15f7a2bfdf4c6cd0e2675cc2ff0b56efb", + "https://bcr.bazel.build/modules/rules_multirun/0.9.0/MODULE.bazel": "32d628ef586b5b23f67e55886b7bc38913ea4160420d66ae90521dda2ff37df0", + "https://bcr.bazel.build/modules/rules_multirun/0.9.0/source.json": "e882ba77962fa6c5fe68619e5c7d0374ec9a219fb8d03c42eadaf6d0243771bd", + "https://bcr.bazel.build/modules/rules_multitool/0.4.0/MODULE.bazel": "15517987d5c00c9e7faab41fbe22ee67a350b6eabcc1e08baded5c6d9025897f", + "https://bcr.bazel.build/modules/rules_multitool/0.4.0/source.json": "d73b450b7c6d9683e400d6cebc463fbc2b870cc5d8e2e75080d6278805aaab08", + "https://bcr.bazel.build/modules/rules_nodejs/5.8.2/MODULE.bazel": "6bc03c8f37f69401b888023bf511cb6ee4781433b0cb56236b2e55a21e3a026a", + "https://bcr.bazel.build/modules/rules_nodejs/5.8.2/source.json": "6e82cf5753d835ea18308200bc79b9c2e782efe2e2a4edc004a9162ca93382ca", "https://bcr.bazel.build/modules/rules_pkg/0.7.0/MODULE.bazel": "df99f03fc7934a4737122518bb87e667e62d780b610910f0447665a7e2be62dc", "https://bcr.bazel.build/modules/rules_pkg/1.0.1/MODULE.bazel": "5b1df97dbc29623bccdf2b0dcd0f5cb08e2f2c9050aab1092fd39a41e82686ff", "https://bcr.bazel.build/modules/rules_pkg/1.0.1/source.json": "bd82e5d7b9ce2d31e380dd9f50c111d678c3bdaca190cb76b0e1c71b05e1ba8a", "https://bcr.bazel.build/modules/rules_proto/4.0.0/MODULE.bazel": "a7a7b6ce9bee418c1a760b3d84f83a299ad6952f9903c67f19e4edd964894e06", "https://bcr.bazel.build/modules/rules_proto/5.3.0-21.7/MODULE.bazel": "e8dff86b0971688790ae75528fe1813f71809b5afd57facb44dad9e8eca631b7", "https://bcr.bazel.build/modules/rules_proto/6.0.0-rc1/MODULE.bazel": "1e5b502e2e1a9e825eef74476a5a1ee524a92297085015a052510b09a1a09483", + "https://bcr.bazel.build/modules/rules_proto/6.0.0-rc2/MODULE.bazel": "e17f94f8a347e2c808517b65d74988839d2d62daceb50073e44060193b785eb1", "https://bcr.bazel.build/modules/rules_proto/6.0.0/MODULE.bazel": "b531d7f09f58dce456cd61b4579ce8c86b38544da75184eadaf0a7cb7966453f", "https://bcr.bazel.build/modules/rules_proto/6.0.2/MODULE.bazel": "ce916b775a62b90b61888052a416ccdda405212b6aaeb39522f7dc53431a5e73", "https://bcr.bazel.build/modules/rules_proto/7.0.2/MODULE.bazel": "bf81793bd6d2ad89a37a40693e56c61b0ee30f7a7fdbaf3eabbf5f39de47dea2", @@ -249,6 +273,7 @@ "https://bcr.bazel.build/modules/rules_python/0.22.1/MODULE.bazel": "26114f0c0b5e93018c0c066d6673f1a2c3737c7e90af95eff30cfee38d0bbac7", "https://bcr.bazel.build/modules/rules_python/0.23.1/MODULE.bazel": "49ffccf0511cb8414de28321f5fcf2a31312b47c40cc21577144b7447f2bf300", "https://bcr.bazel.build/modules/rules_python/0.25.0/MODULE.bazel": "72f1506841c920a1afec76975b35312410eea3aa7b63267436bfb1dd91d2d382", + "https://bcr.bazel.build/modules/rules_python/0.27.1/MODULE.bazel": "65dc875cc1a06c30d5bbdba7ab021fd9e551a6579e408a3943a61303e2228a53", "https://bcr.bazel.build/modules/rules_python/0.28.0/MODULE.bazel": "cba2573d870babc976664a912539b320cbaa7114cd3e8f053c720171cde331ed", "https://bcr.bazel.build/modules/rules_python/0.29.0/MODULE.bazel": "2ac8cd70524b4b9ec49a0b8284c79e4cd86199296f82f6e0d5da3f783d660c82", "https://bcr.bazel.build/modules/rules_python/0.31.0/MODULE.bazel": "93a43dc47ee570e6ec9f5779b2e64c1476a6ce921c48cc9a1678a91dd5f8fd58", @@ -266,8 +291,10 @@ "https://bcr.bazel.build/modules/rules_swift/1.18.0/MODULE.bazel": "a6aba73625d0dc64c7b4a1e831549b6e375fbddb9d2dde9d80c9de6ec45b24c9", "https://bcr.bazel.build/modules/rules_swift/2.1.1/MODULE.bazel": "494900a80f944fc7aa61500c2073d9729dff0b764f0e89b824eb746959bc1046", "https://bcr.bazel.build/modules/rules_swift/2.1.1/source.json": "40fc69dfaac64deddbb75bd99cdac55f4427d9ca0afbe408576a65428427a186", + "https://bcr.bazel.build/modules/stardoc/0.5.0/MODULE.bazel": "f9f1f46ba8d9c3362648eea571c6f9100680efc44913618811b58cc9c02cd678", "https://bcr.bazel.build/modules/stardoc/0.5.1/MODULE.bazel": "1a05d92974d0c122f5ccf09291442580317cdd859f07a8655f1db9a60374f9f8", "https://bcr.bazel.build/modules/stardoc/0.5.3/MODULE.bazel": "c7f6948dae6999bf0db32c1858ae345f112cacf98f174c7a8bb707e41b974f1c", + "https://bcr.bazel.build/modules/stardoc/0.5.4/MODULE.bazel": "6569966df04610b8520957cb8e97cf2e9faac2c0309657c537ab51c16c18a2a4", "https://bcr.bazel.build/modules/stardoc/0.5.6/MODULE.bazel": "c43dabc564990eeab55e25ed61c07a1aadafe9ece96a4efabb3f8bf9063b71ef", "https://bcr.bazel.build/modules/stardoc/0.6.2/MODULE.bazel": "7060193196395f5dd668eda046ccbeacebfd98efc77fed418dbe2b82ffaa39fd", "https://bcr.bazel.build/modules/stardoc/0.7.0/MODULE.bazel": "05e3d6d30c099b6770e97da986c53bd31844d7f13d41412480ea265ac9e8079c", @@ -276,6 +303,8 @@ "https://bcr.bazel.build/modules/stardoc/0.7.2/source.json": "58b029e5e901d6802967754adf0a9056747e8176f017cfe3607c0851f4d42216", "https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.1/MODULE.bazel": "5e463fbfba7b1701d957555ed45097d7f984211330106ccd1352c6e0af0dcf91", "https://bcr.bazel.build/modules/swift_argument_parser/1.3.1.1/source.json": "32bd87e5f4d7acc57c5b2ff7c325ae3061d5e242c0c4c214ae87e0f1c13e54cb", + "https://bcr.bazel.build/modules/toolchains_protoc/0.2.1/MODULE.bazel": "2f08433ff5e659069b3a1abfee2377d68f510f2de1da50678ed992c455b4ff91", + "https://bcr.bazel.build/modules/toolchains_protoc/0.2.1/source.json": "4ee6b007b62e1b9e493b00ccc60e61a258633f304b74813b6e7f7234927be94c", "https://bcr.bazel.build/modules/upb/0.0.0-20211020-160625a/MODULE.bazel": "6cced416be2dc5b9c05efd5b997049ba795e5e4e6fafbe1624f4587767638928", "https://bcr.bazel.build/modules/upb/0.0.0-20220923-a547704/MODULE.bazel": "7298990c00040a0e2f121f6c32544bab27d4452f80d9ce51349b1a28f3005c43", "https://bcr.bazel.build/modules/upb/0.0.0-20230516-61a97ef/MODULE.bazel": "c0df5e35ad55e264160417fd0875932ee3c9dda63d9fccace35ac62f45e1b6f9", @@ -324,6 +353,554 @@ ] } }, + "@@aspect_bazel_lib+//lib:extensions.bzl%toolchains": { + "general": { + "bzlTransitiveDigest": "K4T+rws7BXvHaN8LlVgJVDiNUotXshrjIU3oeUAhhqw=", + "usagesDigest": "W4Mi+ZzXTwLz6fnBG/tlLCL9H28ui8uzVk0kEoey/Uw=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "copy_directory_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "darwin_amd64" + } + }, + "copy_directory_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "darwin_arm64" + } + }, + "copy_directory_freebsd_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "freebsd_amd64" + } + }, + "copy_directory_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "linux_amd64" + } + }, + "copy_directory_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "linux_arm64" + } + }, + "copy_directory_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_platform_repo", + "attributes": { + "platform": "windows_amd64" + } + }, + "copy_directory_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_directory_toolchain.bzl%copy_directory_toolchains_repo", + "attributes": { + "user_repository_name": "copy_directory" + } + }, + "copy_to_directory_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "darwin_amd64" + } + }, + "copy_to_directory_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "darwin_arm64" + } + }, + "copy_to_directory_freebsd_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "freebsd_amd64" + } + }, + "copy_to_directory_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "linux_amd64" + } + }, + "copy_to_directory_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "linux_arm64" + } + }, + "copy_to_directory_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_platform_repo", + "attributes": { + "platform": "windows_amd64" + } + }, + "copy_to_directory_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:copy_to_directory_toolchain.bzl%copy_to_directory_toolchains_repo", + "attributes": { + "user_repository_name": "copy_to_directory" + } + }, + "jq_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_platform_repo", + "attributes": { + "platform": "darwin_amd64", + "version": "1.7" + } + }, + "jq_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_platform_repo", + "attributes": { + "platform": "darwin_arm64", + "version": "1.7" + } + }, + "jq_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_platform_repo", + "attributes": { + "platform": "linux_amd64", + "version": "1.7" + } + }, + "jq_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_platform_repo", + "attributes": { + "platform": "linux_arm64", + "version": "1.7" + } + }, + "jq_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_platform_repo", + "attributes": { + "platform": "windows_amd64", + "version": "1.7" + } + }, + "jq": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_host_alias_repo", + "attributes": {} + }, + "jq_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:jq_toolchain.bzl%jq_toolchains_repo", + "attributes": { + "user_repository_name": "jq" + } + }, + "yq_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "darwin_amd64", + "version": "4.25.2" + } + }, + "yq_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "darwin_arm64", + "version": "4.25.2" + } + }, + "yq_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "linux_amd64", + "version": "4.25.2" + } + }, + "yq_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "linux_arm64", + "version": "4.25.2" + } + }, + "yq_linux_s390x": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "linux_s390x", + "version": "4.25.2" + } + }, + "yq_linux_ppc64le": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "linux_ppc64le", + "version": "4.25.2" + } + }, + "yq_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_platform_repo", + "attributes": { + "platform": "windows_amd64", + "version": "4.25.2" + } + }, + "yq": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_host_alias_repo", + "attributes": {} + }, + "yq_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:yq_toolchain.bzl%yq_toolchains_repo", + "attributes": { + "user_repository_name": "yq" + } + }, + "coreutils_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_platform_repo", + "attributes": { + "platform": "darwin_amd64", + "version": "0.0.26" + } + }, + "coreutils_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_platform_repo", + "attributes": { + "platform": "darwin_arm64", + "version": "0.0.26" + } + }, + "coreutils_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_platform_repo", + "attributes": { + "platform": "linux_amd64", + "version": "0.0.26" + } + }, + "coreutils_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_platform_repo", + "attributes": { + "platform": "linux_arm64", + "version": "0.0.26" + } + }, + "coreutils_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_platform_repo", + "attributes": { + "platform": "windows_amd64", + "version": "0.0.26" + } + }, + "coreutils_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:coreutils_toolchain.bzl%coreutils_toolchains_repo", + "attributes": { + "user_repository_name": "coreutils" + } + }, + "bsd_tar_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%bsdtar_binary_repo", + "attributes": { + "platform": "darwin_amd64" + } + }, + "bsd_tar_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%bsdtar_binary_repo", + "attributes": { + "platform": "darwin_arm64" + } + }, + "bsd_tar_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%bsdtar_binary_repo", + "attributes": { + "platform": "linux_amd64" + } + }, + "bsd_tar_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%bsdtar_binary_repo", + "attributes": { + "platform": "linux_arm64" + } + }, + "bsd_tar_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%bsdtar_binary_repo", + "attributes": { + "platform": "windows_amd64" + } + }, + "bsd_tar_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:tar_toolchain.bzl%tar_toolchains_repo", + "attributes": { + "user_repository_name": "bsd_tar" + } + }, + "zstd_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:zstd_toolchain.bzl%zstd_binary_repo", + "attributes": { + "platform": "darwin_amd64" + } + }, + "zstd_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:zstd_toolchain.bzl%zstd_binary_repo", + "attributes": { + "platform": "darwin_arm64" + } + }, + "zstd_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:zstd_toolchain.bzl%zstd_binary_repo", + "attributes": { + "platform": "linux_amd64" + } + }, + "zstd_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:zstd_toolchain.bzl%zstd_binary_repo", + "attributes": { + "platform": "linux_arm64" + } + }, + "zstd_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:zstd_toolchain.bzl%zstd_toolchains_repo", + "attributes": { + "user_repository_name": "zstd" + } + }, + "expand_template_darwin_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "darwin_amd64" + } + }, + "expand_template_darwin_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "darwin_arm64" + } + }, + "expand_template_freebsd_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "freebsd_amd64" + } + }, + "expand_template_linux_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "linux_amd64" + } + }, + "expand_template_linux_arm64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "linux_arm64" + } + }, + "expand_template_windows_amd64": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_platform_repo", + "attributes": { + "platform": "windows_amd64" + } + }, + "expand_template_toolchains": { + "repoRuleId": "@@aspect_bazel_lib+//lib/private:expand_template_toolchain.bzl%expand_template_toolchains_repo", + "attributes": { + "user_repository_name": "expand_template" + } + }, + "bats_support": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "7815237aafeb42ddcc1b8c698fc5808026d33317d8701d5ec2396e9634e2918f", + "urls": [ + "https://github.com/bats-core/bats-support/archive/v0.3.0.tar.gz" + ], + "strip_prefix": "bats-support-0.3.0", + "build_file_content": "load(\"@aspect_bazel_lib//lib:copy_to_directory.bzl\", \"copy_to_directory\")\n\ncopy_to_directory(\n name = \"support\",\n hardlink = \"on\",\n srcs = glob([\n \"src/**\",\n \"load.bash\",\n ]),\n out = \"bats-support\",\n visibility = [\"//visibility:public\"]\n)\n" + } + }, + "bats_assert": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "98ca3b685f8b8993e48ec057565e6e2abcc541034ed5b0e81f191505682037fd", + "urls": [ + "https://github.com/bats-core/bats-assert/archive/v2.1.0.tar.gz" + ], + "strip_prefix": "bats-assert-2.1.0", + "build_file_content": "load(\"@aspect_bazel_lib//lib:copy_to_directory.bzl\", \"copy_to_directory\")\n\ncopy_to_directory(\n name = \"assert\",\n hardlink = \"on\",\n srcs = glob([\n \"src/**\",\n \"load.bash\",\n ]),\n out = \"bats-assert\",\n visibility = [\"//visibility:public\"]\n)\n" + } + }, + "bats_file": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "9b69043241f3af1c2d251f89b4fcafa5df3f05e97b89db18d7c9bdf5731bb27a", + "urls": [ + "https://github.com/bats-core/bats-file/archive/v0.4.0.tar.gz" + ], + "strip_prefix": "bats-file-0.4.0", + "build_file_content": "load(\"@aspect_bazel_lib//lib:copy_to_directory.bzl\", \"copy_to_directory\")\n\ncopy_to_directory(\n name = \"file\",\n hardlink = \"on\",\n srcs = glob([\n \"src/**\",\n \"load.bash\",\n ]),\n out = \"bats-file\",\n visibility = [\"//visibility:public\"]\n)\n" + } + }, + "bats_toolchains": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_archive", + "attributes": { + "sha256": "a1a9f7875aa4b6a9480ca384d5865f1ccf1b0b1faead6b47aa47d79709a5c5fd", + "urls": [ + "https://github.com/bats-core/bats-core/archive/v1.10.0.tar.gz" + ], + "strip_prefix": "bats-core-1.10.0", + "build_file_content": "load(\"@local_config_platform//:constraints.bzl\", \"HOST_CONSTRAINTS\")\nload(\"@aspect_bazel_lib//lib/private:bats_toolchain.bzl\", \"bats_toolchain\")\nload(\"@aspect_bazel_lib//lib:copy_to_directory.bzl\", \"copy_to_directory\")\n\ncopy_to_directory(\n name = \"core\",\n hardlink = \"on\",\n srcs = glob([\n \"lib/**\",\n \"libexec/**\"\n ]) + [\"bin/bats\"],\n out = \"bats-core\",\n)\n\nbats_toolchain(\n name = \"toolchain\",\n core = \":core\",\n libraries = [\"@bats_support//:support\", \"@bats_assert//:assert\", \"@bats_file//:file\"]\n)\n\ntoolchain(\n name = \"bats_toolchain\",\n exec_compatible_with = HOST_CONSTRAINTS,\n toolchain = \":toolchain\",\n toolchain_type = \"@aspect_bazel_lib//lib:bats_toolchain_type\",\n)\n" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "aspect_bazel_lib+", + "aspect_bazel_lib", + "aspect_bazel_lib+" + ], + [ + "aspect_bazel_lib+", + "bazel_skylib", + "bazel_skylib+" + ], + [ + "aspect_bazel_lib+", + "bazel_tools", + "bazel_tools" + ] + ] + } + }, + "@@buildifier_prebuilt+//:defs.bzl%buildifier_prebuilt_deps_extension": { + "general": { + "bzlTransitiveDigest": "BQ67MS38sDZxeQEfUs4vghLhs3+m4IXU/i7XC50fl9s=", + "usagesDigest": "JCqhJg+TeFVLBlrKVGI0Npi9RChNqkZQAh9TYfbAobs=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "buildifier_darwin_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildifier-darwin-amd64" + ], + "downloaded_file_path": "buildifier", + "executable": true, + "sha256": "e2f4a67691c5f55634fbfb3850eb97dd91be0edd059d947b6c83d120682e0216" + } + }, + "buildifier_darwin_arm64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildifier-darwin-arm64" + ], + "downloaded_file_path": "buildifier", + "executable": true, + "sha256": "7549b5f535219ac957aa2a6069d46fbfc9ea3f74abd85fd3d460af4b1a2099a6" + } + }, + "buildifier_linux_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildifier-linux-amd64" + ], + "downloaded_file_path": "buildifier", + "executable": true, + "sha256": "51bc947dabb7b14ec6fb1224464fbcf7a7cb138f1a10a3b328f00835f72852ce" + } + }, + "buildifier_linux_arm64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildifier-linux-arm64" + ], + "downloaded_file_path": "buildifier", + "executable": true, + "sha256": "0ba6e8e3208b5a029164e542ddb5509e618f87b639ffe8cc2f54770022853080" + } + }, + "buildifier_windows_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildifier-windows-amd64.exe" + ], + "downloaded_file_path": "buildifier.exe", + "executable": true, + "sha256": "92bdd284fbc6766fc3e300b434ff9e68ac4d76a06cb29d1bdefe79a102a8d135" + } + }, + "buildozer_darwin_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildozer-darwin-amd64" + ], + "downloaded_file_path": "buildozer", + "executable": true, + "sha256": "4014751a4cc5e91a7dc4b64be7b30c565bd9014ae6d1879818dc624562a1d431" + } + }, + "buildozer_darwin_arm64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildozer-darwin-arm64" + ], + "downloaded_file_path": "buildozer", + "executable": true, + "sha256": "e78bd5357f2356067d4b0d49ec4e4143dd9b1308746afc6ff11b55b952f462d7" + } + }, + "buildozer_linux_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildozer-linux-amd64" + ], + "downloaded_file_path": "buildozer", + "executable": true, + "sha256": "2aef0f1ef80a0140b8fe6e6a8eb822e14827d8855bfc6681532c7530339ea23b" + } + }, + "buildozer_linux_arm64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildozer-linux-arm64" + ], + "downloaded_file_path": "buildozer", + "executable": true, + "sha256": "586e27630cbc242e8bd6fe8e24485eca8dcadea6410cc13cbe059202655980ac" + } + }, + "buildozer_windows_amd64": { + "repoRuleId": "@@bazel_tools//tools/build_defs/repo:http.bzl%http_file", + "attributes": { + "urls": [ + "https://github.com/bazelbuild/buildtools/releases/download/v6.1.2/buildozer-windows-amd64.exe" + ], + "downloaded_file_path": "buildozer.exe", + "executable": true, + "sha256": "07664d5d08ee099f069cd654070cabf2708efaae9f52dc83921fa400c67a868b" + } + }, + "buildifier_prebuilt_toolchains": { + "repoRuleId": "@@buildifier_prebuilt+//:defs.bzl%_buildifier_toolchain_setup", + "attributes": { + "assets_json": "[{\"arch\":\"amd64\",\"name\":\"buildifier\",\"platform\":\"darwin\",\"sha256\":\"e2f4a67691c5f55634fbfb3850eb97dd91be0edd059d947b6c83d120682e0216\",\"version\":\"v6.1.2\"},{\"arch\":\"arm64\",\"name\":\"buildifier\",\"platform\":\"darwin\",\"sha256\":\"7549b5f535219ac957aa2a6069d46fbfc9ea3f74abd85fd3d460af4b1a2099a6\",\"version\":\"v6.1.2\"},{\"arch\":\"amd64\",\"name\":\"buildifier\",\"platform\":\"linux\",\"sha256\":\"51bc947dabb7b14ec6fb1224464fbcf7a7cb138f1a10a3b328f00835f72852ce\",\"version\":\"v6.1.2\"},{\"arch\":\"arm64\",\"name\":\"buildifier\",\"platform\":\"linux\",\"sha256\":\"0ba6e8e3208b5a029164e542ddb5509e618f87b639ffe8cc2f54770022853080\",\"version\":\"v6.1.2\"},{\"arch\":\"amd64\",\"name\":\"buildifier\",\"platform\":\"windows\",\"sha256\":\"92bdd284fbc6766fc3e300b434ff9e68ac4d76a06cb29d1bdefe79a102a8d135\",\"version\":\"v6.1.2\"},{\"arch\":\"amd64\",\"name\":\"buildozer\",\"platform\":\"darwin\",\"sha256\":\"4014751a4cc5e91a7dc4b64be7b30c565bd9014ae6d1879818dc624562a1d431\",\"version\":\"v6.1.2\"},{\"arch\":\"arm64\",\"name\":\"buildozer\",\"platform\":\"darwin\",\"sha256\":\"e78bd5357f2356067d4b0d49ec4e4143dd9b1308746afc6ff11b55b952f462d7\",\"version\":\"v6.1.2\"},{\"arch\":\"amd64\",\"name\":\"buildozer\",\"platform\":\"linux\",\"sha256\":\"2aef0f1ef80a0140b8fe6e6a8eb822e14827d8855bfc6681532c7530339ea23b\",\"version\":\"v6.1.2\"},{\"arch\":\"arm64\",\"name\":\"buildozer\",\"platform\":\"linux\",\"sha256\":\"586e27630cbc242e8bd6fe8e24485eca8dcadea6410cc13cbe059202655980ac\",\"version\":\"v6.1.2\"},{\"arch\":\"amd64\",\"name\":\"buildozer\",\"platform\":\"windows\",\"sha256\":\"07664d5d08ee099f069cd654070cabf2708efaae9f52dc83921fa400c67a868b\",\"version\":\"v6.1.2\"}]" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "buildifier_prebuilt+", + "bazel_skylib", + "bazel_skylib+" + ], + [ + "buildifier_prebuilt+", + "bazel_tools", + "bazel_tools" + ] + ] + } + }, "@@googleapis+//:extensions.bzl%switched_rules": { "general": { "bzlTransitiveDigest": "vG6fuTzXD8MMvHWZEQud0MMH7eoC4GXY0va7VrFFh04=", @@ -558,6 +1135,30 @@ "recordedRepoMappingEntries": [] } }, + "@@rules_buf+//buf:extensions.bzl%ext": { + "general": { + "bzlTransitiveDigest": "3jGepUu1j86kWsTP3Fgogw/XfktHd4UIQt8zj494n/Y=", + "usagesDigest": "RTc2BMQ2b0wGU8CRvN3EoPz34m3LMe+K/oSkFkN83+M=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "rules_buf_toolchains": { + "repoRuleId": "@@rules_buf+//buf/internal:toolchain.bzl%buf_download_releases", + "attributes": { + "version": "v1.27.0" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_buf+", + "bazel_tools", + "bazel_tools" + ] + ] + } + }, "@@rules_foreign_cc+//foreign_cc:extensions.bzl%tools": { "general": { "bzlTransitiveDigest": "FApcIcVN43WOEs7g8eg7Cy1hrfRbVNEoUu8IiF+8WOc=", @@ -958,6 +1559,161 @@ ] } }, + "@@rules_multitool+//multitool:extension.bzl%multitool": { + "general": { + "bzlTransitiveDigest": "AtvPzG/SAawYMKVVHcMoJq4EXkVPTIhS3AeNwENXp9E=", + "usagesDigest": "e8LhmhBwk31LQl101CsIrkGyTSXLKajJj61scb7SRLY=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "multitool.linux_arm64": { + "repoRuleId": "@@rules_multitool+//multitool/private:multitool.bzl%_env_specific_tools", + "attributes": { + "lockfiles": [ + "@@aspect_rules_lint+//format:multitool.lock.json", + "@@aspect_rules_lint+//lint:multitool.lock.json" + ], + "os": "linux", + "cpu": "arm64" + } + }, + "multitool.linux_x86_64": { + "repoRuleId": "@@rules_multitool+//multitool/private:multitool.bzl%_env_specific_tools", + "attributes": { + "lockfiles": [ + "@@aspect_rules_lint+//format:multitool.lock.json", + "@@aspect_rules_lint+//lint:multitool.lock.json" + ], + "os": "linux", + "cpu": "x86_64" + } + }, + "multitool.macos_arm64": { + "repoRuleId": "@@rules_multitool+//multitool/private:multitool.bzl%_env_specific_tools", + "attributes": { + "lockfiles": [ + "@@aspect_rules_lint+//format:multitool.lock.json", + "@@aspect_rules_lint+//lint:multitool.lock.json" + ], + "os": "macos", + "cpu": "arm64" + } + }, + "multitool.macos_x86_64": { + "repoRuleId": "@@rules_multitool+//multitool/private:multitool.bzl%_env_specific_tools", + "attributes": { + "lockfiles": [ + "@@aspect_rules_lint+//format:multitool.lock.json", + "@@aspect_rules_lint+//lint:multitool.lock.json" + ], + "os": "macos", + "cpu": "x86_64" + } + }, + "multitool": { + "repoRuleId": "@@rules_multitool+//multitool/private:multitool.bzl%_multitool_hub", + "attributes": { + "lockfiles": [ + "@@aspect_rules_lint+//format:multitool.lock.json", + "@@aspect_rules_lint+//lint:multitool.lock.json" + ] + } + } + }, + "recordedRepoMappingEntries": [] + } + }, + "@@rules_nodejs+//nodejs:extensions.bzl%node": { + "general": { + "bzlTransitiveDigest": "btnelILPo3ngQN9vWtsQMclvJZPf3X2vcGTjmW7Owy8=", + "usagesDigest": "8OoyQ05AfTDe1T/jKkylUFidWxQge7e3HN2eOIIA6xM=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "nodejs_linux_amd64": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "linux_amd64", + "node_version": "16.19.0" + } + }, + "nodejs_linux_arm64": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "linux_arm64", + "node_version": "16.19.0" + } + }, + "nodejs_linux_s390x": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "linux_s390x", + "node_version": "16.19.0" + } + }, + "nodejs_linux_ppc64le": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "linux_ppc64le", + "node_version": "16.19.0" + } + }, + "nodejs_darwin_amd64": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "darwin_amd64", + "node_version": "16.19.0" + } + }, + "nodejs_darwin_arm64": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "darwin_arm64", + "node_version": "16.19.0" + } + }, + "nodejs_windows_amd64": { + "repoRuleId": "@@rules_nodejs+//nodejs:repositories.bzl%node_repositories", + "attributes": { + "platform": "windows_amd64", + "node_version": "16.19.0" + } + }, + "nodejs": { + "repoRuleId": "@@rules_nodejs+//nodejs/private:nodejs_repo_host_os_alias.bzl%nodejs_repo_host_os_alias", + "attributes": { + "user_node_repository_name": "nodejs" + } + }, + "nodejs_host": { + "repoRuleId": "@@rules_nodejs+//nodejs/private:nodejs_repo_host_os_alias.bzl%nodejs_repo_host_os_alias", + "attributes": { + "user_node_repository_name": "nodejs" + } + }, + "nodejs_toolchains": { + "repoRuleId": "@@rules_nodejs+//nodejs/private:toolchains_repo.bzl%toolchains_repo", + "attributes": { + "user_node_repository_name": "nodejs" + } + } + }, + "recordedRepoMappingEntries": [ + [ + "rules_nodejs+", + "bazel_skylib", + "bazel_skylib+" + ], + [ + "rules_nodejs+", + "bazel_tools", + "bazel_tools" + ] + ] + } + }, "@@rules_python+//python/extensions:pip.bzl%pip": { "general": { "bzlTransitiveDigest": "Zx6phrxzOJPzNT1TtUMPWUJiDB7sNn8TqEmUFD3Wc2Q=", @@ -6259,6 +7015,79 @@ ] ] } + }, + "@@toolchains_protoc+//protoc:extensions.bzl%protoc": { + "general": { + "bzlTransitiveDigest": "HnmcD4ia7/1ZuQnymt4OGHXrW62MmIgwCtHByGQ7LQs=", + "usagesDigest": "8nmQyO6LoaF/+HM3ni78Za6MQ5BVffJheAFJgl6hvoY=", + "recordedFileInputs": {}, + "recordedDirentsInputs": {}, + "envVariables": {}, + "generatedRepoSpecs": { + "toolchains_protoc_hub.linux_aarch_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "linux-aarch_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.linux_ppcle_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "linux-ppcle_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.linux_s390_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "linux-s390_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.linux_x86_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "linux-x86_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.osx_aarch_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "osx-aarch_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.osx_x86_64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "osx-x86_64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub.win64": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:prebuilt_protoc_toolchain.bzl%prebuilt_protoc_repo", + "attributes": { + "platform": "win64", + "version": "v25.3" + } + }, + "toolchains_protoc_hub": { + "repoRuleId": "@@toolchains_protoc+//protoc/private:protoc_toolchains.bzl%protoc_toolchains_repo", + "attributes": { + "user_repository_name": "toolchains_protoc_hub" + } + }, + "com_google_protobuf": { + "repoRuleId": "@@toolchains_protoc+//protoc:toolchain.bzl%_google_protobuf_alias_repo", + "attributes": { + "alias_to": "toolchains_protoc_hub.osx_aarch_64" + } + } + }, + "recordedRepoMappingEntries": [] + } } } } diff --git a/src/hyperon_das/__init__.py b/src/hyperon_das/__init__.py index 0faf7bd..90569f7 100644 --- a/src/hyperon_das/__init__.py +++ b/src/hyperon_das/__init__.py @@ -1,10 +1,10 @@ import sys if sys.version_info < (3, 10): - raise RuntimeError('hyperon_das requires Python 3.10 or higher') + raise RuntimeError("hyperon_das requires Python 3.10 or higher") from hyperon_das.das import DistributedAtomSpace -__all__ = ['DistributedAtomSpace'] +__all__ = ["DistributedAtomSpace"] -__version__ = '0.9.14' +__version__ = "0.9.14" diff --git a/src/hyperon_das/cache/attention_broker_gateway.py b/src/hyperon_das/cache/attention_broker_gateway.py index 9dc8333..1ad2d5a 100644 --- a/src/hyperon_das/cache/attention_broker_gateway.py +++ b/src/hyperon_das/cache/attention_broker_gateway.py @@ -19,11 +19,11 @@ def __init__(self, system_parameters: Dict[str, Any]): f"Invalid system parameters. server_hostname: '{self.server_hostname}' server_port: {self.server_port}" ) ) - self.server_url = f'{self.server_hostname}:{self.server_port}' + self.server_url = f"{self.server_hostname}:{self.server_port}" self.ping() def ping(self) -> Optional[str]: - logger().info(f'Pinging AttentionBroker at {self.server_url}') + logger().info(f"Pinging AttentionBroker at {self.server_url}") with grpc.insecure_channel(self.server_url) as channel: stub = AttentionBrokerStub(channel) response = stub.ping(grpc_types.Empty()) @@ -33,9 +33,9 @@ def ping(self) -> Optional[str]: def stimulate(self, handle_count: Set[str]) -> Optional[str]: if handle_count is None: - das_error(ValueError(f'Invalid handle_count {handle_count}')) + das_error(ValueError(f"Invalid handle_count {handle_count}")) logger().info( - f'Requesting AttentionBroker at {self.server_url} to stimulate {len(handle_count)} atoms' + f"Requesting AttentionBroker at {self.server_url} to stimulate {len(handle_count)} atoms" ) message = grpc_types.HandleCount(handle_count=handle_count) with grpc.insecure_channel(self.server_url) as channel: @@ -47,9 +47,9 @@ def stimulate(self, handle_count: Set[str]) -> Optional[str]: def correlate(self, handle_set: Set[str]) -> Optional[str]: if handle_set is None: - das_error(ValueError(f'Invalid handle_set {handle_set}')) + das_error(ValueError(f"Invalid handle_set {handle_set}")) logger().info( - f'Requesting AttentionBroker at {self.server_url} to correlate {len(handle_set)} atoms' + f"Requesting AttentionBroker at {self.server_url} to correlate {len(handle_set)} atoms" ) message = grpc_types.HandleList(handle_list=handle_set) sleep(0.05) diff --git a/src/hyperon_das/cache/iterators.py b/src/hyperon_das/cache/iterators.py index 7ca29d3..df28283 100644 --- a/src/hyperon_das/cache/iterators.py +++ b/src/hyperon_das/cache/iterators.py @@ -89,7 +89,10 @@ def __next__(self): class LazyQueryEvaluator(ProductIterator): def __init__( - self, link_type: str, source: List[QueryAnswerIterator], query_engine: QueryEngine + self, + link_type: str, + source: List[QueryAnswerIterator], + query_engine: QueryEngine, ): super().__init__(source) self.link_type = link_type @@ -161,10 +164,10 @@ class BaseLinksIterator(QueryAnswerIterator, ABC): def __init__(self, source: ListIterator, **kwargs) -> None: super().__init__(source) if not self.source.is_empty(): - if not hasattr(self, 'backend'): - self.backend = kwargs.get('backend') - self.chunk_size = kwargs.get('chunk_size', 1000) - self.cursor = kwargs.get('cursor', 0) + if not hasattr(self, "backend"): + self.backend = kwargs.get("backend") + self.chunk_size = kwargs.get("chunk_size", 1000) + self.cursor = kwargs.get("cursor", 0) self.buffer_queue = deque() self.iterator = self.source self.current_value = self.get_current_value() @@ -236,8 +239,8 @@ def get_fetch_data(self, **kwargs) -> tuple: class LocalIncomingLinks(BaseLinksIterator): def __init__(self, source: ListIterator, **kwargs) -> None: - self.atom_handle = kwargs.get('atom_handle') - self.targets_document = kwargs.get('targets_document', False) + self.atom_handle = kwargs.get("atom_handle") + self.targets_document = kwargs.get("targets_document", False) super().__init__(source, **kwargs) def get_next_value(self) -> Any: @@ -259,7 +262,11 @@ def get_current_value(self) -> Any: return None def get_fetch_data_kwargs(self) -> Dict[str, Any]: - return {'handles_only': True, 'cursor': self.cursor, 'chunk_size': self.chunk_size} + return { + "handles_only": True, + "cursor": self.cursor, + "chunk_size": self.chunk_size, + } def get_fetch_data(self, **kwargs) -> tuple: if self.backend: @@ -268,8 +275,8 @@ def get_fetch_data(self, **kwargs) -> tuple: class RemoteIncomingLinks(BaseLinksIterator): def __init__(self, source: ListIterator, **kwargs) -> None: - self.atom_handle = kwargs.get('atom_handle') - self.targets_document = kwargs.get('targets_document', False) + self.atom_handle = kwargs.get("atom_handle") + self.targets_document = kwargs.get("targets_document", False) self.returned_handles = set() super().__init__(source, **kwargs) @@ -278,9 +285,9 @@ def get_next_value(self) -> Any: while True: link_document = next(self.iterator) if isinstance(link_document, tuple) or isinstance(link_document, list): - handle = link_document[0]['handle'] + handle = link_document[0]["handle"] elif isinstance(link_document, dict): - handle = link_document['handle'] + handle = link_document["handle"] elif isinstance(link_document, str): handle = link_document else: @@ -299,9 +306,9 @@ def get_current_value(self) -> Any: def get_fetch_data_kwargs(self) -> Dict[str, Any]: return { - 'cursor': self.cursor, - 'chunk_size': self.chunk_size, - 'targets_document': self.targets_document, + "cursor": self.cursor, + "chunk_size": self.chunk_size, + "targets_document": self.targets_document, } def get_fetch_data(self, **kwargs) -> tuple: @@ -311,9 +318,9 @@ def get_fetch_data(self, **kwargs) -> tuple: class CustomQuery(BaseLinksIterator): def __init__(self, source: ListIterator, **kwargs) -> None: - self.index_id = kwargs.pop('index_id', None) - self.backend = kwargs.pop('backend', None) - self.is_remote = kwargs.pop('is_remote', False) + self.index_id = kwargs.pop("index_id", None) + self.backend = kwargs.pop("backend", None) + self.is_remote = kwargs.pop("is_remote", False) self.kwargs = kwargs super().__init__(source, **kwargs) @@ -330,18 +337,18 @@ def get_current_value(self) -> Any: def get_fetch_data_kwargs(self) -> Dict[str, Any]: kwargs = self.kwargs - kwargs.update({'cursor': self.cursor, 'chunk_size': self.chunk_size}) + kwargs.update({"cursor": self.cursor, "chunk_size": self.chunk_size}) return kwargs def get_fetch_data(self, **kwargs) -> tuple: if self.backend: if self.is_remote: return self.backend.custom_query( - self.index_id, query=kwargs.get('query', []), **kwargs + self.index_id, query=kwargs.get("query", []), **kwargs ) else: return self.backend.get_atoms_by_index( - self.index_id, query=kwargs.get('query', []), **kwargs + self.index_id, query=kwargs.get("query", []), **kwargs ) @@ -350,13 +357,13 @@ def __init__( self, source: LocalIncomingLinks | RemoteIncomingLinks | Iterator, **kwargs ) -> None: super().__init__(source) - self.cursor = kwargs.get('cursor') - self.targets_only = kwargs.get('targets_only', False) + self.cursor = kwargs.get("cursor") + self.targets_only = kwargs.get("targets_only", False) self.buffer = None - self.link_type = kwargs.get('link_type') - self.cursor_position = kwargs.get('cursor_position') - self.target_type = kwargs.get('target_type') - self.custom_filter = kwargs.get('filter') + self.link_type = kwargs.get("link_type") + self.cursor_position = kwargs.get("cursor_position") + self.target_type = kwargs.get("target_type") + self.custom_filter = kwargs.get("filter") if not self.source.is_empty(): self.iterator = self.source self.current_value = self._find_first_valid_element() @@ -371,7 +378,7 @@ def __next__(self): if isinstance(link, tuple): link, targets = link elif isinstance(link, dict): - targets = link.pop('targets_document', []) + targets = link.pop("targets_document", []) else: raise ValueError(f"Invalid link document: {link}") if ( @@ -390,20 +397,20 @@ def _find_first_valid_element(self): if isinstance(link, tuple): link, targets = link elif isinstance(link, dict): - targets = link.get('targets_document', []) + targets = link.get("targets_document", []) else: raise ValueError(f"Invalid link document: {link}") if self._filter(link, targets): return targets if self.targets_only else link def _filter(self, link: Dict[str, Any], targets: list[dict[str, Any]]) -> bool: - if self.link_type and self.link_type != link['named_type']: + if self.link_type and self.link_type != link["named_type"]: return False try: if ( self.cursor_position is not None - and self.cursor != link['targets'][self.cursor_position] + and self.cursor != link["targets"][self.cursor_position] ): return False except IndexError: @@ -412,12 +419,12 @@ def _filter(self, link: Dict[str, Any], targets: list[dict[str, Any]]) -> bool: raise e if self.target_type: - if not any(target['named_type'] == self.target_type for target in targets): + if not any(target["named_type"] == self.target_type for target in targets): return False if self.custom_filter: deep_link = link.copy() - deep_link['targets'] = targets + deep_link["targets"] = targets if self._apply_custom_filter(deep_link) is False: return False @@ -426,15 +433,15 @@ def _filter(self, link: Dict[str, Any], targets: list[dict[str, Any]]) -> bool: def _apply_custom_filter(self, atom: Dict[str, Any], F=None) -> bool: custom_filter = F if F else self.custom_filter - assert callable( - custom_filter - ), "The custom_filter must be a function with this signature 'def func(atom: dict) -> bool: ...'" + assert callable(custom_filter), ( + "The custom_filter must be a function with this signature 'def func(atom: dict) -> bool: ...'" + ) try: if not custom_filter(atom): return False except Exception as e: - raise Exception(f"Error while applying the custom filter: {e}") + raise Exception(f"Error while applying the custom filter: {e}") from e def is_empty(self) -> bool: return not self.current_value @@ -447,7 +454,7 @@ def __init__(self, source: TraverseLinksIterator, **kwargs) -> None: self.cursor = self.source.cursor self.target_type = self.source.target_type self.visited_neighbors = [] - self.custom_filter = kwargs.get('filter') + self.custom_filter = kwargs.get("filter") if not self.source.is_empty(): self.iterator = source self.current_value = self._find_first_valid_element() @@ -480,16 +487,16 @@ def _process_targets(self, targets: list) -> tuple: for target in targets: if self._filter(target): match_found = True - self.visited_neighbors.append(target['handle']) + self.visited_neighbors.append(target["handle"]) answer.append(target) return (answer, match_found) def _filter(self, target: Dict[str, Any]) -> bool: - handle = target['handle'] + handle = target["handle"] if not ( self.cursor != handle and handle not in self.visited_neighbors - and (self.target_type == target['named_type'] or not self.target_type) + and (self.target_type == target["named_type"] or not self.target_type) ): return False diff --git a/src/hyperon_das/client.py b/src/hyperon_das/client.py index 58f3347..514c8c5 100644 --- a/src/hyperon_das/client.py +++ b/src/hyperon_das/client.py @@ -22,23 +22,23 @@ class FunctionsClient: def __init__(self, host: str, port: int, name: Optional[str] = None) -> None: if not host and not port: das_error(ValueError("'host' and 'port' are mandatory parameters")) - self.name = name if name else f'client_{host}:{port}' + self.name = name if name else f"client_{host}:{port}" self.status_code, self.url = connect_to_server(host, port) def _send_request(self, payload) -> Any: try: - if payload.get('input'): - normalized_input = {k: v for k, v in payload['input'].items() if v is not None} - payload['input'] = normalized_input + if payload.get("input"): + normalized_input = {k: v for k, v in payload["input"].items() if v is not None} + payload["input"] = normalized_input payload_serialized = serialize(payload) with sessions.Session() as session: response = session.request( - method='POST', + method="POST", url=self.url, data=payload_serialized, - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) response.raise_for_status() @@ -52,7 +52,7 @@ def _send_request(self, payload) -> Any: return response_data else: return response_data.get( - 'error', f'Unknown error with status code {response.status_code}' + "error", f"Unknown error with status code {response.status_code}" ) except exceptions.ConnectionError as e: das_error( @@ -88,20 +88,20 @@ def _send_request(self, payload) -> Any: def get_atom(self, handle: str, **kwargs) -> Union[str, Dict]: payload = { - 'action': 'get_atom', - 'input': {'handle': handle}, + "action": "get_atom", + "input": {"handle": handle}, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 404: - raise AtomDoesNotExist('Nonexistent atom') + raise AtomDoesNotExist("Nonexistent atom") from e else: raise e def get_links(self, link_filter: LinkFilter) -> Union[List[str], List[Dict]]: payload = { - 'action': 'get_links', + "action": "get_links", "input": { "link_filter": { "filter_type": link_filter.filter_type, @@ -116,9 +116,9 @@ def get_links(self, link_filter: LinkFilter) -> Union[List[str], List[Dict]]: return self._send_request(payload) except HTTPError as e: if e.status_code == 404: - raise AtomDoesNotExist('Nonexistent atom') + raise AtomDoesNotExist("Nonexistent atom") from e elif e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e @@ -129,8 +129,8 @@ def query( ) -> List[AtomT]: try: payload = { - 'action': 'query', - 'input': {'query': query, 'parameters': parameters}, + "action": "query", + "input": {"query": query, "parameters": parameters}, } return self._send_request(payload) except HTTPError as e: @@ -139,40 +139,42 @@ def query( "Your query couldn't be processed due to an invalid format. Review the way the query " "is written and try again.", str(e), - ) + ) from e elif e.status_code == 404: - raise Exception("Your query couldn't be processed because Atom nonexistent", str(e)) + raise Exception( + "Your query couldn't be processed because Atom nonexistent", str(e) + ) from e raise e def count_atoms(self, parameters: Optional[Dict[str, Any]] = None) -> Dict[str, int]: payload = { - 'action': 'count_atoms', - 'input': {'parameters': parameters}, + "action": "count_atoms", + "input": {"parameters": parameters}, } return self._send_request(payload) def commit_changes(self, **kwargs) -> Tuple[int, int]: payload = { - 'action': 'commit_changes', - 'input': {'kwargs': kwargs}, + "action": "commit_changes", + "input": {"kwargs": kwargs}, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 403: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e def get_incoming_links(self, atom_handle: str, **kwargs) -> IncomingLinksT | Iterator: payload = { - 'action': 'get_incoming_links', - 'input': {'atom_handle': atom_handle, 'kwargs': kwargs}, + "action": "get_incoming_links", + "input": {"atom_handle": atom_handle, "kwargs": kwargs}, } try: return self._send_request(payload) except HTTPError as e: - logger().debug(f'Error during `get_incoming_links` request on remote Das: {str(e)}') + logger().debug(f"Error during `get_incoming_links` request on remote Das: {str(e)}") return [] def create_field_index( @@ -184,30 +186,30 @@ def create_field_index( index_type: Optional[str] = None, ) -> str: payload = { - 'action': 'create_field_index', - 'input': { - 'atom_type': atom_type, - 'fields': fields, - 'named_type': named_type, - 'composite_type': composite_type, - 'index_type': index_type, + "action": "create_field_index", + "input": { + "atom_type": atom_type, + "fields": fields, + "named_type": named_type, + "composite_type": composite_type, + "index_type": index_type, }, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e def custom_query(self, index_id: str, query: Query, **kwargs) -> List[AtomT]: payload = { - 'action': 'custom_query', - 'input': { - 'index_id': index_id, - 'query': {v['field']: v['value'] for v in query}, - 'kwargs': kwargs, + "action": "custom_query", + "input": { + "index_id": index_id, + "query": {v["field"]: v["value"] for v in query}, + "kwargs": kwargs, }, } try: @@ -223,8 +225,8 @@ def fetch( **kwargs, ) -> Any: payload = { - 'action': 'fetch', - 'input': {'query': query, 'host': host, 'port': port, 'kwargs': kwargs}, + "action": "fetch", + "input": {"query": query, "host": host, "port": port, "kwargs": kwargs}, } try: return self._send_request(payload) @@ -233,59 +235,66 @@ def fetch( def create_context(self, name: str, queries: Optional[List[Query]]) -> Any: payload = { - 'action': 'create_context', - 'input': {'name': name, 'queries': queries}, + "action": "create_context", + "input": {"name": name, "queries": queries}, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 404: - raise AtomDoesNotExist('nonexistent atom') + raise AtomDoesNotExist("nonexistent atom") from e elif e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e def get_atoms_by_field(self, query: Query) -> List[str]: payload = { - 'action': 'get_atoms_by_field', - 'input': {'query': {v['field']: v['value'] for v in query}}, + "action": "get_atoms_by_field", + "input": {"query": {v["field"]: v["value"] for v in query}}, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e def get_atoms_by_text_field( - self, text_value: str, field: Optional[str] = None, text_index_id: Optional[str] = None + self, + text_value: str, + field: Optional[str] = None, + text_index_id: Optional[str] = None, ) -> List[str]: payload = { - 'action': 'get_atoms_by_text_field', - 'input': {'text_value': text_value, 'field': field, 'text_index_id': text_index_id}, + "action": "get_atoms_by_text_field", + "input": { + "text_value": text_value, + "field": field, + "text_index_id": text_index_id, + }, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e def get_node_by_name_starting_with(self, node_type: str, startswith: str) -> List[str]: payload = { - 'action': 'get_node_by_name_starting_with', - 'input': { - 'node_type': node_type, - 'startswith': startswith, + "action": "get_node_by_name_starting_with", + "input": { + "node_type": node_type, + "startswith": startswith, }, } try: return self._send_request(payload) except HTTPError as e: if e.status_code == 400: - raise ValueError(str(e)) + raise ValueError(str(e)) from e else: raise e diff --git a/src/hyperon_das/das.py b/src/hyperon_das/das.py index 2e3b025..3db2812 100644 --- a/src/hyperon_das/das.py +++ b/src/hyperon_das/das.py @@ -33,7 +33,8 @@ class DistributedAtomSpace: backend: AtomDB - def __init__(self, system_parameters: Dict[str, Any] = {}, **kwargs) -> None: + # FIXME: B006 Do not use mutable data structures for argumnet defaults + def __init__(self, system_parameters: Dict[str, Any] = {}, **kwargs) -> None: # noqa: B006 """ Creates a new DAS object. A DAS client can run locally or locally and remote, connecting to remote DAS instances to query remote atoms. If there are different @@ -83,8 +84,8 @@ def __init__(self, system_parameters: Dict[str, Any] = {}, **kwargs) -> None: redis_ssl (bool, optional): Set Redis to encrypt the connection. Defaults to True. """ self.system_parameters = system_parameters - self.atomdb = kwargs.get('atomdb', 'ram') - self.query_engine_type = kwargs.get('query_engine', 'local') + self.atomdb = kwargs.get("atomdb", "ram") + self.query_engine_type = kwargs.get("query_engine", "local") self._set_default_system_parameters() self._set_backend(**kwargs) self.cache_controller = CacheController(self.system_parameters) @@ -92,15 +93,15 @@ def __init__(self, system_parameters: Dict[str, Any] = {}, **kwargs) -> None: def _set_default_system_parameters(self) -> None: # Internals - if not self.system_parameters.get('running_on_server'): - self.system_parameters['running_on_server'] = False + if not self.system_parameters.get("running_on_server"): + self.system_parameters["running_on_server"] = False # Attention Broker - if not self.system_parameters.get('cache_enabled'): - self.system_parameters['cache_enabled'] = False - if not self.system_parameters.get('attention_broker_hostname'): - self.system_parameters['attention_broker_hostname'] = 'localhost' - if not self.system_parameters.get('attention_broker_port'): - self.system_parameters['attention_broker_port'] = 27000 + if not self.system_parameters.get("cache_enabled"): + self.system_parameters["cache_enabled"] = False + if not self.system_parameters.get("attention_broker_hostname"): + self.system_parameters["attention_broker_hostname"] = "localhost" + if not self.system_parameters.get("attention_broker_port"): + self.system_parameters["attention_broker_port"] = 27000 def _set_backend(self, **kwargs) -> None: if self.atomdb == "ram": @@ -115,7 +116,7 @@ def _set_backend(self, **kwargs) -> None: raise InvalidAtomDB("Invalid AtomDB type. Choose either 'ram' or 'redis_mongo'") def _set_query_engine(self, **kwargs) -> None: - if self.query_engine_type == 'local': + if self.query_engine_type == "local": das_type = DasType.LOCAL_RAM_ONLY if self.atomdb == "ram" else DasType.LOCAL_REDIS_MONGO self._start_query_engine(LocalQueryEngine, das_type, **kwargs) elif self.query_engine_type == "remote": @@ -138,13 +139,14 @@ def _start_query_engine( ) logger().info(f"Started {das_type} DAS") + # FIXME: B006 Do not use mutable data structures for argumnet defaults def _create_context( self, name: str, - queries: List[Query] = [], + queries: List[Query] = [], # noqa: B006 ) -> Context: context_node = self.add_node(NodeT(type=Context.CONTEXT_NODE_TYPE, name=name)) - query_answer = [self.query(query, {'no_iterator': True}) for query in queries] + query_answer = [self.query(query, {"no_iterator": True}) for query in queries] context = Context(context_node, query_answer) self.cache_controller.add_context(context) return context @@ -152,15 +154,15 @@ def _create_context( @staticmethod def about() -> dict: return { - 'das': { - 'name': 'hyperon-das', - 'version': get_package_version('hyperon_das'), - 'summary': 'Query Engine API for Distributed AtomSpace', + "das": { + "name": "hyperon-das", + "version": get_package_version("hyperon_das"), + "summary": "Query Engine API for Distributed AtomSpace", }, - 'atom_db': { - 'name': 'hyperon-das-atomdb', - 'version': get_package_version('hyperon_das_atomdb'), - 'summary': 'Persistence layer for Distributed AtomSpace', + "atom_db": { + "name": "hyperon-das-atomdb", + "version": get_package_version("hyperon_das_atomdb"), + "summary": "Persistence layer for Distributed AtomSpace", }, } @@ -184,7 +186,7 @@ def compute_node_handle(node_type: str, node_name: str) -> str: Examples: >>> das = DistributedAtomSpace() - >>> result = das.compute_node_handle(node_type='Concept', node_name='human') + >>> result = das.compute_node_handle(node_type="Concept", node_name="human") >>> print(result) "af12f10f9ae2002a1607ba0b47ba8407" """ @@ -210,9 +212,11 @@ def compute_link_handle(link_type: str, link_targets: HandleListT) -> str: Examples: >>> das = DistributedAtomSpace() - >>> human_handle = das.compute_node_handle(node_type='Concept', node_name='human') - >>> monkey_handle = das.compute_node_handle(node_type='Concept', node_name='monkey') - >>> result = das.compute_link_handle(link_type='Similarity', targets=[human_handle, monkey_handle]) + >>> human_handle = das.compute_node_handle(node_type="Concept", node_name="human") + >>> monkey_handle = das.compute_node_handle(node_type="Concept", node_name="monkey") + >>> result = das.compute_link_handle( + ... link_type="Similarity", targets=[human_handle, monkey_handle] + ... ) >>> print(result) "bad7472f41a0e7d601ca294eb4607c3a" @@ -237,7 +241,7 @@ def get_atom(self, handle: HandleT) -> AtomT: Examples: >>> das = DistributedAtomSpace() - >>> human_handle = das.compute_node_handle(node_type='Concept', node_name='human') + >>> human_handle = das.compute_node_handle(node_type="Concept", node_name="human") >>> node = das.get_atom(human_handle) >>> print(node) Node(_id: 'af12f10f9ae2002a1607ba0b47ba8407', handle: 'af12f10f9ae2002a1607ba0b47ba8407', @@ -272,8 +276,8 @@ def get_atoms(self, handles: HandleListT) -> list[AtomT]: Examples: >>> das = DistributedAtomSpace() - >>> human_handle = das.compute_node_handle(node_type='Concept', node_name='human') - >>> animal_handle = das.compute_node_handle(node_type='Concept', node_name='monkey') + >>> human_handle = das.compute_node_handle(node_type="Concept", node_name="human") + >>> animal_handle = das.compute_node_handle(node_type="Concept", node_name="monkey") >>> result = das.get_atoms([human_handle, animal_handle]) >>> print(result[0]) Node(_id: 'af12f10f9ae2002a1607ba0b47ba8407', handle: 'af12f10f9ae2002a1607ba0b47ba8407', @@ -302,7 +306,7 @@ def get_node(self, node_type: str, node_name: str) -> NodeT: Examples: >>> das = DistributedAtomSpace() - >>> node = das.get_node(node_type='Concept', node_name='human') + >>> node = das.get_node(node_type="Concept", node_name="human") >>> print(node) Node(_id: 'af12f10f9ae2002a1607ba0b47ba8407', handle: 'af12f10f9ae2002a1607ba0b47ba8407', composite_type_hash: 'd99a604c79ce3c2e76a2f43488d5d4c3', named_type: 'Concept', @@ -332,8 +336,8 @@ def get_link(self, link_type: str, link_targets: HandleListT) -> LinkT: Examples: >>> das = DistributedAtomSpace() - >>> human_handle = das.compute_node_handle('Concept', 'human') - >>> monkey_handle = das.compute_node_handle('Concept', 'monkey') + >>> human_handle = das.compute_node_handle("Concept", "human") + >>> monkey_handle = das.compute_node_handle("Concept", "monkey") >>> result = das.get_link( link_type='Similarity', link_targets=[human_handle, monkey_handle], @@ -391,7 +395,7 @@ def get_incoming_links(self, atom_handle: HandleT, **kwargs) -> IncomingLinksT: Examples: >>> das = DistributedAtomSpace() - >>> rhino = das.compute_node_handle('Concept', 'rhino') + >>> rhino = das.compute_node_handle("Concept", "rhino") >>> links = das.get_incoming_links(rhino) >>> for link in links: >>> print(link.type, link.targets) @@ -401,7 +405,8 @@ def get_incoming_links(self, atom_handle: HandleT, **kwargs) -> IncomingLinksT: """ return self.query_engine.get_incoming_links(atom_handle, **kwargs) - def count_atoms(self, parameters: Dict[str, Any] = {}) -> Dict[str, int]: + # FIXME: B006 Do not use mutable data structures for argumnet defaults + def count_atoms(self, parameters: Dict[str, Any] = {}) -> Dict[str, int]: # noqa: B006 """ Count atoms, nodes and links in DAS. @@ -424,10 +429,11 @@ def count_atoms(self, parameters: Dict[str, Any] = {}) -> Dict[str, int]: """ return self.query_engine.count_atoms(parameters) + # FIXME: B006 Do not use mutable data structures for argumnet defaults def query( self, query: Query, - parameters: Dict[str, Any] = {}, + parameters: Dict[str, Any] = {}, # noqa: B006 ) -> Union[Iterator[QueryAnswer], List[QueryAnswer]]: """ Perform a query on the knowledge base using a dict as input and return an @@ -553,14 +559,14 @@ def custom_query(self, index_id: str, query: Query, **kwargs) -> Union[Iterator, Iterator | List[AtomT]: An iterator or a list of Atom instances (Nodes or Links). Examples: - >>> das.custom_query(index_id='index_123', query={'tag': 'DAS'}) - >>> das.custom_query(index_id='index_123', query={'tag': 'DAS'}, no_iterator=True) + >>> das.custom_query(index_id="index_123", query={"tag": "DAS"}) + >>> das.custom_query(index_id="index_123", query={"tag": "DAS"}, no_iterator=True) """ if isinstance(self.query_engine, LocalQueryEngine) and isinstance(self.backend, InMemoryDB): raise NotImplementedError("custom_query() is not implemented for Local DAS in RAM only") return self.query_engine.custom_query( - index_id, [{'field': k, 'value': v} for k, v in query.items()], **kwargs + index_id, [{"field": k, "value": v} for k, v in query.items()], **kwargs ) def get_atoms_by_field(self, query: Query) -> HandleListT: @@ -578,11 +584,14 @@ def get_atoms_by_field(self, query: Query) -> HandleListT: """ return self.query_engine.get_atoms_by_field( - [{'field': k, 'value': v} for k, v in query.items()] + [{"field": k, "value": v} for k, v in query.items()] ) def get_atoms_by_text_field( - self, text_value: str, field: Optional[str] = None, text_index_id: Optional[str] = None + self, + text_value: str, + field: Optional[str] = None, + text_index_id: Optional[str] = None, ) -> HandleListT: """ Performs a text search, if a text index is previously created performance a token index search, @@ -788,7 +797,7 @@ def clear(self) -> None: Delete all atoms and custom indexes. """ self.backend.clear_database() - logger().debug('The database has been cleaned.') + logger().debug("The database has been cleaned.") def get_traversal_cursor(self, handle: str, **kwargs) -> TraverseEngine: """ @@ -814,8 +823,10 @@ def get_traversal_cursor(self, handle: str, **kwargs) -> TraverseEngine: """ try: return TraverseEngine(handle, das=self, **kwargs) - except AtomDoesNotExist: - raise GetTraversalCursorException(message="Cannot start Traversal. Atom does not exist") + except AtomDoesNotExist as e: + raise GetTraversalCursorException( + message="Cannot start Traversal. Atom does not exist" + ) from e def create_field_index( self, @@ -853,8 +864,17 @@ def create_field_index( newly created index. Examples: - >>> index_id = das.create_field_index('link', ['tag'], type='Expression') - >>> index_id = das.create_field_index('link', ['tag'], composite_type=['Expression', 'Symbol', 'Symbol', ['Expression', 'Symbol', 'Symbol', 'Symbol']]) + >>> index_id = das.create_field_index("link", ["tag"], type="Expression") + >>> index_id = das.create_field_index( + ... "link", + ... ["tag"], + ... composite_type=[ + ... "Expression", + ... "Symbol", + ... "Symbol", + ... ["Expression", "Symbol", "Symbol", "Symbol"], + ... ], + ... ) """ if named_type and composite_type: raise ValueError("'type' and 'composite_type' can't be specified simultaneously") @@ -924,7 +944,7 @@ def fetch( das.fetch(query, host='123.4.5.6', port=8080) """ - if not self.system_parameters.get('running_on_server'): + if not self.system_parameters.get("running_on_server"): if self._das_type != DasType.REMOTE and (not host or not port): raise ValueError("'host' and 'port' are mandatory parameters to local DAS") @@ -932,12 +952,13 @@ def fetch( self.backend.bulk_insert(documents) return documents + # FIXME: B006 Do not use mutable data structures for argumnet defaults def create_context( self, name: str, - queries: List[Query] = [], + queries: List[Query] = [], # noqa: B006 ) -> Context: - if self.query_engine_type == 'local': + if self.query_engine_type == "local": return self._create_context(name, queries) else: return self.query_engine.create_context(name, queries) diff --git a/src/hyperon_das/decorators.py b/src/hyperon_das/decorators.py index ec74a8e..9a2e7d4 100644 --- a/src/hyperon_das/decorators.py +++ b/src/hyperon_das/decorators.py @@ -22,24 +22,24 @@ def wrapper(*args, **kwargs): end_time = time.time() if status == HTTPStatus.OK: logger().debug( - f'{retry_count + 1} successful connection attempt at [host={args[1]}]' + f"{retry_count + 1} successful connection attempt at [host={args[1]}]" ) return status, response except Exception as e: raise RetryConnectionError( message="An error occurs while connecting to the server", details=str(e), - ) + ) from e else: - logger().debug(f'{retry_count + 1} unsuccessful connection attempt') + logger().debug(f"{retry_count + 1} unsuccessful connection attempt") time.sleep(waiting_time_seconds) retry_count += 1 timer_count += end_time - start_time - port = f':{args[1]}' if len(args) > 1 else '' + port = f":{args[1]}" if len(args) > 1 else "" message = ( - f'Failed to connect to remote Das {args[0]}' + f"Failed to connect to remote Das {args[0]}" + port - + f' - attempts:{retry_count} - time_attempted: {timer_count}' + + f" - attempts:{retry_count} - time_attempted: {timer_count}" ) logger().info(message) raise RetryConnectionError(message) diff --git a/src/hyperon_das/grpc/attention_broker_pb2.py b/src/hyperon_das/grpc/attention_broker_pb2.py index 3c93d32..0c98e26 100644 --- a/src/hyperon_das/grpc/attention_broker_pb2.py +++ b/src/hyperon_das/grpc/attention_broker_pb2.py @@ -3,6 +3,7 @@ # source: attention_broker.proto # Protobuf Python Version: 4.25.1 """Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database @@ -16,14 +17,14 @@ import hyperon_das.grpc.common_pb2 as common__pb2 DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x16\x61ttention_broker.proto\x12\x03\x64\x61s\x1a\x0c\x63ommon.proto2\x86\x01\n\x0f\x41ttentionBroker\x12\x1e\n\x04ping\x12\n.das.Empty\x1a\x08.das.Ack\"\x00\x12)\n\tstimulate\x12\x10.das.HandleCount\x1a\x08.das.Ack\"\x00\x12(\n\tcorrelate\x12\x0f.das.HandleList\x1a\x08.das.Ack\"\x00\x62\x06proto3' + b'\n\x16\x61ttention_broker.proto\x12\x03\x64\x61s\x1a\x0c\x63ommon.proto2\x86\x01\n\x0f\x41ttentionBroker\x12\x1e\n\x04ping\x12\n.das.Empty\x1a\x08.das.Ack"\x00\x12)\n\tstimulate\x12\x10.das.HandleCount\x1a\x08.das.Ack"\x00\x12(\n\tcorrelate\x12\x0f.das.HandleList\x1a\x08.das.Ack"\x00\x62\x06proto3' ) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'attention_broker_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "attention_broker_pb2", _globals) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _globals['_ATTENTIONBROKER']._serialized_start = 46 - _globals['_ATTENTIONBROKER']._serialized_end = 180 + _globals["_ATTENTIONBROKER"]._serialized_start = 46 + _globals["_ATTENTIONBROKER"]._serialized_end = 180 # @@protoc_insertion_point(module_scope) diff --git a/src/hyperon_das/grpc/attention_broker_pb2_grpc.py b/src/hyperon_das/grpc/attention_broker_pb2_grpc.py index c425cb9..27a7a49 100644 --- a/src/hyperon_das/grpc/attention_broker_pb2_grpc.py +++ b/src/hyperon_das/grpc/attention_broker_pb2_grpc.py @@ -1,5 +1,6 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" + import grpc import hyperon_das.grpc.common_pb2 as common__pb2 @@ -15,17 +16,17 @@ def __init__(self, channel): channel: A grpc.Channel. """ self.ping = channel.unary_unary( - '/das.AttentionBroker/ping', + "/das.AttentionBroker/ping", request_serializer=common__pb2.Empty.SerializeToString, response_deserializer=common__pb2.Ack.FromString, ) self.stimulate = channel.unary_unary( - '/das.AttentionBroker/stimulate', + "/das.AttentionBroker/stimulate", request_serializer=common__pb2.HandleCount.SerializeToString, response_deserializer=common__pb2.Ack.FromString, ) self.correlate = channel.unary_unary( - '/das.AttentionBroker/correlate', + "/das.AttentionBroker/correlate", request_serializer=common__pb2.HandleList.SerializeToString, response_deserializer=common__pb2.Ack.FromString, ) @@ -37,42 +38,42 @@ class AttentionBrokerServicer(object): def ping(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def stimulate(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def correlate(self, request, context): """Missing associated documentation comment in .proto file.""" context.set_code(grpc.StatusCode.UNIMPLEMENTED) - context.set_details('Method not implemented!') - raise NotImplementedError('Method not implemented!') + context.set_details("Method not implemented!") + raise NotImplementedError("Method not implemented!") def add_AttentionBrokerServicer_to_server(servicer, server): rpc_method_handlers = { - 'ping': grpc.unary_unary_rpc_method_handler( + "ping": grpc.unary_unary_rpc_method_handler( servicer.ping, request_deserializer=common__pb2.Empty.FromString, response_serializer=common__pb2.Ack.SerializeToString, ), - 'stimulate': grpc.unary_unary_rpc_method_handler( + "stimulate": grpc.unary_unary_rpc_method_handler( servicer.stimulate, request_deserializer=common__pb2.HandleCount.FromString, response_serializer=common__pb2.Ack.SerializeToString, ), - 'correlate': grpc.unary_unary_rpc_method_handler( + "correlate": grpc.unary_unary_rpc_method_handler( servicer.correlate, request_deserializer=common__pb2.HandleList.FromString, response_serializer=common__pb2.Ack.SerializeToString, ), } generic_handler = grpc.method_handlers_generic_handler( - 'das.AttentionBroker', rpc_method_handlers + "das.AttentionBroker", rpc_method_handlers ) server.add_generic_rpc_handlers((generic_handler,)) @@ -97,7 +98,7 @@ def ping( return grpc.experimental.unary_unary( request, target, - '/das.AttentionBroker/ping', + "/das.AttentionBroker/ping", common__pb2.Empty.SerializeToString, common__pb2.Ack.FromString, options, @@ -126,7 +127,7 @@ def stimulate( return grpc.experimental.unary_unary( request, target, - '/das.AttentionBroker/stimulate', + "/das.AttentionBroker/stimulate", common__pb2.HandleCount.SerializeToString, common__pb2.Ack.FromString, options, @@ -155,7 +156,7 @@ def correlate( return grpc.experimental.unary_unary( request, target, - '/das.AttentionBroker/correlate', + "/das.AttentionBroker/correlate", common__pb2.HandleList.SerializeToString, common__pb2.Ack.FromString, options, diff --git a/src/hyperon_das/grpc/common_pb2.py b/src/hyperon_das/grpc/common_pb2.py index 53dfdf8..ef01372 100644 --- a/src/hyperon_das/grpc/common_pb2.py +++ b/src/hyperon_das/grpc/common_pb2.py @@ -3,6 +3,7 @@ # source: common.proto # Protobuf Python Version: 4.25.1 """Generated protocol buffer code.""" + from google.protobuf import descriptor as _descriptor from google.protobuf import descriptor_pool as _descriptor_pool from google.protobuf import symbol_database as _symbol_database @@ -14,24 +15,24 @@ DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile( - b'\n\x0c\x63ommon.proto\x12\x03\x64\x61s\"\x07\n\x05\x45mpty\"!\n\x03\x41\x63k\x12\r\n\x05\x65rror\x18\x01 \x01(\x08\x12\x0b\n\x03msg\x18\x02 \x01(\t\"!\n\nHandleList\x12\x13\n\x0bhandle_list\x18\x01 \x03(\t\"z\n\x0bHandleCount\x12\x37\n\x0chandle_count\x18\x01 \x03(\x0b\x32!.das.HandleCount.HandleCountEntry\x1a\x32\n\x10HandleCountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x62\x06proto3' + b'\n\x0c\x63ommon.proto\x12\x03\x64\x61s"\x07\n\x05\x45mpty"!\n\x03\x41\x63k\x12\r\n\x05\x65rror\x18\x01 \x01(\x08\x12\x0b\n\x03msg\x18\x02 \x01(\t"!\n\nHandleList\x12\x13\n\x0bhandle_list\x18\x01 \x03(\t"z\n\x0bHandleCount\x12\x37\n\x0chandle_count\x18\x01 \x03(\x0b\x32!.das.HandleCount.HandleCountEntry\x1a\x32\n\x10HandleCountEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\r:\x02\x38\x01\x62\x06proto3' ) _globals = globals() _builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'common_pb2', _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, "common_pb2", _globals) if _descriptor._USE_C_DESCRIPTORS == False: DESCRIPTOR._options = None - _globals['_HANDLECOUNT_HANDLECOUNTENTRY']._options = None - _globals['_HANDLECOUNT_HANDLECOUNTENTRY']._serialized_options = b'8\001' - _globals['_EMPTY']._serialized_start = 21 - _globals['_EMPTY']._serialized_end = 28 - _globals['_ACK']._serialized_start = 30 - _globals['_ACK']._serialized_end = 63 - _globals['_HANDLELIST']._serialized_start = 65 - _globals['_HANDLELIST']._serialized_end = 98 - _globals['_HANDLECOUNT']._serialized_start = 100 - _globals['_HANDLECOUNT']._serialized_end = 222 - _globals['_HANDLECOUNT_HANDLECOUNTENTRY']._serialized_start = 172 - _globals['_HANDLECOUNT_HANDLECOUNTENTRY']._serialized_end = 222 + _globals["_HANDLECOUNT_HANDLECOUNTENTRY"]._options = None + _globals["_HANDLECOUNT_HANDLECOUNTENTRY"]._serialized_options = b"8\001" + _globals["_EMPTY"]._serialized_start = 21 + _globals["_EMPTY"]._serialized_end = 28 + _globals["_ACK"]._serialized_start = 30 + _globals["_ACK"]._serialized_end = 63 + _globals["_HANDLELIST"]._serialized_start = 65 + _globals["_HANDLELIST"]._serialized_end = 98 + _globals["_HANDLECOUNT"]._serialized_start = 100 + _globals["_HANDLECOUNT"]._serialized_end = 222 + _globals["_HANDLECOUNT_HANDLECOUNTENTRY"]._serialized_start = 172 + _globals["_HANDLECOUNT_HANDLECOUNTENTRY"]._serialized_end = 222 # @@protoc_insertion_point(module_scope) diff --git a/src/hyperon_das/grpc/common_pb2_grpc.py b/src/hyperon_das/grpc/common_pb2_grpc.py index 8a93939..bf94705 100644 --- a/src/hyperon_das/grpc/common_pb2_grpc.py +++ b/src/hyperon_das/grpc/common_pb2_grpc.py @@ -1,3 +1,4 @@ # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! """Client and server classes corresponding to protobuf-defined services.""" + import grpc diff --git a/src/hyperon_das/link_filters.py b/src/hyperon_das/link_filters.py index 0611fa5..9efe8a6 100644 --- a/src/hyperon_das/link_filters.py +++ b/src/hyperon_das/link_filters.py @@ -39,7 +39,10 @@ class FlatTypeTemplate(LinkFilter): """ def __init__( - self, target_types: list[str], link_type: str = WILDCARD, toplevel_only: bool = False + self, + target_types: list[str], + link_type: str = WILDCARD, + toplevel_only: bool = False, ): self.filter_type = LinkFilterType.FLAT_TYPE_TEMPLATE self.link_type = link_type diff --git a/src/hyperon_das/logger.py b/src/hyperon_das/logger.py index 5772f05..098310d 100644 --- a/src/hyperon_das/logger.py +++ b/src/hyperon_das/logger.py @@ -1,6 +1,6 @@ import logging -LOG_FILE_NAME = '/tmp/das.log' +LOG_FILE_NAME = "/tmp/das.log" LOGGING_LEVEL = logging.INFO @@ -20,8 +20,8 @@ def __init__(self): logging.basicConfig( filename=LOG_FILE_NAME, level=LOGGING_LEVEL, - format='%(asctime)s %(levelname)-8s %(message)s', - datefmt='%Y-%m-%d %H:%M:%S', + format="%(asctime)s %(levelname)-8s %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", ) Logger.__instance = self diff --git a/src/hyperon_das/query_engines/local_query_engine.py b/src/hyperon_das/query_engines/local_query_engine.py index e5e704f..e4582c9 100644 --- a/src/hyperon_das/query_engines/local_query_engine.py +++ b/src/hyperon_das/query_engines/local_query_engine.py @@ -75,7 +75,7 @@ def _recursive_query( das_error( UnexpectedQueryFormat( message="Query processing reached an unexpected state", - details=f'link: {str(query)} link target: {str(query)}', + details=f"link: {str(query)} link target: {str(query)}", ) ) return LazyQueryEvaluator( @@ -85,7 +85,7 @@ def _recursive_query( das_error( UnexpectedQueryFormat( message="Query processing reached an unexpected state", - details=f'query: {str(query)}', + details=f"query: {str(query)}", ) ) @@ -122,7 +122,7 @@ def _process_node(self, query: dict) -> List[NodeT]: return [] def _process_link(self, query: dict) -> List[LinkT]: - target_handles = self._generate_target_handles(query['targets']) + target_handles = self._generate_target_handles(query["targets"]) matched_links = self.local_backend.get_matched_links( link_type=query["type"], target_handles=target_handles ) @@ -205,7 +205,9 @@ def get_link_handles(self, link_filter: LinkFilter) -> HandleSetT: ) elif link_filter.filter_type == LinkFilterType.TARGETS: return self.local_backend.get_matched_links( - link_filter.link_type, link_filter.targets, toplevel_only=link_filter.toplevel_only + link_filter.link_type, + link_filter.targets, + toplevel_only=link_filter.toplevel_only, ) elif link_filter.filter_type == LinkFilterType.NAMED_TYPE: return self.local_backend.get_all_links( @@ -235,8 +237,8 @@ def query( if no_iterator: logger().debug( { - 'message': '[DistributedAtomSpace][query] - Start', - 'data': {'query': query, 'parameters': parameters}, + "message": "[DistributedAtomSpace][query] - Start", + "data": {"query": query, "parameters": parameters}, } ) query_results = self._recursive_query(query, parameters) @@ -249,25 +251,25 @@ def query( def custom_query( self, index_id: str, query: list[OrderedDict[str, str]], **kwargs ) -> Iterator | tuple[int, list[AtomT]]: - if kwargs.pop('no_iterator', True): + if kwargs.pop("no_iterator", True): return self.local_backend.get_atoms_by_index(index_id, query=query, **kwargs) else: - if kwargs.get('cursor') is None: - kwargs['cursor'] = 0 + if kwargs.get("cursor") is None: + kwargs["cursor"] = 0 cursor, answer = self.local_backend.get_atoms_by_index(index_id, query=query, **kwargs) - kwargs['backend'] = self.local_backend - kwargs['index_id'] = index_id - kwargs['cursor'] = cursor + kwargs["backend"] = self.local_backend + kwargs["index_id"] = index_id + kwargs["cursor"] = cursor return CustomQuery(ListIterator(answer), **kwargs) def count_atoms(self, parameters: Optional[Dict[str, Any]] = None) -> Dict[str, int]: - if parameters and parameters.get('context') == 'remote': + if parameters and parameters.get("context") == "remote": return {} return self.local_backend.count_atoms(parameters) def commit(self, **kwargs) -> None: - if kwargs.get('buffer'): - self.local_backend.commit(buffer=kwargs['buffer']) + if kwargs.get("buffer"): + self.local_backend.commit(buffer=kwargs["buffer"]) self.local_backend.commit() def reindex(self, pattern_index_templates: Optional[Dict[str, Dict[str, Any]]] = None): @@ -292,7 +294,7 @@ def fetch( port: Optional[int] = None, **kwargs, ) -> Any: - if not self.system_parameters.get('running_on_server'): # Local + if not self.system_parameters.get("running_on_server"): # Local if host is not None and port is not None: server = FunctionsClient(host, port) else: @@ -305,14 +307,14 @@ def fetch( except Exception as e: das_error(e) else: - if 'atom_type' not in query: - das_error(ValueError('Invalid query: missing atom_type')) + if "atom_type" not in query: + das_error(ValueError("Invalid query: missing atom_type")) - atom_type = query['atom_type'] + atom_type = query["atom_type"] - if atom_type == 'node': + if atom_type == "node": return self._process_node(query) - elif atom_type == 'link': + elif atom_type == "link": return self._process_link(query) else: das_error( @@ -330,7 +332,10 @@ def get_atoms_by_field(self, query: list[OrderedDict[str, str]]) -> HandleListT: return self.local_backend.get_atoms_by_field(query) def get_atoms_by_text_field( - self, text_value: str, field: Optional[str] = None, text_index_id: Optional[str] = None + self, + text_value: str, + field: Optional[str] = None, + text_index_id: Optional[str] = None, ) -> HandleListT: return self.local_backend.get_atoms_by_text_field(text_value, field, text_index_id) diff --git a/src/hyperon_das/query_engines/query_engine_protocol.py b/src/hyperon_das/query_engines/query_engine_protocol.py index 02a4e7f..ff3f4c6 100644 --- a/src/hyperon_das/query_engines/query_engine_protocol.py +++ b/src/hyperon_das/query_engines/query_engine_protocol.py @@ -314,7 +314,10 @@ def get_atoms_by_field(self, query: Query) -> HandleListT: @abstractmethod def get_atoms_by_text_field( - self, text_value: str, field: Optional[str] = None, text_index_id: Optional[str] = None + self, + text_value: str, + field: Optional[str] = None, + text_index_id: Optional[str] = None, ) -> HandleListT: """ Retrieves a list of atom handles based on a text field value, with optional field and index ID. diff --git a/src/hyperon_das/query_engines/remote_query_engine.py b/src/hyperon_das/query_engines/remote_query_engine.py index 1cb447a..84c790f 100644 --- a/src/hyperon_das/query_engines/remote_query_engine.py +++ b/src/hyperon_das/query_engines/remote_query_engine.py @@ -24,10 +24,10 @@ class QueryScopes(Enum): - REMOTE_ONLY = 'remote_only' - SYNCHRONOUS_UPDATE = 'synchronous_update' - LOCAL_ONLY = 'local_only' - LOCAL_AND_REMOTE = 'local_and_remote' + REMOTE_ONLY = "remote_only" + SYNCHRONOUS_UPDATE = "synchronous_update" + LOCAL_ONLY = "local_only" + LOCAL_AND_REMOTE = "local_and_remote" class RemoteQueryEngine(QueryEngine): @@ -41,9 +41,9 @@ def __init__( self.system_parameters = system_parameters self.local_query_engine = LocalQueryEngine(backend, cache_controller, kwargs) self.cache_controller = cache_controller - self.__mode = kwargs.get('mode', 'read-only') - self.host = kwargs.get('host') - self.port = kwargs.get('port') + self.__mode = kwargs.get("mode", "read-only") + self.host = kwargs.get("host") + self.port = kwargs.get("port") if not self.host or not self.port: das_error(InvalidDASParameters(message="'host' and 'port' are mandatory parameters")) self.remote_das = FunctionsClient(self.host, self.port) @@ -76,7 +76,7 @@ def get_links(self, link_filter: LinkFilter) -> List[LinkT]: def get_link_handles(self, link_filter: LinkFilter) -> HandleSetT: # TODO Implement get_link_handles() in faas client - return {link['handle'] for link in self.get_links(link_filter)} + return {link["handle"] for link in self.get_links(link_filter)} def get_incoming_links(self, atom_handle: HandleT, **kwargs) -> IncomingLinksT: links = self.local_query_engine.get_incoming_links(atom_handle, **kwargs) @@ -85,13 +85,13 @@ def get_incoming_links(self, atom_handle: HandleT, **kwargs) -> IncomingLinksT: return links def custom_query(self, index_id: str, query: Query, **kwargs) -> Iterator: - kwargs.pop('no_iterator', None) - if kwargs.get('cursor') is None: - kwargs['cursor'] = 0 + kwargs.pop("no_iterator", None) + if kwargs.get("cursor") is None: + kwargs["cursor"] = 0 answer = self.remote_das.custom_query(index_id, query=query, **kwargs) - kwargs['backend'] = self.remote_das - kwargs['index_id'] = index_id - kwargs['is_remote'] = True + kwargs["backend"] = self.remote_das + kwargs["index_id"] = index_id + kwargs["is_remote"] = True return CustomQuery(ListIterator(answer), **kwargs) def query( @@ -100,7 +100,7 @@ def query( parameters: Dict[str, Any] | None = None, ) -> Iterator[QueryAnswer] | list[dict[str, Any]]: parameters = parameters or {} - query_scope = parameters.get('query_scope', 'remote_only') + query_scope = parameters.get("query_scope", "remote_only") try: query_scope = QueryScopes(query_scope) except ValueError: @@ -123,31 +123,31 @@ def query( if query_scope in {QueryScopes.REMOTE_ONLY, QueryScopes.SYNCHRONOUS_UPDATE}: if query_scope == QueryScopes.SYNCHRONOUS_UPDATE: self.commit() - parameters['no_iterator'] = True + parameters["no_iterator"] = True return self.remote_das.query(query, parameters) return self.local_query_engine.query(query, parameters) def count_atoms(self, parameters: Optional[Dict[str, Any]] = None) -> Dict[str, int]: - if (context := parameters.get('context') if parameters else None) == 'local': + if (context := parameters.get("context") if parameters else None) == "local": return self.local_query_engine.count_atoms(parameters) - if context == 'remote': + if context == "remote": return self.remote_das.count_atoms(parameters) local_answer = self.local_query_engine.count_atoms(parameters) remote_answer = self.remote_das.count_atoms(parameters) return { k: (local_answer.get(k, 0) + remote_answer.get(k, 0)) - for k in ['node_count', 'link_count', 'atom_count'] + for k in ["node_count", "link_count", "atom_count"] } # def commit(self, **kwargs) -> None: - if self.__mode == 'read-write': + if self.__mode == "read-write": if self.local_query_engine.has_buffer(): self.remote_das.commit_changes(buffer=self.local_query_engine.buffer) self.remote_das.commit_changes() - elif self.__mode == 'read-only': + elif self.__mode == "read-only": das_error(PermissionError("Commit can't be executed in read mode")) else: das_error(ValueError("Invalid mode: '{self.__mode}'. Use 'read-only' or 'read-write'")) @@ -191,7 +191,10 @@ def get_atoms_by_field(self, query: Query) -> HandleListT: return self.remote_das.get_atoms_by_field(query) def get_atoms_by_text_field( - self, text_value: str, field: Optional[str] = None, text_index_id: Optional[str] = None + self, + text_value: str, + field: Optional[str] = None, + text_index_id: Optional[str] = None, ) -> HandleListT: return self.remote_das.get_atoms_by_text_field(text_value, field, text_index_id) diff --git a/src/hyperon_das/tokenizers/dict_query_tokenizer.py b/src/hyperon_das/tokenizers/dict_query_tokenizer.py index 8c3dee3..f0a5430 100644 --- a/src/hyperon_das/tokenizers/dict_query_tokenizer.py +++ b/src/hyperon_das/tokenizers/dict_query_tokenizer.py @@ -156,11 +156,16 @@ def _tokenize( raise ValueError(f"Unsupported query: {_query}") match query: - case {"or": list()} | {"and": list()} | {"not": dict()} | { - "atom_type": "link", - "type": str(), - "targets": list(), - }: + case ( + {"or": list()} + | {"and": list()} + | {"not": dict()} + | { + "atom_type": "link", + "type": str(), + "targets": list(), + } + ): return TOKENS_DELIMITER.join(_tokenize(query).to_tokens()) case _: raise ValueError( diff --git a/src/hyperon_das/tokenizers/elements.py b/src/hyperon_das/tokenizers/elements.py index 1c5a289..f68341e 100644 --- a/src/hyperon_das/tokenizers/elements.py +++ b/src/hyperon_das/tokenizers/elements.py @@ -196,7 +196,9 @@ class LinkTemplate(Link): Inherits from the Link class and is used to denote links that are templates. """ - def __init__(self, type: str, targets: list[Any] = []) -> None: + def __init__(self, type: str, targets: list[Any]) -> None: + if not targets: + targets = [] super().__init__(type, targets, is_template=True) diff --git a/src/hyperon_das/traverse_engines.py b/src/hyperon_das/traverse_engines.py index 3580c59..dbb75c6 100644 --- a/src/hyperon_das/traverse_engines.py +++ b/src/hyperon_das/traverse_engines.py @@ -11,7 +11,7 @@ class TraverseEngine: def __init__(self, handle: str, **kwargs) -> None: - self.das: DistributedAtomSpace = kwargs['das'] + self.das: DistributedAtomSpace = kwargs["das"] try: atom = self.das.get_atom(handle) @@ -55,15 +55,15 @@ def get_links(self, **kwargs) -> TraverseLinksIterator: >>> next(links) """ cursor, incoming_links = self.das.get_incoming_links( - atom_handle=self._cursor['handle'], + atom_handle=self._cursor["handle"], no_iterator=False, targets_document=True, cursor=0, - chunk_size=kwargs.get('chunk_size', 500), + chunk_size=kwargs.get("chunk_size", 500), ) assert cursor == 0 assert isinstance(incoming_links, (LocalIncomingLinks, RemoteIncomingLinks)) - return TraverseLinksIterator(source=incoming_links, cursor=self._cursor['handle'], **kwargs) + return TraverseLinksIterator(source=incoming_links, cursor=self._cursor["handle"], **kwargs) def get_neighbors(self, **kwargs) -> TraverseNeighborsIterator: """Get all of "neighbors" that pointing to current cursor. @@ -97,7 +97,7 @@ def get_neighbors(self, **kwargs) -> TraverseNeighborsIterator: ) >>> next(neighbors) """ - custom_filter = kwargs.pop('filters', None) + custom_filter = kwargs.pop("filters", None) filter_link = filter_target = None if custom_filter is not None and not isinstance(custom_filter, tuple): @@ -107,7 +107,7 @@ def get_neighbors(self, **kwargs) -> TraverseNeighborsIterator: filter_link, filter_target = custom_filter if filter_link is not None: - kwargs['filter'] = filter_link + kwargs["filter"] = filter_link filtered_links = self.get_links(targets_only=True, **kwargs) return TraverseNeighborsIterator(source=filtered_links, filter=filter_target) @@ -152,7 +152,7 @@ def goto(self, handle: str) -> Dict[str, Any]: Dict[str, Any]: The current cursor. A Python dict with all atom data. Examples: - >>> traverse_engine.goto('asd1234567890') + >>> traverse_engine.goto("asd1234567890") >>> { 'handle': 'asd1234567890', 'type': 'AI, diff --git a/src/hyperon_das/utils.py b/src/hyperon_das/utils.py index 1b87a46..3fcf8bf 100644 --- a/src/hyperon_das/utils.py +++ b/src/hyperon_das/utils.py @@ -230,31 +230,29 @@ def check_versions(response_body: bytes) -> None: if not remote_das_version or not remote_atomdb_version: raise ValueError("Invalid response from server, missing version info.") - + # local packages versions - das_version = get_package_version('hyperon_das') - atom_db_version = get_package_version('hyperon_das_atomdb') + das_version = get_package_version("hyperon_das") + atom_db_version = get_package_version("hyperon_das_atomdb") if not das_version or not atom_db_version: raise ValueError("Missing version info in the local packages.") is_atomdb_compatible = compare_minor_versions( - remote_atomdb_version, - atom_db_version, - ) + remote_atomdb_version, + atom_db_version, + ) is_das_compatible = compare_minor_versions( - remote_das_version, - das_version, - ) + remote_das_version, + das_version, + ) if not is_atomdb_compatible or not is_das_compatible: local_versions = f"hyperon-das: {das_version}, hyperon-das-atomdb: {atom_db_version}" remote_versions = ( - f"hyperon-das: {remote_das_version}, hyperon-das-atomdb: {remote_atomdb_version}" - ) - error_message = ( - f"Version mismatch. Local: {local_versions}. " f"Remote: {remote_versions}." - ) + f"hyperon-das: {remote_das_version}, hyperon-das-atomdb: {remote_atomdb_version}" + ) + error_message = f"Version mismatch. Local: {local_versions}. Remote: {remote_versions}." logger().error(error_message) raise Exception(error_message) diff --git a/src/hyperon_das_atomdb/__init__.py b/src/hyperon_das_atomdb/__init__.py index 9fb9b3b..0b656f3 100644 --- a/src/hyperon_das_atomdb/__init__.py +++ b/src/hyperon_das_atomdb/__init__.py @@ -16,4 +16,4 @@ "AtomDoesNotExist", ] -__version__ = '0.8.10' +__version__ = "0.8.10" diff --git a/src/hyperon_das_atomdb/adapters/redis_mongo_db.py b/src/hyperon_das_atomdb/adapters/redis_mongo_db.py index da56c10..9bfddad 100644 --- a/src/hyperon_das_atomdb/adapters/redis_mongo_db.py +++ b/src/hyperon_das_atomdb/adapters/redis_mongo_db.py @@ -327,7 +327,9 @@ def _connection_mongo_db( return self.mongo_db except ValueError as e: logger().error(f"An error occurred while creating a MongoDB client - Details: {str(e)}") - raise ConnectionMongoDBException(message="error creating a MongoClient", details=str(e)) + raise ConnectionMongoDBException( + message="error creating a MongoClient", details=str(e) + ) from e @staticmethod def _connection_redis( @@ -357,7 +359,7 @@ def _connection_redis( message = ( f"Connecting to {redis_type} at " + ( - f"{redis_username}:{len(redis_password)*'*'}@" + f"{redis_username}:{len(redis_password) * '*'}@" if redis_username and redis_password else "" ) @@ -780,7 +782,7 @@ def get_matched_type_template(self, template: list[Any], **kwargs) -> HandleSetT return templates_matched except Exception as exception: logger().error(f"Failed to get matched type template - Details: {str(exception)}") - raise ValueError(str(exception)) + raise ValueError(str(exception)) from exception def get_matched_type(self, link_type: str, **kwargs) -> HandleSetT: named_type_hash = ExpressionHasher.named_type_hash(link_type) @@ -1400,7 +1402,7 @@ def _retrieve_documents_by_index( def reindex(self, pattern_index_templates: dict[str, list[DocumentT]] | None = None) -> None: if isinstance(pattern_index_templates, list): self._save_pattern_index(deepcopy(pattern_index_templates)) - self._setup_indexes({'pattern_index_templates': pattern_index_templates}) + self._setup_indexes({"pattern_index_templates": pattern_index_templates}) self.redis.flushall() self._update_atom_indexes(self.mongo_atoms_collection.find({})) @@ -1472,7 +1474,8 @@ def create_field_index( logger().error(f"Error: {str(e)}") finally: if not index_id: - return ( # pylint: disable=lost-exception + # B012: `return` inside `finally` blocks cause exceptions to be silenced + return ( # noqa: B012 f"Index creation failed, Details: {str(exc)}" if exc else "Index creation failed" diff --git a/src/hyperon_das_atomdb/utils/expression_hasher.py b/src/hyperon_das_atomdb/utils/expression_hasher.py index 918ed69..724c4cf 100644 --- a/src/hyperon_das_atomdb/utils/expression_hasher.py +++ b/src/hyperon_das_atomdb/utils/expression_hasher.py @@ -116,5 +116,5 @@ def composite_hash(hash_base: str | list[Any]) -> str: # TODO unreachable else: raise ValueError( - "Invalid base to compute composite hash: " f"{type(hash_base)}: {hash_base}" + f"Invalid base to compute composite hash: {type(hash_base)}: {hash_base}" ) diff --git a/src/link_creation_agent/compose.yaml b/src/link_creation_agent/compose.yaml index 7d296f7..ba9c2ae 100644 --- a/src/link_creation_agent/compose.yaml +++ b/src/link_creation_agent/compose.yaml @@ -73,7 +73,7 @@ services: ports: - 37007:37007 volumes: - - ../../../src:/opt + - ../../src:/opt depends_on: - mongodb - redis @@ -97,7 +97,7 @@ services: ports: - 35700:35700 volumes: - - ../../../src:/opt + - ../../src:/opt restart: on-failure depends_on: - mongodb @@ -121,7 +121,7 @@ services: command: ./bin/link_creation_server --type server --config_file /tmp/config volumes: - ./data:/tmp - - ../../../src:/opt + - ../../src:/opt ports: - 9090:9090 restart: on-failure diff --git a/src/link_creation_agent/das_link_creation_node.h b/src/link_creation_agent/das_link_creation_node.h index 25fec81..1af9b04 100644 --- a/src/link_creation_agent/das_link_creation_node.h +++ b/src/link_creation_agent/das_link_creation_node.h @@ -57,6 +57,7 @@ class LinkCreationNode : public StarNode { private: Queue> shared_queue; const string CREATE_LINK = "create_link"; // DAS Node command + const string CREATE_LINK_PROCESSOR = "create_link_processor"; bool shutting_down = false; bool is_server = true; }; diff --git a/src/link_creation_agent/doc/DaC/das_link_creation_diagram.py b/src/link_creation_agent/doc/DaC/das_link_creation_diagram.py index 6ca6fd3..d2bdddb 100644 --- a/src/link_creation_agent/doc/DaC/das_link_creation_diagram.py +++ b/src/link_creation_agent/doc/DaC/das_link_creation_diagram.py @@ -34,17 +34,17 @@ """ node_attributes = { - "label": "", - "direction": "RL", - "shape": "rect", - "width": "1", - "height": "1", - "fixedsize": "false", - "labeljust": "l", - "style": "filled", - "fillcolor": "dodgerblue3", - "fontcolor": "white", - } + "label": "", + "direction": "RL", + "shape": "rect", + "width": "1", + "height": "1", + "fixedsize": "false", + "labeljust": "l", + "style": "filled", + "fillcolor": "dodgerblue3", + "fontcolor": "white", +} # with Diagram("DAS Link Creation", filename="doc/assets/das_link_creation_diagram.png", show=False): @@ -65,29 +65,48 @@ # das_link_creation_node >> edges[0] >> client with Diagram("DAS Link Creation", filename="doc/assets/das_link_creation_hla", show=False): - client = Person( - name="DAS Server", description="Sends a Link creation request" - ) + client = Person(name="DAS Server", description="Sends a Link creation request") with SystemBoundary("DAS Link Creation Agent"): - das_node_server = Container(name="DAS Node Server", technology="DAS NODE", description="Receives a link creation request") - link_creation_agent = Container(name="DAS LCA", technology="C++ Core", description="Process a link creation request") - link_creation_agent_service = Container(name="DAS LCA", technology="C++ Service", description="Process multiple queries") - das_node_client = Container(name="DAS Node Client", technology="DAS NODE", description="Sends atoms creation requests") - das_node_client2 = Container(name="DAS Node Client", technology="DAS NODE", description="Sends query requests") - - - das_query_agent = Container(name="DAS Query Agent", technology="DAS NODE", description="Process Template Queries") + das_node_server = Container( + name="DAS Node Server", + technology="DAS NODE", + description="Receives a link creation request", + ) + link_creation_agent = Container( + name="DAS LCA", + technology="C++ Core", + description="Process a link creation request", + ) + link_creation_agent_service = Container( + name="DAS LCA", + technology="C++ Service", + description="Process multiple queries", + ) + das_node_client = Container( + name="DAS Node Client", + technology="DAS NODE", + description="Sends atoms creation requests", + ) + das_node_client2 = Container( + name="DAS Node Client", + technology="DAS NODE", + description="Sends query requests", + ) + + das_query_agent = Container( + name="DAS Query Agent", + technology="DAS NODE", + description="Process Template Queries", + ) das_node_client >> Relationship("Create links and nodes") >> client client >> Relationship("Request Link Creation") >> das_node_server das_node_server >> Relationship("Start Link Creation") >> link_creation_agent link_creation_agent >> das_node_client2 << link_creation_agent - das_node_client2 >> Relationship("Request Query template results") >> das_query_agent - - das_query_agent >> Relationship("Returns a Query Iterator") >> das_node_client2 - link_creation_agent >> Relationship("Spawn a LCA service") >> link_creation_agent_service - link_creation_agent_service >> Relationship("Send create atom requests") >> das_node_client - + (das_node_client2 >> Relationship("Request Query template results") >> das_query_agent) + das_query_agent >> Relationship("Returns a Query Iterator") >> das_node_client2 + (link_creation_agent >> Relationship("Spawn a LCA service") >> link_creation_agent_service) + (link_creation_agent_service >> Relationship("Send create atom requests") >> das_node_client) diff --git a/src/link_creation_agent/link.cc b/src/link_creation_agent/link.cc index e3c62ca..839a5f0 100644 --- a/src/link_creation_agent/link.cc +++ b/src/link_creation_agent/link.cc @@ -1,73 +1,83 @@ #include "link.h" -#include +#include using namespace link_creation_agent; using namespace std; using namespace query_engine; -Link::Link(string type, vector targets) -{ - this->type = type; - this->targets = targets; -} -Link::Link(QueryAnswer *query_answer, vector link_template) -{ - string query_tokens = query_answer->tokenize(); - string token = ""; - for(char token_char : query_tokens){ - if(token_char == ' '){ - this->targets.push_back(token); - token = ""; - }else{ - token += token_char; +Link::Link(QueryAnswer* query_answer, vector link_template) { + LinkCreateTemplate link_create_template(link_template); + HandlesAnswer* handles_answer = dynamic_cast(query_answer); + + this->type = link_create_template.get_link_type(); + vector targets = link_create_template.get_targets(); + for (LinkCreateTemplateTypes target : targets) { + if (holds_alternative(target)) { + string token = get(target).name; + this->targets.push_back(handles_answer->assignment.get(token.c_str())); + } + if (holds_alternative>(target)) { + shared_ptr sub_link = get>(target); + shared_ptr sub_link_obj = make_shared(query_answer, sub_link); + this->targets.push_back(sub_link_obj); } } + this->custom_fields = link_create_template.get_custom_fields(); } - -Link::Link() -{ -} - -Link::~Link() -{ +Link::Link(QueryAnswer* query_answer, shared_ptr link_create_template) { + HandlesAnswer* handles_answer = dynamic_cast(query_answer); + this->type = link_create_template->get_link_type(); + vector targets = link_create_template->get_targets(); + for (LinkCreateTemplateTypes target : targets) { + if (holds_alternative(target)) { + string token = get(target).name; + this->targets.push_back(handles_answer->assignment.get(token.c_str())); + } + if (holds_alternative>(target)) { + shared_ptr sub_link = get>(target); + shared_ptr sub_link_obj = make_shared(query_answer, sub_link); + this->targets.push_back(sub_link_obj); + } + } + this->custom_fields = link_create_template->get_custom_fields(); } -string Link::get_type() -{ - return this->type; -} +Link::Link() {} -vector Link::get_targets() -{ - return this->targets; -} +Link::~Link() {} -void Link::set_type(string type) -{ - this->type = type; -} +string Link::get_type() { return this->type; } -void Link::set_targets(vector targets) -{ - this->targets = targets; -} +vector Link::get_targets() { return this->targets; } -void Link::add_target(string target) -{ - this->targets.push_back(target); -} +void Link::set_type(string type) { this->type = type; } -vector Link::tokenize() -{ - return targets; -} +void Link::add_target(LinkTargetTypes target) { this->targets.push_back(target); } -Link Link::untokenize(string link) -{ - return Link(); +vector Link::tokenize() { + vector tokens; + tokens.push_back("LINK"); + tokens.push_back(this->type); + for (LinkTargetTypes target : this->targets) { + if (holds_alternative(target)) { + tokens.push_back("HANDLE"); + tokens.push_back(get(target)); + } + if (holds_alternative>(target)) { + for (string token : get>(target)->tokenize()) { + tokens.push_back(token); + } + } + } + for (CustomField custom_field : this->custom_fields) { + for (string token : custom_field.tokenize()) { + tokens.push_back(token); + } + } + return tokens; } diff --git a/src/link_creation_agent/link.h b/src/link_creation_agent/link.h index 9aab472..46902c2 100644 --- a/src/link_creation_agent/link.h +++ b/src/link_creation_agent/link.h @@ -5,7 +5,10 @@ #pragma once #include #include +#include #include "QueryAnswer.h" +#include "HandlesAnswer.h" +#include "link_create_template.h" using namespace std; using namespace query_engine; @@ -13,11 +16,15 @@ using namespace query_engine; namespace link_creation_agent { + class Link; // forward declaration + + using LinkTargetTypes = std::variant>; + class Link { public: - Link(string type, vector targets); Link(QueryAnswer *query_answer, vector link_template); + Link(QueryAnswer *query_answer, shared_ptr link_create_template); Link(); ~Link(); /** @@ -29,29 +36,27 @@ namespace link_creation_agent * @brief Get the targets of the link * @returns Returns the targets of the link */ - vector get_targets(); + vector get_targets(); /** * @brief Set the type of the link */ void set_type(string type); - /** - * @brief Set the targets of the link - */ - void set_targets(vector targets); /** * @brief Add a target to the link * @param target Target to be added */ - void add_target(string target); + void add_target(LinkTargetTypes target); /** * @brief Tokenize the link * @returns Returns the tokenized link */ vector tokenize(); - Link untokenize(string link); private: string type; - vector targets; + vector targets; + vector custom_fields; + + }; } \ No newline at end of file diff --git a/src/link_creation_agent/link_create_template.cc b/src/link_creation_agent/link_create_template.cc index ddb1395..95d01bd 100644 --- a/src/link_creation_agent/link_create_template.cc +++ b/src/link_creation_agent/link_create_template.cc @@ -2,6 +2,7 @@ #include #include +#include #include using namespace link_creation_agent; @@ -25,61 +26,19 @@ static std::string get_token(std::vector& link_template, int cursor } return link_template[cursor]; } - -CustomField::CustomField(std::vector& custom_fields) { - if (get_token(custom_fields, 0) != "CUSTOM_FIELD") - throw std::invalid_argument("Can not create Custom Field: Invalid arguments"); - - int cursor = 0; - std::string custom_field_name = get_token(custom_fields, 1); - this->name = custom_field_name; - cursor += 3; - while (cursor < custom_fields.size()) { - if (get_token(custom_fields, cursor) == "CUSTOM_FIELD") { - std::vector custom_field_args; - int sub_custom_field_size = string_to_int(get_token(custom_fields, cursor + 2)); - std::string sub_custom_field_name = get_token(custom_fields, cursor + 1); - custom_field_args.push_back(get_token(custom_fields, cursor)); // CUSTOM_FIELD - custom_field_args.push_back(get_token(custom_fields, cursor + 1)); // field name - custom_field_args.push_back(get_token(custom_fields, cursor + 2)); // field size - cursor += 3; - while (cursor < custom_fields.size()) { - if (sub_custom_field_size == 0) { - break; - } - - custom_field_args.push_back(get_token(custom_fields, cursor)); - if (get_token(custom_fields, cursor) == "CUSTOM_FIELD") { - sub_custom_field_size += string_to_int(get_token(custom_fields, cursor + 2)); - custom_field_args.push_back(get_token(custom_fields, cursor + 1)); // field name - custom_field_args.push_back(get_token(custom_fields, cursor + 2)); // field size - cursor += 3; - sub_custom_field_size--; - } else { - custom_field_args.push_back(get_token(custom_fields, cursor + 1)); - cursor += 2; - sub_custom_field_size--; - } - } - CustomField custom_field = CustomField(custom_field_args); - this->values.push_back( - std::make_tuple(sub_custom_field_name, std::make_shared(custom_field))); - } else { - this->values.push_back( - std::make_tuple(get_token(custom_fields, cursor), get_token(custom_fields, cursor + 1))); - cursor += 2; - } +// TODO move this to a utils file +static std::vector split(const std::string& s, char delimiter) { + std::vector tokens; + std::string token; + std::istringstream tokenStream(s); + while (std::getline(tokenStream, token, delimiter)) { + tokens.push_back(token); } + return tokens; } -CustomField::~CustomField() {} - -std::string CustomField::get_name() { return this->name; } - -std::vector> CustomField::get_values() { return this->values; } - -std::vector parse_sub_custom_field(std::vector& link_template, - size_t& cursor) { +static std::vector parse_sub_custom_field(std::vector& link_template, + size_t& cursor) { if (get_token(link_template, cursor) != "CUSTOM_FIELD" || link_template.size() < cursor + 3) throw std::invalid_argument("Can not create Custom Field: Invalid arguments"); std::vector custom_field_args; @@ -108,8 +67,8 @@ std::vector parse_sub_custom_field(std::vector& link_t return custom_field_args; } -std::vector parse_sub_link_template(std::vector& link_template, - size_t& cursor) { +static std::vector parse_sub_link_template(std::vector& link_template, + size_t& cursor) { if (get_token(link_template, cursor) != "LINK_CREATE" || link_template.size() < cursor + 4) throw std::invalid_argument("Can not create Link Template: Invalid arguments"); int sub_link_template_size = string_to_int(get_token(link_template, cursor + 2)); @@ -226,6 +185,60 @@ std::string LinkCreateTemplate::to_string() { return link_template; } +std::vector LinkCreateTemplate::tokenize() { return split(this->to_string(), ' '); } + +CustomField::CustomField(std::vector& custom_fields) { + if (get_token(custom_fields, 0) != "CUSTOM_FIELD") + throw std::invalid_argument("Can not create Custom Field: Invalid arguments"); + + int cursor = 0; + std::string custom_field_name = get_token(custom_fields, 1); + this->name = custom_field_name; + cursor += 3; + while (cursor < custom_fields.size()) { + if (get_token(custom_fields, cursor) == "CUSTOM_FIELD") { + std::vector custom_field_args; + int sub_custom_field_size = string_to_int(get_token(custom_fields, cursor + 2)); + std::string sub_custom_field_name = get_token(custom_fields, cursor + 1); + custom_field_args.push_back(get_token(custom_fields, cursor)); // CUSTOM_FIELD + custom_field_args.push_back(get_token(custom_fields, cursor + 1)); // field name + custom_field_args.push_back(get_token(custom_fields, cursor + 2)); // field size + cursor += 3; + while (cursor < custom_fields.size()) { + if (sub_custom_field_size == 0) { + break; + } + + custom_field_args.push_back(get_token(custom_fields, cursor)); + if (get_token(custom_fields, cursor) == "CUSTOM_FIELD") { + sub_custom_field_size += string_to_int(get_token(custom_fields, cursor + 2)); + custom_field_args.push_back(get_token(custom_fields, cursor + 1)); // field name + custom_field_args.push_back(get_token(custom_fields, cursor + 2)); // field size + cursor += 3; + sub_custom_field_size--; + } else { + custom_field_args.push_back(get_token(custom_fields, cursor + 1)); + cursor += 2; + sub_custom_field_size--; + } + } + CustomField custom_field = CustomField(custom_field_args); + this->values.push_back( + std::make_tuple(sub_custom_field_name, std::make_shared(custom_field))); + } else { + this->values.push_back( + std::make_tuple(get_token(custom_fields, cursor), get_token(custom_fields, cursor + 1))); + cursor += 2; + } + } +} + +CustomField::~CustomField() {} + +std::string CustomField::get_name() { return this->name; } + +std::vector> CustomField::get_values() { return this->values; } + std::string CustomField::to_string() { std::string custom_field = "CUSTOM_FIELD " + this->name + " " + std::to_string(this->values.size()) + " "; @@ -241,4 +254,6 @@ std::string CustomField::to_string() { } } return custom_field; -} \ No newline at end of file +} + +std::vector CustomField::tokenize() { return split(this->to_string(), ' '); } \ No newline at end of file diff --git a/src/link_creation_agent/link_create_template.h b/src/link_creation_agent/link_create_template.h index e55d98e..30d7d5a 100644 --- a/src/link_creation_agent/link_create_template.h +++ b/src/link_creation_agent/link_create_template.h @@ -82,6 +82,11 @@ class CustomField { * @return A string representation of the custom field. */ std::string to_string(); + /** + * @brief Tokenizes the custom field. + * @return A vector of strings representing the tokenized custom field. + */ + std::vector tokenize(); private: std::string name; @@ -123,6 +128,12 @@ class LinkCreateTemplate { */ std::string to_string(); + /** + * @brief Tokenizes the link creation template. + * @return A vector of strings representing the tokenized link creation template. + */ + std::vector tokenize(); + private: std::string link_type; std::vector targets; diff --git a/src/link_creation_agent/agent.cc b/src/link_creation_agent/link_creation_agent.cc similarity index 99% rename from src/link_creation_agent/agent.cc rename to src/link_creation_agent/link_creation_agent.cc index 927944b..ec78017 100644 --- a/src/link_creation_agent/agent.cc +++ b/src/link_creation_agent/link_creation_agent.cc @@ -1,4 +1,4 @@ -#include "agent.h" +#include "link_creation_agent.h" #include #include diff --git a/src/link_creation_agent/agent.h b/src/link_creation_agent/link_creation_agent.h similarity index 99% rename from src/link_creation_agent/agent.h rename to src/link_creation_agent/link_creation_agent.h index bd1c36e..da23bf5 100644 --- a/src/link_creation_agent/agent.h +++ b/src/link_creation_agent/link_creation_agent.h @@ -1,5 +1,5 @@ /** - * @file agent.h + * @file link_creation_agent.h * @brief Agent class to handle link creation requests * * This file contains the definition of the LinkCreationAgent class and the diff --git a/src/main/link_creation_agent_main.cc b/src/main/link_creation_agent_main.cc index d0c9018..8504889 100644 --- a/src/main/link_creation_agent_main.cc +++ b/src/main/link_creation_agent_main.cc @@ -3,7 +3,7 @@ #include #include -#include "agent.h" +#include "link_creation_agent.h" using namespace link_creation_agent; using namespace std; diff --git a/src/scripts/bazel.sh b/src/scripts/bazel.sh index c467346..03e306a 100755 --- a/src/scripts/bazel.sh +++ b/src/scripts/bazel.sh @@ -9,12 +9,14 @@ BAZEL_CMD="/opt/bazel/bazelisk" # local paths LOCAL_WORKDIR=$(pwd) LOCAL_BIN_DIR=$LOCAL_WORKDIR/src/bin +LOCAL_ASPECT_CACHE="$HOME/.cache/das/aspect" LOCAL_BAZEL_CACHE="$HOME/.cache/das/bazel" LOCAL_BAZELISK_CACHE="$HOME/.cache/das/bazelisk" LOCAL_PIPTOOLS_CACHE="$HOME/.cache/das/pip-tools" LOCAL_PIP_CACHE="$HOME/.cache/das/pip" mkdir -p \ + "$LOCAL_ASPECT_CACHE" \ "$LOCAL_BIN_DIR" \ "$LOCAL_BAZEL_CACHE" \ "$LOCAL_BAZELISK_CACHE" \ @@ -25,6 +27,7 @@ mkdir -p \ CONTAINER_WORKDIR=/opt/das CONTAINER_WORKSPACE_DIR=/opt/das/src CONTAINER_BIN_DIR=$CONTAINER_WORKSPACE_DIR/bin +CONTAINER_ASPECT_CACHE=/home/"${USER}"/.cache/aspect CONTAINER_BAZEL_CACHE=/home/"${USER}"/.cache/bazel CONTAINER_PIP_CACHE=/home/"${USER}"/.cache/pip CONTAINER_PIPTOOLS_CACHE=/home/"${USER}"/.cache/pip-tools @@ -43,6 +46,7 @@ docker run --rm \ --volume /etc/passwd:/etc/passwd:ro \ --volume "$LOCAL_PIP_CACHE":"$CONTAINER_PIP_CACHE" \ --volume "$LOCAL_PIPTOOLS_CACHE":"$CONTAINER_PIPTOOLS_CACHE" \ + --volume "$LOCAL_ASPECT_CACHE":"$CONTAINER_ASPECT_CACHE" \ --volume "$LOCAL_BAZEL_CACHE":"$CONTAINER_BAZEL_CACHE" \ --volume "$LOCAL_BAZELISK_CACHE":"$CONTAINER_BAZELISK_CACHE" \ --volume "$LOCAL_WORKDIR":"$CONTAINER_WORKDIR" \ diff --git a/src/scripts/bazel_build.sh b/src/scripts/bazel_build.sh index 0032351..dd8ace2 100755 --- a/src/scripts/bazel_build.sh +++ b/src/scripts/bazel_build.sh @@ -45,14 +45,13 @@ fi if [ "$BUILD_WHEELS" = true ]; then $BAZELISK_RUN_CMD //deps:requirements.update $BAZELISK_RUN_CMD //deps:requirements_dev.update - $BAZELISK_BUILD_CMD //hyperon_das_atomdb:hyperon_das_atomdb_wheel --define=ATOMDB_VERSION=0.8.11 - $BAZELISK_BUILD_CMD //hyperon_das:hyperon_das_wheel --define=DAS_VERSION=0.9.17 + $BAZELISK_BUILD_CMD //hyperon_das_atomdb:hyperon_das_atomdb_wheel + $BAZELISK_BUILD_CMD //hyperon_das:hyperon_das_wheel mv bazel-bin/hyperon_das_atomdb/*.whl "$BIN_DIR" mv bazel-bin/hyperon_das/*.whl "$BIN_DIR" - $BAZELISK_BUILD_CMD //hyperon_das_node:hyperon_das_node_wheel --define=DAS_NODE_VERSION=0.0.1 + $BAZELISK_BUILD_CMD //hyperon_das_node:hyperon_das_node_wheel mv bazel-bin/hyperon_das_node/*.whl "$BIN_DIR" - $BAZELISK_BUILD_CMD //hyperon_das_atomdb_cpp:hyperon_das_atomdb_cpp_wheel \ - --define=ATOMDB_VERSION=0.8.11 + $BAZELISK_BUILD_CMD //hyperon_das_atomdb_cpp:hyperon_das_atomdb_cpp_wheel mv bazel-bin/hyperon_das_atomdb_cpp/*.whl "$BIN_DIR" fi diff --git a/src/scripts/build.sh b/src/scripts/build.sh index a22105a..86c71e6 100755 --- a/src/scripts/build.sh +++ b/src/scripts/build.sh @@ -8,11 +8,13 @@ CONTAINER_NAME=${IMAGE_NAME}-container # local paths LOCAL_WORKDIR=$(pwd) LOCAL_BIN_DIR=$LOCAL_WORKDIR/src/bin +LOCAL_ASPECT_CACHE="$HOME/.cache/das/aspect" LOCAL_BAZEL_CACHE="$HOME/.cache/das/bazel" LOCAL_BAZELISK_CACHE="$HOME/.cache/das/bazelisk" LOCAL_PIPTOOLS_CACHE="$HOME/.cache/das/pip-tools" LOCAL_PIP_CACHE="$HOME/.cache/das/pip" mkdir -p \ + $LOCAL_ASPECT_CACHE \ $LOCAL_BAZEL_CACHE \ $LOCAL_BAZELISK_CACHE \ $LOCAL_BIN_DIR \ @@ -23,6 +25,7 @@ mkdir -p \ CONTAINER_WORKDIR=/opt/das CONTAINER_WORKSPACE_DIR=/opt/das/src CONTAINER_BIN_DIR=$CONTAINER_WORKSPACE_DIR/bin +CONTAINER_ASPECT_CACHE=/home/${USER}/.cache/aspect CONTAINER_BAZEL_CACHE=/home/${USER}/.cache/bazel CONTAINER_PIP_CACHE=/home/${USER}/.cache/pip CONTAINER_PIPTOOLS_CACHE=/home/${USER}/.cache/pip-tools @@ -40,6 +43,7 @@ docker run --rm \ --volume /etc/passwd:/etc/passwd:ro \ --volume $LOCAL_PIP_CACHE:$CONTAINER_PIP_CACHE \ --volume $LOCAL_PIPTOOLS_CACHE:$CONTAINER_PIPTOOLS_CACHE \ + --volume $LOCAL_ASPECT_CACHE:$CONTAINER_ASPECT_CACHE \ --volume $LOCAL_BAZEL_CACHE:$CONTAINER_BAZEL_CACHE \ --volume $LOCAL_BAZELISK_CACHE:$CONTAINER_BAZELISK_CACHE \ --volume $LOCAL_WORKDIR:$CONTAINER_WORKDIR \ diff --git a/src/tests/cpp/link_creation_agent_test.cc b/src/tests/cpp/link_creation_agent_test.cc index cb48be5..a243905 100644 --- a/src/tests/cpp/link_creation_agent_test.cc +++ b/src/tests/cpp/link_creation_agent_test.cc @@ -4,7 +4,7 @@ #include #include -#include "agent.h" +#include "link_creation_agent.h" #include "link_create_template.h" using namespace std; @@ -266,3 +266,17 @@ TEST(LinkCreateTemplate, TestInvalidNode) { auto link_template = split(link_template_str, ' '); EXPECT_THROW(LinkCreateTemplate lct(link_template), invalid_argument); } + + +TEST(Link, TestLink) { + vector link_template = split("LINK_CREATE Similarity 2 0 VARIABLE V1 VARIABLE V2", ' '); + HandlesAnswer* query_answer = new HandlesAnswer(); + query_answer->assignment.assign("V1", "Value1"); + query_answer->assignment.assign("V2", "Value2"); + + Link link(query_answer, link_template); + EXPECT_EQ(link.get_type(), "Similarity"); + EXPECT_EQ(link.get_targets().size(), 2); + EXPECT_EQ(get(link.get_targets()[0]), "Value1"); + EXPECT_EQ(get(link.get_targets()[1]), "Value2"); +} diff --git a/src/tests/python/helpers.py b/src/tests/python/helpers.py index 3c16256..529c6f4 100644 --- a/src/tests/python/helpers.py +++ b/src/tests/python/helpers.py @@ -60,7 +60,7 @@ def dict_to_link_params(link_dict: dict) -> LinkT: try: return LinkT(**params) except TypeError as ex: - raise AssertionError(f"{type(ex)}: {ex} - {params=}") + raise AssertionError(f"{type(ex)}: {ex} - {params=}") from ex class AnimalBaseHandlesCollection: diff --git a/src/tests/python/integration/adapters/animals_kb.py b/src/tests/python/integration/adapters/animals_kb.py index 3bea970..6d64318 100644 --- a/src/tests/python/integration/adapters/animals_kb.py +++ b/src/tests/python/integration/adapters/animals_kb.py @@ -22,7 +22,11 @@ node_docs[human] = {"type": "Concept", "name": "human"} node_docs[monkey] = {"type": "Concept", "name": "monkey"} node_docs[chimp] = {"type": "Concept", "name": "chimp"} -node_docs[mammal] = {"type": "Concept", "name": "mammal", "custom_attributes": {"name": "mammal"}} +node_docs[mammal] = { + "type": "Concept", + "name": "mammal", + "custom_attributes": {"name": "mammal"}, +} node_docs[reptile] = {"type": "Concept", "name": "reptile"} node_docs[snake] = {"type": "Concept", "name": "snake"} node_docs[dinosaur] = {"type": "Concept", "name": "dinosaur"} diff --git a/src/tests/python/integration/adapters/test_redis_mongo.py b/src/tests/python/integration/adapters/test_redis_mongo.py index 662c2ef..481b036 100644 --- a/src/tests/python/integration/adapters/test_redis_mongo.py +++ b/src/tests/python/integration/adapters/test_redis_mongo.py @@ -19,7 +19,15 @@ rhino, similarity_docs, ) -from .helpers import Database, PyMongoFindExplain, _db_down, _db_up, cleanup, mongo_port, redis_port +from .helpers import ( + Database, + PyMongoFindExplain, + _db_down, + _db_up, + cleanup, + mongo_port, + redis_port, +) def metta_to_links(input_str): @@ -27,18 +35,18 @@ def parse_tokens(tokens): result = [] while tokens: token = tokens.pop(0) - if token == '(': + if token == "(": nested = parse_tokens(tokens) result.append(nested) - elif token == ')': + elif token == ")": break else: result.append({"type": "Symbol", "name": token}) return {"type": "Expression", "targets": result} - input_str = input_str.replace('(', ' ( ').replace(')', ' ) ') + input_str = input_str.replace("(", " ( ").replace(")", " ) ") tokens = input_str.split() - return parse_tokens(tokens)['targets'][0] + return parse_tokens(tokens)["targets"][0] class TestRedisMongo: @@ -1214,7 +1222,8 @@ def test_add_fields_to_atoms(self, _cleanup, _db: RedisMongoDB): assert db.get_atom(link_handle).custom_attributes["score"] == 0.5 @pytest.mark.parametrize( - "node", [({"type": "A", "name": "type_a", "custom_attributes": {"status": "ready"}})] + "node", + [({"type": "A", "name": "type_a", "custom_attributes": {"status": "ready"}})], ) def test_get_atoms_by_index_custom_att(self, node, _cleanup, _db: RedisMongoDB): node = _db.add_node(NodeT(**node)) @@ -1339,7 +1348,7 @@ def test_commit_with_buffer(self, _cleanup, _db: RedisMongoDB): "arity": 3, } ], - '(synonyms (gene ENSG00000278267) (microRNA_6859-1 hsa-mir-6859-1 HGNC:50039 microRNA_mir-6859-1 MIR6859-1))', + "(synonyms (gene ENSG00000278267) (microRNA_6859-1 hsa-mir-6859-1 HGNC:50039 microRNA_mir-6859-1 MIR6859-1))", [ ("synonyms", "*", "*"), ], @@ -1354,7 +1363,7 @@ def test_commit_with_buffer(self, _cleanup, _db: RedisMongoDB): "arity": 3, } ], - '(tf_name (motif ENSG00000156273) BACH1)', + "(tf_name (motif ENSG00000156273) BACH1)", [ ("tf_name", "*", "*"), ], diff --git a/src/tests/python/integration/conftest.py b/src/tests/python/integration/conftest.py index a3a619f..81f9f9e 100644 --- a/src/tests/python/integration/conftest.py +++ b/src/tests/python/integration/conftest.py @@ -5,7 +5,10 @@ def pytest_addoption(parser): parser.addoption( - "--build", action="store_true", default=False, help="Rebuilds container's test image." + "--build", + action="store_true", + default=False, + help="Rebuilds container's test image.", ) parser.addoption( "--no-destroy", diff --git a/src/tests/python/integration/helpers.py b/src/tests/python/integration/helpers.py index 5c7dc0c..10db685 100644 --- a/src/tests/python/integration/helpers.py +++ b/src/tests/python/integration/helpers.py @@ -78,18 +78,18 @@ def enforce_containers_removal(): @pytest.fixture(scope="module") def das_remote_fixture_module(environment_manager): - yield DAS(query_engine='remote', host=remote_das_host, port=remote_das_port) + yield DAS(query_engine="remote", host=remote_das_host, port=remote_das_port) @pytest.fixture(scope="class") def das_local_fixture_class(): _db_up() yield DAS( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, @@ -101,11 +101,11 @@ def das_local_fixture_class(): def das_local_fixture(): _db_up() yield DAS( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, diff --git a/src/tests/python/integration/test_attention_broker_gateway.py b/src/tests/python/integration/test_attention_broker_gateway.py index 009ef87..ff3a417 100644 --- a/src/tests/python/integration/test_attention_broker_gateway.py +++ b/src/tests/python/integration/test_attention_broker_gateway.py @@ -8,26 +8,26 @@ import hyperon_das.grpc.common_pb2 as common from hyperon_das.cache.attention_broker_gateway import AttentionBrokerGateway -HOST = 'localhost' +HOST = "localhost" PORT = 27000 -SYSTEM_PARAMETERS = {'attention_broker_hostname': HOST, 'attention_broker_port': PORT} +SYSTEM_PARAMETERS = {"attention_broker_hostname": HOST, "attention_broker_port": PORT} RECEIVED = None class AttentionBrokerMock(ab_grpc.AttentionBrokerServicer): def ping(self, request, context): - return common.Ack(error=0, msg='OK') + return common.Ack(error=0, msg="OK") def correlate(self, request, context): global RECEIVED RECEIVED = request.handle_list - return common.Ack(error=0, msg='OK') + return common.Ack(error=0, msg="OK") def stimulate(self, request, context): global RECEIVED RECEIVED = request.handle_count - return common.Ack(error=0, msg='OK') + return common.Ack(error=0, msg="OK") def server_up(): @@ -48,9 +48,9 @@ def test_creation(self): with pytest.raises(ValueError): AttentionBrokerGateway({}) with pytest.raises(ValueError): - AttentionBrokerGateway({'attention_broker_hostname': 'localhost'}) + AttentionBrokerGateway({"attention_broker_hostname": "localhost"}) with pytest.raises(ValueError): - AttentionBrokerGateway({'attention_broker_port': 27000}) + AttentionBrokerGateway({"attention_broker_port": 27000}) grpc_server = server_up() AttentionBrokerGateway(SYSTEM_PARAMETERS) server_down(grpc_server) @@ -58,18 +58,18 @@ def test_creation(self): def test_ping(self): grpc_server = server_up() gateway = AttentionBrokerGateway(SYSTEM_PARAMETERS) - assert gateway.ping() == 'OK' + assert gateway.ping() == "OK" server_down(grpc_server) def _check_correlate(self, gateway, message): global RECEIVED - assert gateway.correlate(message) == 'OK' + assert gateway.correlate(message) == "OK" assert list(RECEIVED) == list(message) def _check_stimulate(self, gateway, stimuli): global RECEIVED message = {handle: count for handle, count in stimuli} - assert gateway.stimulate(message) == 'OK' + assert gateway.stimulate(message) == "OK" assert len(RECEIVED) == len(message) for key in RECEIVED: assert RECEIVED[key] == message[key] @@ -80,11 +80,11 @@ def test_correlate(self): with pytest.raises(ValueError): self._check_correlate(gateway, None) self._check_correlate(gateway, []) - self._check_correlate(gateway, ['h1']) - self._check_correlate(gateway, ['h1', 'h2']) - self._check_correlate(gateway, ['h1', 'h2', 'h2']) - self._check_correlate(gateway, ['h2', 'h1', 'h2']) - self._check_correlate(gateway, set(['h1', 'h2', 'h2'])) + self._check_correlate(gateway, ["h1"]) + self._check_correlate(gateway, ["h1", "h2"]) + self._check_correlate(gateway, ["h1", "h2", "h2"]) + self._check_correlate(gateway, ["h2", "h1", "h2"]) + self._check_correlate(gateway, set(["h1", "h2", "h2"])) server_down(grpc_server) def test_stimulate(self): @@ -93,7 +93,7 @@ def test_stimulate(self): with pytest.raises(ValueError): self._check_correlate(gateway, None) self._check_stimulate(gateway, []) - self._check_stimulate(gateway, [('h1', 1)]) - self._check_stimulate(gateway, [('h1', 1), ('h2', 1)]) - self._check_stimulate(gateway, [('h1', 1), ('h2', 2)]) + self._check_stimulate(gateway, [("h1", 1)]) + self._check_stimulate(gateway, [("h1", 1), ("h2", 1)]) + self._check_stimulate(gateway, [("h1", 1), ("h2", 2)]) server_down(grpc_server) diff --git a/src/tests/python/integration/test_client.py b/src/tests/python/integration/test_client.py index 3902b73..e9d13f6 100644 --- a/src/tests/python/integration/test_client.py +++ b/src/tests/python/integration/test_client.py @@ -4,7 +4,7 @@ import hyperon_das.link_filters as link_filter from hyperon_das.client import FunctionsClient -from tests.python.integration.helpers import faas_fixture, metta_animal_base_handles +from tests.python.integration.helpers import metta_animal_base_handles class TestVultrClientIntegration: @@ -15,16 +15,16 @@ def test_get_atom( result = faas_fixture.get_atom(handle=metta_animal_base_handles.human) assert result.handle == metta_animal_base_handles.human assert result.name == '"human"' - assert result.named_type == 'Symbol' + assert result.named_type == "Symbol" result = faas_fixture.get_atom(handle=metta_animal_base_handles.monkey) assert result.handle == metta_animal_base_handles.monkey assert result.name == '"monkey"' - assert result.named_type == 'Symbol' + assert result.named_type == "Symbol" result = faas_fixture.get_atom(handle=metta_animal_base_handles.similarity_human_monkey) assert result.handle == metta_animal_base_handles.similarity_human_monkey - assert result.named_type == 'Expression' + assert result.named_type == "Expression" assert result.targets == [ metta_animal_base_handles.Similarity, metta_animal_base_handles.human, @@ -33,16 +33,16 @@ def test_get_atom( def test_get_links(self, faas_fixture: FunctionsClient): # noqa: F811 links1 = faas_fixture.get_links( - link_filter.FlatTypeTemplate(['Symbol', 'Symbol', 'Symbol'], 'Expression') + link_filter.FlatTypeTemplate(["Symbol", "Symbol", "Symbol"], "Expression") ) - links2 = faas_fixture.get_links(link_filter.NamedType('Expression')) + links2 = faas_fixture.get_links(link_filter.NamedType("Expression")) assert len(links1) == 43 assert len(links2) == 43 def test_count_atoms(self, faas_fixture: FunctionsClient): ret = faas_fixture.count_atoms() print(ret) - assert ret == {'atom_count': 66} + assert ret == {"atom_count": 66} def test_query(self, faas_fixture: FunctionsClient): answer = faas_fixture.query( @@ -138,18 +138,18 @@ def test_get_incoming_links(self, faas_fixture: FunctionsClient): ) assert len(response_atoms) == 8 for atom in response_atoms: - assert isinstance( - atom, LinkT - ), f"Each item in body must be a LinkT instance. Received: {atom}" + assert isinstance(atom, LinkT), ( + f"Each item in body must be a LinkT instance. Received: {atom}" + ) assert len(atom.targets) == 3 assert atom.handle in [a.handle for a in expected_atoms] response_atoms = faas_fixture.get_incoming_links(metta_animal_base_handles.human) assert len(response_atoms) == 8 for atom in response_atoms: - assert isinstance( - atom, LinkT - ), f"Each item in body must be a LinkT instance. Received: {atom}" + assert isinstance(atom, LinkT), ( + f"Each item in body must be a LinkT instance. Received: {atom}" + ) assert len(atom.targets) == 3 assert atom.handle in [a.handle for a in expected_atoms] @@ -158,9 +158,9 @@ def test_get_incoming_links(self, faas_fixture: FunctionsClient): ) assert len(response_atoms_targets) == 8 for atom in response_atoms_targets: - assert isinstance( - atom, LinkT - ), f"Each item in body must be a LinkT instance. Received: {atom}" + assert isinstance(atom, LinkT), ( + f"Each item in body must be a LinkT instance. Received: {atom}" + ) atom_targets = [a.to_dict() for a in atom.targets_documents] assert len(atom_targets) == 3 assert atom.handle in expected_atoms_targets diff --git a/src/tests/python/integration/test_das_query_api.py b/src/tests/python/integration/test_das_query_api.py index 9d81b96..a388fae 100644 --- a/src/tests/python/integration/test_das_query_api.py +++ b/src/tests/python/integration/test_das_query_api.py @@ -26,15 +26,15 @@ def setup_class(cls): _db_up() remote_up(build=False) - das_instance["local_ram"] = DistributedAtomSpace(query_engine='local', atomdb='ram') + das_instance["local_ram"] = DistributedAtomSpace(query_engine="local", atomdb="ram") load_metta_animals_base(das_instance["local_ram"]) das_instance["local_redis_mongo"] = DistributedAtomSpace( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, @@ -43,7 +43,7 @@ def setup_class(cls): das_instance["local_redis_mongo"].commit_changes() das_instance["remote"] = DistributedAtomSpace( - query_engine='remote', host=remote_das_host, port=get_remote_das_port() + query_engine="remote", host=remote_das_host, port=get_remote_das_port() ) @classmethod @@ -56,7 +56,7 @@ def _cleanup(self, request): return cleanup(request) def test_count_atoms(self): - for key, das in das_instance.items(): + for _key, das in das_instance.items(): count = das.count_atoms({}) assert count["atom_count"] == 66 diff --git a/src/tests/python/integration/test_iterators.py b/src/tests/python/integration/test_iterators.py index f28f822..d34ded3 100644 --- a/src/tests/python/integration/test_iterators.py +++ b/src/tests/python/integration/test_iterators.py @@ -9,8 +9,6 @@ _db_down, _db_up, cleanup, - das_local_fixture_class, - das_remote_fixture_module, get_remote_das_port, load_metta_animals_base, metta_animal_base_handles, @@ -33,7 +31,7 @@ def human_handle(self): return metta_animal_base_handles.human def _is_expression_atom(self, atom: dict) -> bool: - if atom['named_type'] != 'Expression': + if atom["named_type"] != "Expression": return False return True @@ -54,11 +52,11 @@ def _human_incoming_links(self): def _check_asserts(self, das: DistributedAtomSpace, iterator: QueryAnswerIterator): current_value = iterator.get() assert current_value == das.query_engine.get_atom( - iterator.get()['handle'], targets_document=True + iterator.get()["handle"], targets_document=True ) assert isinstance(current_value, dict) assert iterator.is_empty() is False - link_handles = sorted([item['handle'] for item in iterator]) + link_handles = sorted([item["handle"] for item in iterator]) assert len(link_handles) == 8 assert link_handles == self._human_incoming_links() assert iterator.is_empty() is True @@ -77,11 +75,11 @@ def test_traverse_links_with_das_ram_only(self, human_handle): def test_traverse_links_with_das_redis_mongo(self, human_handle, _cleanup): _db_up() das = DistributedAtomSpace( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, @@ -125,16 +123,16 @@ def _human_neighbors(self): ) def _is_literal_atom(self, atom: dict) -> bool: - if atom['is_literal'] is False: + if atom["is_literal"] is False: return False return True def _check_asserts(self, das: DistributedAtomSpace, iterator: TraverseNeighborsIterator): current_value = iterator.get() - assert current_value == das.get_atom(iterator.get()['handle']) + assert current_value == das.get_atom(iterator.get()["handle"]) assert isinstance(current_value, dict) assert iterator.is_empty() is False - node_handles = sorted([item['handle'] for item in iterator]) + node_handles = sorted([item["handle"] for item in iterator]) assert len(node_handles) == 4 assert node_handles == self._human_neighbors() assert iterator.is_empty() is True @@ -153,11 +151,11 @@ def test_traverse_neighbors_with_das_ram_only(self, human_handle): def test_traverse_neighbors_with_das_redis_mongo(self, human_handle, _cleanup): _db_up() das = DistributedAtomSpace( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, @@ -254,27 +252,27 @@ def _all_links(self): def _asserts(self, das: DistributedAtomSpace): node_index = das.create_field_index( - atom_type='node', fields=['is_literal'], named_type='Symbol' + atom_type="node", fields=["is_literal"], named_type="Symbol" ) link_index_type = das.create_field_index( - atom_type='link', fields=['is_toplevel'], named_type='Expression' + atom_type="link", fields=["is_toplevel"], named_type="Expression" ) link_index_composite_type = das.create_field_index( - atom_type='link', - fields=['is_toplevel'], - composite_type=['Expression', 'Symbol', 'Symbol', 'Symbol'], + atom_type="link", + fields=["is_toplevel"], + composite_type=["Expression", "Symbol", "Symbol", "Symbol"], ) - node_iterator = das.custom_query(node_index, query={'is_literal': True}, no_iterator=False) + node_iterator = das.custom_query(node_index, query={"is_literal": True}, no_iterator=False) link_iterator_type = das.custom_query( link_index_type, - query={'is_toplevel': True}, + query={"is_toplevel": True}, chunk_size=10, no_iterator=False, ) link_iterator_composite_type = das.custom_query( link_index_composite_type, - query={'is_toplevel': True}, + query={"is_toplevel": True}, chunk_size=5, no_iterator=False, ) @@ -288,10 +286,10 @@ def _asserts(self, das: DistributedAtomSpace): def _check_asserts(self, das: DistributedAtomSpace, iterator: CustomQuery): current_value = iterator.get() - assert current_value == das.get_atom(iterator.get()['handle']) + assert current_value == das.get_atom(iterator.get()["handle"]) assert isinstance(current_value, dict) assert iterator.is_empty() is False - handles = sorted([item['handle'] for item in iterator]) + handles = sorted([item["handle"] for item in iterator]) assert iterator.is_empty() is True with pytest.raises(StopIteration): iterator.get() @@ -311,7 +309,7 @@ def test_custom_query_with_local_das_redis_mongo(self, _cleanup, das_local_fixtu @pytest.mark.skip("Waiting fix") def test_custom_query_with_remote_das(self): das = DistributedAtomSpace( - query_engine='remote', host=remote_das_host, port=get_remote_das_port() + query_engine="remote", host=remote_das_host, port=get_remote_das_port() ) self._asserts(das) @@ -319,36 +317,36 @@ def test_get_atom_by_field_local(self, das_local_fixture_class): das = das_local_fixture_class load_metta_animals_base(das) das.commit_changes() - atom_field = das.get_atoms_by_field({'name': '"chimp"'}) + atom_field = das.get_atoms_by_field({"name": '"chimp"'}) assert atom_field def test_get_atoms_by_field_remote(self, das_remote_fixture_module): das = das_remote_fixture_module - atom_field = das.get_atoms_by_field({'name': '"chimp"'}) + atom_field = das.get_atoms_by_field({"name": '"chimp"'}) assert atom_field def test_get_atoms_by_text_field_local(self, das_local_fixture_class): das = das_local_fixture_class load_metta_animals_base(das) das.commit_changes() - with pytest.raises(Exception, match=r'text index required for \$text query'): + with pytest.raises(Exception, match=r"text index required for \$text query"): das.get_atoms_by_text_field(text_value='"') - atom_text_field = das.get_atoms_by_text_field(text_value='"chim', field='name') + atom_text_field = das.get_atoms_by_text_field(text_value='"chim', field="name") assert atom_text_field def test_get_atoms_by_text_field_remote(self, das_remote_fixture_module): das = das_remote_fixture_module - atom_text_field = das.get_atoms_by_text_field(text_value='"chim', field='name') + atom_text_field = das.get_atoms_by_text_field(text_value='"chim', field="name") assert atom_text_field def test_get_atoms_starting_local(self, das_local_fixture_class): das = das_local_fixture_class load_metta_animals_base(das) das.commit_changes() - atom_starting_with = das.get_node_by_name_starting_with('Symbol', '"mon') + atom_starting_with = das.get_node_by_name_starting_with("Symbol", '"mon') assert atom_starting_with def test_get_atoms_starting_remote(self, das_remote_fixture_module): das = das_remote_fixture_module - atom_starting_with = das.get_node_by_name_starting_with('Symbol', '"mon') + atom_starting_with = das.get_node_by_name_starting_with("Symbol", '"mon') assert atom_starting_with diff --git a/src/tests/python/integration/test_local_das.py b/src/tests/python/integration/test_local_das.py index c28b333..36b863a 100644 --- a/src/tests/python/integration/test_local_das.py +++ b/src/tests/python/integration/test_local_das.py @@ -4,9 +4,6 @@ from hyperon_das import DistributedAtomSpace from tests.python.integration.helpers import ( - das_local_custom_fixture, - das_local_fixture, - das_remote_fixture_module, get_remote_das_port, mongo_port, redis_port, @@ -100,11 +97,11 @@ def test_fetch_atoms_from_remote_server( def test_fetch_atoms(self, das_local_custom_fixture): das = das_local_custom_fixture( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, diff --git a/src/tests/python/integration/test_remote_das.py b/src/tests/python/integration/test_remote_das.py index 5c4acb0..267b974 100644 --- a/src/tests/python/integration/test_remote_das.py +++ b/src/tests/python/integration/test_remote_das.py @@ -10,7 +10,6 @@ from hyperon_das.traverse_engines import TraverseEngine from tests.python.integration.helpers import remote_das_host # noqa F401 from tests.python.integration.helpers import ( - das_remote_fixture_module, get_remote_das_port, metta_animal_base_handles, ) @@ -35,12 +34,12 @@ def test_server_connection(self, das_remote_fixture_module): try: das = das_remote_fixture_module except Exception as e: - pytest.fail(f'Connection with OpenFaaS server fail, Details: {str(e)}') + pytest.fail(f"Connection with OpenFaaS server fail, Details: {str(e)}") if not das.query_engine.remote_das.url: - pytest.fail('Connection with server fail') + pytest.fail("Connection with server fail") assert ( das.query_engine.remote_das.url - == f'http://{remote_das_host}:{get_remote_das_port()}/function/query-engine' + == f"http://{remote_das_host}:{get_remote_das_port()}/function/query-engine" ) def test_cache_controller(self, das_remote_fixture_module: DistributedAtomSpace): @@ -51,13 +50,13 @@ def test_get_atom(self, das_remote_fixture_module: DistributedAtomSpace): result = das_remote_fixture_module.get_atom(handle=metta_animal_base_handles.human) assert result.handle == metta_animal_base_handles.human assert result.name == '"human"' - assert result.named_type == 'Symbol' + assert result.named_type == "Symbol" result = das_remote_fixture_module.get_atom( handle=metta_animal_base_handles.inheritance_dinosaur_reptile ) assert result.handle == metta_animal_base_handles.inheritance_dinosaur_reptile - assert result.named_type == 'Expression' + assert result.named_type == "Expression" assert result.targets == [ metta_animal_base_handles.Inheritance, metta_animal_base_handles.dinosaur, @@ -65,7 +64,7 @@ def test_get_atom(self, das_remote_fixture_module: DistributedAtomSpace): ] with pytest.raises(AtomDoesNotExist): - das_remote_fixture_module.get_atom(handle='fake') + das_remote_fixture_module.get_atom(handle="fake") @pytest.mark.skip("Wrong values") def test_get_links(self, das_remote_fixture_module: DistributedAtomSpace): @@ -85,7 +84,7 @@ def test_get_links(self, das_remote_fixture_module: DistributedAtomSpace): metta_animal_base_handles.inheritance_typedef, ] - links = das_remote_fixture_module.get_links(link_filters.NamedType('Expression')) + links = das_remote_fixture_module.get_links(link_filters.NamedType("Expression")) inheritance_links = [] for link in links: if metta_animal_base_handles.Inheritance in link.targets: @@ -94,7 +93,7 @@ def test_get_links(self, das_remote_fixture_module: DistributedAtomSpace): assert sorted(inheritance_links) == sorted(all_inheritance) links = das_remote_fixture_module.get_links( - link_filters.FlatTypeTemplate(['Symbol', 'Symbol', 'Symbol'], 'Expression') + link_filters.FlatTypeTemplate(["Symbol", "Symbol", "Symbol"], "Expression") ) inheritance_links = [] for link in links: @@ -110,7 +109,7 @@ def test_get_links(self, das_remote_fixture_module: DistributedAtomSpace): metta_animal_base_handles.earthworm, metta_animal_base_handles.animal, ], - 'Expression', + "Expression", ) ) assert link[0].handle == metta_animal_base_handles.inheritance_earthworm_animal @@ -137,9 +136,9 @@ def test_get_incoming_links(self, das_remote_fixture_module: DistributedAtomSpac @pytest.mark.skip("Wrong value, review") def test_count_atoms(self, das_remote_fixture_module: DistributedAtomSpace): response = das_remote_fixture_module.count_atoms(parameters={}) - assert response == {'atom_count': 66, 'node_count': 0, 'link_count': 0} - response_local = das_remote_fixture_module.count_atoms({'context': 'local'}) - assert response_local == {'atom_count': 0, 'node_count': 0, 'link_count': 0} + assert response == {"atom_count": 66, "node_count": 0, "link_count": 0} + response_local = das_remote_fixture_module.count_atoms({"context": "local"}) + assert response_local == {"atom_count": 0, "node_count": 0, "link_count": 0} def test_query(self, das_remote_fixture_module: DistributedAtomSpace): all_inheritance_mammal = [ @@ -159,7 +158,7 @@ def test_query(self, das_remote_fixture_module: DistributedAtomSpace): {"atom_type": "node", "type": "Symbol", "name": '"mammal"'}, ], }, - {'no_iterator': True}, + {"no_iterator": True}, ) answer = tuple([item.assignment, item.subgraph] for item in query_answer) @@ -173,19 +172,19 @@ def test_query(self, das_remote_fixture_module: DistributedAtomSpace): link.targets_documents, [ { - 'handle': metta_animal_base_handles.Inheritance, - 'named_type': 'Symbol', - 'name': "Inheritance", + "handle": metta_animal_base_handles.Inheritance, + "named_type": "Symbol", + "name": "Inheritance", }, { - 'handle': metta_animal_base_handles.chimp, - 'named_type': 'Symbol', - 'name': '"chimp"', + "handle": metta_animal_base_handles.chimp, + "named_type": "Symbol", + "name": '"chimp"', }, { - 'handle': metta_animal_base_handles.mammal, - 'named_type': 'Symbol', - 'name': '"mammal"', + "handle": metta_animal_base_handles.mammal, + "named_type": "Symbol", + "name": '"mammal"', }, ], ) @@ -194,19 +193,19 @@ def test_query(self, das_remote_fixture_module: DistributedAtomSpace): link.targets_documents, [ { - 'handle': metta_animal_base_handles.Inheritance, - 'named_type': 'Symbol', - 'name': "Inheritance", + "handle": metta_animal_base_handles.Inheritance, + "named_type": "Symbol", + "name": "Inheritance", }, { - 'handle': metta_animal_base_handles.human, - 'named_type': 'Symbol', - 'name': '"human"', + "handle": metta_animal_base_handles.human, + "named_type": "Symbol", + "name": '"human"', }, { - 'handle': metta_animal_base_handles.mammal, - 'named_type': 'Symbol', - 'name': '"mammal"', + "handle": metta_animal_base_handles.mammal, + "named_type": "Symbol", + "name": '"mammal"', }, ], ) @@ -215,19 +214,19 @@ def test_query(self, das_remote_fixture_module: DistributedAtomSpace): link.targets_documents, [ { - 'handle': metta_animal_base_handles.Inheritance, - 'named_type': 'Symbol', - 'name': "Inheritance", + "handle": metta_animal_base_handles.Inheritance, + "named_type": "Symbol", + "name": "Inheritance", }, { - 'handle': metta_animal_base_handles.monkey, - 'named_type': 'Symbol', - 'name': '"monkey"', + "handle": metta_animal_base_handles.monkey, + "named_type": "Symbol", + "name": '"monkey"', }, { - 'handle': metta_animal_base_handles.mammal, - 'named_type': 'Symbol', - 'name': '"mammal"', + "handle": metta_animal_base_handles.mammal, + "named_type": "Symbol", + "name": '"mammal"', }, ], ) @@ -236,19 +235,19 @@ def test_query(self, das_remote_fixture_module: DistributedAtomSpace): link.targets_documents, [ { - 'handle': metta_animal_base_handles.Inheritance, - 'named_type': 'Symbol', - 'name': "Inheritance", + "handle": metta_animal_base_handles.Inheritance, + "named_type": "Symbol", + "name": "Inheritance", }, { - 'handle': metta_animal_base_handles.rhino, - 'named_type': 'Symbol', - 'name': '"rhino"', + "handle": metta_animal_base_handles.rhino, + "named_type": "Symbol", + "name": '"rhino"', }, { - 'handle': metta_animal_base_handles.mammal, - 'named_type': 'Symbol', - 'name': '"mammal"', + "handle": metta_animal_base_handles.mammal, + "named_type": "Symbol", + "name": '"mammal"', }, ], ) @@ -257,7 +256,7 @@ def test_get_traversal_cursor(self, das_remote_fixture_module: DistributedAtomSp cursor = das_remote_fixture_module.get_traversal_cursor(metta_animal_base_handles.human) assert cursor.get().handle == metta_animal_base_handles.human with pytest.raises(GetTraversalCursorException): - das_remote_fixture_module.get_traversal_cursor('fake_handle') + das_remote_fixture_module.get_traversal_cursor("fake_handle") @pytest.mark.skip(reason="Disabled. Waiting for https://github.com/singnet/das/issues/73") def test_traverse_engine_methods(self, das_remote_fixture_module: DistributedAtomSpace): @@ -267,7 +266,7 @@ def test_traverse_engine_methods(self, das_remote_fixture_module: DistributedAto assert cursor.get().handle == metta_animal_base_handles.dinosaur def is_expression_link(link): - return True if link['type'] == 'Expression' else False + return True if link["type"] == "Expression" else False links_iter = cursor.get_links(filter=is_expression_link) @@ -288,23 +287,23 @@ def is_expression_link(link): assert count == 0 def is_literal(atom: dict): - return True if atom['is_literal'] is True else False + return True if atom["is_literal"] is True else False neighbors_iter = cursor.get_neighbors(cursor_position=1, filter=is_literal) - assert neighbors_iter.get()['handle'] == metta_animal_base_handles.reptile + assert neighbors_iter.get()["handle"] == metta_animal_base_handles.reptile atom = cursor.follow_link(cursor_position=2, filter=is_literal) - assert atom['handle'] == metta_animal_base_handles.triceratops + assert atom["handle"] == metta_animal_base_handles.triceratops cursor.goto(metta_animal_base_handles.human) - assert cursor.get()['handle'] == metta_animal_base_handles.human + assert cursor.get()["handle"] == metta_animal_base_handles.human - @pytest.mark.skip('Wrong values') + @pytest.mark.skip("Wrong values") def test_fetch_atoms(self, das_remote_fixture_module): assert das_remote_fixture_module.backend.count_atoms() == { - 'atom_count': 0, - 'node_count': 0, - 'link_count': 0, + "atom_count": 0, + "node_count": 0, + "link_count": 0, } das_remote_fixture_module.fetch( query={ @@ -318,39 +317,39 @@ def test_fetch_atoms(self, das_remote_fixture_module): } ) assert das_remote_fixture_module.backend.count_atoms() == { - 'atom_count': 10, - 'node_count': 6, - 'link_count': 4, + "atom_count": 10, + "node_count": 6, + "link_count": 4, } - @pytest.mark.skip('Wrong values') + @pytest.mark.skip("Wrong values") def test_fetch_all_data(self, das_remote_fixture_module): assert das_remote_fixture_module.backend.count_atoms() == { - 'atom_count': 0, - 'node_count': 0, - 'link_count': 0, + "atom_count": 0, + "node_count": 0, + "link_count": 0, } das_remote_fixture_module.fetch() assert das_remote_fixture_module.backend.count_atoms() == { - 'atom_count': 66, - 'node_count': 23, - 'link_count': 43, + "atom_count": 66, + "node_count": 23, + "link_count": 43, } def test_create_context(self, das_remote_fixture_module): - context_name = 'my context' + context_name = "my context" context = das_remote_fixture_module.create_context(context_name) assert context.name == context_name @pytest.mark.skip(reason="Disable. See https://github.com/singnet/das-query-engine/issues/259") def test_commit_changes(self, das_remote_fixture_module: DistributedAtomSpace): node = das_remote_fixture_module.get_atom(handle=metta_animal_base_handles.human) - assert hasattr(node, 'test_key') is False - assert 'test_key' not in node.custom_attributes - das_remote_fixture_module.add_node(NodeT(**{'type': 'Symbol', 'name': '"human"'})) + assert hasattr(node, "test_key") is False + assert "test_key" not in node.custom_attributes + das_remote_fixture_module.add_node(NodeT(**{"type": "Symbol", "name": '"human"'})) das_remote_fixture_module.commit_changes() node = das_remote_fixture_module.get_atom(handle=metta_animal_base_handles.human) - assert node.custom_attributes['test_key'] == 'test_value' + assert node.custom_attributes["test_key"] == "test_value" def test_commit_changes_method_with_mode_parameter(self, das_remote_fixture_module): das = das_remote_fixture_module @@ -361,6 +360,9 @@ def test_commit_changes_method_with_mode_parameter(self, das_remote_fixture_modu with pytest.raises(ValueError): das = DistributedAtomSpace( - mode='blah', query_engine='remote', host=remote_das_host, port=get_remote_das_port() + mode="blah", + query_engine="remote", + host=remote_das_host, + port=get_remote_das_port(), ) das.commit_changes() diff --git a/src/tests/python/integration/test_traverse_engine.py b/src/tests/python/integration/test_traverse_engine.py index 103106d..3d3567d 100644 --- a/src/tests/python/integration/test_traverse_engine.py +++ b/src/tests/python/integration/test_traverse_engine.py @@ -39,13 +39,13 @@ def _check_asserts(self, das: DistributedAtomSpace): # Get current_cursor = traverse.get() - assert current_cursor['handle'] == metta_animal_base_handles.human - assert current_cursor['name'] == '"human"' - assert current_cursor['named_type'] == 'Symbol' + assert current_cursor["handle"] == metta_animal_base_handles.human + assert current_cursor["name"] == '"human"' + assert current_cursor["named_type"] == "Symbol" # Get links links = traverse.get_links() - link_handles = sorted([link['handle'] for link in links]) + link_handles = sorted([link["handle"] for link in links]) assert link_handles == sorted( [ metta_animal_base_handles.similarity_human_chimp, @@ -67,8 +67,8 @@ def _check_asserts(self, das: DistributedAtomSpace): ) # Get links with filters - links = traverse.get_links(link_type='Expression', cursor_position=1, target_type='Symbol') - link_handles = sorted([link['handle'] for link in links]) + links = traverse.get_links(link_type="Expression", cursor_position=1, target_type="Symbol") + link_handles = sorted([link["handle"] for link in links]) assert link_handles == sorted( [ metta_animal_base_handles.similarity_human_chimp, @@ -81,7 +81,7 @@ def _check_asserts(self, das: DistributedAtomSpace): # Get neighbors neighbors = traverse.get_neighbors() - neighbors_handles = sorted([neighbor['handle'] for neighbor in neighbors]) + neighbors_handles = sorted([neighbor["handle"] for neighbor in neighbors]) assert neighbors_handles == sorted( [ metta_animal_base_handles.chimp, @@ -97,15 +97,15 @@ def _check_asserts(self, das: DistributedAtomSpace): # Get neighbors with filters def is_literal(atom: dict) -> bool: - return atom['is_literal'] is True + return atom["is_literal"] is True neighbors = traverse.get_neighbors( - link_type='Expression', + link_type="Expression", cursor_position=2, - target_type='Symbol', + target_type="Symbol", filters=(None, is_literal), ) - neighbors_handles = sorted([neighbor['handle'] for neighbor in neighbors]) + neighbors_handles = sorted([neighbor["handle"] for neighbor in neighbors]) assert neighbors_handles == sorted( [ metta_animal_base_handles.chimp, @@ -126,33 +126,36 @@ def is_literal(atom: dict) -> bool: metta_animal_base_handles.Inheritance, metta_animal_base_handles.typedef_mark, ] - assert traverse.get()['handle'] in expected_neighbors + assert traverse.get()["handle"] in expected_neighbors # Follow link with filters def is_ent(atom: dict) -> bool: - return atom['name'] == '"ent"' + return atom["name"] == '"ent"' traverse.goto(metta_animal_base_handles.human) traverse.follow_link( - link_type='Expression', cursor_position=2, target_type='Symbol', filters=(None, is_ent) + link_type="Expression", + cursor_position=2, + target_type="Symbol", + filters=(None, is_ent), ) - assert traverse.get()['name'] == '"ent"' + assert traverse.get()["name"] == '"ent"' # Get neighbors with filter as Tuple traverse = das.get_traversal_cursor(handle=metta_animal_base_handles.human) def is_expression_link(atom): - return atom['named_type'] == 'Expression' + return atom["named_type"] == "Expression" def is_mammal(atom): - return atom['name'] == '"mammal"' + return atom["name"] == '"mammal"' neighbors = traverse.get_neighbors(filters=(is_expression_link, is_mammal)) - assert [i['handle'] for i in neighbors] == [metta_animal_base_handles.mammal] + assert [i["handle"] for i in neighbors] == [metta_animal_base_handles.mammal] neighbors = traverse.get_neighbors(filters=(None, is_mammal)) - assert [i['handle'] for i in neighbors] == [metta_animal_base_handles.mammal] + assert [i["handle"] for i in neighbors] == [metta_animal_base_handles.mammal] neighbors = traverse.get_neighbors(filters=(is_expression_link, None)) - handles = sorted([i['handle'] for i in neighbors]) + handles = sorted([i["handle"] for i in neighbors]) assert handles == sorted( [ metta_animal_base_handles.chimp, @@ -166,7 +169,7 @@ def is_mammal(atom): ] ) neighbors = traverse.get_neighbors(filters=(is_expression_link, None)) - assert sorted([i['handle'] for i in neighbors]) == sorted( + assert sorted([i["handle"] for i in neighbors]) == sorted( [ metta_animal_base_handles.chimp, metta_animal_base_handles.monkey, @@ -193,11 +196,11 @@ def test_traverse_engine_with_das_ram_only(self): def test_traverse_engine_with_das_redis_mongo(self, _cleanup): _db_up() das = DistributedAtomSpace( - query_engine='local', - atomdb='redis_mongo', + query_engine="local", + atomdb="redis_mongo", mongo_port=mongo_port, - mongo_username='dbadmin', - mongo_password='dassecret', + mongo_username="dbadmin", + mongo_password="dassecret", redis_port=redis_port, redis_cluster=False, redis_ssl=False, @@ -209,6 +212,6 @@ def test_traverse_engine_with_das_redis_mongo(self, _cleanup): def test_traverse_engine_with_remote_das(self): das = DistributedAtomSpace( - query_engine='remote', host=remote_das_host, port=get_remote_das_port() + query_engine="remote", host=remote_das_host, port=get_remote_das_port() ) self._check_asserts(das) diff --git a/src/tests/python/unit/adapters/test_ram_only.py b/src/tests/python/unit/adapters/test_ram_only.py index 7d3a250..726a542 100644 --- a/src/tests/python/unit/adapters/test_ram_only.py +++ b/src/tests/python/unit/adapters/test_ram_only.py @@ -5,7 +5,11 @@ from hyperon_das_atomdb import AtomDB from hyperon_das_atomdb.adapters import InMemoryDB from hyperon_das_atomdb.database import LinkT, NodeT -from hyperon_das_atomdb.exceptions import AddLinkException, AddNodeException, AtomDoesNotExist +from hyperon_das_atomdb.exceptions import ( + AddLinkException, + AddNodeException, + AtomDoesNotExist, +) from hyperon_das_atomdb.utils.expression_hasher import ExpressionHasher from tests.python.helpers import dict_to_link_params, dict_to_node_params from tests.python.unit.fixtures import in_memory_db # noqa: F401 @@ -181,8 +185,16 @@ def test_get_link_targets_invalid(self, database: InMemoryDB): "*", {"c93e1e758c53912638438e2a7d7f7b7f"}, ), - ([("Concept", "ent"), ("Concept", "plant")], "*", {"ee1c03e6d1f104ccd811cfbba018451a"}), - ([("Concept", "ent"), ("Concept", "human")], "*", {"a45af31b43ee5ea271214338a5a5bd61"}), + ( + [("Concept", "ent"), ("Concept", "plant")], + "*", + {"ee1c03e6d1f104ccd811cfbba018451a"}, + ), + ( + [("Concept", "ent"), ("Concept", "human")], + "*", + {"a45af31b43ee5ea271214338a5a5bd61"}, + ), ( [("Concept", "human"), ("Concept", "chimp")], "*", @@ -191,7 +203,10 @@ def test_get_link_targets_invalid(self, database: InMemoryDB): ( ["*", ("Concept", "chimp")], "Similarity", - {"b5459e299a5c5e8662c427f7e01b3bf1", "31535ddf214f5b239d3b517823cb8144"}, + { + "b5459e299a5c5e8662c427f7e01b3bf1", + "31535ddf214f5b239d3b517823cb8144", + }, ), ( ["*", ("Concept", "human")], @@ -205,7 +220,10 @@ def test_get_link_targets_invalid(self, database: InMemoryDB): ( [("Concept", "chimp"), "*"], "Similarity", - {"abe6ad743fc81bd1c55ece2e1307a178", "2c927fdc6c0f1272ee439ceb76a6d1a4"}, + { + "abe6ad743fc81bd1c55ece2e1307a178", + "2c927fdc6c0f1272ee439ceb76a6d1a4", + }, ), ( [("Concept", "chimp"), "*"], diff --git a/src/tests/python/unit/adapters/test_redis_mongo_db.py b/src/tests/python/unit/adapters/test_redis_mongo_db.py index 621efc2..e2e9332 100644 --- a/src/tests/python/unit/adapters/test_redis_mongo_db.py +++ b/src/tests/python/unit/adapters/test_redis_mongo_db.py @@ -47,12 +47,15 @@ def database(self, redis_mongo_db: RedisMongoDB): # noqa: F811 @pytest.fixture def database_custom_index(self): - with mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_mongo_db", - return_value=mongo_mock(), - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_redis", - return_value=redis_mock(), + with ( + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_mongo_db", + return_value=mongo_mock(), + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_redis", + return_value=redis_mock(), + ), ): yield RedisMongoDB @@ -175,7 +178,12 @@ def test_get_link_targets(self, link_type, targets, expected_count, database: Re assert len(targets) == expected_count @pytest.mark.parametrize( - "handle", ["handle", "2a8a69c01305563932b957de4b3a9ba6", "2a8a69c0130556=z32b957de4b3a9ba6"] + "handle", + [ + "handle", + "2a8a69c01305563932b957de4b3a9ba6", + "2a8a69c0130556=z32b957de4b3a9ba6", + ], ) def test_get_link_targets_invalid(self, handle, database: RedisMongoDB): with pytest.raises(ValueError) as exc_info: @@ -187,7 +195,11 @@ def test_get_link_targets_invalid(self, handle, database: RedisMongoDB): "link_values,expected,expected_count", [ ( - {"link_type": "Evaluation", "target_handles": ["*", "*"], "toplevel_only": True}, + { + "link_type": "Evaluation", + "target_handles": ["*", "*"], + "toplevel_only": True, + }, {"bd2bb6c802a040b00659dfe7954e804d"}, 1, ), @@ -218,7 +230,10 @@ def test_get_link_targets_invalid(self, handle, database: RedisMongoDB): ( { "link_type": "Similarity", - "target_handles": ["*", ExpressionHasher.terminal_hash("Concept", "chimp")], + "target_handles": [ + "*", + ExpressionHasher.terminal_hash("Concept", "chimp"), + ], "toplevel_only": False, }, { @@ -350,7 +365,11 @@ def test_get_node_name(self, node_type, node_name, database: RedisMongoDB): @pytest.mark.parametrize( "handle,", - ["handle", "2a8a69c01305563932b957de4b3a9ba6", "2a8a69c0130556=z32b957de4b3a9ba6"], + [ + "handle", + "2a8a69c01305563932b957de4b3a9ba6", + "2a8a69c0130556=z32b957de4b3a9ba6", + ], ) def test_get_node_name_value_error(self, handle, database: RedisMongoDB): with pytest.raises(ValueError) as exc_info: @@ -430,7 +449,10 @@ def test_get_node_by_field(self, database: RedisMongoDB): "link", ["type"], [{"field": "type", "value": "Evaluation"}], - ["bd2bb6c802a040b00659dfe7954e804d", "cadd63b3fd14e34819bca4803925bf2c"], + [ + "bd2bb6c802a040b00659dfe7954e804d", + "cadd63b3fd14e34819bca4803925bf2c", + ], ), ], ) @@ -652,7 +674,10 @@ def test_redis_keys(self, key, database: RedisMongoDB): ("Concept", "triceratops"), ( "Evaluation", - ["d03e59654221c1e8fcda404fd5c8d6cb", "99d18c702e813b07260baf577c60c455"], + [ + "d03e59654221c1e8fcda404fd5c8d6cb", + "99d18c702e813b07260baf577c60c455", + ], ), ], ), @@ -699,17 +724,26 @@ def test_get_incoming_links_by_links(self, link_type, link_targets, database: Re ("Similarity", ["af12f10f9ae2002a1607ba0b47ba8407", "*"], 3), ( "Inheritance", - ["c1db9b517073e51eb7ef6fed608ec204", "b99ae727c787f1b13b452fd4c9ce1b9a"], + [ + "c1db9b517073e51eb7ef6fed608ec204", + "b99ae727c787f1b13b452fd4c9ce1b9a", + ], 1, ), ( "Evaluation", - ["d03e59654221c1e8fcda404fd5c8d6cb", "99d18c702e813b07260baf577c60c455"], + [ + "d03e59654221c1e8fcda404fd5c8d6cb", + "99d18c702e813b07260baf577c60c455", + ], 1, ), ( "Evaluation", - ["d03e59654221c1e8fcda404fd5c8d6cb", "99d18c702e813b07260baf577c60c455"], + [ + "d03e59654221c1e8fcda404fd5c8d6cb", + "99d18c702e813b07260baf577c60c455", + ], 1, ), ("Evaluation", ["*", "99d18c702e813b07260baf577c60c455"], 1), @@ -742,11 +776,26 @@ def test_redis_patterns(self, link_type, link_targets, expected_count, database: ], 8, ), - ([{"field": "named_type", "value": "Similarity", "positions": [], "arity": 0}], 0), + ( + [ + { + "field": "named_type", + "value": "Similarity", + "positions": [], + "arity": 0, + } + ], + 0, + ), ([{"field": "named_type", "value": "*", "positions": [], "arity": 0}], 1), ( [ - {"field": "named_type", "value": "*", "positions": [0, 1, 2], "arity": 3}, + { + "field": "named_type", + "value": "*", + "positions": [0, 1, 2], + "arity": 3, + }, ], 15, ), @@ -919,13 +968,23 @@ def test_custom_index_templates_load_error( ), ( [ - {"field": "named_type", "value": "*", "positions": [1, "a"], "arity": 3}, + { + "field": "named_type", + "value": "*", + "positions": [1, "a"], + "arity": 3, + }, ], "Value '[1, 'a']' is not supported in 'positions'.", ), ( [ - {"field": "named_type", "value": "*", "positions": [1, 2], "arity": "a"}, + { + "field": "named_type", + "value": "*", + "positions": [1, 2], + "arity": "a", + }, ], "Value 'a' is not supported in 'arity'.", ), @@ -1077,42 +1136,72 @@ def test_custom_index_templates_reindex_find( ), ( [ - {"field": "named_type", "value": "Similarity", "positions": [1], "arity": 2}, + { + "field": "named_type", + "value": "Similarity", + "positions": [1], + "arity": 2, + }, ], ["*", ["*", "*"]], 0, ), ( [ - {"field": "named_type", "value": "Similarity", "positions": [1], "arity": 2}, + { + "field": "named_type", + "value": "Similarity", + "positions": [1], + "arity": 2, + }, ], ["Similarity", ["bb34ce95f161a6b37ff54b3d4c817857", "*"]], 1, ), ( [ - {"field": "named_type", "value": "Similarity", "positions": [1], "arity": 2}, + { + "field": "named_type", + "value": "Similarity", + "positions": [1], + "arity": 2, + }, ], ["Similarity", ["*", "bb34ce95f161a6b37ff54b3d4c817857"]], 0, ), ( [ - {"field": "named_type", "value": "Evaluation", "positions": [0, 1], "arity": 2}, + { + "field": "named_type", + "value": "Evaluation", + "positions": [0, 1], + "arity": 2, + }, ], ["Evaluation", ["*", "*"]], 2, ), ( [ - {"field": "named_type", "value": "*", "positions": [0, 1], "arity": 2}, + { + "field": "named_type", + "value": "*", + "positions": [0, 1], + "arity": 2, + }, ], ["Evaluation", ["*", "*"]], 2, ), ( [ - {"field": "named_type", "value": "Evaluation", "positions": [0, 1], "arity": 2}, + { + "field": "named_type", + "value": "Evaluation", + "positions": [0, 1], + "arity": 2, + }, ], ["*", ["*", "*"]], 0, @@ -1138,7 +1227,12 @@ def test_custom_index_templates_reindex_find( ), ( [ - {"field": "named_type", "value": "*", "positions": [0, 1], "arity": 2}, + { + "field": "named_type", + "value": "*", + "positions": [0, 1], + "arity": 2, + }, ], ["*", ["af12f10f9ae2002a1607ba0b47ba8407", "*"]], 4, @@ -1176,7 +1270,12 @@ def test_custom_index_templates_reindex_find( ), ( [ - {"field": "named_type", "value": "*", "positions": [0, 1, 2, 3], "arity": 5}, + { + "field": "named_type", + "value": "*", + "positions": [0, 1, 2, 3], + "arity": 5, + }, ], ["*", ["*", "*", "*", "*", "af12f10f9ae2002a1607ba0b47ba8407"]], 1, @@ -1207,7 +1306,12 @@ def test_custom_index_templates_reindex_find( ), ( [ - {"field": "named_type", "value": "*", "positions": [1, 2, 3, 4], "arity": 5}, + { + "field": "named_type", + "value": "*", + "positions": [1, 2, 3, 4], + "arity": 5, + }, ], ["*", ["0a32b476852eeb954979b87f5f6cb7af", "*", "*", "*", "*"]], 1, @@ -1289,12 +1393,15 @@ def test_get_atom_type(self, database: RedisMongoDB): def test_create_field_index_node_collection(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "name_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="name_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="name_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index("node", ["name"], "Type") @@ -1308,12 +1415,15 @@ def test_create_field_index_node_collection(self, database: RedisMongoDB): def test_create_field_index_link_collection(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "field_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="field_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="field_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index("link", ["field"], "Type") @@ -1327,12 +1437,15 @@ def test_create_field_index_link_collection(self, database: RedisMongoDB): def test_create_text_index(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "field_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="field_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="field_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index( "link", ["field"], index_type=FieldIndexType.TOKEN_INVERTED_LIST @@ -1346,12 +1459,15 @@ def test_create_text_index(self, database: RedisMongoDB): def test_create_text_index_type(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "field_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="field_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="field_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index( "link", ["field"], "Type", index_type=FieldIndexType.TOKEN_INVERTED_LIST @@ -1367,12 +1483,15 @@ def test_create_text_index_type(self, database: RedisMongoDB): def test_create_compound_index_type(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "field_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="field_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="field_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index("link", fields=["field", "name"]) @@ -1385,12 +1504,15 @@ def test_create_compound_index_type(self, database: RedisMongoDB): def test_create_compound_index_type_filter(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.create_index.return_value = "field_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="field_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="field_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): result = database.create_field_index( "link", named_type="Type", fields=["field", "name"] @@ -1425,12 +1547,15 @@ def test_create_field_index_already_exists(self, database: RedisMongoDB): database.mongo_atoms_collection = mock.Mock() database.mongo_atoms_collection.list_indexes.return_value = [] database.mongo_atoms_collection.create_index.return_value = "name_index_asc" - with mock.patch( - "hyperon_das_atomdb.index.Index.generate_index_id", - return_value="name_index_asc", - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", - return_value=False, + with ( + mock.patch( + "hyperon_das_atomdb.index.Index.generate_index_id", + return_value="name_index_asc", + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoDBIndex.index_exists", + return_value=False, + ), ): database.create_field_index("node", "name", "Type") assert database.create_field_index("node", ["name"], "Type") == "name_index_asc" diff --git a/src/tests/python/unit/adapters/test_redis_mongo_extra.py b/src/tests/python/unit/adapters/test_redis_mongo_extra.py index 4291ff7..22e48c4 100644 --- a/src/tests/python/unit/adapters/test_redis_mongo_extra.py +++ b/src/tests/python/unit/adapters/test_redis_mongo_extra.py @@ -2,7 +2,11 @@ import pytest -from hyperon_das_atomdb.adapters.redis_mongo_db import MongoDBIndex, RedisMongoDB, _HashableDocument +from hyperon_das_atomdb.adapters.redis_mongo_db import ( + MongoDBIndex, + RedisMongoDB, + _HashableDocument, +) from tests.python.helpers import dict_to_node_params from tests.python.unit.fixtures import redis_mongo_db # noqa: F401 @@ -31,11 +35,16 @@ def test_index_create_exceptions(self, params, request): mi.create(**params) @mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.MongoClient", return_value=mock.MagicMock() + "hyperon_das_atomdb.adapters.redis_mongo_db.MongoClient", + return_value=mock.MagicMock(), ) - @mock.patch("hyperon_das_atomdb.adapters.redis_mongo_db.Redis", return_value=mock.MagicMock()) @mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.RedisCluster", return_value=mock.MagicMock() + "hyperon_das_atomdb.adapters.redis_mongo_db.Redis", + return_value=mock.MagicMock(), + ) + @mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.RedisCluster", + return_value=mock.MagicMock(), ) def test_create_db_connection_mongo(self, mock_mongo, mock_redis, mock_redis_cluster): RedisMongoDB(mongo_tls_ca_file="/tmp/mock", redis_password="12", redis_username="A") diff --git a/src/tests/python/unit/fixtures.py b/src/tests/python/unit/fixtures.py index 77adb70..0492e76 100644 --- a/src/tests/python/unit/fixtures.py +++ b/src/tests/python/unit/fixtures.py @@ -39,8 +39,8 @@ def exists(self, key): return 1 return 0 - def cache_overwrite(self, cache=dict()): - self.cache = cache + def cache_overwrite(self, cache): + self.cache = cache or dict() def sadd(self, key, *members): if key not in self.cache: @@ -107,12 +107,15 @@ def redis_mock(): def redis_mongo_db(): mongo_db = mongo_mock() redis_db = redis_mock() - with mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_mongo_db", - return_value=mongo_db, - ), mock.patch( - "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_redis", - return_value=redis_db, + with ( + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_mongo_db", + return_value=mongo_db, + ), + mock.patch( + "hyperon_das_atomdb.adapters.redis_mongo_db.RedisMongoDB._connection_redis", + return_value=redis_db, + ), ): db = RedisMongoDB() diff --git a/src/tests/python/unit/helpers.py b/src/tests/python/unit/helpers.py index 9a44ffe..b63b0da 100644 --- a/src/tests/python/unit/helpers.py +++ b/src/tests/python/unit/helpers.py @@ -5,15 +5,15 @@ def load_n_random_links_by_type( - das: DistributedAtomSpace, n: int, type: str = 'Inheritance' + das: DistributedAtomSpace, n: int, type: str = "Inheritance" ) -> None: - for name in [''.join([choice(ascii_lowercase) for c in range(5)]) for i in range(n)]: + for name in ["".join([choice(ascii_lowercase) for c in range(5)]) for i in range(n)]: das.add_link( { - 'type': type, - 'targets': [ - {'type': 'Concept', 'name': 'human'}, - {'type': 'Concept', 'name': name}, + "type": type, + "targets": [ + {"type": "Concept", "name": "human"}, + {"type": "Concept", "name": name}, ], } ) diff --git a/src/tests/python/unit/mock.py b/src/tests/python/unit/mock.py index d469a67..bddeae3 100644 --- a/src/tests/python/unit/mock.py +++ b/src/tests/python/unit/mock.py @@ -19,26 +19,26 @@ def _build_node_handle(node_type: str, node_name: str) -> str: - return f'<{node_type}: {node_name}>' + return f"<{node_type}: {node_name}>" def _split_node_handle(node_handle: str) -> Tuple[str, str]: - v = re.split('[<: >]', node_handle) + v = re.split("[<: >]", node_handle) return v[1], v[3] def _build_link_handle(link_type: str, target_handles: List[str]) -> str: - if link_type == 'Similarity' or link_type == 'Set': + if link_type == "Similarity" or link_type == "Set": target_handles.sort() - return f'<{link_type}: {target_handles}>' + return f"<{link_type}: {target_handles}>" class DistributedAtomSpaceMock(DistributedAtomSpace): - def __init__(self, query_engine: Optional[str] = 'local', **kwargs) -> None: + def __init__(self, query_engine: Optional[str] = "local", **kwargs) -> None: self.backend = DatabaseAnimals() self.cache_controller = CacheController({}) - if query_engine == 'remote': - with patch('hyperon_das.client.connect_to_server', return_value=(200, 'OK')): + if query_engine == "remote": + with patch("hyperon_das.client.connect_to_server", return_value=(200, "OK")): self.query_engine = RemoteQueryEngine( self.backend, self.cache_controller, {}, **kwargs ) @@ -74,60 +74,60 @@ def __init__(self): for name in self.node_names } - self.human = _build_node_handle(self.node_type, 'human') - self.monkey = _build_node_handle(self.node_type, 'monkey') - self.chimp = _build_node_handle(self.node_type, 'chimp') - self.snake = _build_node_handle(self.node_type, 'snake') - self.earthworm = _build_node_handle(self.node_type, 'earthworm') - self.rhino = _build_node_handle(self.node_type, 'rhino') - self.triceratops = _build_node_handle(self.node_type, 'triceratops') - self.vine = _build_node_handle(self.node_type, 'vine') - self.ent = _build_node_handle(self.node_type, 'ent') - self.mammal = _build_node_handle(self.node_type, 'mammal') - self.animal = _build_node_handle(self.node_type, 'animal') - self.reptile = _build_node_handle(self.node_type, 'reptile') - self.dinosaur = _build_node_handle(self.node_type, 'dinosaur') - self.plant = _build_node_handle(self.node_type, 'plant') + self.human = _build_node_handle(self.node_type, "human") + self.monkey = _build_node_handle(self.node_type, "monkey") + self.chimp = _build_node_handle(self.node_type, "chimp") + self.snake = _build_node_handle(self.node_type, "snake") + self.earthworm = _build_node_handle(self.node_type, "earthworm") + self.rhino = _build_node_handle(self.node_type, "rhino") + self.triceratops = _build_node_handle(self.node_type, "triceratops") + self.vine = _build_node_handle(self.node_type, "vine") + self.ent = _build_node_handle(self.node_type, "ent") + self.mammal = _build_node_handle(self.node_type, "mammal") + self.animal = _build_node_handle(self.node_type, "animal") + self.reptile = _build_node_handle(self.node_type, "reptile") + self.dinosaur = _build_node_handle(self.node_type, "dinosaur") + self.plant = _build_node_handle(self.node_type, "plant") self.all_links: list[list[str]] = [ - ['Similarity', self.human, self.monkey], - ['Similarity', self.human, self.chimp], - ['Similarity', self.chimp, self.monkey], - ['Similarity', self.snake, self.earthworm], - ['Similarity', self.rhino, self.triceratops], - ['Similarity', self.snake, self.vine], - ['Similarity', self.human, self.ent], - ['Inheritance', self.human, self.mammal], - ['Inheritance', self.monkey, self.mammal], - ['Inheritance', self.chimp, self.mammal], - ['Inheritance', self.mammal, self.animal], - ['Inheritance', self.reptile, self.animal], - ['Inheritance', self.snake, self.reptile], - ['Inheritance', self.dinosaur, self.reptile], - ['Inheritance', self.triceratops, self.dinosaur], - ['Inheritance', self.earthworm, self.animal], - ['Inheritance', self.rhino, self.mammal], - ['Inheritance', self.vine, self.plant], - ['Inheritance', self.ent, self.plant], + ["Similarity", self.human, self.monkey], + ["Similarity", self.human, self.chimp], + ["Similarity", self.chimp, self.monkey], + ["Similarity", self.snake, self.earthworm], + ["Similarity", self.rhino, self.triceratops], + ["Similarity", self.snake, self.vine], + ["Similarity", self.human, self.ent], + ["Inheritance", self.human, self.mammal], + ["Inheritance", self.monkey, self.mammal], + ["Inheritance", self.chimp, self.mammal], + ["Inheritance", self.mammal, self.animal], + ["Inheritance", self.reptile, self.animal], + ["Inheritance", self.snake, self.reptile], + ["Inheritance", self.dinosaur, self.reptile], + ["Inheritance", self.triceratops, self.dinosaur], + ["Inheritance", self.earthworm, self.animal], + ["Inheritance", self.rhino, self.mammal], + ["Inheritance", self.vine, self.plant], + ["Inheritance", self.ent, self.plant], [ - 'List', - _build_link_handle('Inheritance', [self.dinosaur, self.reptile]), - _build_link_handle('Inheritance', [self.triceratops, self.dinosaur]), + "List", + _build_link_handle("Inheritance", [self.dinosaur, self.reptile]), + _build_link_handle("Inheritance", [self.triceratops, self.dinosaur]), ], [ - 'Set', - _build_link_handle('Inheritance', [self.dinosaur, self.reptile]), - _build_link_handle('Inheritance', [self.triceratops, self.dinosaur]), + "Set", + _build_link_handle("Inheritance", [self.dinosaur, self.reptile]), + _build_link_handle("Inheritance", [self.triceratops, self.dinosaur]), ], - ['List', self.human, self.ent, self.monkey, self.chimp], - ['List', self.human, self.mammal, self.triceratops, self.vine], - ['List', self.human, self.monkey, self.chimp], - ['List', self.triceratops, self.ent, self.monkey, self.snake], - ['Set', self.triceratops, self.vine, self.monkey, self.snake], - ['Set', self.triceratops, self.ent, self.monkey, self.snake], - ['Set', self.human, self.ent, self.monkey, self.chimp], - ['Set', self.mammal, self.monkey, self.human, self.chimp], - ['Set', self.human, self.monkey, self.chimp], + ["List", self.human, self.ent, self.monkey, self.chimp], + ["List", self.human, self.mammal, self.triceratops, self.vine], + ["List", self.human, self.monkey, self.chimp], + ["List", self.triceratops, self.ent, self.monkey, self.snake], + ["Set", self.triceratops, self.vine, self.monkey, self.snake], + ["Set", self.triceratops, self.ent, self.monkey, self.snake], + ["Set", self.human, self.ent, self.monkey, self.chimp], + ["Set", self.mammal, self.monkey, self.human, self.chimp], + ["Set", self.human, self.monkey, self.chimp], ] self.template_index: dict[str, list[tuple[str, tuple[str, ...]]]] = {} @@ -145,9 +145,13 @@ def __init__(self): self._add_incoming_set(str(link), link[1:]) nested_link = [ - 'Evaluation', + "Evaluation", self.human, - ['Evaluation', self.human, _build_link_handle('Set', [self.monkey, self.mammal])], + [ + "Evaluation", + self.human, + _build_link_handle("Set", [self.monkey, self.mammal]), + ], ] self.all_links.append(nested_link) # type: ignore @@ -191,16 +195,16 @@ def _get_atom(self, handle: str) -> AtomT | None: def is_ordered(self, link_handle: str) -> bool: for link in self.all_links: if _build_link_handle(link[0], link[1:]) == link_handle: - return link[0] != 'Similarity' and link[0] != 'Set' + return link[0] != "Similarity" and link[0] != "Set" return True def get_link_handle(self, link_type: str, target_handles: list[str]) -> str: for link in self.all_links: if link[0] == link_type and len(target_handles) == (len(link) - 1): - if link_type == 'Similarity': + if link_type == "Similarity": if all(target in target_handles for target in link[1:]): return _build_link_handle(link_type, link[1:]) - elif link_type == 'Inheritance': + elif link_type == "Inheritance": for i in range(0, len(target_handles)): if target_handles[i] != link[i + 1]: break @@ -218,17 +222,17 @@ def get_matched_links(self, link_type: str, target_handles: list[str], **kwargs) answer = [] for link in self.all_links: if len(target_handles) == (len(link) - 1) and link[0] == link_type: - if link[0] == 'Similarity' or link[0] == 'Set': + if link[0] == "Similarity" or link[0] == "Set": if all(target == WILDCARD or target in link[1:] for target in target_handles): answer.append(_build_link_handle(link[0], link[1:])) - elif link[0] == 'Inheritance' or link[0] == 'List': + elif link[0] == "Inheritance" or link[0] == "List": for i in range(0, len(target_handles)): if target_handles[i] != WILDCARD and target_handles[i] != link[i + 1]: break else: answer.append(_build_link_handle(link[0], [])) - elif link[0] == 'Evaluation': - answer.append('test') + elif link[0] == "Evaluation": + answer.append("test") else: raise ValueError(f"Invalid link type: {link[0]}") return answer @@ -269,9 +273,9 @@ def get_matched_type(self, link_type: str, **kwargs) -> HandleListT: def get_atom_as_dict(self, handle: str, arity: int | None = 0) -> dict[str, Any]: if handle in self.all_nodes: return { - 'handle': handle, - 'type': handle.split()[0][1:-1], - 'name': handle.split()[1][:-1], + "handle": handle, + "type": handle.split()[0][1:-1], + "name": handle.split()[1][:-1], } match = re.search(r"<([^:]+): (.+)>", handle) _type = match.group(1) @@ -281,10 +285,10 @@ def get_atom_as_dict(self, handle: str, arity: int | None = 0) -> dict[str, Any] template.append(_split_node_handle(target)[0]) if match: return { - 'handle': handle, - 'type': _type, - 'template': template, - 'targets': targets, + "handle": handle, + "type": _type, + "template": template, + "targets": targets, } def get_link_type(self, link_handle: str) -> str | None: @@ -297,9 +301,9 @@ def get_node_type(self, node_handle: str) -> str | None: def count_atoms(self, parameters: dict[str, Any] | None = None) -> dict[str, int]: return { - 'link_count': len(self.all_links), - 'node_count': len(self.all_nodes), - 'atom_count': len(self.all_links) + len(self.all_nodes), + "link_count": len(self.all_links), + "node_count": len(self.all_nodes), + "atom_count": len(self.all_links) + len(self.all_nodes), } # return (len(self.all_nodes), len(self.all_links)) @@ -355,7 +359,7 @@ def get_atoms_by_field(self, query: list[OrderedDict[str, str]]) -> list[str]: def _append_atom(atom, named_type, name): for q in query: - if q['field'] == named_type and q['value'] in name: + if q["field"] == named_type and q["value"] in name: return True return False @@ -419,32 +423,32 @@ def __init__(self): super().__init__() self.all_links = [ - ['Similarity', self.human, self.monkey], - ['Similarity', self.human, self.chimp], - ['Similarity', self.chimp, self.monkey], - ['Similarity', self.snake, self.earthworm], - ['Similarity', self.rhino, self.triceratops], - ['Similarity', self.snake, self.vine], - ['Similarity', self.human, self.ent], - ['Inheritance', self.human, self.mammal], - ['Inheritance', self.monkey, self.mammal], - ['Inheritance', self.chimp, self.mammal], - ['Inheritance', self.mammal, self.animal], - ['Inheritance', self.reptile, self.animal], - ['Inheritance', self.snake, self.reptile], - ['Inheritance', self.dinosaur, self.reptile], - ['Inheritance', self.triceratops, self.dinosaur], - ['Inheritance', self.earthworm, self.animal], - ['Inheritance', self.rhino, self.mammal], - ['Inheritance', self.vine, self.plant], - ['Inheritance', self.ent, self.plant], - ['Similarity', self.monkey, self.human], - ['Similarity', self.chimp, self.human], - ['Similarity', self.monkey, self.chimp], - ['Similarity', self.earthworm, self.snake], - ['Similarity', self.triceratops, self.rhino], - ['Similarity', self.vine, self.snake], - ['Similarity', self.ent, self.human], + ["Similarity", self.human, self.monkey], + ["Similarity", self.human, self.chimp], + ["Similarity", self.chimp, self.monkey], + ["Similarity", self.snake, self.earthworm], + ["Similarity", self.rhino, self.triceratops], + ["Similarity", self.snake, self.vine], + ["Similarity", self.human, self.ent], + ["Inheritance", self.human, self.mammal], + ["Inheritance", self.monkey, self.mammal], + ["Inheritance", self.chimp, self.mammal], + ["Inheritance", self.mammal, self.animal], + ["Inheritance", self.reptile, self.animal], + ["Inheritance", self.snake, self.reptile], + ["Inheritance", self.dinosaur, self.reptile], + ["Inheritance", self.triceratops, self.dinosaur], + ["Inheritance", self.earthworm, self.animal], + ["Inheritance", self.rhino, self.mammal], + ["Inheritance", self.vine, self.plant], + ["Inheritance", self.ent, self.plant], + ["Similarity", self.monkey, self.human], + ["Similarity", self.chimp, self.human], + ["Similarity", self.monkey, self.chimp], + ["Similarity", self.earthworm, self.snake], + ["Similarity", self.triceratops, self.rhino], + ["Similarity", self.vine, self.snake], + ["Similarity", self.ent, self.human], ] self.incoming_set = {} diff --git a/src/tests/python/unit/test_attention_broker_gateway.py b/src/tests/python/unit/test_attention_broker_gateway.py index 1b76a75..c95f7bf 100644 --- a/src/tests/python/unit/test_attention_broker_gateway.py +++ b/src/tests/python/unit/test_attention_broker_gateway.py @@ -8,8 +8,8 @@ def test_creation(self): with pytest.raises(ValueError): AttentionBrokerGateway({}) with pytest.raises(ValueError): - AttentionBrokerGateway({'attention_broker_hostname': 'localhost'}) + AttentionBrokerGateway({"attention_broker_hostname": "localhost"}) with pytest.raises(ValueError): - AttentionBrokerGateway({'attention_broker_port': 27000}) + AttentionBrokerGateway({"attention_broker_port": 27000}) # successful creation is tested in a integration test because it requires # actual connection to a GRPC server diff --git a/src/tests/python/unit/test_cache.py b/src/tests/python/unit/test_cache.py index 03f5c7e..e4a5131 100644 --- a/src/tests/python/unit/test_cache.py +++ b/src/tests/python/unit/test_cache.py @@ -18,11 +18,11 @@ class TestListIterator: def test_list_iterator(self): iterator = ListIterator(None) - for element in iterator: + for _element in iterator: assert False iterator = ListIterator([]) - for element in iterator: + for _element in iterator: assert False iterator = ListIterator( @@ -134,30 +134,30 @@ def test_product_iterator(self): for arg in [[ln, l1], [ln, l1, l2], [ln]]: iterator = ProductIterator([ListIterator(v) for v in arg]) assert iterator.is_empty() - for element in iterator: + for _element in iterator: assert False assert iterator.is_empty() for arg in [[l0, l1], [l0, l1, l2], [l0]]: iterator = ProductIterator([ListIterator(v) for v in arg]) assert iterator.is_empty() - for element in iterator: + for _element in iterator: assert False assert iterator.is_empty() class ConcreteBaseLinksIterator(BaseLinksIterator): def get_current_value(self): - return 'current_value' + return "current_value" def get_fetch_data(self, **kwargs): return 2024, [] def get_fetch_data_kwargs(self): - return {'fetch_data_kwargs': True} + return {"fetch_data_kwargs": True} def get_next_value(self): - return 'next_value' + return "next_value" class TestBaseLinksIterator: @@ -200,13 +200,13 @@ def test_fetch_data(self): def test_refresh_iterator(self): source = ListIterator([1, 2, 3]) iterator = ConcreteBaseLinksIterator(source, cursor=1) - iterator.get_current_value = mock.MagicMock(return_value='current_value') + iterator.get_current_value = mock.MagicMock(return_value="current_value") iterator._refresh_iterator() iterator.get_current_value.assert_called_once() assert iterator.source.source == ListIterator(list(iterator.buffer_queue)).source assert iterator.iterator == iterator.source - assert iterator.current_value == 'current_value' + assert iterator.current_value == "current_value" assert iterator.buffer_queue == deque() def test_is_empty(self): @@ -220,7 +220,7 @@ class TestLocalIncomingLinks: @pytest.fixture def backend(self): backend = mock.MagicMock() - backend.get_atom.side_effect = lambda x, targets_document=None: {'handle': x} + backend.get_atom.side_effect = lambda x, targets_document=None: {"handle": x} return backend def test_get_next_value(self, backend): @@ -258,14 +258,14 @@ def test_get_current_value(self, backend): def test_get_fetch_data_kwargs(self, backend): iterator = LocalIncomingLinks(ListIterator([1, 2, 3]), backend=backend) assert iterator.get_fetch_data_kwargs() == { - 'handles_only': True, - 'cursor': iterator.cursor, - 'chunk_size': iterator.chunk_size, + "handles_only": True, + "cursor": iterator.cursor, + "chunk_size": iterator.chunk_size, } def test_get_fetch_data(self, backend): iterator = LocalIncomingLinks(ListIterator([1, 2, 3]), backend=backend) - kwargs = {'param1': 'value1', 'param2': 'value2'} + kwargs = {"param1": "value1", "param2": "value2"} result = iterator.get_fetch_data(**kwargs) assert result == backend.get_incoming_links(iterator.atom_handle, **kwargs) @@ -273,31 +273,36 @@ def test_get_fetch_data(self, backend): class TestRemoteIncomingLinks: def test_get_next_value(self): source = ListIterator( - [{'handle': 'link1'}, {'handle': 'link2'}, {'handle': 'link3'}, {'handle': 'link4'}] + [ + {"handle": "link1"}, + {"handle": "link2"}, + {"handle": "link3"}, + {"handle": "link4"}, + ] ) iterator = RemoteIncomingLinks(source) - iterator.returned_handles = set(['link2', 'link3']) + iterator.returned_handles = set(["link2", "link3"]) iterator.get_next_value() - assert iterator.current_value == {'handle': 'link1'} + assert iterator.current_value == {"handle": "link1"} iterator.get_next_value() - assert iterator.current_value == {'handle': 'link4'} + assert iterator.current_value == {"handle": "link4"} def test_get_current_value(self): source = ListIterator( [ - {'handle': 'link1'}, - {'handle': 'link2'}, + {"handle": "link1"}, + {"handle": "link2"}, ] ) iterator = RemoteIncomingLinks(source) iterator.get_next_value() - assert iterator.get_current_value() == {'handle': 'link1'} + assert iterator.get_current_value() == {"handle": "link1"} iterator.get_next_value() - assert iterator.get_current_value() == {'handle': 'link2'} + assert iterator.get_current_value() == {"handle": "link2"} iterator = RemoteIncomingLinks(ListIterator([])) assert iterator.is_empty() is True @@ -305,44 +310,50 @@ def test_get_current_value(self): def test_get_fetch_data_kwargs(self): source = ListIterator( [ - {'handle': 'link1'}, - {'handle': 'link2'}, + {"handle": "link1"}, + {"handle": "link2"}, ] ) - iterator = RemoteIncomingLinks(source, atom_handle='atom1', targets_document=True) + iterator = RemoteIncomingLinks(source, atom_handle="atom1", targets_document=True) kwargs = iterator.get_fetch_data_kwargs() assert kwargs == { - 'cursor': iterator.cursor, - 'chunk_size': iterator.chunk_size, - 'targets_document': iterator.targets_document, + "cursor": iterator.cursor, + "chunk_size": iterator.chunk_size, + "targets_document": iterator.targets_document, } def test_get_fetch_data(self): backend = mock.MagicMock() - backend.get_incoming_links.return_value = (123, [{'handle': 'link1'}, {'handle': 'link2'}]) + backend.get_incoming_links.return_value = ( + 123, + [{"handle": "link1"}, {"handle": "link2"}], + ) source = ListIterator([]) - iterator = RemoteIncomingLinks(source, atom_handle='atom1') + iterator = RemoteIncomingLinks(source, atom_handle="atom1") iterator.backend = backend result = iterator.get_fetch_data(cursor=0, chunk_size=100) - assert result == (123, [{'handle': 'link1'}, {'handle': 'link2'}]) - backend.get_incoming_links.assert_called_once_with('atom1', cursor=0, chunk_size=100) + assert result == (123, [{"handle": "link1"}, {"handle": "link2"}]) + backend.get_incoming_links.assert_called_once_with("atom1", cursor=0, chunk_size=100) class TestTraverseLinksIterator: @pytest.fixture def incoming_links(self): - source = ListIterator(['link1', 'link2', 'link3']) + source = ListIterator(["link1", "link2", "link3"]) backend = mock.Mock() targets_document = True backend.get_atom.side_effect = lambda handle, targets_document=targets_document: ( { - 'handle': handle, - 'named_type': f'Type{handle[-1]}', - 'targets': ['node11', f'node{handle[-1]}2'], + "handle": handle, + "named_type": f"Type{handle[-1]}", + "targets": ["node11", f"node{handle[-1]}2"], }, [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': f'node{handle[-1]}2', 'named_type': f'Type{int(handle[-1]) + 1}'}, + {"handle": "node11", "named_type": "Type2"}, + { + "handle": f"node{handle[-1]}2", + "named_type": f"Type{int(handle[-1]) + 1}", + }, ], ) return LocalIncomingLinks(source=source, backend=backend, targets_document=targets_document) @@ -359,19 +370,19 @@ def test_no_filters(self, incoming_links): assert iterator.is_empty() is False assert next(iterator) == { - 'handle': 'link1', - 'named_type': 'Type1', - 'targets': ['node11', 'node12'], + "handle": "link1", + "named_type": "Type1", + "targets": ["node11", "node12"], } assert next(iterator) == { - 'handle': 'link2', - 'named_type': 'Type2', - 'targets': ['node11', 'node22'], + "handle": "link2", + "named_type": "Type2", + "targets": ["node11", "node22"], } assert next(iterator) == { - 'handle': 'link3', - 'named_type': 'Type3', - 'targets': ['node11', 'node32'], + "handle": "link3", + "named_type": "Type3", + "targets": ["node11", "node32"], } with pytest.raises(StopIteration): next(iterator) @@ -379,36 +390,36 @@ def test_no_filters(self, incoming_links): def test_with_filters(self, incoming_links): iterator = TraverseLinksIterator( - source=incoming_links, link_type='Type2', target_type='Type3' + source=incoming_links, link_type="Type2", target_type="Type3" ) assert iterator.is_empty() is False assert next(iterator) == { - 'handle': 'link2', - 'named_type': 'Type2', - 'targets': ['node11', 'node22'], + "handle": "link2", + "named_type": "Type2", + "targets": ["node11", "node22"], } with pytest.raises(StopIteration): next(iterator) assert iterator.is_empty() is True def test_cursor_position(self, incoming_links): - iterator = TraverseLinksIterator(incoming_links, cursor_position=0, cursor='node11') + iterator = TraverseLinksIterator(incoming_links, cursor_position=0, cursor="node11") assert iterator.is_empty() is False assert next(iterator) == { - 'handle': 'link1', - 'named_type': 'Type1', - 'targets': ['node11', 'node12'], + "handle": "link1", + "named_type": "Type1", + "targets": ["node11", "node12"], } assert next(iterator) == { - 'handle': 'link2', - 'named_type': 'Type2', - 'targets': ['node11', 'node22'], + "handle": "link2", + "named_type": "Type2", + "targets": ["node11", "node22"], } assert next(iterator) == { - 'handle': 'link3', - 'named_type': 'Type3', - 'targets': ['node11', 'node32'], + "handle": "link3", + "named_type": "Type3", + "targets": ["node11", "node32"], } with pytest.raises(StopIteration): next(iterator) @@ -416,15 +427,15 @@ def test_cursor_position(self, incoming_links): def test_custom_filter(self, incoming_links): def custom_filter(link): - return link['named_type'] == 'Type3' + return link["named_type"] == "Type3" iterator = TraverseLinksIterator(incoming_links, filter=custom_filter) assert iterator.is_empty() is False assert next(iterator) == { - 'handle': 'link3', - 'named_type': 'Type3', - 'targets': ['node11', 'node32'], + "handle": "link3", + "named_type": "Type3", + "targets": ["node11", "node32"], } with pytest.raises(StopIteration): next(iterator) @@ -434,16 +445,16 @@ def test_targets_only(self, incoming_links): iterator = TraverseLinksIterator(incoming_links, targets_only=True) assert iterator.is_empty() is False assert next(iterator) == [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': 'node12', 'named_type': 'Type2'}, + {"handle": "node11", "named_type": "Type2"}, + {"handle": "node12", "named_type": "Type2"}, ] assert next(iterator) == [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': 'node22', 'named_type': 'Type3'}, + {"handle": "node11", "named_type": "Type2"}, + {"handle": "node22", "named_type": "Type3"}, ] assert next(iterator) == [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': 'node32', 'named_type': 'Type4'}, + {"handle": "node11", "named_type": "Type2"}, + {"handle": "node32", "named_type": "Type4"}, ] with pytest.raises(StopIteration): next(iterator) @@ -453,25 +464,28 @@ def test_targets_only(self, incoming_links): class TestTraverseNeighborsIterator: @pytest.fixture def traverse_links_iterator(self): - source = ListIterator(['link1', 'link2', 'link3']) + source = ListIterator(["link1", "link2", "link3"]) backend = mock.Mock() targets_document = True backend.get_atom.side_effect = lambda handle, targets_document=targets_document: ( { - 'handle': handle, - 'named_type': f'Type{handle[-1]}', - 'targets': ['node11', f'node{handle[-1]}2'], + "handle": handle, + "named_type": f"Type{handle[-1]}", + "targets": ["node11", f"node{handle[-1]}2"], }, [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': f'node{handle[-1]}2', 'named_type': f'Type{int(handle[-1]) + 1}'}, + {"handle": "node11", "named_type": "Type2"}, + { + "handle": f"node{handle[-1]}2", + "named_type": f"Type{int(handle[-1]) + 1}", + }, ], ) incoming_links = LocalIncomingLinks( source=source, backend=backend, targets_document=targets_document ) - return TraverseLinksIterator(incoming_links, targets_only=True, cursor='node11') + return TraverseLinksIterator(incoming_links, targets_only=True, cursor="node11") def test_init(self, traverse_links_iterator): iterator = TraverseNeighborsIterator(source=traverse_links_iterator) @@ -479,9 +493,9 @@ def test_init(self, traverse_links_iterator): assert iterator.buffered_answer is not None assert iterator.cursor == traverse_links_iterator.cursor assert iterator.target_type == traverse_links_iterator.target_type - assert iterator.visited_neighbors == ['node12'] + assert iterator.visited_neighbors == ["node12"] assert iterator.iterator == traverse_links_iterator - assert iterator.current_value == {'handle': 'node12', 'named_type': 'Type2'} + assert iterator.current_value == {"handle": "node12", "named_type": "Type2"} def test_next_with_buffered_answer(self): iterator = TraverseNeighborsIterator(source=mock.Mock()) @@ -492,29 +506,29 @@ def test_next_with_buffered_answer(self): def test_next_without_buffered_answer(self, traverse_links_iterator): iterator = TraverseNeighborsIterator(source=traverse_links_iterator) - assert next(iterator) == {'handle': 'node12', 'named_type': 'Type2'} - assert next(iterator) == {'handle': 'node22', 'named_type': 'Type3'} - assert next(iterator) == {'handle': 'node32', 'named_type': 'Type4'} + assert next(iterator) == {"handle": "node12", "named_type": "Type2"} + assert next(iterator) == {"handle": "node22", "named_type": "Type3"} + assert next(iterator) == {"handle": "node32", "named_type": "Type4"} with pytest.raises(StopIteration): next(iterator) def test_process_targets(self, traverse_links_iterator): iterator = TraverseNeighborsIterator(source=traverse_links_iterator) targets = [ - {'handle': 'node11', 'named_type': 'Type2'}, - {'handle': 'node22', 'named_type': 'Type3'}, + {"handle": "node11", "named_type": "Type2"}, + {"handle": "node22", "named_type": "Type3"}, ] answer, match_found = iterator._process_targets(targets) - assert answer == [{'handle': 'node22', 'named_type': 'Type3'}] + assert answer == [{"handle": "node22", "named_type": "Type3"}] assert match_found is True def test_filter(self, traverse_links_iterator): iterator = TraverseNeighborsIterator(source=traverse_links_iterator) - target = {'handle': 'node11', 'named_type': 'Type2'} + target = {"handle": "node11", "named_type": "Type2"} assert iterator._filter(target) is False - target = {'handle': 'node22', 'named_type': 'Type3'} + target = {"handle": "node22", "named_type": "Type3"} assert iterator._filter(target) is True def test_is_empty(self): @@ -524,5 +538,5 @@ def test_is_empty(self): iterator.current_value = None assert iterator.is_empty() is True - iterator.current_value = {'handle': 1} + iterator.current_value = {"handle": 1} assert iterator.is_empty() is False diff --git a/src/tests/python/unit/test_cache_controller.py b/src/tests/python/unit/test_cache_controller.py index 5c2ac44..32e6553 100644 --- a/src/tests/python/unit/test_cache_controller.py +++ b/src/tests/python/unit/test_cache_controller.py @@ -9,9 +9,9 @@ from hyperon_das.utils import QueryAnswer SYSTEM_PARAMETERS = { - 'attention_broker_hostname': None, - 'attention_broker_port': None, - 'cache_enabled': False, + "attention_broker_hostname": None, + "attention_broker_port": None, + "cache_enabled": False, } @@ -31,7 +31,7 @@ class TestCacheController: def _build_controller(self): params = SYSTEM_PARAMETERS.copy() controller = CacheController(params) - params['cache_enabled'] = True + params["cache_enabled"] = True controller.attention_broker = AttentionBrokerGatewayMock() return controller @@ -40,71 +40,77 @@ def test_creation(self): controller = CacheController({}) assert not controller.enabled() # assert default == False with pytest.raises(ValueError): - controller = CacheController({'cache_enabled': True}) - controller = CacheController({'cache_enabled': False}) + controller = CacheController({"cache_enabled": True}) + controller = CacheController({"cache_enabled": False}) assert not controller.enabled() def test_get_atom(self): controller = CacheController({}) - assert controller.get_atom('blah') is None + assert controller.get_atom("blah") is None def test_add_context(self): controller = self._build_controller() - node = NodeT(type='Context', name='blah') - node.handle = node._id = 'h1' + node = NodeT(type="Context", name="blah") + node.handle = node._id = "h1" context = Context( node, [ - [QueryAnswer({'handle': 'h1'}, None), QueryAnswer({'handle': 'h2'}, None)], - [QueryAnswer({'handle': 'h1'}, None), QueryAnswer({'handle': 'h3'}, None)], + [ + QueryAnswer({"handle": "h1"}, None), + QueryAnswer({"handle": "h2"}, None), + ], + [ + QueryAnswer({"handle": "h1"}, None), + QueryAnswer({"handle": "h3"}, None), + ], ], ) controller.add_context(context) broker = controller.attention_broker assert len(controller.attention_broker.handle_set_list) == 4 - assert broker.handle_set_list[0] == set(['h1']) - assert broker.handle_set_list[1] == set(['h2']) - assert broker.handle_set_list[2] == set(['h1']) - assert broker.handle_set_list[3] == set(['h3']) + assert broker.handle_set_list[0] == set(["h1"]) + assert broker.handle_set_list[1] == set(["h2"]) + assert broker.handle_set_list[2] == set(["h1"]) + assert broker.handle_set_list[3] == set(["h3"]) assert len(broker.handle_count) == 2 - assert broker.handle_count['h1'] == 1 - assert broker.handle_count['h3'] == 1 + assert broker.handle_count["h1"] == 1 + assert broker.handle_count["h3"] == 1 def test_regard_query_answer(self): controller = self._build_controller() - query_answer_1 = QueryAnswer({'handle': 'h7'}, None) + query_answer_1 = QueryAnswer({"handle": "h7"}, None) query_answer_2 = QueryAnswer( { - 'handle': 'h1', - 'targets': [ - {'handle': 'h8'}, - {'handle': 'h9'}, + "handle": "h1", + "targets": [ + {"handle": "h8"}, + {"handle": "h9"}, ], }, None, ) query_answer_3 = QueryAnswer( { - 'handle': 'h1', - 'targets': [ - {'handle': 'h2'}, + "handle": "h1", + "targets": [ + {"handle": "h2"}, { - 'handle': 'h2', - 'targets': [ - {'handle': 'h4'}, - {'handle': 'h1'}, + "handle": "h2", + "targets": [ + {"handle": "h4"}, + {"handle": "h1"}, ], }, { - 'handle': 'h5', - 'targets': [ - {'handle': 'h1'}, - {'handle': 'h6'}, + "handle": "h5", + "targets": [ + {"handle": "h1"}, + {"handle": "h6"}, ], }, - {'handle': 'h3'}, + {"handle": "h3"}, ], }, None, @@ -114,20 +120,20 @@ def test_regard_query_answer(self): broker = controller.attention_broker assert len(controller.attention_broker.handle_set_list) == 3 - assert broker.handle_set_list[0] == set(['h7']) - assert broker.handle_set_list[1] == set(['h1', 'h8', 'h9']) - assert broker.handle_set_list[2] == set(['h1', 'h2', 'h3', 'h4', 'h5', 'h6']) + assert broker.handle_set_list[0] == set(["h7"]) + assert broker.handle_set_list[1] == set(["h1", "h8", "h9"]) + assert broker.handle_set_list[2] == set(["h1", "h2", "h3", "h4", "h5", "h6"]) all_handles = set() for handle_set in broker.handle_set_list: all_handles.update(handle_set) assert len(broker.handle_count) == len(all_handles) - assert broker.handle_count['h1'] == 4 - assert broker.handle_count['h2'] == 2 - assert broker.handle_count['h3'] == 1 - assert broker.handle_count['h4'] == 1 - assert broker.handle_count['h5'] == 1 - assert broker.handle_count['h6'] == 1 - assert broker.handle_count['h7'] == 1 - assert broker.handle_count['h8'] == 1 - assert broker.handle_count['h9'] == 1 + assert broker.handle_count["h1"] == 4 + assert broker.handle_count["h2"] == 2 + assert broker.handle_count["h3"] == 1 + assert broker.handle_count["h4"] == 1 + assert broker.handle_count["h5"] == 1 + assert broker.handle_count["h6"] == 1 + assert broker.handle_count["h7"] == 1 + assert broker.handle_count["h8"] == 1 + assert broker.handle_count["h9"] == 1 diff --git a/src/tests/python/unit/test_client.py b/src/tests/python/unit/test_client.py index b24a602..55170da 100644 --- a/src/tests/python/unit/test_client.py +++ b/src/tests/python/unit/test_client.py @@ -6,20 +6,24 @@ import hyperon_das.link_filters as link_filter from hyperon_das.client import FunctionsClient -from hyperon_das.exceptions import FunctionsConnectionError, FunctionsTimeoutError, RequestError +from hyperon_das.exceptions import ( + FunctionsConnectionError, + FunctionsTimeoutError, + RequestError, +) from hyperon_das.utils import serialize class TestFunctionsClient: @pytest.fixture def mock_request(self): - with patch('requests.sessions.Session.request') as mock_request: + with patch("requests.sessions.Session.request") as mock_request: yield mock_request @pytest.fixture def client(self): - with patch('hyperon_das.utils.check_server_connection', return_value=(200, 'OK')): - return FunctionsClient(host='0.0.0.0', port=1000) + with patch("hyperon_das.utils.check_server_connection", return_value=(200, "OK")): + return FunctionsClient(host="0.0.0.0", port=1000) def test_get_atom_success(self, mock_request, client): expected_request_data = {"action": "get_atom", "input": {"handle": "123"}} @@ -33,13 +37,13 @@ def test_get_atom_success(self, mock_request, client): mock_request.return_value.status_code = 200 mock_request.return_value.content = serialize(expected_response) - result = client.get_atom(handle='123') + result = client.get_atom(handle="123") mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response @@ -57,13 +61,13 @@ def test_create_context_success(self, mock_request, client): mock_request.return_value.status_code = 200 mock_request.return_value.content = serialize(expected_response) - result = client.create_context(name='n', queries=[]) + result = client.create_context(name="n", queries=[]) mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response @@ -89,7 +93,10 @@ def test_get_links_success(self, mock_request, client): "handle": "ee1c03e6d1f104ccd811cfbba018451a", "type": "Inheritance", "template": ["Inheritance", "Concept", "Concept"], - "targets": ["4e8e26e3276af8a5c2ac2cc2dc95c6d2", "80aff30094874e75028033a38ce677bb"], + "targets": [ + "4e8e26e3276af8a5c2ac2cc2dc95c6d2", + "80aff30094874e75028033a38ce677bb", + ], } ] @@ -98,16 +105,19 @@ def test_get_links_success(self, mock_request, client): result = client.get_links( link_filter.Targets( - ['4e8e26e3276af8a5c2ac2cc2dc95c6d2', '80aff30094874e75028033a38ce677bb'], - 'Inheritance', + [ + "4e8e26e3276af8a5c2ac2cc2dc95c6d2", + "80aff30094874e75028033a38ce677bb", + ], + "Inheritance", ) ) mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response @@ -162,10 +172,10 @@ def test_query_success(self, mock_request, client): result = client.query(query, parameters=[]) mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response @@ -178,9 +188,13 @@ def test_query_success(self, mock_request, client): "query": { "atom_type": "link", "targets": [ - {'atom_type': 'variable', 'name': 'v1'}, - {'atom_type': 'node', 'type': 'Symbol', 'name': '"earthworm"'}, - {'atom_type': 'variable', 'name': 'v2'}, + {"atom_type": "variable", "name": "v1"}, + { + "atom_type": "node", + "type": "Symbol", + "name": '"earthworm"', + }, + {"atom_type": "variable", "name": "v2"}, ], }, "parameters": [], @@ -188,7 +202,7 @@ def test_query_success(self, mock_request, client): ), ( { - "query": {"atom_type": "link", 'type': 'Expression'}, + "query": {"atom_type": "link", "type": "Expression"}, "parameters": [], } ), @@ -196,11 +210,15 @@ def test_query_success(self, mock_request, client): { "query": { "atom_type": "node", - 'type': 'Expression', + "type": "Expression", "targets": [ - {'atom_type': 'variable', 'name': 'v1'}, - {'atom_type': 'node', 'type': 'Symbol', 'name': '"earthworm"'}, - {'atom_type': 'variable', 'name': 'v2'}, + {"atom_type": "variable", "name": "v1"}, + { + "atom_type": "node", + "type": "Symbol", + "name": '"earthworm"', + }, + {"atom_type": "variable", "name": "v2"}, ], }, "parameters": [], @@ -230,10 +248,10 @@ def test_query_malformed(self, query, mock_request, client): client.query(query, parameters=[]) mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) def test_count_atoms_success(self, mock_request, client): @@ -245,37 +263,37 @@ def test_count_atoms_success(self, mock_request, client): result = client.count_atoms() mock_request.assert_called_once_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response def test_count_atoms_success_parameters(self, mock_request, client): - values = {'parameters': {'context': 'local'}} + values = {"parameters": {"context": "local"}} expected_request_data = {"action": "count_atoms", "input": values} expected_response = (14, 26) mock_request.return_value.status_code = 200 mock_request.return_value.content = serialize(expected_response) - result = client.count_atoms(values['parameters']) + result = client.count_atoms(values["parameters"]) mock_request.assert_called_once_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response def test_get_atoms_by_field(self, mock_request, client): - query = [{'field': 'name', 'value': 'test'}] + query = [{"field": "name", "value": "test"}] expected_request_data = { "action": "get_atoms_by_field", - "input": {'query': {k['field']: k['value'] for k in query}}, + "input": {"query": {k["field"]: k["value"] for k in query}}, } expected_response = (14, 26) mock_request.return_value.status_code = 200 @@ -283,35 +301,38 @@ def test_get_atoms_by_field(self, mock_request, client): result = client.get_atoms_by_field(query=query) mock_request.assert_called_once_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response def test_get_atoms_by_text_field(self, mock_request, client): - expected_input = {'text_value': 'value'} - expected_request_data = {"action": "get_atoms_by_text_field", "input": expected_input} + expected_input = {"text_value": "value"} + expected_request_data = { + "action": "get_atoms_by_text_field", + "input": expected_input, + } expected_response = (14, 26) mock_request.return_value.status_code = 200 mock_request.return_value.content = serialize(expected_response) result = client.get_atoms_by_text_field(**expected_input) mock_request.assert_called_once_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response def test_get_node_by_name_starting_with(self, mock_request, client): expected_input = { - 'node_type': 'Concept', - 'startswith': 'nam', + "node_type": "Concept", + "startswith": "nam", } expected_request_data = { "action": "get_node_by_name_starting_with", @@ -323,10 +344,10 @@ def test_get_node_by_name_starting_with(self, mock_request, client): result = client.get_node_by_name_starting_with(**expected_input) mock_request.assert_called_once_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(expected_request_data), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response @@ -346,10 +367,10 @@ def test_send_request_success(self, mock_request, client): result = client._send_request(payload) mock_request.assert_called_with( - method='POST', - url='http://0.0.0.0:1000/function/query-engine', + method="POST", + url="http://0.0.0.0:1000/function/query-engine", data=serialize(payload), - headers={'Content-Type': 'application/octet-stream'}, + headers={"Content-Type": "application/octet-stream"}, ) assert result == expected_response diff --git a/src/tests/python/unit/test_context.py b/src/tests/python/unit/test_context.py index 1e3a4e9..6bf421d 100644 --- a/src/tests/python/unit/test_context.py +++ b/src/tests/python/unit/test_context.py @@ -5,9 +5,9 @@ class TestContext: def test_creation(self): - context_name = 'blah' - context_handle = 'h' - node = NodeT(type='Context', name=context_name) + context_name = "blah" + context_handle = "h" + node = NodeT(type="Context", name=context_name) node.handle = node._id = context_handle context = Context(node, []) assert context.name == context_name diff --git a/src/tests/python/unit/test_das.py b/src/tests/python/unit/test_das.py index ae192ea..86b3d78 100644 --- a/src/tests/python/unit/test_das.py +++ b/src/tests/python/unit/test_das.py @@ -18,46 +18,47 @@ def test_create_das(self): assert isinstance(das.backend, InMemoryDB) assert isinstance(das.query_engine, LocalQueryEngine) - with mock.patch('hyperon_das.utils.check_server_connection', return_value=(200, 'OK')): - das = DistributedAtomSpace(query_engine='remote', host='0.0.0.0', port=1234) + with mock.patch("hyperon_das.utils.check_server_connection", return_value=(200, "OK")): + das = DistributedAtomSpace(query_engine="remote", host="0.0.0.0", port=1234) assert isinstance(das.backend, InMemoryDB) assert isinstance(das.query_engine, RemoteQueryEngine) with pytest.raises(InvalidAtomDB): - das = DistributedAtomSpace(atomdb='snet') + das = DistributedAtomSpace(atomdb="snet") with pytest.raises(InvalidQueryEngine) as exc: - das = DistributedAtomSpace(query_engine='snet') + das = DistributedAtomSpace(query_engine="snet") assert exc.value.message == "Use either 'local' or 'remote'" - assert exc.value.details == 'query_engine=snet' + assert exc.value.details == "query_engine=snet" def test_get_incoming_links(self): das = DistributedAtomSpaceMock() - links = das.get_incoming_links('', handles_only=True) + links = das.get_incoming_links("", handles_only=True) assert len(links) == 7 - links = das.get_incoming_links('') + links = das.get_incoming_links("") assert len(links) == 7 - with mock.patch('hyperon_das.utils.check_server_connection', return_value=(200, 'OK')): - das_remote = DistributedAtomSpaceMock('remote', host='test', port=8080) + with mock.patch("hyperon_das.utils.check_server_connection", return_value=(200, "OK")): + das_remote = DistributedAtomSpaceMock("remote", host="test", port=8080) - with mock.patch('hyperon_das.client.FunctionsClient.get_incoming_links', return_value=[]): - links = das_remote.get_incoming_links('') + with mock.patch("hyperon_das.client.FunctionsClient.get_incoming_links", return_value=[]): + links = das_remote.get_incoming_links("") assert len(links) == 7 with mock.patch( - 'hyperon_das.client.FunctionsClient.get_incoming_links', return_value=[1, 2, 3, 4] + "hyperon_das.client.FunctionsClient.get_incoming_links", + return_value=[1, 2, 3, 4], ): - links = das_remote.get_incoming_links('') + links = das_remote.get_incoming_links("") assert links == [1, 2, 3, 4] with mock.patch( - 'hyperon_das.client.FunctionsClient.get_incoming_links', + "hyperon_das.client.FunctionsClient.get_incoming_links", return_value=["['Inheritance', '', '']"], ): - links = das_remote.get_incoming_links('', handles_only=True) + links = das_remote.get_incoming_links("", handles_only=True) assert set(links) == { "['Inheritance', '', '']", "['Similarity', '', '']", @@ -67,30 +68,30 @@ def test_get_incoming_links(self): def test_get_traversal_cursor(self): das = DistributedAtomSpace() - das.add_node(NodeT(type='Concept', name='human')) - human = das.compute_node_handle('Concept', 'human') + das.add_node(NodeT(type="Concept", name="human")) + human = das.compute_node_handle("Concept", "human") cursor = das.get_traversal_cursor(human) assert isinstance(cursor, TraverseEngine) with pytest.raises(GetTraversalCursorException) as exc: - das.get_traversal_cursor(handle='snet') + das.get_traversal_cursor(handle="snet") - assert exc.value.message == 'Cannot start Traversal. Atom does not exist' + assert exc.value.message == "Cannot start Traversal. Atom does not exist" def test_get_atom(self): das = DistributedAtomSpace() das.add_link( LinkT( - type='expression', + type="expression", targets=[ - NodeT(type='symbol', name='a'), + NodeT(type="symbol", name="a"), LinkT( - type='expression', + type="expression", targets=[ - NodeT(type='symbol', name='b'), - NodeT(type='symbol', name='c'), + NodeT(type="symbol", name="b"), + NodeT(type="symbol", name="c"), ], ), ], @@ -98,43 +99,43 @@ def test_get_atom(self): ) handle = {} - for n in ['a', 'b', 'c']: - handle[n] = das.compute_node_handle('symbol', n) + for n in ["a", "b", "c"]: + handle[n] = das.compute_node_handle("symbol", n) - handle['internal_link'] = das.compute_link_handle('expression', [handle['b'], handle['c']]) - handle['external_link'] = das.compute_link_handle( - 'expression', [handle['a'], handle['internal_link']] + handle["internal_link"] = das.compute_link_handle("expression", [handle["b"], handle["c"]]) + handle["external_link"] = das.compute_link_handle( + "expression", [handle["a"], handle["internal_link"]] ) - for n in ['a', 'b', 'c']: + for n in ["a", "b", "c"]: document = das.get_atom(handle[n]) - assert document.named_type == 'symbol' + assert document.named_type == "symbol" assert document.name == n assert document.handle == handle[n] - document = das.get_atom(handle['internal_link']) - assert document.named_type == 'expression' - assert document.handle == handle['internal_link'] - assert document.targets == [handle['b'], handle['c']] + document = das.get_atom(handle["internal_link"]) + assert document.named_type == "expression" + assert document.handle == handle["internal_link"] + assert document.targets == [handle["b"], handle["c"]] - document = das.get_atom(handle['external_link']) - assert document.named_type == 'expression' - assert document.handle == handle['external_link'] - assert document.targets == [handle['a'], handle['internal_link']] + document = das.get_atom(handle["external_link"]) + assert document.named_type == "expression" + assert document.handle == handle["external_link"] + assert document.targets == [handle["a"], handle["internal_link"]] - assert das.get_atoms([handle['a'], handle['external_link'], handle['c']]) == [ - das.get_atom(handle['a']), - das.get_atom(handle['external_link']), - das.get_atom(handle['c']), + assert das.get_atoms([handle["a"], handle["external_link"], handle["c"]]) == [ + das.get_atom(handle["a"]), + das.get_atom(handle["external_link"]), + das.get_atom(handle["c"]), ] def test_about(self): das = DistributedAtomSpace() assert isinstance(das.about(), dict) - assert 'das' in das.about() - assert 'atom_db' in das.about() - assert {'name', 'version', 'summary'} == set(das.about().get('das').keys()) - assert {'name', 'version', 'summary'} == set(das.about().get('atom_db').keys()) + assert "das" in das.about() + assert "atom_db" in das.about() + assert {"name", "version", "summary"} == set(das.about().get("das").keys()) + assert {"name", "version", "summary"} == set(das.about().get("atom_db").keys()) def test_create_context(self): das = DistributedAtomSpace() @@ -143,55 +144,55 @@ def test_create_context(self): def test_get_atoms_by_field(self): das = DistributedAtomSpaceMock() - atom_field = das.get_atoms_by_field({'Concept': 'human'}) + atom_field = das.get_atoms_by_field({"Concept": "human"}) assert atom_field def test_get_atoms_by_text_field(self): das = DistributedAtomSpaceMock() - atom_text_field = das.get_atoms_by_text_field(text_value='human', field='name') + atom_text_field = das.get_atoms_by_text_field(text_value="human", field="name") assert atom_text_field def test_get_node_by_name_starting_with(self): das = DistributedAtomSpaceMock() - atom_starting_with = das.get_node_by_name_starting_with('Concept', 'mon') + atom_starting_with = das.get_node_by_name_starting_with("Concept", "mon") assert atom_starting_with def test_count_atoms(self): das = DistributedAtomSpaceMock() atom_count = das.count_atoms() - assert atom_count == {'link_count': 26, 'node_count': 14, 'atom_count': 40} + assert atom_count == {"link_count": 26, "node_count": 14, "atom_count": 40} def test_count_atoms_local(self): das = DistributedAtomSpaceMock() - atom_count = das.count_atoms({'context': 'local'}) - assert atom_count == {'link_count': 26, 'node_count': 14, 'atom_count': 40} + atom_count = das.count_atoms({"context": "local"}) + assert atom_count == {"link_count": 26, "node_count": 14, "atom_count": 40} def test_count_atoms_local_remote(self): das = DistributedAtomSpaceMock() - atom_count = das.count_atoms({'context': 'remote'}) + atom_count = das.count_atoms({"context": "remote"}) assert atom_count == {} def test_count_atoms_local_both(self): das = DistributedAtomSpaceMock() - atom_count = das.count_atoms({'context': 'both'}) - assert atom_count == {'link_count': 26, 'node_count': 14, 'atom_count': 40} + atom_count = das.count_atoms({"context": "both"}) + assert atom_count == {"link_count": 26, "node_count": 14, "atom_count": 40} # assert atom_count == (14, 26) def test_count_atoms_remote(self): - das = DistributedAtomSpaceMock('remote', host='localhost', port=123) + das = DistributedAtomSpaceMock("remote", host="localhost", port=123) with mock.patch( - 'hyperon_das.client.FunctionsClient.count_atoms', + "hyperon_das.client.FunctionsClient.count_atoms", return_value=(10, 0), ): - atom_count = das.count_atoms({'context': 'remote'}) + atom_count = das.count_atoms({"context": "remote"}) assert atom_count == (10, 0) def test_count_atoms_both(self): - das = DistributedAtomSpaceMock('remote', host='localhost', port=123) + das = DistributedAtomSpaceMock("remote", host="localhost", port=123) with mock.patch( - 'hyperon_das.client.FunctionsClient.count_atoms', - return_value={'link_count': 0, 'node_count': 10, 'atom_count': 0}, + "hyperon_das.client.FunctionsClient.count_atoms", + return_value={"link_count": 0, "node_count": 10, "atom_count": 0}, ): - atom_count = das.count_atoms({'context': 'both'}) + atom_count = das.count_atoms({"context": "both"}) # assert atom_count == (24, 26) - assert atom_count == {'link_count': 26, 'node_count': 24, 'atom_count': 40} + assert atom_count == {"link_count": 26, "node_count": 24, "atom_count": 40} diff --git a/src/tests/python/unit/test_database_private_methods.py b/src/tests/python/unit/test_database_private_methods.py index 42d297a..78efc9c 100644 --- a/src/tests/python/unit/test_database_private_methods.py +++ b/src/tests/python/unit/test_database_private_methods.py @@ -1,7 +1,11 @@ import pytest from hyperon_das_atomdb.database import AtomDB, LinkT, NodeT -from hyperon_das_atomdb.exceptions import AddLinkException, AddNodeException, AtomDoesNotExist +from hyperon_das_atomdb.exceptions import ( + AddLinkException, + AddNodeException, + AtomDoesNotExist, +) from tests.python.helpers import add_link, add_node, check_handle from .fixtures import in_memory_db, redis_mongo_db # noqa: F401 @@ -49,9 +53,9 @@ def test__reformat_document(self, database, kwlist, request): assert len(answer.targets) == 2 assert len(answer.targets_documents) == 2 assert answer.named_type == "Relation" - assert all( - isinstance(t, NodeT) for t in answer.targets_documents - ), answer.targets_documents + assert all(isinstance(t, NodeT) for t in answer.targets_documents), ( + answer.targets_documents + ) @pytest.mark.parametrize( "database,kwlist", diff --git a/src/tests/python/unit/test_database_public_methods.py b/src/tests/python/unit/test_database_public_methods.py index 6d31210..3abc1c5 100644 --- a/src/tests/python/unit/test_database_public_methods.py +++ b/src/tests/python/unit/test_database_public_methods.py @@ -4,7 +4,13 @@ import pytest from hyperon_das_atomdb.database import AtomDB, AtomT, LinkT, NodeT -from tests.python.helpers import add_link, add_node, check_handle, dict_to_link_params, dict_to_node_params +from tests.python.helpers import ( + add_link, + add_node, + check_handle, + dict_to_link_params, + dict_to_node_params, +) from tests.python.unit.fixtures import in_memory_db, redis_mongo_db # noqa: F401 diff --git a/src/tests/python/unit/test_decorators.py b/src/tests/python/unit/test_decorators.py index edc5f01..aaf743a 100644 --- a/src/tests/python/unit/test_decorators.py +++ b/src/tests/python/unit/test_decorators.py @@ -8,24 +8,24 @@ logger_mock = Mock() -@patch('hyperon_das.logger') +@patch("hyperon_das.logger") def test_retry_successful_connection(logger_mock): @retry(attempts=3, timeout_seconds=5) def successful_function(self, host, port): return 200, "Success" - result = successful_function({}, 'localhost', 80) + result = successful_function({}, "localhost", 80) - assert result == (200, 'Success') + assert result == (200, "Success") -@patch('hyperon_das.logger') +@patch("hyperon_das.logger") def test_retry_exception_raised(logger_mock): @retry(attempts=3, timeout_seconds=5) def exception_function(): raise ValueError("Simulated exception") with pytest.raises( - RetryConnectionError, match='An error occurs while connecting to the server' + RetryConnectionError, match="An error occurs while connecting to the server" ): exception_function() diff --git a/src/tests/python/unit/test_dict_query_tokenizer.py b/src/tests/python/unit/test_dict_query_tokenizer.py index 41fc0c0..815dca5 100644 --- a/src/tests/python/unit/test_dict_query_tokenizer.py +++ b/src/tests/python/unit/test_dict_query_tokenizer.py @@ -24,14 +24,14 @@ def test_tokenize_link(self): ], } expected_tokens = ( - 'LINK_TEMPLATE Expression 3 NODE Symbol Similarity LINK Expression 2 ' + "LINK_TEMPLATE Expression 3 NODE Symbol Similarity LINK Expression 2 " 'NODE Symbol Concept NODE Symbol "human" VARIABLE v1' ) assert DictQueryTokenizer.tokenize(query) == expected_tokens def test_untokenize_link(self): tokens = ( - 'LINK_TEMPLATE Expression 3 NODE Symbol Similarity LINK Expression 2 ' + "LINK_TEMPLATE Expression 3 NODE Symbol Similarity LINK Expression 2 " 'NODE Symbol Concept NODE Symbol "human" VARIABLE v1' ) expected_query = { @@ -55,7 +55,8 @@ def test_untokenize_link(self): def test_tokenize_invalid_query(self): query = {"atom_type": "unknown", "name": "InvalidQuery"} with pytest.raises( - ValueError, match="Unsupported query, it should start with a link or an operator:" + ValueError, + match="Unsupported query, it should start with a link or an operator:", ): DictQueryTokenizer.tokenize(query) @@ -91,14 +92,16 @@ def test_untokenize_wrong_elements_count(self): def test_tokenize_invalid_start_node(self): query = {"atom_type": "node", "type": "Symbol", "name": "TestNode"} with pytest.raises( - ValueError, match="Unsupported query, it should start with a link or an operator:" + ValueError, + match="Unsupported query, it should start with a link or an operator:", ): DictQueryTokenizer.tokenize(query) def test_tokenize_invalid_start_variable(self): query = {"atom_type": "variable", "name": "TestVariable"} with pytest.raises( - ValueError, match="Unsupported query, it should start with a link or an operator:" + ValueError, + match="Unsupported query, it should start with a link or an operator:", ): DictQueryTokenizer.tokenize(query) diff --git a/src/tests/python/unit/test_queries.py b/src/tests/python/unit/test_queries.py index d570a1a..3183d46 100644 --- a/src/tests/python/unit/test_queries.py +++ b/src/tests/python/unit/test_queries.py @@ -5,16 +5,16 @@ def _name(link, index, typed=False): - named_type = f"{link['targets'][index]['type']}:" if typed else '' + named_type = f"{link['targets'][index]['type']}:" if typed else "" return f"{named_type}{link['targets'][index]['name']}" def _print_query_answer(query_answer, typed=False): if query_answer: for link in query_answer: - if len(link['targets']) == 2: + if len(link["targets"]) == 2: print(f"{link['type']}: {_name(link, 0)} -> {_name(link, 1)}") - elif len(link['targets']) == 3: + elif len(link["targets"]) == 3: print( f"{link['type']}: {_name(link, 0)}({_name(link, 1, typed)}) -> {_name(link, 2, typed)}" ) @@ -391,32 +391,32 @@ def test_conjunction(self): answer, [ [ - 'c93e1e758c53912638438e2a7d7f7b7f', - '1c3bf151ea200b2d9e088a1178d060cb', + "c93e1e758c53912638438e2a7d7f7b7f", + "1c3bf151ea200b2d9e088a1178d060cb", ], [ - 'f31dfe97db782e8cec26de18dddf8965', - '1c3bf151ea200b2d9e088a1178d060cb', + "f31dfe97db782e8cec26de18dddf8965", + "1c3bf151ea200b2d9e088a1178d060cb", ], [ - '75756335011dcedb71a0d9a7bd2da9e8', - '1c3bf151ea200b2d9e088a1178d060cb', + "75756335011dcedb71a0d9a7bd2da9e8", + "1c3bf151ea200b2d9e088a1178d060cb", ], [ - '116df61c01859c710d178ba14a483509', - 'b0f428929706d1d991e4d712ad08f9ab', + "116df61c01859c710d178ba14a483509", + "b0f428929706d1d991e4d712ad08f9ab", ], [ - '959924e3aab197af80a84c1ab261fd65', - 'b0f428929706d1d991e4d712ad08f9ab', + "959924e3aab197af80a84c1ab261fd65", + "b0f428929706d1d991e4d712ad08f9ab", ], [ - '906fa505ae3bc6336d80a5f9aaa47b3b', - '959924e3aab197af80a84c1ab261fd65', + "906fa505ae3bc6336d80a5f9aaa47b3b", + "959924e3aab197af80a84c1ab261fd65", ], [ - 'fbf03d17d6a40feff828a3f2c6e86f05', - '1c3bf151ea200b2d9e088a1178d060cb', + "fbf03d17d6a40feff828a3f2c6e86f05", + "1c3bf151ea200b2d9e088a1178d060cb", ], ], ) diff --git a/src/tests/python/unit/test_traverse_engine.py b/src/tests/python/unit/test_traverse_engine.py index 84d2406..b29644c 100644 --- a/src/tests/python/unit/test_traverse_engine.py +++ b/src/tests/python/unit/test_traverse_engine.py @@ -9,8 +9,8 @@ def simplify_links(links: list, das: DistributedAtomSpace) -> set: answers = set() for link in links: targets_name = [] - for target in link['targets']: - targets_name.append(das.get_atom(target)['name']) + for target in link["targets"]: + targets_name.append(das.get_atom(target)["name"]) answers.add(f"{link['named_type']} : {targets_name}") return answers @@ -18,7 +18,7 @@ def simplify_links(links: list, das: DistributedAtomSpace) -> set: def get_names(handles: str, das: DistributedAtomSpace) -> set: answer = set() for handle in handles: - answer.add(das.get_atom(handle)['name']) + answer.add(das.get_atom(handle)["name"]) return answer @@ -36,9 +36,9 @@ def test_get(self, das): cursor = das.get_traversal_cursor(animal_base_handles.human) current_cursor = cursor.get() - assert current_cursor['handle'] == animal_base_handles.human - assert current_cursor['name'] == 'human' - assert current_cursor['named_type'] == 'Concept' + assert current_cursor["handle"] == animal_base_handles.human + assert current_cursor["name"] == "human" + assert current_cursor["named_type"] == "Concept" def test_get_links(self, das): def _build_atom_answer(handle: str) -> dict: @@ -177,7 +177,10 @@ def _vine_links(): def _plant_links(): answers = _build_atom_answer(animal_base_handles.plant) assert len(answers) == 2 - assert answers == {"Inheritance : ['vine', 'plant']", "Inheritance : ['ent', 'plant']"} + assert answers == { + "Inheritance : ['vine', 'plant']", + "Inheritance : ['ent', 'plant']", + } def _similarity_inheritance_links(): answers = _build_atom_answer(animal_base_handles.similarity_human_monkey) @@ -281,7 +284,7 @@ def _build_atom_answer(handle: str, **filters) -> dict: return simplify_links(links, das) def _human_links(): - answers = _build_atom_answer(animal_base_handles.human, link_type='Similarity') + answers = _build_atom_answer(animal_base_handles.human, link_type="Similarity") assert answers == { "Similarity : ['human', 'chimp']", "Similarity : ['human', 'monkey']", @@ -291,7 +294,7 @@ def _human_links(): "Similarity : ['chimp', 'human']", } answers = _build_atom_answer( - animal_base_handles.human, link_type='Similarity', cursor_position=0 + animal_base_handles.human, link_type="Similarity", cursor_position=0 ) assert answers == { "Similarity : ['human', 'chimp']", @@ -299,7 +302,7 @@ def _human_links(): "Similarity : ['human', 'ent']", } answers = _build_atom_answer( - animal_base_handles.human, link_type='Similarity', cursor_position=1 + animal_base_handles.human, link_type="Similarity", cursor_position=1 ) assert answers == { "Similarity : ['ent', 'human']", @@ -308,49 +311,49 @@ def _human_links(): } answers = _build_atom_answer( animal_base_handles.human, - link_type='Similarity', + link_type="Similarity", cursor_position=0, - target_type='Concept', + target_type="Concept", ) assert answers == { "Similarity : ['human', 'chimp']", "Similarity : ['human', 'monkey']", "Similarity : ['human', 'ent']", } - answers = _build_atom_answer(animal_base_handles.human, link_type='Fake') + answers = _build_atom_answer(animal_base_handles.human, link_type="Fake") assert len(answers) == 0 answers = _build_atom_answer(animal_base_handles.human, cursor_position=2) assert len(answers) == 0 - answers = _build_atom_answer(animal_base_handles.human, target_type='Fake') + answers = _build_atom_answer(animal_base_handles.human, target_type="Fake") assert len(answers) == 0 das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Concept', 'name': 'human'}, - {'type': 'Concept', 'name': 'snet'}, + "type": "Similarity", + "targets": [ + {"type": "Concept", "name": "human"}, + {"type": "Concept", "name": "snet"}, ], - 'weight': 0.5, + "weight": 0.5, } ) def my_filter(link) -> bool: - if 'weight' in link: + if "weight" in link: return True return False answers = _build_atom_answer( animal_base_handles.human, - link_type='Similarity', + link_type="Similarity", cursor_position=0, - target_type='Concept', + target_type="Concept", filter=my_filter, ) assert answers == {"Similarity : ['human', 'snet']"} def _mammal_links(): - answers = _build_atom_answer(animal_base_handles.mammal, link_type='Inheritance') + answers = _build_atom_answer(animal_base_handles.mammal, link_type="Inheritance") assert answers == { "Inheritance : ['mammal', 'animal']", "Inheritance : ['monkey', 'mammal']", @@ -359,11 +362,11 @@ def _mammal_links(): "Inheritance : ['rhino', 'mammal']", } answers = _build_atom_answer( - animal_base_handles.mammal, link_type='Inheritance', cursor_position=0 + animal_base_handles.mammal, link_type="Inheritance", cursor_position=0 ) assert answers == {"Inheritance : ['mammal', 'animal']"} answers = _build_atom_answer( - animal_base_handles.mammal, link_type='Inheritance', cursor_position=1 + animal_base_handles.mammal, link_type="Inheritance", cursor_position=1 ) assert answers == { "Inheritance : ['monkey', 'mammal']", @@ -373,9 +376,9 @@ def _mammal_links(): } answers = _build_atom_answer( animal_base_handles.mammal, - link_type='Inheritance', + link_type="Inheritance", cursor_position=1, - target_type='Concept', + target_type="Concept", ) assert answers == { "Inheritance : ['monkey', 'mammal']", @@ -383,50 +386,50 @@ def _mammal_links(): "Inheritance : ['human', 'mammal']", "Inheritance : ['rhino', 'mammal']", } - answers = _build_atom_answer(animal_base_handles.mammal, link_type='Similarity') + answers = _build_atom_answer(animal_base_handles.mammal, link_type="Similarity") assert len(answers) == 0 answers = _build_atom_answer(animal_base_handles.mammal, cursor_position=5) assert len(answers) == 0 - answers = _build_atom_answer(animal_base_handles.mammal, target_type='Snet') + answers = _build_atom_answer(animal_base_handles.mammal, target_type="Snet") assert len(answers) == 0 das.add_link( { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Fake', 'name': 'fake1'}, - {'type': 'Concept', 'name': 'mammal'}, + "type": "Inheritance", + "targets": [ + {"type": "Fake", "name": "fake1"}, + {"type": "Concept", "name": "mammal"}, ], - 'weight': 0.4, + "weight": 0.4, } ) das.add_link( { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Fake', 'name': 'fake2'}, - {'type': 'Concept', 'name': 'mammal'}, + "type": "Inheritance", + "targets": [ + {"type": "Fake", "name": "fake2"}, + {"type": "Concept", "name": "mammal"}, ], - 'weight': 0.5, + "weight": 0.5, } ) def my_filter(link) -> bool: - if 'weight' in link and link['weight'] >= 0.5: + if "weight" in link and link["weight"] >= 0.5: return True return False answers = _build_atom_answer( animal_base_handles.mammal, - link_type='Inheritance', + link_type="Inheritance", cursor_position=1, - target_type='Fake', + target_type="Fake", filter=my_filter, ) assert answers == {"Inheritance : ['fake2', 'mammal']"} def _snake_links(): - answers = _build_atom_answer(animal_base_handles.snake, link_type='Similarity') + answers = _build_atom_answer(animal_base_handles.snake, link_type="Similarity") assert answers == { "Similarity : ['snake', 'earthworm']", "Similarity : ['earthworm', 'snake']", @@ -434,24 +437,24 @@ def _snake_links(): "Similarity : ['vine', 'snake']", } answers = _build_atom_answer( - animal_base_handles.snake, link_type='Inheritance', cursor_position=0 + animal_base_handles.snake, link_type="Inheritance", cursor_position=0 ) assert answers == { "Inheritance : ['snake', 'reptile']", } answers = _build_atom_answer( - animal_base_handles.snake, link_type='Inheritance', cursor_position=1 + animal_base_handles.snake, link_type="Inheritance", cursor_position=1 ) assert len(answers) == 0 answers = _build_atom_answer( - animal_base_handles.snake, link_type='Similarity', cursor_position=0 + animal_base_handles.snake, link_type="Similarity", cursor_position=0 ) assert answers == { "Similarity : ['snake', 'earthworm']", "Similarity : ['snake', 'vine']", } answers = _build_atom_answer( - animal_base_handles.snake, link_type='Similarity', cursor_position=1 + animal_base_handles.snake, link_type="Similarity", cursor_position=1 ) assert answers == { "Similarity : ['earthworm', 'snake']", @@ -459,73 +462,73 @@ def _snake_links(): } answers = _build_atom_answer( animal_base_handles.snake, - link_type='Inheritance', + link_type="Inheritance", cursor_position=0, - target_type='Concept', + target_type="Concept", ) assert answers == {"Inheritance : ['snake', 'reptile']"} - answers = _build_atom_answer(animal_base_handles.snake, link_type='Evaluation') + answers = _build_atom_answer(animal_base_handles.snake, link_type="Evaluation") assert len(answers) == 0 answers = _build_atom_answer(animal_base_handles.snake, cursor_position=5) assert len(answers) == 0 answers = _build_atom_answer( animal_base_handles.snake, - link_type='Inheritance', + link_type="Inheritance", cursor_position=0, - target_type='Snet', + target_type="Snet", ) assert len(answers) == 0 das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Fake', 'name': 'fake1'}, - {'type': 'Concept', 'name': 'snake'}, + "type": "Similarity", + "targets": [ + {"type": "Fake", "name": "fake1"}, + {"type": "Concept", "name": "snake"}, ], - 'weight': 0.2, + "weight": 0.2, } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Concept', 'name': 'snake'}, - {'type': 'Fake', 'name': 'fake1'}, + "type": "Similarity", + "targets": [ + {"type": "Concept", "name": "snake"}, + {"type": "Fake", "name": "fake1"}, ], - 'weight': 0.5, + "weight": 0.5, } ) def my_filter(link) -> bool: - if 'weight' in link and link['weight'] >= 0.5: + if "weight" in link and link["weight"] >= 0.5: return True return False answers = _build_atom_answer( animal_base_handles.snake, - link_type='Similarity', - target_type='Fake', + link_type="Similarity", + target_type="Fake", filter=my_filter, ) assert answers == {"Similarity : ['snake', 'fake1']"} def _similarity_human_monkey_links(): answers = _build_atom_answer( - animal_base_handles.similarity_human_monkey, link_type='Similarity' + animal_base_handles.similarity_human_monkey, link_type="Similarity" ) assert len(answers) == 0 das.add_link( { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Fake', 'name': 'fake'}, + "type": "Inheritance", + "targets": [ + {"type": "Fake", "name": "fake"}, { - 'type': 'Similarity', - 'targets': [ - {'type': 'Concept', 'name': 'human'}, - {'type': 'Concept', 'name': 'monkey'}, + "type": "Similarity", + "targets": [ + {"type": "Concept", "name": "human"}, + {"type": "Concept", "name": "monkey"}, ], }, ], @@ -533,7 +536,7 @@ def _similarity_human_monkey_links(): ) cursor = das.get_traversal_cursor(animal_base_handles.similarity_human_monkey) - links = cursor.get_links(link_type='Inheritance') + links = cursor.get_links(link_type="Inheritance") answers = [link for link in links] assert len(answers) == 1 @@ -747,17 +750,17 @@ def _build_neighbors(handle: str, **filters) -> dict: return ret def _human_neighbors(): - neighbors = _build_neighbors(animal_base_handles.human, link_type='Inheritance') + neighbors = _build_neighbors(animal_base_handles.human, link_type="Inheritance") assert das.get_atom(animal_base_handles.mammal) in neighbors assert len(neighbors) == 1 - neighbors = _build_neighbors(animal_base_handles.human, link_type='Similarity') + neighbors = _build_neighbors(animal_base_handles.human, link_type="Similarity") assert das.get_atom(animal_base_handles.monkey) in neighbors assert das.get_atom(animal_base_handles.chimp) in neighbors assert das.get_atom(animal_base_handles.ent) in neighbors assert len(neighbors) == 3 - neighbors = _build_neighbors(animal_base_handles.human, target_type='Concept') + neighbors = _build_neighbors(animal_base_handles.human, target_type="Concept") assert das.get_atom(animal_base_handles.mammal) in neighbors assert das.get_atom(animal_base_handles.monkey) in neighbors assert das.get_atom(animal_base_handles.chimp) in neighbors @@ -765,66 +768,66 @@ def _human_neighbors(): assert len(neighbors) == 4 neighbors = _build_neighbors( - animal_base_handles.human, link_type='Inheritance', target_type='Snet' + animal_base_handles.human, link_type="Inheritance", target_type="Snet" ) assert len(neighbors) == 0 neighbors = _build_neighbors( - animal_base_handles.human, link_type='Similarity', target_type='Snet' + animal_base_handles.human, link_type="Similarity", target_type="Snet" ) assert len(neighbors) == 0 das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Concept', 'name': 'human'}, - {'type': 'Fake', 'name': 'fake-h', 'weight': 0.7}, + "type": "Similarity", + "targets": [ + {"type": "Concept", "name": "human"}, + {"type": "Fake", "name": "fake-h", "weight": 0.7}, ], } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Fake', 'name': 'fake-h', 'weight': 0.3}, - {'type': 'Concept', 'name': 'human'}, + "type": "Similarity", + "targets": [ + {"type": "Fake", "name": "fake-h", "weight": 0.3}, + {"type": "Concept", "name": "human"}, ], } ) das.add_link( { - 'type': 'Inheritance', - 'targets': [ + "type": "Inheritance", + "targets": [ { - 'type': 'Fake', - 'name': 'fake-h2', - 'weight': 0.3, + "type": "Fake", + "name": "fake-h2", + "weight": 0.3, }, { - 'type': 'Fake', - 'name': 'fake-h3', - 'weight': 0.3, + "type": "Fake", + "name": "fake-h3", + "weight": 0.3, }, { - 'type': 'Fake', - 'name': 'fake-h4', - 'weight': 1.3, + "type": "Fake", + "name": "fake-h4", + "weight": 1.3, }, - {'type': 'Concept', 'name': 'human'}, + {"type": "Concept", "name": "human"}, ], } ) def my_filter(target) -> bool: - if 'weight' in target and target['weight'] >= 1: + if "weight" in target and target["weight"] >= 1: return True return False - fake_h = AtomDB.node_handle('Fake', 'fake-h') - fake_h2 = AtomDB.node_handle('Fake', 'fake-h2') - fake_h3 = AtomDB.node_handle('Fake', 'fake-h3') - fake_h4 = AtomDB.node_handle('Fake', 'fake-h4') + fake_h = AtomDB.node_handle("Fake", "fake-h") + fake_h2 = AtomDB.node_handle("Fake", "fake-h2") + fake_h3 = AtomDB.node_handle("Fake", "fake-h3") + fake_h4 = AtomDB.node_handle("Fake", "fake-h4") neighbors = _build_neighbors(animal_base_handles.human) assert das.get_atom(animal_base_handles.mammal) in neighbors @@ -838,23 +841,23 @@ def my_filter(target) -> bool: assert len(neighbors) == 8 neighbors = _build_neighbors( - animal_base_handles.human, link_type='Similarity', target_type='Fake' + animal_base_handles.human, link_type="Similarity", target_type="Fake" ) assert das.get_atom(fake_h) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( animal_base_handles.human, - link_type='Inheritance', - target_type='Fake', + link_type="Inheritance", + target_type="Fake", filters=(None, my_filter), ) assert len(neighbors) == 1 neighbors = _build_neighbors( animal_base_handles.human, - link_type='Inheritance', - target_type='Fake', + link_type="Inheritance", + target_type="Fake", filters=None, ) @@ -863,103 +866,107 @@ def my_filter(target) -> bool: with pytest.raises(ValueError): _build_neighbors( animal_base_handles.human, - link_type='Inheritance', - target_type='Fake', + link_type="Inheritance", + target_type="Fake", filters=my_filter, ) def _vine_neighbors(): - neighbors = _build_neighbors(animal_base_handles.vine, link_type='Similarity') + neighbors = _build_neighbors(animal_base_handles.vine, link_type="Similarity") assert das.get_atom(animal_base_handles.snake) in neighbors assert len(neighbors) == 1 - neighbors = _build_neighbors(animal_base_handles.vine, link_type='Inheritance') + neighbors = _build_neighbors(animal_base_handles.vine, link_type="Inheritance") assert das.get_atom(animal_base_handles.plant) in neighbors assert len(neighbors) == 1 - fake_v1 = AtomDB.node_handle('Fake', 'fake-v1') - fake_v2 = AtomDB.node_handle('Fake', 'fake-v2') + fake_v1 = AtomDB.node_handle("Fake", "fake-v1") + fake_v2 = AtomDB.node_handle("Fake", "fake-v2") das.add_link( { - 'type': 'Inheritance', - 'targets': [ + "type": "Inheritance", + "targets": [ { - 'type': 'Fake', - 'name': 'fake-v1', - 'weight': 1, + "type": "Fake", + "name": "fake-v1", + "weight": 1, }, - {'type': 'Concept', 'name': 'vine'}, + {"type": "Concept", "name": "vine"}, ], } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Concept', 'name': 'vine'}, + "type": "Similarity", + "targets": [ + {"type": "Concept", "name": "vine"}, { - 'type': 'Fake', - 'name': 'fake-v2', - 'weight': 0.7, + "type": "Fake", + "name": "fake-v2", + "weight": 0.7, }, ], } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ + "type": "Similarity", + "targets": [ { - 'type': 'Fake', - 'name': 'fake-v2', - 'weight': 0.3, + "type": "Fake", + "name": "fake-v2", + "weight": 0.3, }, - {'type': 'Concept', 'name': 'vine'}, + {"type": "Concept", "name": "vine"}, ], } ) - neighbors = _build_neighbors(animal_base_handles.vine, link_type='Inheritance') + neighbors = _build_neighbors(animal_base_handles.vine, link_type="Inheritance") assert das.get_atom(animal_base_handles.plant) in neighbors assert das.get_atom(fake_v1) in neighbors assert len(neighbors) == 2 - neighbors = _build_neighbors(animal_base_handles.vine, link_type='Similarity') + neighbors = _build_neighbors(animal_base_handles.vine, link_type="Similarity") assert das.get_atom(animal_base_handles.snake) in neighbors assert das.get_atom(fake_v2) in neighbors assert len(neighbors) == 2 neighbors = _build_neighbors( - animal_base_handles.vine, link_type='Similarity', target_type='Concept' + animal_base_handles.vine, link_type="Similarity", target_type="Concept" ) assert das.get_atom(animal_base_handles.snake) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( - animal_base_handles.vine, link_type='Inheritance', target_type='Fake' + animal_base_handles.vine, link_type="Inheritance", target_type="Fake" ) assert das.get_atom(fake_v1) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( - animal_base_handles.vine, link_type='Similarity', target_type='Fake' + animal_base_handles.vine, link_type="Similarity", target_type="Fake" ) assert das.get_atom(fake_v2) in neighbors assert len(neighbors) == 1 def my_filter(target) -> bool: - if 'weight' in target and target['weight'] >= 1: + if "weight" in target and target["weight"] >= 1: return True return False neighbors = _build_neighbors( - animal_base_handles.vine, link_type='Similarity', filters=(None, my_filter) + animal_base_handles.vine, + link_type="Similarity", + filters=(None, my_filter), ) assert len(neighbors) == 0 neighbors = _build_neighbors( - animal_base_handles.vine, link_type='Inheritance', filters=(None, my_filter) + animal_base_handles.vine, + link_type="Inheritance", + filters=(None, my_filter), ) assert das.get_atom(fake_v1) in neighbors assert len(neighbors) == 1 @@ -968,55 +975,55 @@ def _inheritance_dinosaur_reptile(): neighbors = _build_neighbors(animal_base_handles.inheritance_dinosaur_reptile) assert len(neighbors) == 0 - fake_dr1 = AtomDB.node_handle('Fake', 'fake-dr1') - fake_dr2 = AtomDB.node_handle('Fake', 'fake-dr2') + fake_dr1 = AtomDB.node_handle("Fake", "fake-dr1") + fake_dr2 = AtomDB.node_handle("Fake", "fake-dr2") das.add_link( { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Fake', 'name': 'fake-dr1'}, + "type": "Inheritance", + "targets": [ + {"type": "Fake", "name": "fake-dr1"}, { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Concept', 'name': 'dinosaur'}, - {'type': 'Concept', 'name': 'reptile'}, + "type": "Inheritance", + "targets": [ + {"type": "Concept", "name": "dinosaur"}, + {"type": "Concept", "name": "reptile"}, ], }, ], - 'weight': 1, + "weight": 1, } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ + "type": "Similarity", + "targets": [ { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Concept', 'name': 'dinosaur'}, - {'type': 'Concept', 'name': 'reptile'}, + "type": "Inheritance", + "targets": [ + {"type": "Concept", "name": "dinosaur"}, + {"type": "Concept", "name": "reptile"}, ], }, - {'type': 'Fake', 'name': 'fake-dr2'}, + {"type": "Fake", "name": "fake-dr2"}, ], - 'weight': 0.7, + "weight": 0.7, } ) das.add_link( { - 'type': 'Similarity', - 'targets': [ - {'type': 'Fake', 'name': 'fake-dr2'}, + "type": "Similarity", + "targets": [ + {"type": "Fake", "name": "fake-dr2"}, { - 'type': 'Inheritance', - 'targets': [ - {'type': 'Concept', 'name': 'dinosaur'}, - {'type': 'Concept', 'name': 'reptile'}, + "type": "Inheritance", + "targets": [ + {"type": "Concept", "name": "dinosaur"}, + {"type": "Concept", "name": "reptile"}, ], }, ], - 'weight': 0.3, + "weight": 0.3, } ) @@ -1026,39 +1033,40 @@ def _inheritance_dinosaur_reptile(): assert len(neighbors) == 2 neighbors = _build_neighbors( - animal_base_handles.inheritance_dinosaur_reptile, link_type='Similarity' + animal_base_handles.inheritance_dinosaur_reptile, link_type="Similarity" ) assert das.get_atom(fake_dr2) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( - animal_base_handles.inheritance_dinosaur_reptile, link_type='Inheritance' + animal_base_handles.inheritance_dinosaur_reptile, + link_type="Inheritance", ) assert das.get_atom(fake_dr1) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( animal_base_handles.inheritance_dinosaur_reptile, - link_type='Inheritance', - target_type='Fake', + link_type="Inheritance", + target_type="Fake", ) assert das.get_atom(fake_dr1) in neighbors assert len(neighbors) == 1 neighbors = _build_neighbors( animal_base_handles.inheritance_dinosaur_reptile, - link_type='Inheritance', - target_type='Concept', + link_type="Inheritance", + target_type="Concept", ) assert len(neighbors) == 0 neighbors = _build_neighbors( - animal_base_handles.inheritance_dinosaur_reptile, target_type='Concept' + animal_base_handles.inheritance_dinosaur_reptile, target_type="Concept" ) assert len(neighbors) == 0 neighbors = _build_neighbors( - animal_base_handles.inheritance_dinosaur_reptile, target_type='Fake' + animal_base_handles.inheritance_dinosaur_reptile, target_type="Fake" ) assert das.get_atom(fake_dr1) in neighbors assert das.get_atom(fake_dr2) in neighbors @@ -1072,42 +1080,42 @@ def _inheritance_dinosaur_reptile(): def test_follow_link(self, das, n): def _mammal(): cursor = das.get_traversal_cursor(animal_base_handles.mammal) - assert cursor.get()['name'] == 'mammal' + assert cursor.get()["name"] == "mammal" cursor.follow_link() - current_cursor = cursor.get()['name'] - assert current_cursor in ('monkey', 'chimp', 'human', 'animal', 'rhino') + current_cursor = cursor.get()["name"] + assert current_cursor in ("monkey", "chimp", "human", "animal", "rhino") cursor.follow_link() previous_cursor = current_cursor - current_cursor = cursor.get()['name'] - if previous_cursor == 'monkey': - assert current_cursor in ('mammal', 'chimp', 'human') - elif previous_cursor == 'chimp': - assert current_cursor in ('mammal', 'monkey', 'human') - elif previous_cursor == 'human': - assert current_cursor in ('mammal', 'monkey', 'chimp', 'ent') - elif previous_cursor == 'animal': - assert current_cursor in ('mammal', 'reptile', 'earthworm') - elif previous_cursor == 'rhino': - assert current_cursor in ('mammal', 'triceratops') + current_cursor = cursor.get()["name"] + if previous_cursor == "monkey": + assert current_cursor in ("mammal", "chimp", "human") + elif previous_cursor == "chimp": + assert current_cursor in ("mammal", "monkey", "human") + elif previous_cursor == "human": + assert current_cursor in ("mammal", "monkey", "chimp", "ent") + elif previous_cursor == "animal": + assert current_cursor in ("mammal", "reptile", "earthworm") + elif previous_cursor == "rhino": + assert current_cursor in ("mammal", "triceratops") def _earthworm(): cursor = das.get_traversal_cursor(animal_base_handles.earthworm) - assert cursor.get()['name'] == 'earthworm' + assert cursor.get()["name"] == "earthworm" cursor.follow_link() - current_cursor = cursor.get()['name'] - assert current_cursor in ('animal', 'snake') + current_cursor = cursor.get()["name"] + assert current_cursor in ("animal", "snake") cursor.follow_link() previous_cursor = current_cursor - current_cursor = cursor.get()['name'] + current_cursor = cursor.get()["name"] - if previous_cursor == 'animal': - assert current_cursor in ('mammal', 'reptile', 'earthworm') - elif previous_cursor == 'snake': - assert current_cursor in ('earthworm', 'reptile', 'vine') + if previous_cursor == "animal": + assert current_cursor in ("mammal", "reptile", "earthworm") + elif previous_cursor == "snake": + assert current_cursor in ("earthworm", "reptile", "vine") _mammal() _earthworm() @@ -1116,42 +1124,42 @@ def _earthworm(): def test_follow_link_with_filters(self, das, n): def _mammal(): cursor = das.get_traversal_cursor(animal_base_handles.mammal) - assert cursor.get()['name'] == 'mammal' + assert cursor.get()["name"] == "mammal" - cursor.follow_link(link_type='Similarity') - assert cursor.get()['name'] == 'mammal' + cursor.follow_link(link_type="Similarity") + assert cursor.get()["name"] == "mammal" - cursor.follow_link(link_type='Inheritance') - current_cursor = cursor.get()['name'] - assert current_cursor in ('monkey', 'chimp', 'human', 'animal', 'rhino') + cursor.follow_link(link_type="Inheritance") + current_cursor = cursor.get()["name"] + assert current_cursor in ("monkey", "chimp", "human", "animal", "rhino") - cursor.follow_link(link_type='Inheritance', target_type='Concept') + cursor.follow_link(link_type="Inheritance", target_type="Concept") previous_cursor = current_cursor - current_cursor = cursor.get()['name'] - if previous_cursor == 'monkey': - assert current_cursor in ('mammal', 'chimp') - elif previous_cursor == 'chimp': - assert current_cursor in ('mammal', 'monkey', 'human') - elif previous_cursor == 'human': - assert current_cursor in ('mammal', 'monkey', 'chimp', 'ent') - elif previous_cursor == 'animal': - assert current_cursor in ('mammal', 'reptile', 'earthworm') - elif previous_cursor == 'rhino': - assert current_cursor in ('mammal', 'triceratops') - - cursor.follow_link(link_type='Inheritance', target_type='Fake') + current_cursor = cursor.get()["name"] + if previous_cursor == "monkey": + assert current_cursor in ("mammal", "chimp") + elif previous_cursor == "chimp": + assert current_cursor in ("mammal", "monkey", "human") + elif previous_cursor == "human": + assert current_cursor in ("mammal", "monkey", "chimp", "ent") + elif previous_cursor == "animal": + assert current_cursor in ("mammal", "reptile", "earthworm") + elif previous_cursor == "rhino": + assert current_cursor in ("mammal", "triceratops") + + cursor.follow_link(link_type="Inheritance", target_type="Fake") previous_cursor = current_cursor - current_cursor = cursor.get()['name'] + current_cursor = cursor.get()["name"] assert previous_cursor == current_cursor _mammal() def test_goto(self, das): cursor = das.get_traversal_cursor(animal_base_handles.human) - cursor.get()['name'] == 'human' + assert cursor.get()["name"] == "human" cursor.goto(animal_base_handles.ent) - assert cursor.get()['name'] == 'ent' + assert cursor.get()["name"] == "ent" with pytest.raises(AtomDoesNotExist): - cursor.goto('snet') + cursor.goto("snet") diff --git a/src/tools/bazel/pytest/main.py b/src/tools/bazel/pytest/main.py index fb23061..9dedbb2 100644 --- a/src/tools/bazel/pytest/main.py +++ b/src/tools/bazel/pytest/main.py @@ -4,5 +4,3 @@ if __name__ == "__main__": sys.exit(pytest.main(sys.argv[1:])) - - diff --git a/src/tools/format/BUILD.bazel b/src/tools/format/BUILD.bazel new file mode 100644 index 0000000..08f130f --- /dev/null +++ b/src/tools/format/BUILD.bazel @@ -0,0 +1,13 @@ +load("@aspect_rules_lint//format:defs.bzl", "format_multirun") + + +format_multirun( + name = "format", + # cc = "@llvm_toolchain_llvm//:bin/clang-format", + python = "@aspect_rules_lint//format:ruff", + disable_git_attribute_checks=True, + # starlark = "@buildifier_prebuild//:buildifier" + + # TODO: ADD rules for yaml, markdown, rust, c + visibility = ["//visibility:public"], +) diff --git a/src/tools/lint/BUILD b/src/tools/lint/BUILD new file mode 100644 index 0000000..e69de29 diff --git a/src/tools/lint/linters.bzl b/src/tools/lint/linters.bzl new file mode 100644 index 0000000..2265c8b --- /dev/null +++ b/src/tools/lint/linters.bzl @@ -0,0 +1,12 @@ +load("@aspect_rules_lint//lint:lint_test.bzl", "lint_test") +load("@aspect_rules_lint//lint:ruff.bzl", "lint_ruff_aspect") + +ruff = lint_ruff_aspect( + binary = "@multitool//tools/ruff", + configs = [ + Label("@//:.ruff.toml"), + ], +) + +ruff_test = lint_test(aspect = ruff) +