diff --git a/.gitignore b/.gitignore index 41598c50..1d19183e 100644 --- a/.gitignore +++ b/.gitignore @@ -6,11 +6,14 @@ # Node artifact files node_modules/ dist/ - +jsonschema/json/ # Compiled Java class files *.class # Compiled Python bytecode +venv/ +build/ +*.egg-info/ *.py[cod] # Log files diff --git a/Pipfile b/Pipfile index ed290f6d..103f6143 100644 --- a/Pipfile +++ b/Pipfile @@ -11,15 +11,22 @@ furo = "==2021.10.9" [packages] riocli = {path = "."} -click-spinner = "==0.1.10" -click-help-colors = "==0.9.1" -click-repl = "==0.2.0" -click-plugins = "==1.1.1" -click = "==8.0.1" -rapyuta-io = "==0.32.0" -pyyaml = "==5.4.1" -click-completion = "==0.5.2" -argparse = "==1.4.0" +pretty-traceback = ">=2022.1018" +argparse = ">=1.4.0" +click = ">=8.0.1" +click-completion = ">=0.5.2" +click-help-colors = ">=0.9.1" +click-plugins = ">=1.1.1" +click-repl = ">=0.2.0" +click-spinner = ">=0.1.10" +dictdiffer = ">=0.9.0" +fastjsonschema = ">=2.16.1" +graphlib-backport = ">=1.0.3" +jinja2 = ">=3.0.1" +munch = ">=2.4.0" +pyyaml = ">=5.4.1" +rapyuta-io = ">=1.5.0" +tabulate = ">=0.8.0" [requires] python_version = "3" diff --git a/Pipfile.lock b/Pipfile.lock index 27717d53..41654844 100644 --- a/Pipfile.lock +++ b/Pipfile.lock @@ -1,7 +1,7 @@ { "_meta": { "hash": { - "sha256": "652e4c057f270398ae15f7661a3f57eb8c8860c3ec56e29060d4721487b51dc5" + "sha256": "c48a467c0c01a1c06fe1506990c955028fbc11515edcee6586015db4b1a1c74e" }, "pipfile-spec": 6, "requires": { @@ -26,25 +26,27 @@ }, "certifi": { "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" + "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14", + "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382" ], - "version": "==2019.9.11" + "markers": "python_version >= '3.6'", + "version": "==2022.9.24" }, - "chardet": { + "charset-normalizer": { "hashes": [ - "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", - "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" + "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", + "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "version": "==3.0.4" + "markers": "python_version >= '3.6'", + "version": "==2.1.1" }, "click": { "hashes": [ - "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", - "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" ], "index": "pypi", - "version": "==8.0.1" + "version": "==8.1.3" }, "click-completion": { "hashes": [ @@ -61,6 +63,14 @@ "index": "pypi", "version": "==0.9.1" }, + "click-plugins": { + "hashes": [ + "sha256:46ab999744a9d831159c3411bb0c79346d94a444df9a3a3742e9ed63645f264b", + "sha256:5d262006d3222f5057fd81e1623d4443e41dcda5dc815c06b442aa3c02889fc8" + ], + "index": "pypi", + "version": "==1.1.1" + }, "click-repl": { "hashes": [ "sha256:94b3fbbc9406a236f176e0506524b2937e4b23b6f4c0c0b2a0a83f8a64e9194b", @@ -77,302 +87,248 @@ "index": "pypi", "version": "==0.1.10" }, - "concurrencytest": { - "hashes": [ - "sha256:64a9c5b5cdb9949a375fcc5e114a82180f6a07cc1a026d3956230aecf980c2d8" - ], - "version": "==0.1.2" - }, - "enum34": { + "colorama": { "hashes": [ - "sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850", - "sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a", - "sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79", - "sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1" + "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da", + "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4" ], - "version": "==1.1.6" + "markers": "python_version >= '3.6'", + "version": "==0.4.5" }, - "extras": { + "dictdiffer": { "hashes": [ - "sha256:132e36de10b9c91d5d4cc620160a476e0468a88f16c9431817a6729611a81b4e", - "sha256:f689f08df47e2decf76aa6208c081306e7bd472630eb1ec8a875c67de2366e87" + "sha256:17bacf5fbfe613ccf1b6d512bd766e6b21fb798822a133aa86098b8ac9997578", + "sha256:442bfc693cfcadaf46674575d2eba1c53b42f5e404218ca2c2ff549f2df56595" ], - "version": "==1.0.0" + "index": "pypi", + "version": "==0.9.0" }, - "fixtures": { + "fastjsonschema": { "hashes": [ - "sha256:2a551b0421101de112d9497fb5f6fd25e5019391c0fbec9bad591ecae981420d", - "sha256:fcf0d60234f1544da717a9738325812de1f42c2fa085e2d9252d8fff5712b2ef" + "sha256:01e366f25d9047816fe3d288cbfc3e10541daf0af2044763f3d0ade42476da18", + "sha256:21f918e8d9a1a4ba9c22e09574ba72267a6762d47822db9add95f6454e51cc1c" ], - "version": "==3.0.0" + "index": "pypi", + "version": "==2.16.2" }, - "funcsigs": { + "graphlib-backport": { "hashes": [ - "sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca", - "sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50" + "sha256:24246967b9e7e6a91550bc770e6169585d35aa32790258579a8a3899a8c18fde", + "sha256:7bb8fc7757b8ae4e6d8000a26cd49e9232aaa9a3aa57edb478474b8424bfaae2" ], - "version": "==1.0.2" + "index": "pypi", + "version": "==1.0.3" }, "idna": { "hashes": [ - "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f", - "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4" + "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", + "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" ], - "version": "==2.6" + "markers": "python_version >= '3.5'", + "version": "==3.4" }, "jinja2": { "hashes": [ - "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4", - "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4" + "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", + "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" ], - "markers": "python_version >= '3.6'", - "version": "==3.0.1" - }, - "linecache2": { - "hashes": [ - "sha256:4b26ff4e7110db76eeb6f5a7b64a82623839d595c2038eeda662f2a2db78e97c", - "sha256:e78be9c0a0dfcbac712fe04fbf92b96cddae80b1b842f24248214c8496f006ef" - ], - "version": "==1.0.0" + "index": "pypi", + "version": "==3.1.2" }, "markupsafe": { "hashes": [ - "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298", - "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64", - "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b", - "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194", - "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567", - "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff", - "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724", - "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74", - "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646", - "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35", - "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6", - "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a", - "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6", - "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad", - "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26", - "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38", - "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac", - "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7", - "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6", - "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047", - "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75", - "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f", - "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b", - "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135", - "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8", - "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a", - "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a", - "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1", - "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9", - "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864", - "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914", - "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee", - "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f", - "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18", - "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8", - "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2", - "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d", - "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b", - "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b", - "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86", - "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6", - "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f", - "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb", - "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833", - "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28", - "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e", - "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415", - "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902", - "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f", - "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d", - "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9", - "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d", - "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145", - "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066", - "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c", - "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1", - "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a", - "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207", - "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f", - "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53", - "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd", - "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134", - "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85", - "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9", - "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5", - "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94", - "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509", - "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51", - "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872" + "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", + "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", + "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", + "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", + "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", + "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", + "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", + "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", + "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", + "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", + "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", + "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", + "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", + "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", + "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", + "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", + "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", + "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", + "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", + "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", + "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", + "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", + "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", + "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", + "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", + "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", + "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", + "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", + "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", + "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", + "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", + "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", + "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", + "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", + "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", + "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", + "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", + "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", + "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", + "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.1" + }, + "munch": { + "hashes": [ + "sha256:2d735f6f24d4dba3417fa448cae40c6e896ec1fdab6cdb5e6510999758a4dbd2", + "sha256:6f44af89a2ce4ed04ff8de41f70b226b984db10a91dcc7b9ac2efc1c77022fdd" ], - "markers": "python_version >= '3.6'", - "version": "==2.0.1" - }, - "mock": { - "hashes": [ - "sha256:5ce3c71c5545b472da17b72268978914d0252980348636840bd34a00b5cc96c1", - "sha256:b158b6df76edd239b8208d481dc46b6afd45a846b7812ff0ce58971cf5bc8bba" - ], - "version": "==2.0.0" - }, - "nose": { - "hashes": [ - "sha256:85273b87ab3db9307e3b1452b071e25c1db1cc812bc337d2a97ea0b0cf2ab6ba" - ], - "version": "==1.3.1" + "index": "pypi", + "version": "==2.5.0" }, - "pbr": { + "pretty-traceback": { "hashes": [ - "sha256:139d2625547dbfa5fb0b81daebb39601c478c21956dc57e2e07b74450a8c506b", - "sha256:61aa52a0f18b71c5cc58232d2cf8f8d09cd67fcad60b742a60124cb8d6951488" + "sha256:36c7c11b450e6fc645ddf2b494dbc3c6e8e7485c10c69fa213314ab473655199", + "sha256:7d07d4e1330f74acc329727bf8dedb9e1f8edea53565bb1d542cab76afcaaec8" ], - "version": "==5.4.4" + "index": "pypi", + "version": "==2022.1018" }, "prompt-toolkit": { "hashes": [ - "sha256:6076e46efae19b1e0ca1ec003ed37a933dc94b4d20f486235d436e64771dcd5c", - "sha256:eb71d5a6b72ce6db177af4a7d4d7085b99756bf656d98ffcc4fecd36850eea6c" + "sha256:9696f386133df0fc8ca5af4895afe5d78f5fcfe5258111c2a79a1c3e41ffa96d", + "sha256:9ada952c9d1787f52ff6d5f3484d0b4df8952787c087edf6a1f7c2cb1ea88148" ], "markers": "python_full_version >= '3.6.2'", - "version": "==3.0.20" - }, - "pyfakefs": { - "hashes": [ - "sha256:74cb0c53959f185f6b0fe6c9f6882359b2390df861d4857a9f1a95481b466ace", - "sha256:dbe4c6e9d1f2d9c2e22e9b0a5778080de8213e44bf164b58d5a7a89fcce1c3e7" - ], - "version": "==3.7" + "version": "==3.0.31" }, "python-dateutil": { "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.1" - }, - "python-mimeparse": { - "hashes": [ - "sha256:76e4b03d700a641fd7761d3cd4fdbbdcd787eade1ebfac43f877016328334f78", - "sha256:a295f03ff20341491bfe4717a39cd0a8cc9afad619ba44b77e86b0ab8a2b8282" - ], - "version": "==1.6.0" - }, - "python-subunit": { - "hashes": [ - "sha256:042039928120fbf392e8c983d60f3d8ae1b88f90a9f8fd7188ddd9c26cad1e48", - "sha256:40f34660c3da3e513cf2e59498a87ef04ebe2b5fe144fa25d476e1f888b19659" - ], - "version": "==1.4.0" + "version": "==2.8.2" }, "pytz": { "hashes": [ - "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", - "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" + "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197", + "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5" ], - "version": "==2021.1" + "version": "==2022.2.1" }, "pyyaml": { "hashes": [ - "sha256:08682f6b72c722394747bddaf0aa62277e02557c0fd1c42cb853016a38f8dedf", - "sha256:0f5f5786c0e09baddcd8b4b45f20a7b5d61a7e7e99846e3c799b05c7c53fa696", - "sha256:129def1b7c1bf22faffd67b8f3724645203b79d8f4cc81f674654d9902cb4393", - "sha256:294db365efa064d00b8d1ef65d8ea2c3426ac366c0c4368d930bf1c5fb497f77", - "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922", - "sha256:3bd0e463264cf257d1ffd2e40223b197271046d09dadf73a0fe82b9c1fc385a5", - "sha256:4465124ef1b18d9ace298060f4eccc64b0850899ac4ac53294547536533800c8", - "sha256:49d4cdd9065b9b6e206d0595fee27a96b5dd22618e7520c33204a4a3239d5b10", - "sha256:4e0583d24c881e14342eaf4ec5fbc97f934b999a6828693a99157fde912540cc", - "sha256:5accb17103e43963b80e6f837831f38d314a0495500067cb25afab2e8d7a4018", - "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e", - "sha256:6c78645d400265a062508ae399b60b8c167bf003db364ecb26dcab2bda048253", - "sha256:72a01f726a9c7851ca9bfad6fd09ca4e090a023c00945ea05ba1638c09dc3347", - "sha256:74c1485f7707cf707a7aef42ef6322b8f97921bd89be2ab6317fd782c2d53183", - "sha256:895f61ef02e8fed38159bb70f7e100e00f471eae2bc838cd0f4ebb21e28f8541", - "sha256:8c1be557ee92a20f184922c7b6424e8ab6691788e6d86137c5d93c1a6ec1b8fb", - "sha256:bb4191dfc9306777bc594117aee052446b3fa88737cd13b7188d0e7aa8162185", - "sha256:bfb51918d4ff3d77c1c856a9699f8492c612cde32fd3bcd344af9be34999bfdc", - "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db", - "sha256:cb333c16912324fd5f769fff6bc5de372e9e7a202247b48870bc251ed40239aa", - "sha256:d2d9808ea7b4af864f35ea216be506ecec180628aced0704e34aca0b040ffe46", - "sha256:d483ad4e639292c90170eb6f7783ad19490e7a8defb3e46f97dfe4bacae89122", - "sha256:dd5de0646207f053eb0d6c74ae45ba98c3395a571a2891858e87df7c9b9bd51b", - "sha256:e1d4970ea66be07ae37a3c2e48b5ec63f7ba6804bdddfdbd3cfd954d25a82e63", - "sha256:e4fac90784481d221a8e4b1162afa7c47ed953be40d31ab4629ae917510051df", - "sha256:fa5ae20527d8e831e8230cbffd9f8fe952815b2b7dae6ffec25318803a7528fc", - "sha256:fd7f6999a8070df521b6384004ef42833b9bd62cfee11a09bda1079b4b704247", - "sha256:fdc842473cd33f45ff6bce46aea678a54e3d21f1b61a7750ce3c498eedfe25d6", - "sha256:fe69978f3f768926cfa37b867e3843918e012cf83f680806599ddce33c2c68b0" + "sha256:01b45c0191e6d66c470b6cf1b9531a771a83c1c4208272ead47a3ae4f2f603bf", + "sha256:0283c35a6a9fbf047493e3a0ce8d79ef5030852c51e9d911a27badfde0605293", + "sha256:055d937d65826939cb044fc8c9b08889e8c743fdc6a32b33e2390f66013e449b", + "sha256:07751360502caac1c067a8132d150cf3d61339af5691fe9e87803040dbc5db57", + "sha256:0b4624f379dab24d3725ffde76559cff63d9ec94e1736b556dacdfebe5ab6d4b", + "sha256:0ce82d761c532fe4ec3f87fc45688bdd3a4c1dc5e0b4a19814b9009a29baefd4", + "sha256:1e4747bc279b4f613a09eb64bba2ba602d8a6664c6ce6396a4d0cd413a50ce07", + "sha256:213c60cd50106436cc818accf5baa1aba61c0189ff610f64f4a3e8c6726218ba", + "sha256:231710d57adfd809ef5d34183b8ed1eeae3f76459c18fb4a0b373ad56bedcdd9", + "sha256:277a0ef2981ca40581a47093e9e2d13b3f1fbbeffae064c1d21bfceba2030287", + "sha256:2cd5df3de48857ed0544b34e2d40e9fac445930039f3cfe4bcc592a1f836d513", + "sha256:40527857252b61eacd1d9af500c3337ba8deb8fc298940291486c465c8b46ec0", + "sha256:432557aa2c09802be39460360ddffd48156e30721f5e8d917f01d31694216782", + "sha256:473f9edb243cb1935ab5a084eb238d842fb8f404ed2193a915d1784b5a6b5fc0", + "sha256:48c346915c114f5fdb3ead70312bd042a953a8ce5c7106d5bfb1a5254e47da92", + "sha256:50602afada6d6cbfad699b0c7bb50d5ccffa7e46a3d738092afddc1f9758427f", + "sha256:68fb519c14306fec9720a2a5b45bc9f0c8d1b9c72adf45c37baedfcd949c35a2", + "sha256:77f396e6ef4c73fdc33a9157446466f1cff553d979bd00ecb64385760c6babdc", + "sha256:81957921f441d50af23654aa6c5e5eaf9b06aba7f0a19c18a538dc7ef291c5a1", + "sha256:819b3830a1543db06c4d4b865e70ded25be52a2e0631ccd2f6a47a2822f2fd7c", + "sha256:897b80890765f037df3403d22bab41627ca8811ae55e9a722fd0392850ec4d86", + "sha256:98c4d36e99714e55cfbaaee6dd5badbc9a1ec339ebfc3b1f52e293aee6bb71a4", + "sha256:9df7ed3b3d2e0ecfe09e14741b857df43adb5a3ddadc919a2d94fbdf78fea53c", + "sha256:9fa600030013c4de8165339db93d182b9431076eb98eb40ee068700c9c813e34", + "sha256:a80a78046a72361de73f8f395f1f1e49f956c6be882eed58505a15f3e430962b", + "sha256:afa17f5bc4d1b10afd4466fd3a44dc0e245382deca5b3c353d8b757f9e3ecb8d", + "sha256:b3d267842bf12586ba6c734f89d1f5b871df0273157918b0ccefa29deb05c21c", + "sha256:b5b9eccad747aabaaffbc6064800670f0c297e52c12754eb1d976c57e4f74dcb", + "sha256:bfaef573a63ba8923503d27530362590ff4f576c626d86a9fed95822a8255fd7", + "sha256:c5687b8d43cf58545ade1fe3e055f70eac7a5a1a0bf42824308d868289a95737", + "sha256:cba8c411ef271aa037d7357a2bc8f9ee8b58b9965831d9e51baf703280dc73d3", + "sha256:d15a181d1ecd0d4270dc32edb46f7cb7733c7c508857278d3d378d14d606db2d", + "sha256:d4b0ba9512519522b118090257be113b9468d804b19d63c71dbcf4a48fa32358", + "sha256:d4db7c7aef085872ef65a8fd7d6d09a14ae91f691dec3e87ee5ee0539d516f53", + "sha256:d4eccecf9adf6fbcc6861a38015c2a64f38b9d94838ac1810a9023a0609e1b78", + "sha256:d67d839ede4ed1b28a4e8909735fc992a923cdb84e618544973d7dfc71540803", + "sha256:daf496c58a8c52083df09b80c860005194014c3698698d1a57cbcfa182142a3a", + "sha256:dbad0e9d368bb989f4515da330b88a057617d16b6a8245084f1b05400f24609f", + "sha256:e61ceaab6f49fb8bdfaa0f92c4b57bcfbea54c09277b1b4f7ac376bfb7a7c174", + "sha256:f84fbc98b019fef2ee9a1cb3ce93e3187a6df0b2538a651bfb890254ba9f90b5" ], "index": "pypi", - "version": "==5.4.1" + "version": "==6.0" }, "rapyuta-io": { "hashes": [ - "sha256:7502dcd00ab7939c730d5887ab4b6b6b7fe5e48198f4448caf449cfbd015b4d8" + "sha256:b69036ca16b2f4157fce4b5d4f6c9c3636082a43b1c9098731d8deba8389f8f1", + "sha256:cb25025eb78fc379db68c80024800b6955713b85a887feb6573ad4349beab36c" ], "index": "pypi", - "version": "==0.32.0" + "version": "==1.5.0" }, "rapyuta-io-cli": { "path": ".", - "version": "==0.1.0" + "version": "==0.3.1" }, "requests": { "hashes": [ - "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b", - "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e" + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" ], - "version": "==2.18.4" + "markers": "python_version >= '3.7' and python_version < '4.0'", + "version": "==2.28.1" }, "riocli": { "path": "." }, - "shellingham": { - "hashes": [ - "sha256:4855c2458d6904829bd34c299f11fdeed7cfefbf8a2c522e4caea6cd76b3171e", - "sha256:536b67a0697f2e4af32ab176c00a50ac2899c5a05e0d8e2dadac8e58888283f9" - ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.4.0" - }, - "six": { + "setuptools": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9", + "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.13.0" + "markers": "python_version >= '3.7'", + "version": "==65.4.0" }, - "testtools": { + "shellingham": { "hashes": [ - "sha256:36ff4998177c7d32ffe5fed3d541cb9ee62618a3b8e745c55510698997774ba4", - "sha256:64c974a6cca4385d05f4bbfa2deca1c39ce88ede31c3448bee86a7259a9a61c8" + "sha256:72fb7f5c63103ca2cb91b23dee0c71fe8ad6fbfd46418ef17dbe40db51592dad", + "sha256:a8f02ba61b69baaa13facdba62908ca8690a94b8119b69f5ec5873ea85f7391b" ], - "version": "==2.4.0" + "markers": "python_version >= '3.4'", + "version": "==1.5.0" }, - "traceback2": { + "six": { "hashes": [ - "sha256:05acc67a09980c2ecfedd3423f7ae0104839eccb55fc645773e1caa0951c3030", - "sha256:8253cebec4b19094d67cc5ed5af99bf1dba1285292226e98a31929f87a5d6b23" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "version": "==1.4.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" }, - "unittest2": { + "tabulate": { "hashes": [ - "sha256:13f77d0875db6d9b435e1d4f41e74ad4cc2eb6e1d5c824996092b3430f088bb8", - "sha256:22882a0e418c284e1f718a822b3b022944d53d2d908e1690b319a9d3eb2c0579" + "sha256:0ba055423dbaa164b9e456abe7920c5e8ed33fcc16f6d1b2f2d152c8e1e8b4fc", + "sha256:436f1c768b424654fce8597290d2764def1eea6a77cfa5c33be00b1bc0f4f63d", + "sha256:6c57f3f3dd7ac2782770155f3adb2db0b1a269637e42f27599925e64b114f519" ], - "version": "==1.1.0" + "index": "pypi", + "version": "==0.8.10" }, "urllib3": { "hashes": [ - "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b", - "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f" + "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e", + "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997" ], - "version": "==1.22" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4.0'", + "version": "==1.26.12" }, "wcwidth": { "hashes": [ @@ -399,27 +355,27 @@ }, "attrs": { "hashes": [ - "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1", - "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb" + "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6", + "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==21.2.0" + "markers": "python_version >= '3.5'", + "version": "==22.1.0" }, "babel": { "hashes": [ - "sha256:ab49e12b91d937cd11f0b67cb259a57ab4ad2b59ac7a3b41d6c06c0ac5b0def9", - "sha256:bc0c176f9f6a994582230df350aa6e05ba2ebe4b3ac317eab29d9be5d2768da0" + "sha256:7614553711ee97490f732126dc077f8d0ae084ebc6a96e23db1482afabdb2c51", + "sha256:ff56f4892c1c4bf0d814575ea23471c230d544203c7748e8c68f0089478d48eb" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.9.1" + "markers": "python_version >= '3.6'", + "version": "==2.10.3" }, "beautifulsoup4": { "hashes": [ - "sha256:9a315ce70049920ea4572a4055bc4bd700c940521d36fc858205ad4fcde149bf", - "sha256:c23ad23c521d818955a4151a67d81580319d4bf548d3d49f4223ae041ff98891" + "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30", + "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693" ], - "markers": "python_version >= '3.1'", - "version": "==4.10.0" + "markers": "python_version >= '3.6'", + "version": "==4.11.1" }, "black": { "hashes": [ @@ -444,41 +400,42 @@ }, "certifi": { "hashes": [ - "sha256:e4f3620cfea4f83eedc95b24abd9cd56f3c4b146dd0177e83a21b4eb49e21e50", - "sha256:fd7c7c74727ddcf00e9acd26bba8da604ffec95bf1c2144e67aff7a8b50e6cef" + "sha256:0d9c601124e5a6ba9712dbc60d9c53c21e34f5f641fe83002317394311bdce14", + "sha256:90c1a32f1d68f940488354e36370f6cca89f0f106db09518524c88d6ed83f382" ], - "version": "==2019.9.11" + "markers": "python_version >= '3.6'", + "version": "==2022.9.24" }, "charset-normalizer": { "hashes": [ - "sha256:e019de665e2bcf9c2b64e2e5aa025fa991da8720daa3c1138cadd2fd1856aed0", - "sha256:f7af805c321bfa1ce6714c51f254e0d5bb5e5834039bc17db7ebe3a4cec9492b" + "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845", + "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f" ], - "markers": "python_version >= '3'", - "version": "==2.0.7" + "markers": "python_version >= '3.6'", + "version": "==2.1.1" }, "click": { "hashes": [ - "sha256:8c04c11192119b1ef78ea049e0a6f0463e4c48ef00a30160c704337586f3ad7a", - "sha256:fba402a4a47334742d782209a7c79bc448911afe1149d07bdabdf480b3e2f4b6" + "sha256:7682dc8afb30297001674575ea00d1814d808d6a36af415a82bd481d37ba7b8e", + "sha256:bb4d8133cb15a609f44e8213d9b391b0809795062913b383c62be0ee95b1db48" ], "index": "pypi", - "version": "==8.0.1" + "version": "==8.1.3" }, "colorama": { "hashes": [ - "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b", - "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2" + "sha256:854bf444933e37f5824ae7bfc1e98d5bce2ebe4160d46b5edf346a89358e99da", + "sha256:e6c6b4334fc50988a639d9b98aa429a0b57da6e17b9a44f0451f930b6967b7a4" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.4.4" + "markers": "python_version >= '3.6'", + "version": "==0.4.5" }, "distlib": { "hashes": [ - "sha256:c8b54e8454e5bf6237cc84c20e8264c3e991e824ef27e8f1e81049867d861e31", - "sha256:d982d0751ff6eaaab5e2ec8e691d949ee80eddf01a62eaa96ddb11531fe16b05" + "sha256:14bad2d9b04d3a36127ac97f30b12a19268f211063d8f8ee4f47108896e11b46", + "sha256:f35c4b692542ca110de7ef0bea44d73981caeb34ca0b9b6b2e6d7790dda8f80e" ], - "version": "==0.3.3" + "version": "==0.3.6" }, "docutils": { "hashes": [ @@ -498,101 +455,73 @@ }, "idna": { "hashes": [ - "sha256:2c6a5de3089009e3da7c5dde64a141dbc8551d5b7f6cf4ed7c2568d0cc520a8f", - "sha256:8c7309c718f94b3a625cb648ace320157ad16ff131ae0af362c9f21b80ef6ec4" + "sha256:814f528e8dead7d329833b91c5faa87d60bf71824cd12a7530b5526063d02cb4", + "sha256:90b77e79eaa3eba6de819a0c442c0b4ceefc341a7a2ab77d7562bf49f425c5c2" ], - "version": "==2.6" + "markers": "python_version >= '3.5'", + "version": "==3.4" }, "imagesize": { "hashes": [ - "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1", - "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1" + "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", + "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.2.0" + "version": "==1.4.1" }, "jinja2": { "hashes": [ - "sha256:1f06f2da51e7b56b8f238affdd6b4e2c61e39598a378cc49345bc1bd42a978a4", - "sha256:703f484b47a6af502e743c9122595cc812b0271f661722403114f71a79d0f5a4" + "sha256:31351a702a408a9e7595a8fc6150fc3f43bb6bf7e319770cbc0db9df9437e852", + "sha256:6088930bfe239f0e6710546ab9c19c9ef35e29792895fed6e6e31a023a182a61" ], - "markers": "python_version >= '3.6'", - "version": "==3.0.1" + "index": "pypi", + "version": "==3.1.2" }, "markupsafe": { "hashes": [ - "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298", - "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64", - "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b", - "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194", - "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567", - "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff", - "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724", - "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74", - "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646", - "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35", - "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6", - "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a", - "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6", - "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad", - "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26", - "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38", - "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac", - "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7", - "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6", - "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047", - "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75", - "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f", - "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b", - "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135", - "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8", - "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a", - "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a", - "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1", - "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9", - "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864", - "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914", - "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee", - "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f", - "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18", - "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8", - "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2", - "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d", - "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b", - "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b", - "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86", - "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6", - "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f", - "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb", - "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833", - "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28", - "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e", - "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415", - "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902", - "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f", - "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d", - "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9", - "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d", - "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145", - "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066", - "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c", - "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1", - "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a", - "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207", - "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f", - "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53", - "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd", - "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134", - "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85", - "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9", - "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5", - "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94", - "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509", - "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51", - "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872" - ], - "markers": "python_version >= '3.6'", - "version": "==2.0.1" + "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003", + "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88", + "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5", + "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7", + "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a", + "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603", + "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1", + "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135", + "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247", + "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6", + "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601", + "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77", + "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02", + "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e", + "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63", + "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f", + "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980", + "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b", + "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812", + "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff", + "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96", + "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1", + "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925", + "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a", + "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6", + "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e", + "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f", + "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4", + "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f", + "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3", + "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c", + "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a", + "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417", + "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a", + "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a", + "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37", + "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452", + "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933", + "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a", + "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7" + ], + "markers": "python_version >= '3.7'", + "version": "==2.1.1" }, "orderedmultidict": { "hashes": [ @@ -611,25 +540,35 @@ }, "pathspec": { "hashes": [ - "sha256:7d15c4ddb0b5c802d161efc417ec1a2558ea2653c2e8ad9c19098201dc1c993a", - "sha256:e564499435a2673d586f6b2130bb5b95f04a3ba06f81b8f895b651a3c76aabb1" + "sha256:46846318467efc4556ccfd27816e004270a9eeeeb4d062ce5e6fc7a87c573f93", + "sha256:7ace6161b621d31e7902eb6b5ae148d12cfd23f4a249b9ffb6b9fee12084323d" ], - "version": "==0.9.0" + "markers": "python_version >= '3.7'", + "version": "==0.10.1" }, "pep517": { "hashes": [ - "sha256:931378d93d11b298cf511dd634cf5ea4cb249a28ef84160b3247ee9afb4e8ab0", - "sha256:dd884c326898e2c6e11f9e0b64940606a93eb10ea022a2e067959f3a110cf161" + "sha256:4ba4446d80aed5b5eac6509ade100bff3e7943a8489de249654a5ae9b33ee35b", + "sha256:ae69927c5c172be1add9203726d4b84cf3ebad1edcd5f71fcdc746e66e829f59" + ], + "markers": "python_version >= '3.6'", + "version": "==0.13.0" + }, + "pip": { + "hashes": [ + "sha256:3fd1929db052f056d7a998439176d3333fa1b3f6c1ad881de1885c0717608a4b", + "sha256:b61a374b5bc40a6e982426aede40c9b5a08ff20e640f5b56977f4f91fed1e39a" ], - "version": "==0.12.0" + "markers": "python_version >= '3.7'", + "version": "==22.2.2" }, "pip-shims": { "hashes": [ - "sha256:05b00ade9d1e686a98bb656dd9b0608a933897283dc21913fad6ea5409ff7e91", - "sha256:16ca9f87485667b16b978b68a1aae4f9cc082c0fa018aed28567f9f34a590569" + "sha256:089e3586a92b1b8dbbc16b2d2859331dc1c412d3e3dbcd91d80e6b30d73db96c", + "sha256:2ae9f21c0155ca5c37d2734eb5f9a7d98c4c42a122d1ba3eddbacc9d9ea9fbae" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.5.3" + "markers": "python_version >= '3.6'", + "version": "==0.7.3" }, "pipenv-setup": { "hashes": [ @@ -645,126 +584,196 @@ ], "version": "==0.0.2" }, + "platformdirs": { + "hashes": [ + "sha256:027d8e83a2d7de06bbac4e5ef7e023c02b863d7ea5d079477e722bb41ab25788", + "sha256:58c8abb07dcb441e6ee4b11d8df0ac856038f944ab98b7be6b27b2a3c7feef19" + ], + "markers": "python_version >= '3.7'", + "version": "==2.5.2" + }, "plette": { "extras": [ "validation" ], "hashes": [ - "sha256:46402c03e36d6eadddad2a5125990e322dd74f98160c8f2dcd832b2291858a26", - "sha256:d6c9b96981b347bddd333910b753b6091a2c1eb2ef85bb373b4a67c9d91dca16" + "sha256:965d7d1d9c1e8ca1f9ec1223df7bfbf3ce8ac82ec52374e8e8b26886cebd0c09", + "sha256:ac52c2dbb085b0ebe12f32be62b160d423bd79e2ee34bec04cafa4406b821afb" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.2.3" + "markers": "python_version >= '3.7'", + "version": "==0.3.1" }, "pygments": { "hashes": [ - "sha256:b8e67fe6af78f492b3c4b3e2970c0624cbf08beb1e493b2c99b9fa1b67a20380", - "sha256:f398865f7eb6874156579fdf36bc840a03cab64d1cde9e93d68f46a425ec52c6" + "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1", + "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42" ], - "markers": "python_version >= '3.5'", - "version": "==2.10.0" + "markers": "python_version >= '3.6'", + "version": "==2.13.0" }, "pyparsing": { "hashes": [ - "sha256:84196357aa3566d64ad123d7a3c67b0e597a115c4934b097580e5ce220b91531", - "sha256:fd93fc45c47893c300bd98f5dd1b41c0e783eaeb727e7cea210dcc09d64ce7c3" + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" ], - "markers": "python_version >= '3.6'", - "version": "==3.0.1" + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" }, "python-dateutil": { "hashes": [ - "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c", - "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a" + "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86", + "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==2.8.1" + "version": "==2.8.2" }, "pytz": { "hashes": [ - "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da", - "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798" + "sha256:220f481bdafa09c3955dfbdddb7b57780e9a94f5127e35456a48589b9e0c0197", + "sha256:cea221417204f2d1a2aa03ddae3e867921971d0d76f14d87abb4414415bbdcf5" ], - "version": "==2021.1" + "version": "==2022.2.1" }, "regex": { "hashes": [ - "sha256:0c186691a7995ef1db61205e00545bf161fb7b59cdb8c1201c89b333141c438a", - "sha256:0dcc0e71118be8c69252c207630faf13ca5e1b8583d57012aae191e7d6d28b84", - "sha256:0f7552429dd39f70057ac5d0e897e5bfe211629652399a21671e53f2a9693a4e", - "sha256:129472cd06062fb13e7b4670a102951a3e655e9b91634432cfbdb7810af9d710", - "sha256:13ec99df95003f56edcd307db44f06fbeb708c4ccdcf940478067dd62353181e", - "sha256:1f2b59c28afc53973d22e7bc18428721ee8ca6079becf1b36571c42627321c65", - "sha256:2b20f544cbbeffe171911f6ce90388ad36fe3fad26b7c7a35d4762817e9ea69c", - "sha256:2fb698037c35109d3c2e30f2beb499e5ebae6e4bb8ff2e60c50b9a805a716f79", - "sha256:34d870f9f27f2161709054d73646fc9aca49480617a65533fc2b4611c518e455", - "sha256:391703a2abf8013d95bae39145d26b4e21531ab82e22f26cd3a181ee2644c234", - "sha256:450dc27483548214314640c89a0f275dbc557968ed088da40bde7ef8fb52829e", - "sha256:45b65d6a275a478ac2cbd7fdbf7cc93c1982d613de4574b56fd6972ceadb8395", - "sha256:5095a411c8479e715784a0c9236568ae72509450ee2226b649083730f3fadfc6", - "sha256:530fc2bbb3dc1ebb17f70f7b234f90a1dd43b1b489ea38cea7be95fb21cdb5c7", - "sha256:56f0c81c44638dfd0e2367df1a331b4ddf2e771366c4b9c5d9a473de75e3e1c7", - "sha256:5e9c9e0ce92f27cef79e28e877c6b6988c48b16942258f3bc55d39b5f911df4f", - "sha256:6d7722136c6ed75caf84e1788df36397efdc5dbadab95e59c2bba82d4d808a4c", - "sha256:74d071dbe4b53c602edd87a7476ab23015a991374ddb228d941929ad7c8c922e", - "sha256:7b568809dca44cb75c8ebb260844ea98252c8c88396f9d203f5094e50a70355f", - "sha256:80bb5d2e92b2258188e7dcae5b188c7bf868eafdf800ea6edd0fbfc029984a88", - "sha256:8d1cdcda6bd16268316d5db1038965acf948f2a6f43acc2e0b1641ceab443623", - "sha256:9f665677e46c5a4d288ece12fdedf4f4204a422bb28ff05f0e6b08b7447796d1", - "sha256:a30513828180264294953cecd942202dfda64e85195ae36c265daf4052af0464", - "sha256:a7a986c45d1099a5de766a15de7bee3840b1e0e1a344430926af08e5297cf666", - "sha256:a940ca7e7189d23da2bfbb38973832813eab6bd83f3bf89a977668c2f813deae", - "sha256:ab7c5684ff3538b67df3f93d66bd3369b749087871ae3786e70ef39e601345b0", - "sha256:be04739a27be55631069b348dda0c81d8ea9822b5da10b8019b789e42d1fe452", - "sha256:c0938ddd60cc04e8f1faf7a14a166ac939aac703745bfcd8e8f20322a7373019", - "sha256:cb46b542133999580ffb691baf67410306833ee1e4f58ed06b6a7aaf4e046952", - "sha256:d134757a37d8640f3c0abb41f5e68b7cf66c644f54ef1cb0573b7ea1c63e1509", - "sha256:de557502c3bec8e634246588a94e82f1ee1b9dfcfdc453267c4fb652ff531570", - "sha256:ded0c4a3eee56b57fcb2315e40812b173cafe79d2f992d50015f4387445737fa", - "sha256:e1dae12321b31059a1a72aaa0e6ba30156fe7e633355e445451e4021b8e122b6", - "sha256:eb672217f7bd640411cfc69756ce721d00ae600814708d35c930930f18e8029f", - "sha256:ee684f139c91e69fe09b8e83d18b4d63bf87d9440c1eb2eeb52ee851883b1b29", - "sha256:f3f9a91d3cc5e5b0ddf1043c0ae5fa4852f18a1c0050318baf5fc7930ecc1f9c" - ], - "version": "==2021.10.23" + "sha256:003a2e1449d425afc817b5f0b3d4c4aa9072dd5f3dfbf6c7631b8dc7b13233de", + "sha256:0385d66e73cdd4462f3cc42c76a6576ddcc12472c30e02a2ae82061bff132c32", + "sha256:0394265391a86e2bbaa7606e59ac71bd9f1edf8665a59e42771a9c9adbf6fd4f", + "sha256:03ff695518482b946a6d3d4ce9cbbd99a21320e20d94913080aa3841f880abcd", + "sha256:079c182f99c89524069b9cd96f5410d6af437e9dca576a7d59599a574972707e", + "sha256:091efcfdd4178a7e19a23776dc2b1fafb4f57f4d94daf340f98335817056f874", + "sha256:0b664a4d33ffc6be10996606dfc25fd3248c24cc589c0b139feb4c158053565e", + "sha256:14216ea15efc13f28d0ef1c463d86d93ca7158a79cd4aec0f9273f6d4c6bb047", + "sha256:14a7ab070fa3aec288076eed6ed828587b805ef83d37c9bfccc1a4a7cfbd8111", + "sha256:14c71437ffb89479c89cc7022a5ea2075a842b728f37205e47c824cc17b30a42", + "sha256:18e503b1e515a10282b3f14f1b3d856194ecece4250e850fad230842ed31227f", + "sha256:19a4da6f513045f5ba00e491215bd00122e5bd131847586522463e5a6b2bd65f", + "sha256:1a901ce5cd42658ab8f8eade51b71a6d26ad4b68c7cfc86b87efc577dfa95602", + "sha256:26df88c9636a0c3f3bd9189dd435850a0c49d0b7d6e932500db3f99a6dd604d1", + "sha256:2dda4b096a6f630d6531728a45bd12c67ec3badf44342046dc77d4897277d4f2", + "sha256:322bd5572bed36a5b39952d88e072738926759422498a96df138d93384934ff8", + "sha256:360ffbc9357794ae41336b681dff1c0463193199dfb91fcad3ec385ea4972f46", + "sha256:37e5a26e76c46f54b3baf56a6fdd56df9db89758694516413757b7d127d4c57b", + "sha256:3d64e1a7e6d98a4cdc8b29cb8d8ed38f73f49e55fbaa737bdb5933db99b9de22", + "sha256:3f3b4594d564ed0b2f54463a9f328cf6a5b2a32610a90cdff778d6e3e561d08b", + "sha256:4146cb7ae6029fc83b5c905ec6d806b7e5568dc14297c423e66b86294bad6c39", + "sha256:4318f69b79f9f7d84a7420e97d4bfe872dc767c72f891d4fea5fa721c74685f7", + "sha256:4cdbfa6d2befeaee0c899f19222e9b20fc5abbafe5e9c43a46ef819aeb7b75e5", + "sha256:50e764ffbd08b06aa8c4e86b8b568b6722c75d301b33b259099f237c46b2134e", + "sha256:518272f25da93e02af4f1e94985f5042cec21557ef3591027d0716f2adda5d0a", + "sha256:592b9e2e1862168e71d9e612bfdc22c451261967dbd46681f14e76dfba7105fd", + "sha256:59a786a55d00439d8fae4caaf71581f2aaef7297d04ee60345c3594efef5648a", + "sha256:59bac44b5a07b08a261537f652c26993af9b1bbe2a29624473968dd42fc29d56", + "sha256:5d0dd8b06896423211ce18fba0c75dacc49182a1d6514c004b535be7163dca0f", + "sha256:67a4c625361db04ae40ef7c49d3cbe2c1f5ff10b5a4491327ab20f19f2fb5d40", + "sha256:6adfe300848d61a470ec7547adc97b0ccf86de86a99e6830f1d8c8d19ecaf6b3", + "sha256:6b32b45433df1fad7fed738fe15200b6516da888e0bd1fdd6aa5e50cc16b76bc", + "sha256:6c57d50d4d5eb0c862569ca3c840eba2a73412f31d9ecc46ef0d6b2e621a592b", + "sha256:6d43bd402b27e0e7eae85c612725ba1ce7798f20f6fab4e8bc3de4f263294f03", + "sha256:6e521d9db006c5e4a0f8acfef738399f72b704913d4e083516774eb51645ad7c", + "sha256:6fe1dd1021e0f8f3f454ce2811f1b0b148f2d25bb38c712fec00316551e93650", + "sha256:73b985c9fc09a7896846e26d7b6f4d1fd5a20437055f4ef985d44729f9f928d0", + "sha256:7681c49da1a2d4b905b4f53d86c9ba4506e79fba50c4a664d9516056e0f7dfcc", + "sha256:77c2879d3ba51e5ca6c2b47f2dcf3d04a976a623a8fc8236010a16c9e0b0a3c7", + "sha256:7b0c5cc3d1744a67c3b433dce91e5ef7c527d612354c1f1e8576d9e86bc5c5e2", + "sha256:7fcf7f94ccad19186820ac67e2ec7e09e0ac2dac39689f11cf71eac580503296", + "sha256:83cc32a1a2fa5bac00f4abc0e6ce142e3c05d3a6d57e23bd0f187c59b4e1e43b", + "sha256:8418ee2cb857b83881b8f981e4c636bc50a0587b12d98cb9b947408a3c484fe7", + "sha256:86df2049b18745f3cd4b0f4c4ef672bfac4b80ca488e6ecfd2bbfe68d2423a2c", + "sha256:880dbeb6bdde7d926b4d8e41410b16ffcd4cb3b4c6d926280fea46e2615c7a01", + "sha256:8aba0d01e3dfd335f2cb107079b07fdddb4cd7fb2d8c8a1986f9cb8ce9246c24", + "sha256:8dcbcc9e72a791f622a32d17ff5011326a18996647509cac0609a7fc43adc229", + "sha256:944567bb08f52268d8600ee5bdf1798b2b62ea002cc692a39cec113244cbdd0d", + "sha256:995e70bb8c91d1b99ed2aaf8ec44863e06ad1dfbb45d7df95f76ef583ec323a9", + "sha256:99945ddb4f379bb9831c05e9f80f02f079ba361a0fb1fba1fc3b267639b6bb2e", + "sha256:9a165a05979e212b2c2d56a9f40b69c811c98a788964e669eb322de0a3e420b4", + "sha256:9bc8edc5f8ef0ebb46f3fa0d02bd825bbe9cc63d59e428ffb6981ff9672f6de1", + "sha256:a1aec4ae549fd7b3f52ceaf67e133010e2fba1538bf4d5fc5cd162a5e058d5df", + "sha256:a1c4d17879dd4c4432c08a1ca1ab379f12ab54af569e945b6fc1c4cf6a74ca45", + "sha256:a2b39ee3b280e15824298b97cec3f7cbbe6539d8282cc8a6047a455b9a72c598", + "sha256:a2effeaf50a6838f3dd4d3c5d265f06eabc748f476e8441892645ae3a697e273", + "sha256:a59d0377e58d96a6f11636e97992f5b51b7e1e89eb66332d1c01b35adbabfe8a", + "sha256:a926339356fe29595f8e37af71db37cd87ff764e15da8ad5129bbaff35bcc5a6", + "sha256:a9eb9558e1d0f78e07082d8a70d5c4d631c8dd75575fae92105df9e19c736730", + "sha256:ab07934725e6f25c6f87465976cc69aef1141e86987af49d8c839c3ffd367c72", + "sha256:ad75173349ad79f9d21e0d0896b27dcb37bfd233b09047bc0b4d226699cf5c87", + "sha256:b7b701dbc124558fd2b1b08005eeca6c9160e209108fbcbd00091fcfac641ac7", + "sha256:b7bee775ff05c9d519195bd9e8aaaccfe3971db60f89f89751ee0f234e8aeac5", + "sha256:b86548b8234b2be3985dbc0b385e35f5038f0f3e6251464b827b83ebf4ed90e5", + "sha256:b9d68eb704b24bc4d441b24e4a12653acd07d2c39940548761e0985a08bc1fff", + "sha256:c0b7cb9598795b01f9a3dd3f770ab540889259def28a3bf9b2fa24d52edecba3", + "sha256:cab548d6d972e1de584161487b2ac1aa82edd8430d1bde69587ba61698ad1cfb", + "sha256:ce331b076b2b013e7d7f07157f957974ef0b0881a808e8a4a4b3b5105aee5d04", + "sha256:cfa4c956ff0a977c4823cb3b930b0a4e82543b060733628fec7ab3eb9b1abe37", + "sha256:d23ac6b4bf9e32fcde5fcdb2e1fd5e7370d6693fcac51ee1d340f0e886f50d1f", + "sha256:d2885ec6eea629c648ecc9bde0837ec6b92208b7f36381689937fe5d64a517e8", + "sha256:d2a1371dc73e921f3c2e087c05359050f3525a9a34b476ebc8130e71bec55e97", + "sha256:d3102ab9bf16bf541ca228012d45d88d2a567c9682a805ae2c145a79d3141fdd", + "sha256:d5b003d248e6f292475cd24b04e5f72c48412231961a675edcb653c70730e79e", + "sha256:d5edd3eb877c9fc2e385173d4a4e1d792bf692d79e25c1ca391802d36ecfaa01", + "sha256:d7430f041755801b712ec804aaf3b094b9b5facbaa93a6339812a8e00d7bd53a", + "sha256:d837ccf3bd2474feabee96cd71144e991472e400ed26582edc8ca88ce259899c", + "sha256:dab81cc4d58026861445230cfba27f9825e9223557926e7ec22156a1a140d55c", + "sha256:db45016364eec9ddbb5af93c8740c5c92eb7f5fc8848d1ae04205a40a1a2efc6", + "sha256:df8fe00b60e4717662c7f80c810ba66dcc77309183c76b7754c0dff6f1d42054", + "sha256:e6e6e61e9a38b6cc60ca3e19caabc90261f070f23352e66307b3d21a24a34aaf", + "sha256:ee7045623a5ace70f3765e452528b4c1f2ce669ed31959c63f54de64fe2f6ff7", + "sha256:f06cc1190f3db3192ab8949e28f2c627e1809487e2cfc435b6524c1ce6a2f391", + "sha256:f07373b6e56a6f3a0df3d75b651a278ca7bd357a796078a26a958ea1ce0588fd", + "sha256:f6e0321921d2fdc082ef90c1fd0870f129c2e691bfdc4937dcb5cd308aba95c4", + "sha256:f6e167d1ccd41d27b7b6655bb7a2dcb1b1eb1e0d2d662043470bd3b4315d8b2b", + "sha256:fcbd1edff1473d90dc5cf4b52d355cf1f47b74eb7c85ba6e45f45d0116b8edbd", + "sha256:fe428822b7a8c486bcd90b334e9ab541ce6cc0d6106993d59f201853e5e14121" + ], + "markers": "python_version >= '3.6'", + "version": "==2022.9.13" }, "requests": { "hashes": [ - "sha256:6a1b267aa90cac58ac3a765d067950e7dbbf75b1da07e895d1f594193a40a38b", - "sha256:9c443e7324ba5b85070c4a818ade28bfabedf16ea10206da1132edaa6dda237e" + "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983", + "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349" ], - "version": "==2.18.4" + "markers": "python_version >= '3.7' and python_version < '4.0'", + "version": "==2.28.1" }, "requirementslib": { "hashes": [ - "sha256:50d20f27e4515a2393695b0d886219598302163438ae054253147b2bad9b4a44", - "sha256:9c1e8666ca4512724cdd1739adcc7df19ec7ad2ed21f0e748f9631ad6b54f321" + "sha256:28924cf11a2fa91adb03f8431d80c2a8c3dc386f1c48fb2be9a58e4c39072354", + "sha256:d26ec6ad45e1ffce9532303543996c9c71a99dc65f783908f112e3f2aae7e49c" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==1.5.16" + "markers": "python_version >= '3.7'", + "version": "==1.6.9" + }, + "setuptools": { + "hashes": [ + "sha256:a8f6e213b4b0661f590ccf40de95d28a177cd747d098624ad3f69c40287297e9", + "sha256:c2d2709550f15aab6c9110196ea312f468f41cd546bceb24127a1be6fdcaeeb1" + ], + "markers": "python_version >= '3.7'", + "version": "==65.4.0" }, "six": { "hashes": [ - "sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd", - "sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66" + "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926", + "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254" ], - "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==1.13.0" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.16.0" }, "snowballstemmer": { "hashes": [ - "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2", - "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914" + "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1", + "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a" ], - "version": "==2.1.0" + "version": "==2.2.0" }, "soupsieve": { "hashes": [ - "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc", - "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b" + "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759", + "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d" ], "markers": "python_version >= '3.6'", - "version": "==2.2.1" + "version": "==2.3.2.post1" }, "sphinx": { "hashes": [ @@ -840,77 +849,73 @@ }, "tomli": { "hashes": [ - "sha256:c6ce0015eb38820eaf32b5db832dbc26deb3dd427bd5f6556cf0acac2c214fee", - "sha256:f04066f68f5554911363063a30b108d2b5a5b1a010aa8b6132af78489fe3aade" + "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc", + "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f" ], - "markers": "python_version >= '3.6'", - "version": "==1.2.2" + "markers": "python_version < '3.11'", + "version": "==2.0.1" }, "tomlkit": { "hashes": [ - "sha256:173ad840fa5d2aac140528ca1933c29791b79a374a0861a80347f42ec9328117", - "sha256:d7a454f319a7e9bd2e249f239168729327e4dd2d27b17dc68be264ad1ce36754" + "sha256:25d4e2e446c453be6360c67ddfb88838cfc42026322770ba13d1fbd403a93a5c", + "sha256:3235a9010fae54323e727c3ac06fb720752fe6635b3426e379daec60fbd44a83" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.7.2" + "markers": "python_version >= '3.6' and python_version < '4.0'", + "version": "==0.11.4" }, "typed-ast": { "hashes": [ - "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace", - "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff", - "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266", - "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528", - "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6", - "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808", - "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4", - "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363", - "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341", - "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04", - "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41", - "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e", - "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3", - "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899", - "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805", - "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c", - "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c", - "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39", - "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a", - "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3", - "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7", - "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f", - "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075", - "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0", - "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40", - "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428", - "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927", - "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3", - "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f", - "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65" - ], - "version": "==1.4.3" + "sha256:0261195c2062caf107831e92a76764c81227dae162c4f75192c0d489faf751a2", + "sha256:0fdbcf2fef0ca421a3f5912555804296f0b0960f0418c440f5d6d3abb549f3e1", + "sha256:183afdf0ec5b1b211724dfef3d2cad2d767cbefac291f24d69b00546c1837fb6", + "sha256:211260621ab1cd7324e0798d6be953d00b74e0428382991adfddb352252f1d62", + "sha256:267e3f78697a6c00c689c03db4876dd1efdfea2f251a5ad6555e82a26847b4ac", + "sha256:2efae9db7a8c05ad5547d522e7dbe62c83d838d3906a3716d1478b6c1d61388d", + "sha256:370788a63915e82fd6f212865a596a0fefcbb7d408bbbb13dea723d971ed8bdc", + "sha256:39e21ceb7388e4bb37f4c679d72707ed46c2fbf2a5609b8b8ebc4b067d977df2", + "sha256:3e123d878ba170397916557d31c8f589951e353cc95fb7f24f6bb69adc1a8a97", + "sha256:4879da6c9b73443f97e731b617184a596ac1235fe91f98d279a7af36c796da35", + "sha256:4e964b4ff86550a7a7d56345c7864b18f403f5bd7380edf44a3c1fb4ee7ac6c6", + "sha256:639c5f0b21776605dd6c9dbe592d5228f021404dafd377e2b7ac046b0349b1a1", + "sha256:669dd0c4167f6f2cd9f57041e03c3c2ebf9063d0757dc89f79ba1daa2bfca9d4", + "sha256:6778e1b2f81dfc7bc58e4b259363b83d2e509a65198e85d5700dfae4c6c8ff1c", + "sha256:683407d92dc953c8a7347119596f0b0e6c55eb98ebebd9b23437501b28dcbb8e", + "sha256:79b1e0869db7c830ba6a981d58711c88b6677506e648496b1f64ac7d15633aec", + "sha256:7d5d014b7daa8b0bf2eaef684295acae12b036d79f54178b92a2b6a56f92278f", + "sha256:98f80dee3c03455e92796b58b98ff6ca0b2a6f652120c263efdba4d6c5e58f72", + "sha256:a94d55d142c9265f4ea46fab70977a1944ecae359ae867397757d836ea5a3f47", + "sha256:a9916d2bb8865f973824fb47436fa45e1ebf2efd920f2b9f99342cb7fab93f72", + "sha256:c542eeda69212fa10a7ada75e668876fdec5f856cd3d06829e6aa64ad17c8dfe", + "sha256:cf4afcfac006ece570e32d6fa90ab74a17245b83dfd6655a6f68568098345ff6", + "sha256:ebd9d7f80ccf7a82ac5f88c521115cc55d84e35bf8b446fcd7836eb6b98929a3", + "sha256:ed855bbe3eb3715fca349c80174cfcfd699c2f9de574d40527b8429acae23a66" + ], + "markers": "python_version >= '3.6'", + "version": "==1.5.4" }, "urllib3": { "hashes": [ - "sha256:06330f386d6e4b195fbfc736b297f58c5a892e4440e54d294d7004e3a9bbea1b", - "sha256:cc44da8e1145637334317feebd728bd869a35285b93cbb4cca2577da7e62db4f" + "sha256:3fa96cf423e6987997fc326ae8df396db2a8b7c667747d47ddd8ecba91f4a74e", + "sha256:b930dd878d5a8afb066a637fbb35144fe7901e3b209d1cd4f524bd0e9deee997" ], - "version": "==1.22" + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4, 3.5' and python_version < '4.0'", + "version": "==1.26.12" }, "vistir": { "hashes": [ - "sha256:a37079cdbd85d31a41cdd18457fe521e15ec08b255811e81aa061fd5f48a20fb", - "sha256:eff1d19ef50c703a329ed294e5ec0b0fbb35b96c1b3ee6dcdb266dddbe1e935a" + "sha256:1a89a612fb667c26ed6b4ed415b01e0261e13200a350c43d1990ace0ef44d35b", + "sha256:a8beb7643d07779cdda3941a08dad77d48de94883dbd3cb2b9b5ecb7eb7c0994" ], - "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "version": "==0.5.2" + "markers": "python_version not in '3.0, 3.1, 3.2, 3.3' and python_version >= '3.7'", + "version": "==0.6.1" }, "wheel": { "hashes": [ - "sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd", - "sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad" + "sha256:4bdcd7d840138086126cd09254dc6195fb4fc6f01c050a1d7236f2630db1d22a", + "sha256:e9a504e793efbca1b8e0e9cb979a249cf4a0a7b5b8c9e8b65a5e39d49529c1c4" ], "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", - "version": "==0.37.0" + "version": "==0.37.1" } } } diff --git a/examples/cvat/01-disks.yaml b/examples/cvat/01-disks.yaml new file mode 100644 index 00000000..508644ba --- /dev/null +++ b/examples/cvat/01-disks.yaml @@ -0,0 +1,23 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "cvat_minio" +spec: + runtime: cloud + capacity: 256 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "cvat_data_128gb" +spec: + runtime: cloud + capacity: 256 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "cvat_db" +spec: + runtime: cloud + capacity: 32 \ No newline at end of file diff --git a/examples/cvat/02-deployment.yaml b/examples/cvat/02-deployment.yaml new file mode 100644 index 00000000..cf9b9804 --- /dev/null +++ b/examples/cvat/02-deployment.yaml @@ -0,0 +1,64 @@ +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: cvat_db + depends: + kind: package + nameOrGUID: cvat_db_redis + version: "v1.0.2" + labels: + app: cvat +spec: + runtime: cloud + envArgs: + - name: PGDATA + value: "/var/lib/postgresql/data/pgdata" + - name: POSTGRES_DB + value: "cvat" + - name: POSTGRES_USER + value: root + - name: POSTGRES_HOST_AUTH_METHOD + value: "trust" + volumes: + - execName: postgres + mountPath: "/var/lib/postgresql/data" + depends: + kind: disk + nameOrGUID: "cvat_data_128gb" + staticRoutes: + - name: CVAT_UI + depends: + kind: staticroute + nameOrGUID: cvat-prod +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: cvat_prod + depends: + kind: package + nameOrGUID: cvat_custom + version: "v1.3.0" + labels: + app: cvat +spec: + depends: + - kind: deployment + nameOrGUID: cvat_db + runtime: cloud + envArgs: + - name: DJANGO_MODWSGI_EXTRA_ARGS + value: " " + - name: ALLOWED_HOSTS + value: "*" + volumes: + - execName: cvat_server + mountPath: "/home/django/user_data" + depends: + kind: disk + nameOrGUID: "cvat_data_128gb" + staticRoutes: + - name: CVAT_UI + depends: + kind: staticroute + nameOrGUID: cvat-prod \ No newline at end of file diff --git a/examples/cvat/03-extra-tools.yaml b/examples/cvat/03-extra-tools.yaml new file mode 100644 index 00000000..cba89ce7 --- /dev/null +++ b/examples/cvat/03-extra-tools.yaml @@ -0,0 +1,31 @@ +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: pgAdmin + depends: + kind: package + nameOrGUID: pgAdmin + version: "v1.0.1" + labels: + app: cvat +spec: + depends: + - kind: deployment + nameOrGUID: cvat_db + runtime: cloud + envArgs: + - name: PGADMIN_DEFAULT_PASSWORD + value: "pgadmin" + - name: PGADMIN_DEFAULT_EMAIL + value: "pgadmin@rapyuta-robotics.com" + volumes: + - execName: cvat_server + mountPath: "/home/django/user_data" + depends: + kind: disk + nameOrGUID: "cvat_data_128gb" + staticRoutes: + - name: endpoint + depends: + kind: staticroute + nameOrGUID: cvat-prod \ No newline at end of file diff --git a/examples/demo/all-in-one.yaml b/examples/demo/all-in-one.yaml new file mode 100644 index 00000000..8c0aacfc --- /dev/null +++ b/examples/demo/all-in-one.yaml @@ -0,0 +1,292 @@ +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_ims + depends: + kind: package + nameOrGUID: "sootballs_ims" + version: "1.13.0" + labels: + app: ims +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_ims_pgbouncer + # TODO: Come back once Minio and DB is deployed + # - kind: deployment + # nameOrGUID: sootballs_minio + envArgs: + - name: ACC_KEY + value: "rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==" + - name: ACC_NAME + value: "rrkibdem" + - name: AUTH_ENABLED + value: "True" + - name: AZURE_CONNECTION_STRING + value: "DefaultEndpointsProtocol=https;AccountName=rrkibdem;AccountKey=rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==;EndpointSuffix=core.windows.net" + - name: TIMEZONE + value: "Asia/Tokyo" + - name: CONTAINER_NAME + value: "warehouse" + - name: DAYS_BEFORE_ARCHIVE + value: "15" + - name: DEFAULT_FILE_STORAGE + value: "storages.backends.gcloud.GoogleCloudStorage" + - name: DEPLOY_ON_CLOUD + value: "True" + - name: EODTIME + value: "19:00" + - name: GCP_CREDS + value: "U2FsdGVkX19WrtHt0rhAAhj61mBc8S43+r1Hp61wGeAM3HOksh893DzhmuBMNfrzISGwyU5EOove2VSlwhXVzBp9TapdlDS1LZ2GEmAMsthuHtsoRv/F6wt61pKQ6D0dTQME8mia/weqjzy4UKQPv/acUsSMvAsk6MyDTNwMuz0SuGTvbJwZfIXEVgkC7qXsX8h7yjczbol2vxlXTTNC8QC0iJGT923MoxGt6Xd1XUZjB/stp3y1DLXG7i9EdPr4U4C36ywpv13gDKSzRetWnNamhPVreMyNIwX9xs7c5eWRvfZXn70ayuLWizJajVSljOWDf+atgyUMHtvBKPrvXnhegOXmMwkyDmvKxhTb+lEYlcQCDzERqZB/Co0SbqQUJhs9O3/XnmLmxgY1T83EuSBgiT3Gbt9TQXI2p8l21QapMJ4uvItLD3dXvEMa/VlmLDRc7+thlhpP6z8UFOMdK84j15IFa4oEtPpPA6FKkZPZq8nGLSTVp1WAAf54yMb3H5W1tVz6ICva+lk35Lm8Zd7VxocZxoDnq36oOB9/y6VajU6IVDPP8iuIq3sHQD8JhbGvoqh9wRdcKWUwZsc3TNK3fxHtS0wTxa9TTuoX34A3FKFri2GskiEv55ICeCu733yL9KnR5bOHs11s0ECBH40M7a8W0XAxMIe5qeDOJwhjOXlO6tDaCsFjHAIGDs8NRjgdJJI13+8Vn1XqNj4LMxaLFm2oRG+krHCyyC2PoUWBUixu+6jpLpcIhi9G3JM98RUPLy6qX4Te9eQFuODMi9tPXLyBQAAFPXBxTbnGTJY1xyl+m+P8piT1t67V4FTK1JGollEEn7eL+V5tYWwPG/TVS9AKTcg7lBi/dEdwVZ/HH8PG6LUs1DKUmHm7DhX7KRv4tNqTyLlPNzUsFwTPzkWovdy8EeS6O6++sTS+DWPTTP1cVbckNcztyB1lgkhq0iK50WPkB5NWG5EDdih8yZpRdPXSGxNnap73HmxTfeEfIqDFnHm8avsZjc13qM2rYJhSM2wwRlQxWyoPbL3DcsYO5UlcRycxa1fwAY/rrXguaH1L84LvSDjDSzbmbgFw4zFD78q6wUUgXIidpxLtloxSoBFgqRJGokTdMPK6o7cfdBrvBPcOpIXOhpHe/z+wikzCRB3f/gRpc44Dj8JJLsjpL9XATrAF4FDT5P2h945RX4bqOfrX/EqFyaLMvlQAieUgvcyQu+XDP+VEMmq4lM9rQTTCygEIQqKEkjsVGvuJZyud/z1VwgeDIFs2Cad7eXPu+vjw6h993w+qj3Yc2rYgI1cXk6J67qYt6xziEc+8gFMMF7Y15qX9Cu2S47ERN3yg1lfw6sEEgHdlcCLSdp77SyzvYww/SdyLzNeWNyp/EbtnkgoXip+OvsKzNO6wMJ6cXfkSvn2K8xwyVLb1nqOemEsn/dhl7qMaQ5TqNkRLuZrNPhEotoDCLQROJJaophrsC0w2jtxhqtzF3hv3foEMDnBwY88Lr5YP72+JN2+ELa1ryb7gC48mdWRWQq8Am07b2shJMXa5Kc/0R7AuGJd9schQc6kRM6b2B1xTp8iC0bAwDB1m6osx5KCMkDNwyLOUdmVhOL4PcZ57Ns+jsBrU6oK2lVa4H+Ns2Ud4ei2DzFAqB4smsQ50ZMAyeKst9NJYmoKg4Fl7oUIAaQsrn85uUlDOXPnJHCO93WUzNzF+ZKsVW9FP0KwBzcX/lHpy0VbLHPK7ZfNMGuW+LGlk1QvE8L5temzSWFygZ4QbLiu2AlXo5bFKPZ05/Zotb41+EmD+lRlIZxPUBLI6niH2wpjtdpkSopOlB6VHeHdLQ1aAk1+DpX7E5AAAtCsR9EvbfJxriY9QxQumw/C0Z+XQ7R5qyveYTuC3sjRXf82qT5agrc8swt0g0tikbWP3vUT3gSPmYF0TzXSo/VvyOxHyIrSAhftljiS25UPP+e4KZk5cLUVxkmtP9t3ePu+aAM4LmeY+R41O60yaEKv4KEkKo0ORKdhjGwhrgk8D2NaUfnrPCPfykHC6ginfYVMK9UTps/jfbZjTKr1hbg1F4pYYshq9dn2K4IUjFiOeIZeA1xzY8Xt/TnLOTKWM5wI/CPZ71nU0VrdSi8qvA+vv2odhkB0P92+mjdPcEMMh3igrrRowjwAheGxMq8k34x2kBzMoe7mH6tOTMDYRHUxjxYNpVlfPDDQ2dV0+4wm76GBPesSwwOmh2T2TOlNRlaZGyVN79TIz5jy2iTJ4dLuTeHdh5JMXMS+OLZ8TftPZuRCEVri6I/h8lG5WhAhLnJS9aBNEhstAZX+cUnLxs61D80XVRd+izU6GXuOH/INpYV/9zPwlkl5CNpz0kneBhJTMEgWPaAJ3vERMkLzFe57mEeIOCz9Jba9AQ8mqKZBpIkMGfo9da06wWDRW1eVo+FPmlSzhomCYRX30uI+813KPQRF2l9orfTJvrDiuE8RqVXBXu/ap1FKbgyRmMp8PNrU3JU5HWRKY3ONNQAnLFoBcB0DUxxY1KzR4IwjJ4Vp+ugKcWN8CHH3knuyuJu+jHXkLBGeOExLM1IlL+yc2iBx2fasGZz/DlXkeCrjPdjcMCUmuhPRvAKi2C+nlnuBEITV0pjMkBq3M/o/Rob+lDZy/ravlz/0l/3nr1pExSMqQT70sSsUWMO4m/VkJYL5b86ZecjlJb8bFvVkbgAyFXCy3Iwpp+0CwFSAhcwAhh6cLR7P0bJE6/N16vHHKgc70pDx4Lu7Q4IL4xS5HqkhMwOq8eDmdcalMtYz3O8O+i4i4+FzLqK7SInj/Jr/yBquA78N7jYTNnCtN5k3WoNJOum1vohAXWax9nYPTdtPLyczpYVfG+dUZTKhnCLkoOacjKbe1mdTRXgR3jKIEoiW5eIXZKBbcMyh6V5uGOsiXYUqzPaa/pbDnk6r9c/fPUeVPqnsMydl1q/ry+fs0IIWHmIUS3ir/ZaBpsI3aI7uQ1/aUJ70Qtz6GGKPQoSA7tSrwnns4EJG4Qy8J9fVYcHhY8xavaLFaNL5LsEAKoFaCuAXjQd9Hrnv+IA2R17GqUC2J8Nq9Z2za14w/klBncOUPHRv4NJ1fTA==" + - name: GOOGLE_APPLICATION_CREDENTIALS + value: "/code/creds.json" + - name: GS_BUCKET_NAME + value: "rrkibdem" + - name: IMS_DB + value: "rrkibdem_ims" + - name: CELERY_BROKER_URL + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: CELERY_RESULT_BACKEND + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: SECRET_KEY + value: "igy0moow=_qbsbiw4ln&wnpp5+ocvy*y(ov_9a$7j^1k4ccn86" + - name: SODTIME + value: "20:00" + staticRoutes: + - name: IMS_URL + depends: + kind: staticroute + nameOrGUID: ims-{{ routePrefix }} +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_ims_worker + depends: + kind: package + nameOrGUID: "sootballs_ims_worker" + version: "1.13.0" +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_ims_pgbouncer + # TODO: Come back once Minio and DB is deployed + # - kind: deployment + # nameOrGUID: sootballs_minio + envArgs: + - name: ACC_KEY + value: "rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==" + - name: ACC_NAME + value: "rrkibdem" + - name: AUTH_ENABLED + value: "True" + - name: AZURE_CONNECTION_STRING + value: "DefaultEndpointsProtocol=https;AccountName=rrkibdem;AccountKey=rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==;EndpointSuffix=core.windows.net" + - name: TIMEZONE + value: "Asia/Tokyo" + - name: CONTAINER_NAME + value: "warehouse" + - name: DAYS_BEFORE_ARCHIVE + value: "15" + - name: DEFAULT_FILE_STORAGE + value: "storages.backends.gcloud.GoogleCloudStorage" + - name: DEPLOY_ON_CLOUD + value: "True" + - name: EODTIME + value: "19:00" + - name: GCP_CREDS + value: "U2FsdGVkX19WrtHt0rhAAhj61mBc8S43+r1Hp61wGeAM3HOksh893DzhmuBMNfrzISGwyU5EOove2VSlwhXVzBp9TapdlDS1LZ2GEmAMsthuHtsoRv/F6wt61pKQ6D0dTQME8mia/weqjzy4UKQPv/acUsSMvAsk6MyDTNwMuz0SuGTvbJwZfIXEVgkC7qXsX8h7yjczbol2vxlXTTNC8QC0iJGT923MoxGt6Xd1XUZjB/stp3y1DLXG7i9EdPr4U4C36ywpv13gDKSzRetWnNamhPVreMyNIwX9xs7c5eWRvfZXn70ayuLWizJajVSljOWDf+atgyUMHtvBKPrvXnhegOXmMwkyDmvKxhTb+lEYlcQCDzERqZB/Co0SbqQUJhs9O3/XnmLmxgY1T83EuSBgiT3Gbt9TQXI2p8l21QapMJ4uvItLD3dXvEMa/VlmLDRc7+thlhpP6z8UFOMdK84j15IFa4oEtPpPA6FKkZPZq8nGLSTVp1WAAf54yMb3H5W1tVz6ICva+lk35Lm8Zd7VxocZxoDnq36oOB9/y6VajU6IVDPP8iuIq3sHQD8JhbGvoqh9wRdcKWUwZsc3TNK3fxHtS0wTxa9TTuoX34A3FKFri2GskiEv55ICeCu733yL9KnR5bOHs11s0ECBH40M7a8W0XAxMIe5qeDOJwhjOXlO6tDaCsFjHAIGDs8NRjgdJJI13+8Vn1XqNj4LMxaLFm2oRG+krHCyyC2PoUWBUixu+6jpLpcIhi9G3JM98RUPLy6qX4Te9eQFuODMi9tPXLyBQAAFPXBxTbnGTJY1xyl+m+P8piT1t67V4FTK1JGollEEn7eL+V5tYWwPG/TVS9AKTcg7lBi/dEdwVZ/HH8PG6LUs1DKUmHm7DhX7KRv4tNqTyLlPNzUsFwTPzkWovdy8EeS6O6++sTS+DWPTTP1cVbckNcztyB1lgkhq0iK50WPkB5NWG5EDdih8yZpRdPXSGxNnap73HmxTfeEfIqDFnHm8avsZjc13qM2rYJhSM2wwRlQxWyoPbL3DcsYO5UlcRycxa1fwAY/rrXguaH1L84LvSDjDSzbmbgFw4zFD78q6wUUgXIidpxLtloxSoBFgqRJGokTdMPK6o7cfdBrvBPcOpIXOhpHe/z+wikzCRB3f/gRpc44Dj8JJLsjpL9XATrAF4FDT5P2h945RX4bqOfrX/EqFyaLMvlQAieUgvcyQu+XDP+VEMmq4lM9rQTTCygEIQqKEkjsVGvuJZyud/z1VwgeDIFs2Cad7eXPu+vjw6h993w+qj3Yc2rYgI1cXk6J67qYt6xziEc+8gFMMF7Y15qX9Cu2S47ERN3yg1lfw6sEEgHdlcCLSdp77SyzvYww/SdyLzNeWNyp/EbtnkgoXip+OvsKzNO6wMJ6cXfkSvn2K8xwyVLb1nqOemEsn/dhl7qMaQ5TqNkRLuZrNPhEotoDCLQROJJaophrsC0w2jtxhqtzF3hv3foEMDnBwY88Lr5YP72+JN2+ELa1ryb7gC48mdWRWQq8Am07b2shJMXa5Kc/0R7AuGJd9schQc6kRM6b2B1xTp8iC0bAwDB1m6osx5KCMkDNwyLOUdmVhOL4PcZ57Ns+jsBrU6oK2lVa4H+Ns2Ud4ei2DzFAqB4smsQ50ZMAyeKst9NJYmoKg4Fl7oUIAaQsrn85uUlDOXPnJHCO93WUzNzF+ZKsVW9FP0KwBzcX/lHpy0VbLHPK7ZfNMGuW+LGlk1QvE8L5temzSWFygZ4QbLiu2AlXo5bFKPZ05/Zotb41+EmD+lRlIZxPUBLI6niH2wpjtdpkSopOlB6VHeHdLQ1aAk1+DpX7E5AAAtCsR9EvbfJxriY9QxQumw/C0Z+XQ7R5qyveYTuC3sjRXf82qT5agrc8swt0g0tikbWP3vUT3gSPmYF0TzXSo/VvyOxHyIrSAhftljiS25UPP+e4KZk5cLUVxkmtP9t3ePu+aAM4LmeY+R41O60yaEKv4KEkKo0ORKdhjGwhrgk8D2NaUfnrPCPfykHC6ginfYVMK9UTps/jfbZjTKr1hbg1F4pYYshq9dn2K4IUjFiOeIZeA1xzY8Xt/TnLOTKWM5wI/CPZ71nU0VrdSi8qvA+vv2odhkB0P92+mjdPcEMMh3igrrRowjwAheGxMq8k34x2kBzMoe7mH6tOTMDYRHUxjxYNpVlfPDDQ2dV0+4wm76GBPesSwwOmh2T2TOlNRlaZGyVN79TIz5jy2iTJ4dLuTeHdh5JMXMS+OLZ8TftPZuRCEVri6I/h8lG5WhAhLnJS9aBNEhstAZX+cUnLxs61D80XVRd+izU6GXuOH/INpYV/9zPwlkl5CNpz0kneBhJTMEgWPaAJ3vERMkLzFe57mEeIOCz9Jba9AQ8mqKZBpIkMGfo9da06wWDRW1eVo+FPmlSzhomCYRX30uI+813KPQRF2l9orfTJvrDiuE8RqVXBXu/ap1FKbgyRmMp8PNrU3JU5HWRKY3ONNQAnLFoBcB0DUxxY1KzR4IwjJ4Vp+ugKcWN8CHH3knuyuJu+jHXkLBGeOExLM1IlL+yc2iBx2fasGZz/DlXkeCrjPdjcMCUmuhPRvAKi2C+nlnuBEITV0pjMkBq3M/o/Rob+lDZy/ravlz/0l/3nr1pExSMqQT70sSsUWMO4m/VkJYL5b86ZecjlJb8bFvVkbgAyFXCy3Iwpp+0CwFSAhcwAhh6cLR7P0bJE6/N16vHHKgc70pDx4Lu7Q4IL4xS5HqkhMwOq8eDmdcalMtYz3O8O+i4i4+FzLqK7SInj/Jr/yBquA78N7jYTNnCtN5k3WoNJOum1vohAXWax9nYPTdtPLyczpYVfG+dUZTKhnCLkoOacjKbe1mdTRXgR3jKIEoiW5eIXZKBbcMyh6V5uGOsiXYUqzPaa/pbDnk6r9c/fPUeVPqnsMydl1q/ry+fs0IIWHmIUS3ir/ZaBpsI3aI7uQ1/aUJ70Qtz6GGKPQoSA7tSrwnns4EJG4Qy8J9fVYcHhY8xavaLFaNL5LsEAKoFaCuAXjQd9Hrnv+IA2R17GqUC2J8Nq9Z2za14w/klBncOUPHRv4NJ1fTA==" + - name: GOOGLE_APPLICATION_CREDENTIALS + value: "/code/creds.json" + - name: GS_BUCKET_NAME + value: "rrkibdem" + - name: IMS_DB + value: "rrkibdem_ims" + - name: CELERY_BROKER_URL + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: CELERY_RESULT_BACKEND + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: SECRET_KEY + value: "igy0moow=_qbsbiw4ln&wnpp5+ocvy*y(ov_9a$7j^1k4ccn86" + - name: SODTIME + value: "20:00" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_ims_beat + depends: + kind: package + nameOrGUID: "sootballs_ims_beat" + version: "1.13.0" +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_ims_pgbouncer + # TODO: Come back once Minio and DB is deployed + # - kind: deployment + # nameOrGUID: sootballs_minio + envArgs: + - name: ACC_KEY + value: "rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==" + - name: ACC_NAME + value: "rrkibdem" + - name: AUTH_ENABLED + value: "True" + - name: AZURE_CONNECTION_STRING + value: "DefaultEndpointsProtocol=https;AccountName=rrkibdem;AccountKey=rooir+ucWD0ObAHVGdco3T7qFskEpZfgZm0lQdIxbsowLkct2uXi3zKYi+EKJmTpmtqsNGYVMswsp719klvl7Q==;EndpointSuffix=core.windows.net" + - name: TIMEZONE + value: "Asia/Tokyo" + - name: CONTAINER_NAME + value: "warehouse" + - name: DAYS_BEFORE_ARCHIVE + value: "15" + - name: DEFAULT_FILE_STORAGE + value: "storages.backends.gcloud.GoogleCloudStorage" + - name: DEPLOY_ON_CLOUD + value: "True" + - name: EODTIME + value: "19:00" + - name: GCP_CREDS + value: "U2FsdGVkX19WrtHt0rhAAhj61mBc8S43+r1Hp61wGeAM3HOksh893DzhmuBMNfrzISGwyU5EOove2VSlwhXVzBp9TapdlDS1LZ2GEmAMsthuHtsoRv/F6wt61pKQ6D0dTQME8mia/weqjzy4UKQPv/acUsSMvAsk6MyDTNwMuz0SuGTvbJwZfIXEVgkC7qXsX8h7yjczbol2vxlXTTNC8QC0iJGT923MoxGt6Xd1XUZjB/stp3y1DLXG7i9EdPr4U4C36ywpv13gDKSzRetWnNamhPVreMyNIwX9xs7c5eWRvfZXn70ayuLWizJajVSljOWDf+atgyUMHtvBKPrvXnhegOXmMwkyDmvKxhTb+lEYlcQCDzERqZB/Co0SbqQUJhs9O3/XnmLmxgY1T83EuSBgiT3Gbt9TQXI2p8l21QapMJ4uvItLD3dXvEMa/VlmLDRc7+thlhpP6z8UFOMdK84j15IFa4oEtPpPA6FKkZPZq8nGLSTVp1WAAf54yMb3H5W1tVz6ICva+lk35Lm8Zd7VxocZxoDnq36oOB9/y6VajU6IVDPP8iuIq3sHQD8JhbGvoqh9wRdcKWUwZsc3TNK3fxHtS0wTxa9TTuoX34A3FKFri2GskiEv55ICeCu733yL9KnR5bOHs11s0ECBH40M7a8W0XAxMIe5qeDOJwhjOXlO6tDaCsFjHAIGDs8NRjgdJJI13+8Vn1XqNj4LMxaLFm2oRG+krHCyyC2PoUWBUixu+6jpLpcIhi9G3JM98RUPLy6qX4Te9eQFuODMi9tPXLyBQAAFPXBxTbnGTJY1xyl+m+P8piT1t67V4FTK1JGollEEn7eL+V5tYWwPG/TVS9AKTcg7lBi/dEdwVZ/HH8PG6LUs1DKUmHm7DhX7KRv4tNqTyLlPNzUsFwTPzkWovdy8EeS6O6++sTS+DWPTTP1cVbckNcztyB1lgkhq0iK50WPkB5NWG5EDdih8yZpRdPXSGxNnap73HmxTfeEfIqDFnHm8avsZjc13qM2rYJhSM2wwRlQxWyoPbL3DcsYO5UlcRycxa1fwAY/rrXguaH1L84LvSDjDSzbmbgFw4zFD78q6wUUgXIidpxLtloxSoBFgqRJGokTdMPK6o7cfdBrvBPcOpIXOhpHe/z+wikzCRB3f/gRpc44Dj8JJLsjpL9XATrAF4FDT5P2h945RX4bqOfrX/EqFyaLMvlQAieUgvcyQu+XDP+VEMmq4lM9rQTTCygEIQqKEkjsVGvuJZyud/z1VwgeDIFs2Cad7eXPu+vjw6h993w+qj3Yc2rYgI1cXk6J67qYt6xziEc+8gFMMF7Y15qX9Cu2S47ERN3yg1lfw6sEEgHdlcCLSdp77SyzvYww/SdyLzNeWNyp/EbtnkgoXip+OvsKzNO6wMJ6cXfkSvn2K8xwyVLb1nqOemEsn/dhl7qMaQ5TqNkRLuZrNPhEotoDCLQROJJaophrsC0w2jtxhqtzF3hv3foEMDnBwY88Lr5YP72+JN2+ELa1ryb7gC48mdWRWQq8Am07b2shJMXa5Kc/0R7AuGJd9schQc6kRM6b2B1xTp8iC0bAwDB1m6osx5KCMkDNwyLOUdmVhOL4PcZ57Ns+jsBrU6oK2lVa4H+Ns2Ud4ei2DzFAqB4smsQ50ZMAyeKst9NJYmoKg4Fl7oUIAaQsrn85uUlDOXPnJHCO93WUzNzF+ZKsVW9FP0KwBzcX/lHpy0VbLHPK7ZfNMGuW+LGlk1QvE8L5temzSWFygZ4QbLiu2AlXo5bFKPZ05/Zotb41+EmD+lRlIZxPUBLI6niH2wpjtdpkSopOlB6VHeHdLQ1aAk1+DpX7E5AAAtCsR9EvbfJxriY9QxQumw/C0Z+XQ7R5qyveYTuC3sjRXf82qT5agrc8swt0g0tikbWP3vUT3gSPmYF0TzXSo/VvyOxHyIrSAhftljiS25UPP+e4KZk5cLUVxkmtP9t3ePu+aAM4LmeY+R41O60yaEKv4KEkKo0ORKdhjGwhrgk8D2NaUfnrPCPfykHC6ginfYVMK9UTps/jfbZjTKr1hbg1F4pYYshq9dn2K4IUjFiOeIZeA1xzY8Xt/TnLOTKWM5wI/CPZ71nU0VrdSi8qvA+vv2odhkB0P92+mjdPcEMMh3igrrRowjwAheGxMq8k34x2kBzMoe7mH6tOTMDYRHUxjxYNpVlfPDDQ2dV0+4wm76GBPesSwwOmh2T2TOlNRlaZGyVN79TIz5jy2iTJ4dLuTeHdh5JMXMS+OLZ8TftPZuRCEVri6I/h8lG5WhAhLnJS9aBNEhstAZX+cUnLxs61D80XVRd+izU6GXuOH/INpYV/9zPwlkl5CNpz0kneBhJTMEgWPaAJ3vERMkLzFe57mEeIOCz9Jba9AQ8mqKZBpIkMGfo9da06wWDRW1eVo+FPmlSzhomCYRX30uI+813KPQRF2l9orfTJvrDiuE8RqVXBXu/ap1FKbgyRmMp8PNrU3JU5HWRKY3ONNQAnLFoBcB0DUxxY1KzR4IwjJ4Vp+ugKcWN8CHH3knuyuJu+jHXkLBGeOExLM1IlL+yc2iBx2fasGZz/DlXkeCrjPdjcMCUmuhPRvAKi2C+nlnuBEITV0pjMkBq3M/o/Rob+lDZy/ravlz/0l/3nr1pExSMqQT70sSsUWMO4m/VkJYL5b86ZecjlJb8bFvVkbgAyFXCy3Iwpp+0CwFSAhcwAhh6cLR7P0bJE6/N16vHHKgc70pDx4Lu7Q4IL4xS5HqkhMwOq8eDmdcalMtYz3O8O+i4i4+FzLqK7SInj/Jr/yBquA78N7jYTNnCtN5k3WoNJOum1vohAXWax9nYPTdtPLyczpYVfG+dUZTKhnCLkoOacjKbe1mdTRXgR3jKIEoiW5eIXZKBbcMyh6V5uGOsiXYUqzPaa/pbDnk6r9c/fPUeVPqnsMydl1q/ry+fs0IIWHmIUS3ir/ZaBpsI3aI7uQ1/aUJ70Qtz6GGKPQoSA7tSrwnns4EJG4Qy8J9fVYcHhY8xavaLFaNL5LsEAKoFaCuAXjQd9Hrnv+IA2R17GqUC2J8Nq9Z2za14w/klBncOUPHRv4NJ1fTA==" + - name: GOOGLE_APPLICATION_CREDENTIALS + value: "/code/creds.json" + - name: GS_BUCKET_NAME + value: "rrkibdem" + - name: IMS_DB + value: "rrkibdem_ims" + - name: CELERY_BROKER_URL + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: CELERY_RESULT_BACKEND + value: "rediss://:QYTYXHGQHAdzx27jlxuJyqfnnaks61klHvPhD1vPx+Q=@sootballsdev.redis.cache.windows.net:6380/0" + - name: SECRET_KEY + value: "igy0moow=_qbsbiw4ln&wnpp5+ocvy*y(ov_9a$7j^1k4ccn86" + - name: SODTIME + value: "20:00" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_systemui + depends: + kind: package + nameOrGUID: "sootballs_systemui" + version: "1.13.0" +spec: + runtime: cloud + envArgs: + - name: DOCKER_STDOUT + value: "true" + - name: IMS_AUTH_PASSWORD + value: "airborne_rr" + - name: IMS_AUTH_USERNAME + value: "root" + - name: IMS_URL + value: "https://ims-rrkibdem-buonj.ep-r.io" + - name: SENTRY_DSN + value: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: SOOTBALLS_MAP + value: "rrkibdem" + - name: SYSTEM_UI_REMOTE_MODE + value: "true" + - name: USE_LOCAL_MAP + value: "false" + - name: WS_EXTERNAL_PORT + value: "80" + rosNetworks: + - depends: + kind: network + nameOrGUID: "sootballs" + staticRoutes: + - name: "SYSTEM_UI" + depends: + kind: staticroute + nameOrGUID: ui-{{ routePrefix }} +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_amr_06 + depends: + kind: package + nameOrGUID: "sootballs_robot" + version: "1.13.0" +spec: + runtime: device + # TODO: Come back when IMS is bindable + # depends: + # - kind: deployment + # nameOrGUID: sootballs_ims + rosNetworks: + - depends: + kind: network + nameOrGUID: sootballs + device: + depends: + kind: device + nameOrGUID: amr06 + envArgs: + - name: IMS_AUTH_USERNAME + value: root + - name: IMS_AUTH_PASSWORD + value: airborne_rr + - name: IMS_URL + value: "https://ims-rrkibdem-buonj.ep-r.io" + - name: "SOOTBALLS_MAP" + value: "rrkibdem" + - name: "SENTRY_DSN" + value: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + value: "true" + - name: "DISABLE_MULTICAST" + value: "false" + - name: "ROS_DOMAIN_ID" + value: "5" + - name: "USE_PARAMS_IO" + value: "true" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_edge_edge02 + depends: + kind: package + nameOrGUID: "sootballs_edge" + version: "1.13.0" +spec: + runtime: device + # TODO: Come back when IMS is bindable + # depends: + # - kind: deployment + # nameOrGUID: sootballs_ims + rosNetworks: + - depends: + kind: network + nameOrGUID: sootballs + device: + depends: + kind: device + nameOrGUID: edge02 + envArgs: + - name: IMS_AUTH_USERNAME + value: root + - name: IMS_AUTH_PASSWORD + value: airborne_rr + - name: IMS_URL + value: "https://ims-rrkibdem-buonj.ep-r.io" + - name: "SOOTBALLS_MAP" + value: "rrkibdem" + - name: "SENTRY_DSN" + value: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + value: "true" + - name: "DISABLE_MULTICAST" + value: "false" + - name: "ROS_DOMAIN_ID" + value: "5" + - name: "USE_PARAMS_IO" + value: "true" diff --git a/examples/demo/values.yaml b/examples/demo/values.yaml new file mode 100644 index 00000000..01cb50ae --- /dev/null +++ b/examples/demo/values.yaml @@ -0,0 +1 @@ +routePrefix: "rrkibdem" diff --git a/examples/kiba-robots/00_disk.yaml b/examples/kiba-robots/00_disk.yaml new file mode 100644 index 00000000..4549163a --- /dev/null +++ b/examples/kiba-robots/00_disk.yaml @@ -0,0 +1,7 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "small-pvc-4" +spec: + runtime: cloud + capacity: 4 diff --git a/examples/kiba-robots/01_secret.yaml b/examples/kiba-robots/01_secret.yaml new file mode 100644 index 00000000..c0ee3a6f --- /dev/null +++ b/examples/kiba-robots/01_secret.yaml @@ -0,0 +1,12 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "sootballs" + guid: secret-vebkjobybhhwmyiwkwvndagu +spec: + type: Docker + docker: + username: shaishavrapyuta + password: asdfg123$ + email: shaishavrapyuta + # registry: https://index.docker.io/v1/ diff --git a/examples/kiba-robots/02_routednetwork.yaml b/examples/kiba-robots/02_routednetwork.yaml new file mode 100644 index 00000000..67fea6da --- /dev/null +++ b/examples/kiba-robots/02_routednetwork.yaml @@ -0,0 +1,9 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "sootballs" +spec: + runtime: "cloud" + type: "routed" + rosDistro: "melodic" + resourceLimits: "small" \ No newline at end of file diff --git a/examples/kiba-robots/03_edge_packages.yaml b/examples/kiba-robots/03_edge_packages.yaml new file mode 100644 index 00000000..64031eef --- /dev/null +++ b/examples/kiba-robots/03_edge_packages.yaml @@ -0,0 +1,207 @@ +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs SUI2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "cloud" + ros: + enabled: True + version: "melodic" + rosEndpoints: + - name: "/cmd_global_charge" + type: "topic" + qos: "hi" + - name: "/cmd_global_in" + type: "topic" + qos: "hi" + - name: "/cmd_move_to" + type: "topic" + qos: "hi" + - name: "/dispatcher/active_tote_unload_request" + type: "topic" + qos: "low" + - name: "/dispatcher/control_request" + type: "topic" + qos: "low" + - name: "/dispatcher/modify_order_request" + type: "topic" + qos: "low" + - name: "/dispatcher/productivity_configs_request" + type: "topic" + qos: "low" + - name: "/edge/emergency_released_request" + type: "topic" + qos: "low" + - name: "/manual_order_recovery" + type: "topic" + qos: "hi" + - name: "/reservation_request" + type: "topic" + qos: "hi" + cloud: + replicas: 1 + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:1.14.0-rc4" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_edge remote_ui.launch" + limits: + cpu: 2 + memory: 8192 + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "SOOTBALLS_MAP" + defaultValue: "rrkibstg" + - name: "SYSTEM_UI_REMOTE_MODE" + defaultValue: "true" + - name: "WS_EXTERNAL_PORT" + defaultValue: "80" + - name: "DOCKER_STDOUT" + defaultValue: "true" + endpoints: + - name: "SYSTEM_UI" + type: external-https + port: 443 + targetPort: 7099 + - name: "SYSTEM_UI_ROSBRIDGE_URL" + type: external-https + port: 443 + targetPort: 9092 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs Edge2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "device" + device: + arch: amd64 + restart: always + ros: + enabled: True + version: "melodic" + rosEndpoints: + - name: "/dispatcher/control_response" + type: "topic" + qos: "low" + - name: "/dispatcher/modify_order_response" + type: "topic" + qos: "low" + - name: "/dispatcher/productivity_configs_response" + type: "topic" + qos: "low" + - name: "/dispatcher/active_tote_unload_response" + type: "topic" + qos: "low" + - name: "/edge/emergency_released_response" + type: "topic" + qos: "low" + - name: "/robot_reservation" + type: "topic" + qos: "low" + - name: "/sui/status" + type: "topic" + qos: "low" + - name: "/sui/overview" + type: "topic" + qos: "low" + - name: "/sui/main" + type: "topic" + qos: "low" + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:1.14.0-rc4" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_edge default.launch --wait" + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + defaultValue: "true" + - name: "DISABLE_MULTICAST" + defaultValue: "false" + - name: "ROS_DOMAIN_ID" + defaultValue: "10" + - name: "USE_PARAMS_IO" + defaultValue: "true" + - name: "EDGE_ALARM_MONITOR" + defaultValue: "true" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs Robot2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "device" + device: + arch: amd64 + restart: always + ros: + enabled: True + version: "melodic" + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:1.14.0-rc4" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_edge default.launch --wait" + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + defaultValue: "true" + - name: "DISABLE_MULTICAST" + defaultValue: "false" + - name: "ROS_DOMAIN_ID" + defaultValue: "10" + - name: "USE_PARAMS_IO" + defaultValue: "true" \ No newline at end of file diff --git a/examples/kiba-robots/03_packages.yaml b/examples/kiba-robots/03_packages.yaml new file mode 100644 index 00000000..ba90a3fa --- /dev/null +++ b/examples/kiba-robots/03_packages.yaml @@ -0,0 +1,173 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_ims + version: v1.14.0-rc4-6 + description: Sootballs IMS Package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: Django + docker: + image: rrdockerhub/sootballs_ims:1.14.0-rc4 + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + limits: + cpu: 0.5 + memory: 2048 + environmentVars: + - name: SECRET_KEY + - name: AUTH_ENABLED + default: "False" + - name: TIMEZONE + default: "Asia/Tokyo" + - name: CONTAINER_NAME + default: "warehouse" + - name: DAYS_BEFORE_ARCHIVE + default: "15" + - name: DEPLOY_ON_CLOUD + default: "False" + - name: SODTIME + default: "09:00" + - name: EODTIME + default: "19:00" + - name: AWS_STORAGE_BUCKET_NAME + default: "kiba-robots" + - name: IMS_DB + default: ims_db + - name: CELERY_BROKER_URL + - name: CELERY_RESULT_BACKEND + + endpoints: + - name: IMS_URL + type: external-https + port: 443 + targetPort: 8002 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_db + version: v1.0.11 + description: Sootballs DB package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: redis + docker: + image: redis:4.0-alpine + limits: + cpu: 0.5 + memory: 2048 + - type: docker + name: postgres + docker: + image: postgis/postgis:9.6-3.2 + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: POSTGRES_MULTIPLE_DATABASES + default: ims_db,wcs_db + - name: PGDATA + default: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + default: trust + - name: POSTGRES_USER + default: postgres + exposed: true + exposedName: POSTGRES_USER + - name: POSTGRES_PASSWORD + default: password + exposed: true + exposedName: POSTGRES_PASSWORD + endpoints: + - name: POSTGRES + type: internal-tcp + port: 5432 + targetPort: 5432 + - name: REDIS + type: internal-tcp + port: 6379 + targetPort: 6379 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: MinIO File Server + version: v1.0.11 + description: Sootballs File Server package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: minio_executable + docker: + image: rrdockerhub/minio-server + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: MINIO_ACCESS_KEY + - name: MINIO_SECRET_KEY + endpoints: + - name: MINIO + type: external-https + port: 443 + targetPort: 9000 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_wcs + version: v1.14.0-rc4-2 + description: Sootballs WCS package + labels: + app: wcs +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: django + command: /code/docker/entrypoint.sh + docker: + image: rrdockerhub/sootballs_wcs:1.14.0-rc4 + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: TIMEZONE + default: Asia/Tokyo + - name: WCS_DB + default: wcs_db + - name: CELERY_BROKER_URL + - name: CELERY_RESULT_BACKEND + - name: LOCAL_PRINT_SERVER_URL + - name: SECRET_KEY + endpoints: + - name: WCS_URL + type: external-https + port: 443 + targetPort: 8003 \ No newline at end of file diff --git a/examples/kiba-robots/04_ims.yaml b/examples/kiba-robots/04_ims.yaml new file mode 100644 index 00000000..50bbfa9c --- /dev/null +++ b/examples/kiba-robots/04_ims.yaml @@ -0,0 +1,144 @@ +--- +#sootballs_staticroutes +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "ims-kibarobots-apply" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "minio-kibarobots-apply" +--- +#sootballs_minio +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_minio + depends: + kind: package + nameOrGUID: "MinIO File Server" + version: "v1.0.11" + labels: + app: ims +spec: + runtime: cloud + envArgs: + - name: MINIO_ACCESS_KEY + value: access + - name: MINIO_SECRET_KEY + value: secret_key + staticRoutes: + - name: MINIO + depends: + kind: staticroute + nameOrGUID: minio-kibarobots-apply + volumes: + - execName: minio_executable + mountPath: "/data" + subPath: "data" + depends: + kind: disk + nameOrGUID: "minio-pvc" +--- +#sootballs_db +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_db + depends: + kind: package + nameOrGUID: sootballs_db + version: "v1.0.11" + labels: + app: ims +spec: + runtime: cloud + envArgs: + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + value: trust + - name: POSTGRES_MULTIPLE_DATABASES + value: ims_db,wcs_db + - name: POSTGRES_PASSWORD + value: sootballs + - name: POSTGRES_USER + value: postgres + volumes: + - execName: postgres + mountPath: "/var/lib/postgresql/data/pgdata" + subPath: "pgdata" + depends: + kind: disk + nameOrGUID: "postgres-pvc" + +#sootballs_ims +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_ims + depends: + kind: package + nameOrGUID: "sootballs_ims" + version: "v1.14.0-rc4-6" + labels: + app: ims +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_db + - kind: deployment + nameOrGUID: sootballs_minio + envArgs: + - name: AUTH_ENABLED + value: "True" + - name: AWS_STORAGE_BUCKET_NAME + value: kiba-robots + #TODO this should be parsed from redis url in the docker container. + - name: CELERY_BROKER_URL + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + #TODO this should be parsed from redis url in the docker container. + - name: CELERY_RESULT_BACKEND + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: CONTAINER_NAME + value: warehouse + - name: DAYS_BEFORE_ARCHIVE + value: "15" + - name: DEPLOY_ON_CLOUD + value: "False" + - name: EODTIME + value: "19:00" + - name: IMS_DB + value: ims_db + - name: SECRET_KEY + value: asdasd + - name: SODTIME + value: 09:00 + - name: TIMEZONE + value: Asia/Tokyo + - name: TEST_ENV + value: asdsad + staticRoutes: + - name: IMS_URL + depends: + kind: staticroute + nameOrGUID: ims-kibarobots-apply +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "postgres-pvc" +spec: + runtime: cloud + capacity: 4 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "minio-pvc" +spec: + runtime: cloud + capacity: 4 diff --git a/examples/kiba-robots/05_wcs.yaml b/examples/kiba-robots/05_wcs.yaml new file mode 100644 index 00000000..c3033ac5 --- /dev/null +++ b/examples/kiba-robots/05_wcs.yaml @@ -0,0 +1,40 @@ +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "wcs-kibarobots-apply" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_wcs + depends: + kind: package + nameOrGUID: sootballs_wcs + version: "v1.14.0-rc4-2" + labels: + app: wcs +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_db + - kind: deployment + nameOrGUID: sootballs_ims + envArgs: + - name: CELERY_BROKER_URL + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: CELERY_RESULT_BACKEND + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: LOCAL_PRINT_SERVER_URL + value: " " + - name: SECRET_KEY + value: asdasd + - name: TIMEZONE + value: Asia/Tokyo + - name: WCS_DB + value: wcs_db + + + + \ No newline at end of file diff --git a/examples/kiba-robots/06_onlydb.yaml b/examples/kiba-robots/06_onlydb.yaml new file mode 100644 index 00000000..57fad914 --- /dev/null +++ b/examples/kiba-robots/06_onlydb.yaml @@ -0,0 +1,84 @@ +--- +#sootballs_db +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_db + depends: + kind: package + nameOrGUID: sootballs_db + version: "v1.0.11" + labels: + app: ims +spec: + runtime: cloud + envArgs: + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + value: trust + - name: POSTGRES_MULTIPLE_DATABASES + value: ims_db,wcs_db + - name: POSTGRES_PASSWORD + value: sootballs + - name: POSTGRES_USER + value: postgres + volumes: + - execName: postgres + mountPath: "/var/lib/postgresql/data/pgdata" + subPath: "pgdata" + depends: + kind: disk + nameOrGUID: "postgres-pvc" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_db + version: v1.0.11 + description: Sootballs DB package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: redis + docker: + image: redis:4.0-alpine + limits: + cpu: 0.5 + memory: 2048 + - type: docker + name: postgres + docker: + image: postgis/postgis:9.6-3.2 + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: POSTGRES_MULTIPLE_DATABASES + default: ims_db,wcs_db + - name: PGDATA + default: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + default: trust + - name: POSTGRES_USER + default: postgres + exposed: true + exposedName: POSTGRES_USER + - name: POSTGRES_PASSWORD + default: password + exposed: true + exposedName: POSTGRES_PASSWORD + endpoints: + - name: POSTGRES + type: internal-tcp + port: 5432 + targetPort: 5432 + - name: REDIS + type: internal-tcp + port: 6379 + targetPort: 6379 diff --git a/examples/kiba-robots/all-in-one.yaml b/examples/kiba-robots/all-in-one.yaml new file mode 100644 index 00000000..3384be50 --- /dev/null +++ b/examples/kiba-robots/all-in-one.yaml @@ -0,0 +1,712 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "sootballs" + guid: secret-vebkjobybhhwmyiwkwvndagu +spec: + type: Docker + docker: + username: shaishavrapyuta + password: asdfg123$ + email: shaishavrapyuta + # registry: https://index.docker.io/v1/ +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs SUI2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "cloud" + ros: + enabled: True + version: "melodic" + rosEndpoints: + - name: "/cmd_global_charge" + type: "topic" + qos: "hi" + - name: "/cmd_global_in" + type: "topic" + qos: "hi" + - name: "/cmd_move_to" + type: "topic" + qos: "hi" + - name: "/dispatcher/active_tote_unload_request" + type: "topic" + qos: "low" + - name: "/dispatcher/control_request" + type: "topic" + qos: "low" + - name: "/dispatcher/modify_order_request" + type: "topic" + qos: "low" + - name: "/dispatcher/productivity_configs_request" + type: "topic" + qos: "low" + - name: "/edge/emergency_released_request" + type: "topic" + qos: "low" + - name: "/manual_order_recovery" + type: "topic" + qos: "hi" + - name: "/reservation_request" + type: "topic" + qos: "hi" + cloud: + replicas: 1 + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:{{ testlabel }}" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_edge remote_ui.launch" + runAsBash: False + limits: + cpu: 2 + memory: 8192 + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "SOOTBALLS_MAP" + defaultValue: "rrkibstg" + - name: "SYSTEM_UI_REMOTE_MODE" + defaultValue: "true" + - name: "WS_EXTERNAL_PORT" + defaultValue: "80" + - name: "DOCKER_STDOUT" + defaultValue: "true" + endpoints: + - name: "SYSTEM_UI" + type: external-https + port: 443 + targetPort: 7099 + - name: "SYSTEM_UI_ROSBRIDGE_URL" + type: external-https + port: 443 + targetPort: 9092 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs Edge2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "device" + device: + arch: amd64 + restart: always + ros: + enabled: True + version: "melodic" + rosEndpoints: + - name: "/dispatcher/control_response" + type: "topic" + qos: "low" + - name: "/dispatcher/modify_order_response" + type: "topic" + qos: "low" + - name: "/dispatcher/productivity_configs_response" + type: "topic" + qos: "low" + - name: "/dispatcher/active_tote_unload_response" + type: "topic" + qos: "low" + - name: "/edge/emergency_released_response" + type: "topic" + qos: "low" + - name: "/robot_reservation" + type: "topic" + qos: "low" + - name: "/sui/status" + type: "topic" + qos: "low" + - name: "/sui/overview" + type: "topic" + qos: "low" + - name: "/sui/main" + type: "topic" + qos: "low" + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:{{ testlabel }}" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_edge default.launch --wait" + runAsBash: False + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + defaultValue: "true" + - name: "DISABLE_MULTICAST" + defaultValue: "false" + - name: "ROS_DOMAIN_ID" + defaultValue: "10" + - name: "USE_PARAMS_IO" + defaultValue: "true" + - name: "EDGE_ALARM_MONITOR" + defaultValue: "true" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: "Sootballs Robot2" + version: "1.0.0" + labels: + app: sootballs +spec: + runtime: "device" + device: + arch: amd64 + restart: always + ros: + enabled: True + version: "melodic" + executables: + - name: "systemui" + type: docker + docker: + image: "rrdockerhub/sootballs:{{ testlabel }}" + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + command: "roslaunch sootballs_applications_robot default.launch --wait" + runAsBash: False + + environmentVars: + - name: "IMS_AUTH_USERNAME" + description: "Username to authenticate to IMS" + defaultValue: "root" + - name: "IMS_AUTH_PASSWORD" + description: "Password to authenticate to IMS" + defaultValue: "airborne_rr" + - name: "SENTRY_DSN" + description: "Password to authenticate to IMS" + defaultValue: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + defaultValue: "true" + - name: "DISABLE_MULTICAST" + defaultValue: "false" + - name: "ROS_DOMAIN_ID" + defaultValue: "10" + - name: "USE_PARAMS_IO" + defaultValue: "true" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_ims + version: v1.14.0-rc4-6 + description: Sootballs IMS Package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: Django + docker: + image: rrdockerhub/sootballs_ims:{{ testlabel }} + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + limits: + cpu: 0.5 + memory: 2048 + command: "/code/docker/entrypoint.sh" + environmentVars: + - name: SECRET_KEY + - name: AUTH_ENABLED + default: "False" + - name: TIMEZONE + default: "Asia/Tokyo" + - name: CONTAINER_NAME + default: "warehouse" + - name: DAYS_BEFORE_ARCHIVE + default: "15" + - name: DEPLOY_ON_CLOUD + default: "False" + - name: SODTIME + default: "09:00" + - name: EODTIME + default: "19:00" + - name: AWS_STORAGE_BUCKET_NAME + default: "kiba-robots" + - name: IMS_DB + default: ims_db + - name: CELERY_BROKER_URL + - name: CELERY_RESULT_BACKEND + + endpoints: + - name: IMS_URL + type: external-https + port: 443 + targetPort: 8002 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_db + version: v1.0.11 + description: Sootballs DB package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: redis + docker: + image: redis:4.0-alpine + limits: + cpu: 0.5 + memory: 2048 + - type: docker + name: postgres + docker: + image: postgis/postgis:9.6-3.2 + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: POSTGRES_MULTIPLE_DATABASES + default: ims_db,wcs_db + - name: PGDATA + default: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + default: trust + - name: POSTGRES_USER + default: postgres + exposed: true + exposedName: POSTGRES_USER + - name: POSTGRES_PASSWORD + default: password + exposed: true + exposedName: POSTGRES_PASSWORD + endpoints: + - name: POSTGRES + type: internal-tcp + port: 5432 + targetPort: 5432 + - name: REDIS + type: internal-tcp + port: 6379 + targetPort: 6379 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: MinIO File Server + version: v1.0.11 + description: Sootballs File Server package + labels: + app: ims +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: minio_executable + docker: + image: rrdockerhub/minio-server + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: MINIO_ACCESS_KEY + - name: MINIO_SECRET_KEY + endpoints: + - name: MINIO + type: external-https + port: 443 + targetPort: 9000 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" +metadata: + name: sootballs_wcs + version: v1.14.0-rc4-2 + description: Sootballs WCS package + labels: + app: wcs +spec: + runtime: cloud + cloud: + replicas: 1 + executables: + - type: docker + name: django + command: /code/docker/entrypoint.sh + docker: + image: rrdockerhub/sootballs_wcs:{{ testlabel}} + pullSecret: + depends: + kind: secret + nameOrGUID: sootballs + limits: + cpu: 1 + memory: 4096 + environmentVars: + - name: TIMEZONE + default: Asia/Tokyo + - name: WCS_DB + default: wcs_db + - name: CELERY_BROKER_URL + - name: CELERY_RESULT_BACKEND + - name: LOCAL_PRINT_SERVER_URL + - name: SECRET_KEY + endpoints: + - name: WCS_URL + type: external-https + port: 443 + targetPort: 8003 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "postgres-pvc" +spec: + runtime: cloud + capacity: 4 +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "minio-pvc" +spec: + runtime: cloud + capacity: 4 +--- +#sootballs_staticroutes +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "ims-{{ routePrefix }}" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "minio-{{ routePrefix }}" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "wcs-{{ routePrefix }}" +--- +#sootballs_minio +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_minio + depends: + kind: package + nameOrGUID: "MinIO File Server" + version: "v1.0.11" + labels: + app: ims +spec: + runtime: cloud + envArgs: + - name: MINIO_ACCESS_KEY + value: access + - name: MINIO_SECRET_KEY + value: secret_key + staticRoutes: + - name: MINIO + depends: + kind: staticroute + nameOrGUID: minio-{{ routePrefix }} + volumes: + - execName: minio_executable + mountPath: "/data" + subPath: "data" + depends: + kind: disk + nameOrGUID: "minio-pvc" +--- +#sootballs_db +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_db + depends: + kind: package + nameOrGUID: sootballs_db + version: "v1.0.11" + labels: + app: ims +spec: + runtime: cloud + envArgs: + - name: PGDATA + value: /var/lib/postgresql/data/pgdata + - name: POSTGRES_HOST_AUTH_METHOD + value: trust + - name: POSTGRES_MULTIPLE_DATABASES + value: ims_db,wcs_db + - name: POSTGRES_PASSWORD + value: sootballs + - name: POSTGRES_USER + value: postgres + volumes: + - execName: postgres + mountPath: "/var/lib/postgresql/data/pgdata" + subPath: "pgdata" + depends: + kind: disk + nameOrGUID: "postgres-pvc" + +#sootballs_ims +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_ims + depends: + kind: package + nameOrGUID: "sootballs_ims" + version: "v1.14.0-rc4-6" + labels: + app: ims +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_db + - kind: deployment + nameOrGUID: sootballs_minio + envArgs: + - name: AUTH_ENABLED + value: "True" + - name: AWS_STORAGE_BUCKET_NAME + value: kiba-robots + #TODO this should be parsed from redis url in the docker container. + - name: CELERY_BROKER_URL + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + #TODO this should be parsed from redis url in the docker container. + - name: CELERY_RESULT_BACKEND + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: CONTAINER_NAME + value: warehouse + - name: DAYS_BEFORE_ARCHIVE + value: "15" + - name: DEPLOY_ON_CLOUD + value: "False" + - name: EODTIME + value: "19:00" + - name: IMS_DB + value: ims_db + - name: SECRET_KEY + value: asdasd + - name: SODTIME + value: 09:00 + - name: TIMEZONE + value: Asia/Tokyo + - name: TEST_ENV + value: asdsad + staticRoutes: + - name: IMS_URL + depends: + kind: staticroute + nameOrGUID: ims-{{ routePrefix }} +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_wcs + depends: + kind: package + nameOrGUID: sootballs_wcs + version: "v1.14.0-rc4-2" + labels: + app: wcs +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_db + - kind: deployment + nameOrGUID: sootballs_ims + envArgs: + - name: CELERY_BROKER_URL + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: CELERY_RESULT_BACKEND + value: rediss://inst-lrxokslpvkctnstujixsczsw-redis-srv.dep-ns-inst-lrxokslpvkctnstujixsczsw.svc:6379/5 + - name: LOCAL_PRINT_SERVER_URL + value: " " + - name: SECRET_KEY + value: asdasd + - name: TIMEZONE + value: Asia/Tokyo + - name: WCS_DB + value: wcs_db +--- +#sootballs_staticroutes +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "ui-{{ routePrefix }}" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "sootballs" +spec: + runtime: "cloud" + type: "routed" + rosDistro: "melodic" + resourceLimits: "small" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_systemui + depends: + kind: package + nameOrGUID: Sootballs SUI2 + version: "1.0.0" + labels: + app: systemui +spec: + runtime: cloud + depends: + - kind: deployment + nameOrGUID: sootballs_ims + rosNetworks: + - depends: + kind: network + nameOrGUID: sootballs + envArgs: + - name: DOCKER_STDOUT + value: "true" + - name: IMS_AUTH_PASSWORD + value: airborne_rr + - name: IMS_AUTH_USERNAME + value: root + - name: SENTRY_DSN + value: https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18 + - name: SOOTBALLS_MAP + value: rrkibstg + - name: SYSTEM_UI_REMOTE_MODE + value: "true" + - name: USE_LOCAL_MAP + value: "false" + - name: WS_EXTERNAL_PORT + value: "80" + staticRoutes: + - name: SYSTEM_UI + depends: + kind: staticroute + nameOrGUID: ui-{{ routePrefix }} + +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_edge_01 + depends: + kind: package + nameOrGUID: "Sootballs Edge2" + version: "1.0.0" +spec: + runtime: device + depends: + - kind: deployment + nameOrGUID: sootballs_ims + rosNetworks: + - depends: + kind: network + nameOrGUID: sootballs + device: + depends: + kind: device + nameOrGUID: kibstg-edge-1 + envArgs: + - name: IMS_AUTH_USERNAME + value: root + - name: IMS_AUTH_PASSWORD + value: airborne_rr + - name: "SENTRY_DSN" + value: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + value: "true" + - name: "DISABLE_MULTICAST" + value: "false" + - name: "ROS_DOMAIN_ID" + value: "10" + - name: "USE_PARAMS_IO" + value: "true" + - name: "EDGE_ALARM_MONITOR" + value: "true" +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: sootballs_amr_01 + depends: + kind: package + nameOrGUID: "Sootballs Robot2" + version: "1.0.0" +spec: + runtime: device + depends: + - kind: deployment + nameOrGUID: sootballs_ims + rosNetworks: + - depends: + kind: network + nameOrGUID: sootballs + device: + depends: + kind: device + nameOrGUID: kibstg-amr-1 + envArgs: + - name: IMS_AUTH_USERNAME + value: root + - name: IMS_AUTH_PASSWORD + value: airborne_rr + - name: "SENTRY_DSN" + value: "https://a83904fb2f394a768557eaea8ec39db4:7d8a02b629994bf884534917dfb08511@sentry.svc.rapyuta.io/18" + - name: "DOCKER_STDOUT" + value: "true" + - name: "DISABLE_MULTICAST" + value: "false" + - name: "ROS_DOMAIN_ID" + value: "10" + - name: "USE_PARAMS_IO" + value: "true" diff --git a/examples/kiba-robots/secrets.staging.yaml b/examples/kiba-robots/secrets.staging.yaml new file mode 100644 index 00000000..d3c4ebb8 --- /dev/null +++ b/examples/kiba-robots/secrets.staging.yaml @@ -0,0 +1,15 @@ +global: + dbHost: ENC[AES256_GCM,data:+7cvT+9J09CigDcvxm837Hfz0+WFwBzA1TcQvI75SFs5PmIs7A==,iv:km6KEq0AYSHBxgL1a1WlIa8gvguU4dEaB9oogaVTU4o=,tag:G+fxpM7FB7TQYsWeekrJ0g==,type:str] + dbUser: ENC[AES256_GCM,data:4j9zoiRsJhMJ0MWW0XEQCHt4,iv:Q4NboLJnT4oA6Z6uxp7ans1V4sjgYl/quemOXNEulco=,tag:AxLp7Xj75MN+JU0+x5YfSA==,type:str] + dbPass: ENC[AES256_GCM,data:YyfHySZ4bcAvR3qnIAa2BMLMFQuUkg==,iv:FJJTQ+TQcjnzo7iNXg/a7P5do8b8ButaaPALvf4Q+FQ=,tag:OZIrjBt0CGAhVbWpzvfAYg==,type:str] +sops: + kms: [] + gcp_kms: + - resource_id: projects/rapyuta-io/locations/global/keyRings/sops_key/cryptoKeys/sops-key + created_at: '2022-04-12T19:53:13Z' + enc: CiQAJwkHSPloHZMqFLMdQRRActkGdVWHzMZOwLw/YrTDyS/ZE3gSSQD+fVSX3pLI/l0n8INc9C6nY+bGWBIxbuXWELl3d5jYVurpxGqXeikLhKsJZo6C2v1gknKv734uZgcPXVG7Y5dLDMGaMYeBpzA= + lastmodified: '2022-04-12T19:53:14Z' + mac: ENC[AES256_GCM,data:t/lH23kZ3lZymEpuiTwVxICJacw5jgRSXJ1REuCxcoN2GT9Dz76CsS5g9RYJpawx7LtdTU8SqRN17eZ/7f4gctuqvVWFOqu/vh/ui1SDYonkclh4JVxU4wTGAkXKnDJG5qGF/1z0d2zHr/I4CfWHmJFBWbvD1MxMCpJaU0T11n4=,iv:q62pfYu3Ku9EAKwvR0k+69A9goJ4KOz4FBO6DqRKN0U=,tag:ICqGK8A6+bHWnGam4ECIsg==,type:str] + pgp: [] + unencrypted_suffix: _unencrypted + version: 3.0.5 diff --git a/examples/kiba-robots/values.yaml b/examples/kiba-robots/values.yaml new file mode 100644 index 00000000..90983848 --- /dev/null +++ b/examples/kiba-robots/values.yaml @@ -0,0 +1,3 @@ +testlabel: 1.14.0-rc4 +routePrefix: rc4 +# this is a simple yaml file which can be used to abstract out frequently changed values from your resource yamls. diff --git a/examples/manifests b/examples/manifests new file mode 120000 index 00000000..91067c47 --- /dev/null +++ b/examples/manifests @@ -0,0 +1 @@ +../riocli/apply/manifests/ \ No newline at end of file diff --git a/examples/marketplace_dependencies_format.sample.yaml b/examples/marketplace/dependencies.yaml similarity index 100% rename from examples/marketplace_dependencies_format.sample.yaml rename to examples/marketplace/dependencies.yaml diff --git a/jsonschema/build-schema.yaml b/jsonschema/build-schema.yaml new file mode 100644 index 00000000..5662e403 --- /dev/null +++ b/jsonschema/build-schema.yaml @@ -0,0 +1,214 @@ +--- +$schema: https://json-schema.org/draft-07/schema +title: Build +$ref: "#/definitions/buildSpec" +definitions: + build: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + kind: + const: Build + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/buildSpec" + status: + "$ref": "#/definitions/buildStatus" + required: + - apiVersion + - kind + - metadata + - spec + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/buildGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + buildGUID: + type: string + pattern: "^build-[a-z]{24}$" + secretGUID: + type: string + pattern: "^secret-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + buildSpec: + type: object + properties: + repository: + type: object + properties: + url: + type: string + ref: + type: string + contextDir: + type: string + gitSecret: + "$ref": "#/definitions/secretGUID" + buildMethod: + "$ref": "#/definitions/buildRecipe" + required: + - buildMethod + - repository + - image + dependencies: + buildMethod: + oneOf: + - properties: + buildMethod: + enum: + - Docker + docker: + type: object + "$ref": "#/definitions/docker" + image: + "$ref": "#/definitions/imageArtifact" + - properties: + buildMethod: + enum: + - Source + catkin: + type: object + "$ref": "#/definitions/catkin" + image: + "$ref": "#/definitions/imageArtifact" + docker: + type: object + properties: + architecture: + "$ref": "#/definitions/architecture" + dockerfile: + type: string + default: Dockerfile + pullSecret: + "$ref": "#/definitions/secretGUID" + + isRos: + type: boolean + rosDistro: + "$ref": "#/definitions/rosDistro" + simulation: + type: boolean + default: false + + dependentRequired: + isRos: + - rosDistro + - simulation + + catkin: + type: object + properties: + architecture: + "$ref": "#/definitions/architecture" + + isRos: + type: boolean + const: true + default: true + + rosDistro: + "$ref": "#/definitions/rosDistro" + + simulation: + type: boolean + default: false + + catkinParameters: + "$ref": "#/definitions/catkinParameters" + required: + - isRos + - rosDistro + - simulation + - architecture + + imageArtifact: + type: object + properties: + registry: + type: string + pushSecret: + type: string + tagName: + type: string + triggerName: + type: string + webhookURL: + type: string + + buildStatus: + type: object + properties: + status: + "$ref": "#/definitions/buildStatusType" + generation: + type: integer + buildStatusType: + enum: + - Complete + - BuildFailed + - BuildInProgress + gitInfo: + type: object + properties: + repository: + type: string + gitRef: + type: string + secret: + "$ref": "#/definitions/secretGUID" + required: + - repository + catkinParameters: + type: array + items: + "$ref": "#/definitions/catkinParameter" + catkinParameter: + type: object + properties: + rosPackages: + type: string + cmakeArguments: + type: string + makeArguments: + type: string + catkinMakeArguments: + type: string + blacklist: + type: string + buildRecipe: + enum: + - Docker + - Source + architecture: + enum: + - amd64 + - arm32v7 + - arm64v8 + rosDistro: + enum: + - melodic + - kinetic + - noetic diff --git a/jsonschema/deployment-schema.yaml b/jsonschema/deployment-schema.yaml new file mode 100644 index 00000000..223b7ff3 --- /dev/null +++ b/jsonschema/deployment-schema.yaml @@ -0,0 +1,275 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Deployment +$ref: "#/definitions/deployment" +definitions: + deployment: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: Deployment + default: Deployment + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/componentSpec" + required: + - apiVersion + - kind + - metadata + - spec + + metadata: + type: object + properties: + name: + type: string + depends: + "$ref": "#/definitions/packageDepends" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + guid: + "$ref": "#/definitions/packageGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + + required: + - name + - depends + + envArgsSpec: + type: object + properties: + name: + type: string + value: + type: string + + deviceNetworkAttachSpec: + properties: + depends: + "$ref": "#/definitions/networkDepends" + interface: + type: string + topics: + type: array + items: + type: string + + cloudNetworkAttachSpec: + properties: + depends: + "$ref": "#/definitions/networkDepends" + topics: + type: array + items: + type: string + + cloudVolumeAttachSpec: + type: object + properties: + execName: + type: string + mountPath: + type: string + subPath: + type: string + depends: + "$ref": "#/definitions/diskDepends" + + deviceVolumeAttachSpec: + type: object + properties: + execName: + type: string + mountPath: + type: string + subPath: + type: string + + endpointSpec: + properties: + name: + type: string + depends: + properties: + kind: + const: staticroute + default: staticroute + nameOrGUID: + type: string + + componentSpec: + properties: + runtime: + type: string + enum: + - device + - cloud + default: cloud + depends: + type: array + items: + "$ref": "#/definitions/deploymentDepends" + dependencies: + runtime: + oneOf: + - properties: + runtime: + type: string + enum: + - device + depends: + type: object + "$ref": "#/definitions/deviceDepends" + restart: + type: string + enum: + - always + - onfailure + - never + default: always + envArgs: + type: array + items: + "$ref": "#/definitions/envArgsSpec" + + volumes: + type: array + items: + "$ref": "#/definitions/deviceVolumeAttachSpec" + + rosNetworks: + type: array + items: + "$ref": "#/definitions/deviceNetworkAttachSpec" + + - properties: + runtime: + type: string + enum: + - cloud + + envArgs: + type: array + items: + "$ref": "#/definitions/envArgsSpec" + + volumes: + type: array + items: + "$ref": "#/definitions/cloudVolumeAttachSpec" + + staticRoutes: + type: array + items: + "$ref": "#/definitions/endpointSpec" + + rosNetworks: + type: array + items: + "$ref": "#/definitions/cloudNetworkAttachSpec" + + stringMap: + type: object + additionalProperties: + type: string + organizationGUID: + type: string + pattern: "^org-[a-z]{24}$" + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + secretGUID: + type: string + pattern: "^secret-[a-z]{24}$" + diskGUID: + type: string + pattern: "^disk-[a-z]{24}$" + buildGUID: + type: string + pattern: "^build-[a-z]{24}$" + packageGUID: + type: string + pattern: "^pkg-[a-z]{24}$" + deploymentGUID: + type: string + pattern: "^dep-[a-z]{24}$" + networkGUID: + type: string + pattern: "^network-[a-z]{24}$" + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + + packageDepends: + properties: + kind: + const: package + default: package + nameOrGUID: + type: string + version: + type: string + guid: + type: string + deviceDepends: + properties: + kind: + const: device + default: device + nameOrGUID: + type: string + guid: + type: string + networkDepends: + properties: + kind: + const: network + default: network + nameOrGUID: + type: string + guid: + type: string + secretDepends: + properties: + kind: + const: secret + default: secret + nameOrGUID: + type: string + guid: + type: string + diskDepends: + properties: + kind: + const: disk + default: disk + nameOrGUID: + type: string + guid: + type: string + staticRouteDepends: + properties: + kind: + const: static-route + default: static-route + nameOrGUID: + type: string + guid: + type: string + deploymentDepends: + properties: + kind: + const: deployment + default: deployment + nameOrGUID: + type: string + guid: + type: string diff --git a/jsonschema/device-schema.yaml b/jsonschema/device-schema.yaml new file mode 100644 index 00000000..44799479 --- /dev/null +++ b/jsonschema/device-schema.yaml @@ -0,0 +1,104 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Device +$ref: "#/definitions/device" +definitions: + metadata: + type: object + properties: + name: + type: string + guid: + $ref: "#/definitions/uuid" + creator: + $ref: "#/definitions/uuid" + project: + $ref: "#/definitions/projectGUID" + labels: + $ref: "#/definitions/stringMap" + uniqueItems: true + required: + - name + + device: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + kind: + const: Device + metadata: + $ref: "#/definitions/metadata" + spec: + $ref: "#/definitions/deviceSpec" + required: + - apiVersion + - kind + - metadata + - spec + + deviceSpec: + type: object + properties: + rosDistro: + type: string + enum: + - kinetic + - melodic + - noetic + default: melodic + python: + type: string + enum: + - "2" + - "3" + default: "3" + dependencies: + docker: + oneOf: + - properties: + docker: + type: object + properties: + enabled: + enum: + - False + - properties: + docker: + type: object + properties: + enabled: + enum: + - True + rosbagMountPath: + type: string + default: /opt/rapyuta/volumes/rosbag + preinstalled: + oneOf: + - properties: + preinstalled: + type: object + properties: + enabled: + enum: + - False + - properties: + preinstalled: + type: object + properties: + enabled: + enum: + - True + catkinWorkspace: + type: string + + stringMap: + type: object + additionalProperties: + type: string + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" diff --git a/jsonschema/disk-schema.yaml b/jsonschema/disk-schema.yaml new file mode 100644 index 00000000..7134dcb4 --- /dev/null +++ b/jsonschema/disk-schema.yaml @@ -0,0 +1,71 @@ +--- +$schema: "http://json-schema.org/draft-07/schema#" +title: Disk +$ref: "#/definitions/disk" +definitions: + disk: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: Disk + default: Disk + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/diskSpec" + required: + - apiVersion + - kind + - metadata + - spec + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/diskGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + diskGUID: + type: string + pattern: "^disk-[a-z]{24}$" + packageGUID: + type: string + pattern: "^pkg-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + diskSpec: + type: object + properties: + runtime: + default: cloud + const: cloud + capacity: + type: number + enum: + - 4 + - 8 + - 16 + - 32 + - 64 + - 128 + - 256 + - 512 diff --git a/jsonschema/network-schema.yaml b/jsonschema/network-schema.yaml new file mode 100644 index 00000000..02393437 --- /dev/null +++ b/jsonschema/network-schema.yaml @@ -0,0 +1,115 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Network +$ref: "#/definitions/network" +definitions: + network: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + kind: + const: Network + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/networkSpec" + required: + - apiVersion + - kind + - metadata + - spec + networkSpec: + type: object + properties: + type: + "$ref": "#/definitions/networkType" + rosDistro: + "$ref": "#/definitions/rosDistro" + runtime: + "$ref": "#/definitions/runtime" + required: + - type + - rosDistro + - runtime + dependencies: + runtime: + oneOf: + - properties: + runtime: + enum: + - cloud + resourceLimits: + "$ref": "#/definitions/resourceLimits" + required: + - runtime + - resourceLimits + - properties: + runtime: + enum: + - device + deviceGUID: + "$ref": "#/definitions/uuid" + networkInterface: + type: string + restartPolicy: + "$ref": "#/definitions/restartPolicy" + default: Always + + required: + - deviceGUID + - networkInterface + + resourceLimits: + enum: + - xSmall + - small + - medium + - large + rosDistro: + enum: + - melodic + - kinetic + - noetic + restartPolicy: + enum: + - always + - never + - onFailure + runtime: + enum: + - cloud + - device + networkType: + enum: + - routed + - native + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/networkGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + networkGUID: + type: string + pattern: "^network-[a-z]{24}$" + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" diff --git a/jsonschema/package-schema.yaml b/jsonschema/package-schema.yaml new file mode 100644 index 00000000..93ffa93c --- /dev/null +++ b/jsonschema/package-schema.yaml @@ -0,0 +1,579 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Package +$ref: "#/definitions/package" +definitions: + package: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: Package + default: Package + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/componentSpec" + required: + - apiVersion + - kind + - metadata + - spec + + metadata: + type: object + properties: + name: + type: string + version: + type: string + tag: + type: string + description: + type: string + guid: + "$ref": "#/definitions/packageGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + - version + + componentSpec: + type: object + properties: + runtime: + type: string + enum: + - device + - cloud + default: cloud + + # TODO: validate + ros: + type: object + "$ref": "#/definitions/rosComponentSpec" + dependencies: + runtime: + oneOf: + - properties: + runtime: + enum: + - device + device: + type: object + "$ref": "#/definitions/deviceComponentInfoSpec" + executables: + type: array + items: + "$ref": "#/definitions/deviceExecutableSpec" + + environmentArgs: + type: array + items: + "$ref": "#/definitions/environmentSpec" + + - properties: + runtime: + enum: + - cloud + cloud: + type: object + "$ref": "#/definitions/cloudComponentInfoSpec" + + executables: + type: array + items: + "$ref": "#/definitions/cloudExecutableSpec" + + environmentVars: + type: array + items: + "$ref": "#/definitions/environmentSpec" + + endpoints: + type: array + items: + "$ref": "#/definitions/endpointSpec" + + deviceExecutableSpec: + type: object + properties: + name: + type: string + type: + type: string + default: docker + enum: + - docker + - build + - preInstalled + command: + type: string + runAsBash: + type: boolean + default: True + required: + - type + dependencies: + type: + oneOf: + - properties: + type: + enum: + - docker + docker: + type: object + properties: + image: + type: string + pullSecret: + "$ref": "#/definitions/secretDepends" + required: + - image + - properties: + type: + enum: + - build + build: + type: object + properties: + depends: + "$ref": "#/definitions/buildDepends" + required: + - depends + - properties: + type: + enum: + - preInstalled + + cloudExecutableSpec: + type: object + properties: + name: + type: string + type: + type: string + default: docker + enum: + - docker + - build + command: + type: string + runAsBash: + type: boolean + default: True + simulation: + type: boolean + default: False + limits: + type: object + properties: + cpu: + type: number + min: 0.1 + max: 8 + memory: + type: number + min: 256 + max: 32678 + required: + - type + dependencies: + type: + oneOf: + - properties: + type: + enum: + - docker + docker: + type: object + properties: + image: + type: string + pullSecret: + type: object + properties: + depends: + "$ref": "#/definitions/secretDepends" + required: + - depends + required: + - image + - properties: + type: + enum: + - build + build: + type: object + properties: + depends: + "$ref": "#/definitions/secretDepends" + required: + - depends + - properties: + type: + enum: + - preInstalled + + portNumber: + type: integer + min: 1 + max: 65531 + + endpointSpec: + type: object + properties: + name: + type: string + type: + type: string + default: external-http + enum: + - external-http + - external-https + - external-tls-tcp + - internal-tcp + - internal-udp + - internal-tcp-range + - internal-udp-range + required: + - name + - type + dependencies: + type: + oneOf: + - properties: + type: + enum: + - external-http + port: + "$ref": "#/definitions/portNumber" + default: 80 + targetPort: + "$ref": "#/definitions/portNumber" + required: + - port + - targetPort + - properties: + type: + enum: + - external-https + port: + "$ref": "#/definitions/portNumber" + default: 443 + targetPort: + "$ref": "#/definitions/portNumber" + required: + - port + - targetPort + - properties: + type: + enum: + - external-tls-tcp + port: + "$ref": "#/definitions/portNumber" + default: 443 + targetPort: + "$ref": "#/definitions/portNumber" + required: + - port + - targetPort + - properties: + type: + enum: + - internal-tcp + port: + "$ref": "#/definitions/portNumber" + default: 80 + targetPort: + "$ref": "#/definitions/portNumber" + required: + - port + - targetPort + + - properties: + type: + enum: + - internal-udp + + port: + "$ref": "#/definitions/portNumber" + default: 80 + targetPort: + "$ref": "#/definitions/portNumber" + required: + - port + - targetPort + - properties: + type: + enum: + - internal-tcp-range + portRange: + type: string + default: 22,80, 1024-1030 + required: + - portRange + + - properties: + type: + enum: + - internal-udp-range + + portRange: + type: string + default: 53,1024-1025 + required: + - portRange + + environmentSpec: + type: object + properties: + name: + type: string + description: + type: string + default: + type: string + exposed: + type: boolean + default: False + required: + - name + dependencies: + exposed: + oneOf: + - properties: + exposed: + enum: + - True + exposedName: + type: string + required: + - exposedName + - properties: + exposed: + enum: + - False + + cloudComponentInfoSpec: + type: object + properties: + replicas: + type: number + default: 1 + + deviceComponentInfoSpec: + type: object + properties: + arch: + type: string + enum: + - arm32v7 + - arm64v8 + - amd64 + default: amd64 + restart: + type: string + default: always + enum: + - always + - never + - onfailure + + rosComponentSpec: + type: object + properties: + enabled: + type: boolean + default: False + dependencies: + enabled: + oneOf: + - properties: + enabled: + enum: + - False + - properties: + enabled: + type: boolean + enum: + - True + version: + type: string + enum: + - kinetic + - melodic + - noetic + default: melodic + inboundScopedTargeted: + type: boolean + default: false + rosEndpoints: + type: array + items: + "$ref": "#/definitions/rosEndpointSpec" + + rosEndpointSpec: + type: object + properties: + type: + type: string + default: topic + enum: + - topic + - service + - action + name: + type: string + compression: + type: boolean + default: false + scoped: + type: boolean + default: false + targeted: + type: boolean + default: false + required: + - type + - name + dependencies: + type: + oneOf: + - properties: + type: + enum: + - topic + qos: + type: string + enum: + - low + - medium + - hi + - max + default: low + - properties: + type: + enum: + - service + timeout: + type: number + default: 120 + min: 0 + - properties: + type: + enum: + - action + + stringMap: + type: object + additionalProperties: + type: string + organizationGUID: + type: string + pattern: "^org-[a-z]{24}$" + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + secretGUID: + type: string + pattern: "^secret-[a-z]{24}$" + diskGUID: + type: string + pattern: "^disk-[a-z]{24}$" + buildGUID: + type: string + pattern: "^build-[a-z]{24}$" + packageGUID: + type: string + pattern: "^pkg-[a-z]{24}$" + deploymentGUID: + type: string + pattern: "^dep-[a-z]{24}$" + networkGUID: + type: string + pattern: "^network-[a-z]{24}$" + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + + buildDepends: + properties: + kind: + const: build + default: build + nameOrGUID: + type: string + guid: + type: string + packageDepends: + properties: + kind: + const: package + default: package + nameOrGUID: + type: string + guid: + type: string + deviceDepends: + properties: + kind: + const: device + default: device + nameOrGUID: + type: string + guid: + type: string + networkDepends: + properties: + kind: + const: network + default: network + nameOrGUID: + type: string + guid: + type: string + secretDepends: + properties: + kind: + const: secret + default: secret + nameOrGUID: + type: string + guid: + type: string + diskDepends: + properties: + kind: + const: disk + default: disk + nameOrGUID: + type: string + guid: + type: string + staticRouteDepends: + properties: + kind: + const: static-route + default: static-route + nameOrGUID: + type: string + guid: + type: string + deploymentDepends: + properties: + kind: + const: deployment + default: deployment + nameOrGUID: + type: string + guid: + type: string diff --git a/jsonschema/primitives.yaml b/jsonschema/primitives.yaml new file mode 100644 index 00000000..213cbecc --- /dev/null +++ b/jsonschema/primitives.yaml @@ -0,0 +1,90 @@ +title: Primitives +definitions: + stringMap: + type: object + additionalProperties: + type: string + organizationGUID: + type: string + pattern: "^org-[a-z]{24}$" + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + secretGUID: + type: string + pattern: "^secret-[a-z]{24}$" + diskGUID: + type: string + pattern: "^disk-[a-z]{24}$" + buildGUID: + type: string + pattern: "^build-[a-z]{24}$" + packageGUID: + type: string + pattern: "^pkg-[a-z]{24}$" + deploymentGUID: + type: string + pattern: "^dep-[a-z]{24}$" + networkGUID: + type: string + pattern: "^network-[a-z]{24}$" + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + + buildDepends: + properties: + kind: + const: build + default: build + nameOrGUID: + type: string + packageDepends: + properties: + kind: + const: package + default: package + nameOrGUID: + type: string + deviceDepends: + properties: + kind: + const: device + default: device + nameOrGUID: + type: string + networkDepends: + properties: + kind: + const: network + default: network + nameOrGUID: + type: string + secretDepends: + properties: + kind: + const: secret + default: secret + nameOrGUID: + type: string + diskDepends: + properties: + kind: + const: disk + default: disk + nameOrGUID: + type: string + staticRouteDepends: + properties: + kind: + const: static-route + default: static-route + nameOrGUID: + type: string + deploymentDepends: + properties: + kind: + const: deployment + default: deployment + nameOrGUID: + type: string diff --git a/jsonschema/project-schema.yaml b/jsonschema/project-schema.yaml new file mode 100644 index 00000000..d210d77f --- /dev/null +++ b/jsonschema/project-schema.yaml @@ -0,0 +1,87 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Project +description: A namespace for resources on Rapyuta +$ref: "#/definitions/project" +definitions: + project: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: Project + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/projectSpec" + status: + "$ref": "#/definitions/projectStatus" + required: + - apiVersion + - kind + - metadata + - spec + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/projectGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + projectSpec: + type: object + properties: + users: + type: array + items: + type: string + required: + - users + user: + type: object + properties: + email_id: + type: string + first_name: + type: string + last_name: + type: string + guid: + "$ref": "#/definitions/uuid" + state: + type: string + required: + - guid + - state + - email_id + projectStatus: + type: object + properties: + users: + type: array + items: + "$ref": "#/definitions/user" + uniqueItems: true + required: + - users diff --git a/jsonschema/secret-schema.yaml b/jsonschema/secret-schema.yaml new file mode 100644 index 00000000..1b05614f --- /dev/null +++ b/jsonschema/secret-schema.yaml @@ -0,0 +1,145 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Secret +$ref: "#/definitions/secret" +definitions: + secret: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: Secret + metadata: + "$ref": "#/definitions/metadata" + spec: + "$ref": "#/definitions/secretSpec" + required: + - apiVersion + - kind + - metadata + - spec + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/secretGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + secretGUID: + type: string + pattern: "^secret-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" + secretSpec: + type: object + properties: + type: + type: string + default: Docker + enum: + - Docker + - Git + required: + - type + dependencies: + type: + oneOf: + - properties: + type: + enum: + - Docker + docker: + type: object + "$ref": "#/definitions/docker" + required: + - properties: + type: + enum: + - Git + git: + type: object + "$ref": "#/definitions/git" + + git: + type: object + properties: + authMethod: + type: string + default: HTTP/S Basic Auth + enum: + - HTTP/S Basic Auth + - HTTP/S Token Auth + - SSH Auth + + dependencies: + authMethod: + oneOf: + - properties: + authMethod: + type: string + enum: + - HTTP/S Basic Auth + username: + type: string + password: + type: string + caCert: + type: string + required: + - username + - password + - properties: + authMethod: + type: string + enum: + - HTTP/S Token Auth + token: + type: string + caCert: + type: string + required: + - token + - properties: + authMethod: + type: string + enum: + - SSH Auth + privateKey: + type: string + required: + - privateKey + docker: + type: object + properties: + registry: + type: string + default: "https://index.docker.io/v1/" + username: + type: string + password: + type: string + email: + type: string + required: + - username + - password + - email diff --git a/jsonschema/static_route-schema.yaml b/jsonschema/static_route-schema.yaml new file mode 100644 index 00000000..731c2c47 --- /dev/null +++ b/jsonschema/static_route-schema.yaml @@ -0,0 +1,46 @@ +--- +$schema: http://json-schema.org/draft-07/schema# +title: Static Route +description: A named route for the Deployment endpoint +$ref: "#/definitions/staticRoute" +definitions: + staticRoute: + type: object + properties: + apiVersion: + const: apiextensions.rapyuta.io/v1 + default: apiextensions.rapyuta.io/v1 + kind: + const: StaticRoute + metadata: + "$ref": "#/definitions/metadata" + required: + - apiVersion + - kind + - metadata + metadata: + type: object + properties: + name: + type: string + guid: + "$ref": "#/definitions/projectGUID" + creator: + "$ref": "#/definitions/uuid" + project: + "$ref": "#/definitions/projectGUID" + labels: + "$ref": "#/definitions/stringMap" + uniqueItems: true + required: + - name + projectGUID: + type: string + pattern: "^project-[a-z]{24}$" + stringMap: + type: object + additionalProperties: + type: string + uuid: + type: string + pattern: "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$" diff --git a/riocli/__init__.py b/riocli/__init__.py index 931aeb73..f2d67e5d 100644 --- a/riocli/__init__.py +++ b/riocli/__init__.py @@ -11,3 +11,9 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. + +try: + import pretty_traceback + pretty_traceback.install() +except ImportError: + pass # no need to fail because of missing dev dependency \ No newline at end of file diff --git a/riocli/apply/__init__.py b/riocli/apply/__init__.py new file mode 100644 index 00000000..2bad685b --- /dev/null +++ b/riocli/apply/__init__.py @@ -0,0 +1,77 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import click +from click_help_colors import HelpColorsCommand +from typing import Iterable + +from riocli.apply.parse import Applier +from riocli.apply.util import process_files_values_secrets +from riocli.apply.explain import explain + +@click.command( + 'apply', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', +) +@click.option('--dryrun', '-d', is_flag=True, default=False, help='dry run the yaml files without applying any change') +@click.option('--values', '-v', help="path to values yaml file. key/values specified in the values file can be used as variables in template yamls") +@click.option('--secrets', '-s', help="secret files are sops encoded value files. rio-cli expects sops to be authorized for decoding files on this computer") +@click.option('--workers', '-w', help="number of parallel workers while running apply command. defaults to 6.", type=int) +@click.argument('files', nargs=-1) +def apply(values: str, secrets: str, files: Iterable[str], dryrun: bool = False, workers: int = 6) -> None: + """ + Apply resource manifests + """ + glob_files, abs_values, abs_secrets = process_files_values_secrets(files, values, secrets) + + if len(glob_files) == 0: + click.secho('no files specified', fg='red') + raise SystemExit(1) + + click.secho("----- Files Processed ----", fg="yellow") + for file in glob_files: + click.secho(file, fg="yellow") + + + rc = Applier(glob_files, abs_values, abs_secrets) + rc.parse_dependencies() + + rc.apply(dryrun=dryrun, workers=workers) + + +@click.command( + 'delete', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', +) +@click.option('--dryrun', '-d', is_flag=True, default=False, help='dry run the yaml files without applying any change') +@click.option('--values', '-v', help="path to values yaml file. key/values specified in the values file can be used as variables in template yamls") +@click.option('--secrets', '-s', help="secret files are sops encoded value files. rio-cli expects sops to be authorized for decoding files on this computer") +@click.argument('files', nargs=-1) +def delete(values: str, secrets: str, files: Iterable[str], dryrun: bool = False) -> None: + """ + Apply resource manifests + """ + glob_files, abs_values, abs_secrets = process_files_values_secrets(files, values, secrets) + + if len(glob_files) == 0: + click.secho('no files specified', fg='red') + raise SystemExit(1) + + rc = Applier(glob_files, abs_values, abs_secrets) + rc.parse_dependencies(check_missing=False, delete=True) + rc.delete(dryrun=dryrun) diff --git a/riocli/apply/explain.py b/riocli/apply/explain.py new file mode 100644 index 00000000..f36ab566 --- /dev/null +++ b/riocli/apply/explain.py @@ -0,0 +1,43 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from pathlib import Path + +import click +from click_help_colors import HelpColorsCommand + + +@click.command( + 'explain', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + help='Generates a sample resource manifest for the given type' +) +@click.option('--templates', help='Alternate root for templates', + default=None) +@click.argument('resource') +def explain(resource: str, templates: str = None) -> None: + if templates: + path = Path(templates) + else: + path = Path(__file__).parent.joinpath('manifests') + + for each in path.glob('**/*'): + if resource in each.name: + with open(each) as f: + click.secho(f.readlines()) + raise SystemExit(0) + + click.secho("[Err] Resource \"{}\" not found".format(resource), fg='red') + raise SystemExit(1) diff --git a/riocli/apply/manifests/04-build-catkin.yaml b/riocli/apply/manifests/04-build-catkin.yaml new file mode 100644 index 00000000..54ecd239 --- /dev/null +++ b/riocli/apply/manifests/04-build-catkin.yaml @@ -0,0 +1,19 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Build" +metadata: + name: "catkin-build" +spec: + recipe: "Source" + architecture: "amd64" + git: + repository: "https://github.com/rapyuta-robotics/io_tutorials" + # gitRef: "master" + # secret: "secret-guid" + # contextDir: "talker/talker" + rosDistro: "melodic" + # catkinParameters: + # - rosPackages: + # cmakeArguments: + # makeArguments: + # catkinMakeArguments: + # blacklist: diff --git a/riocli/apply/manifests/05-build-docker.yaml b/riocli/apply/manifests/05-build-docker.yaml new file mode 100644 index 00000000..9b14862f --- /dev/null +++ b/riocli/apply/manifests/05-build-docker.yaml @@ -0,0 +1,19 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Build" +metadata: + name: "docker-build" +spec: + recipe: "Docker" + architecture: "amd64" + git: + repository: "https://github.com/rapyuta-robotics/io_tutorial.git" + # gitRef: "master" + # secret: "secret-guid" + # dockerfile: "Dockerfile" + # pullSecret: "secret-guid" + # contextDir: "talker/talker" + # pushRepository: "rrdockerhub/test" + # pushSecret: "secret-guid" + # tagName: "tag-name" + # triggerName: "trigger-name" + # webhookURL: "http://webhook.url" diff --git a/riocli/apply/manifests/06-build-docker-ros.yaml b/riocli/apply/manifests/06-build-docker-ros.yaml new file mode 100644 index 00000000..ffb88c3d --- /dev/null +++ b/riocli/apply/manifests/06-build-docker-ros.yaml @@ -0,0 +1,13 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Build" +metadata: + name: "docker-ros-build" +spec: + recipe: "Docker" + architecture: "amd64" + git: + repository: "https://github.com/rapyuta-robotics/io_tutorial.git" + # gitRef: "master" + # secret: "secret-guid" + # contextDir: "talker/talker" + rosDistro: "melodic" diff --git a/riocli/apply/manifests/07-device-preinstalled.yaml b/riocli/apply/manifests/07-device-preinstalled.yaml new file mode 100644 index 00000000..42b214bd --- /dev/null +++ b/riocli/apply/manifests/07-device-preinstalled.yaml @@ -0,0 +1,10 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Device" +metadata: + name: "apply-device-preinstalled" + project: "project-guid" +spec: + python: "3" + rosDistro: "melodic" + preinstalled: + enabled: True diff --git a/riocli/apply/manifests/08-device-docker.yaml b/riocli/apply/manifests/08-device-docker.yaml new file mode 100644 index 00000000..9f1d3f2d --- /dev/null +++ b/riocli/apply/manifests/08-device-docker.yaml @@ -0,0 +1,10 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Device" +metadata: + name: "apply-device-docker" + project: "project-guid" +spec: + python: "3" + rosDistro: "melodic" + docker: + enabled: True diff --git a/riocli/apply/manifests/09-cloud-routed-network.yaml b/riocli/apply/manifests/09-cloud-routed-network.yaml new file mode 100644 index 00000000..a5b8fcb5 --- /dev/null +++ b/riocli/apply/manifests/09-cloud-routed-network.yaml @@ -0,0 +1,41 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "cloud-routed-network" +spec: + runtime: "cloud" + type: "routed" + rosDistro: "kinetic" + resourceLimits: "small" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "device-routed-network" +spec: + runtime: "device" + type: "routed" + rosDistro: "melodic" + deviceGUID: "eca8de7b-932a-499b-8b9a-3cebeb2a8c7c" + networkInterface: "wlp0s20f3" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "cloud-native-network" +spec: + runtime: "cloud" + type: "native" + rosDistro: "melodic" + resourceLimits: "xSmall" +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "device-native-network" +spec: + runtime: "device" + type: "native" + rosDistro: "melodic" + deviceGUID: "eca8de7b-932a-499b-8b9a-3cebeb2a8c7c" + networkInterface: "wlp0s20f3" diff --git a/riocli/apply/manifests/10-device-routed-network.yaml b/riocli/apply/manifests/10-device-routed-network.yaml new file mode 100644 index 00000000..d113788c --- /dev/null +++ b/riocli/apply/manifests/10-device-routed-network.yaml @@ -0,0 +1,10 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "device-routed-network" +spec: + runtime: "device" + type: "routed" + rosDistro: "melodic" + deviceGUID: "299a436f-9d7f-4bbf-adce-21be06bfbbce" + networkInterface: "eth0" diff --git a/riocli/apply/manifests/11-cloud-native-network.yaml b/riocli/apply/manifests/11-cloud-native-network.yaml new file mode 100644 index 00000000..6382e1bb --- /dev/null +++ b/riocli/apply/manifests/11-cloud-native-network.yaml @@ -0,0 +1,9 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "cloud-native-network" +spec: + runtime: "cloud" + type: "native" + rosDistro: "melodic" + resourceLimits: "xSmall" diff --git a/riocli/apply/manifests/12-device-native-network.yaml b/riocli/apply/manifests/12-device-native-network.yaml new file mode 100644 index 00000000..f2b15407 --- /dev/null +++ b/riocli/apply/manifests/12-device-native-network.yaml @@ -0,0 +1,10 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Network" +metadata: + name: "device-native-network" +spec: + runtime: "device" + type: "native" + rosDistro: "melodic" + deviceGUID: "299a436f-9d7f-4bbf-adce-21be06bfbbce" + networkInterface: "eth0" diff --git a/riocli/apply/manifests/deployment.yaml b/riocli/apply/manifests/deployment.yaml new file mode 100644 index 00000000..ef64850a --- /dev/null +++ b/riocli/apply/manifests/deployment.yaml @@ -0,0 +1,42 @@ +--- +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: cloud-deployment + depends: + kind: package + nameOrGUID: "non-ros-cloud" + version: "v1.0.0" + labels: {} +spec: + runtime: device + depends: + kind: device + nameOrGUID: shaishav-thinkpad + envArgs: + - name: TEST_ENV + value: asdsad + volumes: + - execName: exec + mountPath: "/tmp" + subPath: "/tmp" + depends: + kind: disk + nameOrGUID: "PVC" + staticRoutes: + - name: asd + depends: + kind: staticroute + nameOrGUID: asdasd + rosNetworks: + - depends: + kind: network + nameOrGUID: asdasd + topics: + - "/telemetry" + - depends: + kind: network + nameOrGUID: "native" + - depends: + kind: network + nameOrGUID: "routed" diff --git a/riocli/apply/manifests/device-with-both-runtimes.yaml b/riocli/apply/manifests/device-with-both-runtimes.yaml new file mode 100644 index 00000000..02a445e4 --- /dev/null +++ b/riocli/apply/manifests/device-with-both-runtimes.yaml @@ -0,0 +1,13 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Device" +metadata: + name: "apply-device-both" + project: "project-guid" +spec: + python: "3" + rosDistro: "melodic" + preinstalled: + enabled: True + catkinWorkspace: "/home/rapyuta/catkin_ws" + docker: + enabled: True diff --git a/riocli/apply/manifests/disk.yaml b/riocli/apply/manifests/disk.yaml new file mode 100644 index 00000000..4899bc84 --- /dev/null +++ b/riocli/apply/manifests/disk.yaml @@ -0,0 +1,7 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Disk" +metadata: + name: "pvc-name" +spec: + runtime: cloud + capacity: 4 diff --git a/riocli/apply/manifests/package.yaml b/riocli/apply/manifests/package.yaml new file mode 100644 index 00000000..1472d7eb --- /dev/null +++ b/riocli/apply/manifests/package.yaml @@ -0,0 +1,49 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "rrdockerhub" +spec: + type: Docker + docker: + username: user + password: password + email: user@example.net + # registry: https://index.docker.io/v1/ +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" #We will create a package +metadata: + name: "non-ros-cloud" #it will be called non-ros-cloud. + version: "1.0.0" + labels: + app: test +spec: + runtime: "cloud" + cloud: + replicas: 1 + executables: + - name: "http" + type: docker + docker: + image: "nginx" + pullSecret: # docker with a pull secret + depends: + kind: secret + nameOrGUID: "rrdockerhub" #here we referene by names. not GUIDs + environmentVars: #it has environment args + - name: "TEST_VAR" + description: "This is a test var" + defaultValue: "45" + exposed: true + exposedName: "TEST_VAR_EXPOSED" + - name: "TEST_INTERNAL_VAR" + defaultValue: "45" + endpoints: #These are cloud endpoints + - name: "HTTP" + type: external-http + port: 80 + targetPort: 80 + - name: "HTTPS" + type: external-https + port: 443 + targetPort: 443 diff --git a/riocli/apply/manifests/project.yaml b/riocli/apply/manifests/project.yaml new file mode 100644 index 00000000..c37ac984 --- /dev/null +++ b/riocli/apply/manifests/project.yaml @@ -0,0 +1,7 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Project" +metadata: + name: "test-project-1" +spec: + users: + - "user@example.net" diff --git a/riocli/apply/manifests/secret.yaml b/riocli/apply/manifests/secret.yaml new file mode 100644 index 00000000..946350bd --- /dev/null +++ b/riocli/apply/manifests/secret.yaml @@ -0,0 +1,46 @@ +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "git-ssh" +spec: + type: Git + git: + authMethod: SSH Auth + privateKey: | + private-key +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "git-basic" +spec: + type: Git + git: + authMethod: HTTP/S Basic Auth + username: user + password: password + # caCert: +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "git-token" +spec: + type: Git + git: + authMethod: HTTP/S Token Auth + token: token + # caCert: +--- +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Secret" +metadata: + name: "docker-secret" +spec: + type: Docker + docker: + username: user + password: password + email: user@example.net + # registry: https://index.docker.io/v1/ diff --git a/riocli/apply/manifests/static-route.yaml b/riocli/apply/manifests/static-route.yaml new file mode 100644 index 00000000..9893ec88 --- /dev/null +++ b/riocli/apply/manifests/static-route.yaml @@ -0,0 +1,4 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "StaticRoute" +metadata: + name: "unreal-server-apply" diff --git a/riocli/apply/parse.py b/riocli/apply/parse.py new file mode 100644 index 00000000..d6939591 --- /dev/null +++ b/riocli/apply/parse.py @@ -0,0 +1,387 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import copy +import json +import queue +import threading +import typing +from graphlib import TopologicalSorter +from shutil import get_terminal_size + +import click +import jinja2 +import yaml +from tabulate import tabulate + +from riocli.apply.resolver import ResolverCache +from riocli.config import Configuration +from riocli.utils import run_bash +from riocli.utils.mermaid import mermaid_link, mermaid_safe + +class Applier(object): + DEFAULT_MAX_WORKERS=6 + + EXPECTED_TIME = { + "organization": 3, + "project": 3, + "secret": 3, + "package": 3, + "staticroute": 3, + "build": 180, + "disk": 180, + "deployment": 240, + "network": 120, + "device": 5, + "user": 3, + } + + def __init__(self, files: typing.List, values, secrets): + self.environment = None + self.input_file_paths = files + self.config = Configuration() + self.client = self.config.new_client() + self.dependencies = {} + self.objects = {} + self.resolved_objects = {} + self.files = {} + self.graph = TopologicalSorter() + self.rc = ResolverCache(self.client) + self.secrets = {} + self.values = {} + self.diagram = ["flowchart LR"] + if values or secrets: + self.environment = jinja2.Environment() + + if values: + self.values = self._load_file_content(values, is_value=True, is_secret=False)[0] + + if secrets: + self.secrets = self._load_file_content(secrets, is_value=True, is_secret=True)[0] + + + self._process_file_list(files) + + #Public Functions + def order(self): + return self.graph.static_order() + + def apply(self, *args, **kwargs): + WORKERS = int(kwargs.get('workers', self.DEFAULT_MAX_WORKERS)) + if WORKERS == 1 : + return self.apply_sync(*args, **kwargs) + else: + return self.apply_async(*args, **kwargs) + + def apply_async(self, *args, **kwargs): + WORKERS = kwargs.get('workers', self.DEFAULT_MAX_WORKERS) + task_queue = queue.Queue() + done_queue = queue.Queue() + + def worker(): + while True: + obj = task_queue.get() + if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: + # click.secho("obj {} is being aplied".format(obj)) + try: + self._apply_manifest(obj, *args, **kwargs) + except Exception as ex: + click.secho('[Err] Object "{}" apply failed. Apply will not progress further.'.format(obj, str(ex))) + raise ex + + task_queue.task_done() + done_queue.put(obj) + + + worker_list = [] + for worker_id in range(0, WORKERS - 1): + worker_list.append(threading.Thread(target=worker, daemon=True)) + worker_list[worker_id].start() + + self.graph.prepare() + while self.graph.is_active(): + for obj in self.graph.get_ready(): + # if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: + task_queue.put(obj) + + done_obj = done_queue.get() + self.graph.done(done_obj) + + task_queue.join() + + def apply_sync(self, *args, **kwargs): + self.graph.prepare() + while self.graph.is_active(): + for obj in self.graph.get_ready(): + if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: + self._apply_manifest(obj, *args, **kwargs) + self.graph.done(obj) + + + def delete(self, *args, **kwargs): + delete_order = list(self.graph.static_order()) + delete_order.reverse() + for obj in delete_order: + if obj in self.resolved_objects and 'manifest' in self.resolved_objects[obj]: + self._delete_manifest(obj, *args, **kwargs) + + def parse_dependencies(self, check_missing=True, delete=False): + number_of_objects = 0 + for f, data in self.files.items(): + for model in data: + key = self._get_object_key(model) + self._parse_dependency(key, model) + self._add_graph_node(key) + number_of_objects = number_of_objects + 1 + + resource_list = [] + total_time = 0 + + for node in copy.deepcopy(self.graph).static_order(): + + action = 'CREATE' if not self.resolved_objects[node]['src'] == 'remote' else 'UPDATE' + if delete: + action = 'DELETE' + kind = node.split(":")[0] + expected_time = round(self.EXPECTED_TIME.get(kind.lower(), 5)/60, 2) + total_time = total_time + expected_time + resource_list.append([node, action, expected_time]) + + self._display_context(total_time=total_time, total_objects=number_of_objects, resource_list=resource_list) + + if check_missing: + missing_resources = [] + for key, item in self.resolved_objects.items(): + if 'src' in item and item['src'] == 'missing': + missing_resources.append(key) + + if missing_resources: + click.secho("missing resources found in yaml. " + \ + "Plese ensure the following are either available in your yaml" + \ + "or created on the server. {}".format(set(missing_resources)), fg="red") + raise SystemExit(1) + + #Manifest Operations via base.py + def _apply_manifest(self, obj_key, *args, **kwargs): + obj = self.objects[obj_key] + cls = ResolverCache.get_model(obj) + ist = cls.from_dict(self.client, obj) + setattr(ist, 'rc', ResolverCache(self.client)) + ist.apply(self.client, *args, **kwargs) + + def _delete_manifest(self, obj_key, *args, **kwargs): + obj = self.objects[obj_key] + cls = ResolverCache.get_model(obj) + ist = cls.from_dict(self.client, obj) + setattr(ist, 'rc', ResolverCache(self.client)) + ist.delete(self.client, obj, *args, **kwargs) + + + #File Loading Operations + def _process_file_list(self, files): + for f in files: + data = self._load_file_content(f) + if data: + for obj in data: + self._register_object(obj) + + self.files[f] = data + + def _register_object(self, data): + try: + key = self._get_object_key(data) + self.objects[key] = data + self.resolved_objects[key] = {'src': 'local', 'manifest': data} + except KeyError: + print("key error {}".format(data)) + return + + def _load_file_content(self, file_name, is_value=False, is_secret=False): + if not is_secret: + with open(file_name) as opened: + data = opened.read() + else: + data = run_bash('sops -d {}'.format(file_name)) + + # TODO: If no Kind in yaml/json, then skip + if not (is_value or is_secret): + if self.environment or file_name.endswith('.j2'): + template = self.environment.from_string(data) + template_args = self.values + if self.secrets: + template_args['secrets'] = self.secrets + try: + data = template.render(**template_args) + except Exception as ex: + click.secho('{} yaml parsing error. Msg: {}'.format(file_name, str(ex))) + raise ex + + file_name = file_name.rstrip('.j2') + + loaded_data = [] + if file_name.endswith('json'): + # FIXME: Handle for JSON List. + try: + loaded = json.loads(data) + loaded_data.append(loaded) + except json.JSONDecodeError as ex: + ex_message = '{} yaml parsing error. Msg: {}'.format(file_name, str(ex)) + raise Exception(ex_message) + + elif file_name.endswith('yaml') or file_name.endswith('yml'): + try: + loaded = yaml.safe_load_all(data) + loaded_data = list(loaded) + + except yaml.YAMLError as e: + ex_message = '{} yaml parsing error. Msg: {}'.format(file_name, str(e)) + raise Exception(ex_message) + + if not loaded_data: + click.secho('{} file is empty'.format(file_name)) + + return loaded_data + + + #Graph Operations + def _add_graph_node(self, key): + self.graph.add(key) + self.diagram.append('\t{}[{}]'.format( mermaid_safe(key), key)) + + def _add_graph_edge(self, dependent_key, key): + self.graph.add(dependent_key, key) + self.diagram.append('\t{}[{}] --> {}[{}] '.format( mermaid_safe(key), key, mermaid_safe(dependent_key), dependent_key)) + + #Dependency Resolution + def _parse_dependency(self, dependent_key, model): + for key, value in model.items(): + if key == "depends": + if 'kind' in value and value.get('kind'): + self._resolve_dependency(dependent_key, value) + if isinstance(value, list): + for each in value: + if isinstance(each, dict) and each.get('kind'): + self._resolve_dependency(dependent_key, each) + + continue + + if isinstance(value, dict): + self._parse_dependency(dependent_key, value) + continue + + if isinstance(value, list): + for each in value: + if isinstance(each, dict): + self._parse_dependency(dependent_key, each) + + def _resolve_dependency(self, dependent_key, dependency): + kind = dependency.get('kind') + name_or_guid = dependency.get('nameOrGUID') + key = '{}:{}'.format(kind, name_or_guid) + + self._initialize_kind_dependency(kind) + guid = ResolverCache._maybe_guid(kind, name_or_guid) + + obj_list = self.rc.list_objects(kind) + for obj in obj_list: + obj_guid = self._get_attr(obj, ResolverCache.GUID_KEYS) + obj_name = self._get_attr(obj, ResolverCache.NAME_KEYS) + + if kind == 'package': + if (guid and obj_guid == guid): + self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + + if (name_or_guid == obj_name) and ( + 'version' in dependency and obj['packageVersion'] == dependency.get('version')): + self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + + # Special handling for Static route since it doesn't have a name field. + # StaticRoute sends a URLPrefix field with name being the prefix along with short org guid. + elif kind == 'staticroute' and name_or_guid in obj_name: + self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + + elif (guid and obj_guid == guid) or (name_or_guid == obj_name): + self._add_remote_object_to_resolve_tree(dependent_key, obj_guid, dependency, obj) + + self.dependencies[kind][name_or_guid] = {'local': True} + self._add_graph_edge(dependent_key, key) + + if key not in self.resolved_objects: + self.resolved_objects[key] = {'src': 'missing'} + + def _add_remote_object_to_resolve_tree(self, dependent_key, guid, dependency, obj): + kind = dependency.get('kind') + name_or_guid = dependency.get('nameOrGUID') + key = '{}:{}'.format(kind, name_or_guid) + + self.dependencies[kind][name_or_guid] = {'guid': guid, 'raw': obj, 'local': False} + if key not in self.resolved_objects: + self.resolved_objects[key] = {} + self.resolved_objects[key]['guid'] = guid + self.resolved_objects[key]['raw'] = obj + self.resolved_objects[key]['src'] = 'remote' + + self._add_graph_edge(dependent_key, key) + + dependency['guid'] = guid + if kind.lower() == "disk": + dependency['depGuid'] = obj['internalDeploymentGUID'] + + if kind.lower() == "deployment": + dependency['guid'] = obj['deploymentId'] + + def _initialize_kind_dependency(self, kind): + if not self.dependencies.get(kind): + self.dependencies[kind] = {} + + #Utils + def _display_context(self, total_time: int, total_objects: int, resource_list: typing.List) -> None: + # Display context + + if os.environ.get('MERMAID'): + diagram_link = mermaid_link("\n".join(self.diagram)) + click.launch(diagram_link) + + headers = [click.style('Resource Context', bold=True, fg='yellow')] + context = [ + ['Expected Time (mins)', round(total_time, 2)], + ['Files', len(self.files)], + ['Resources', total_objects], + ] + click.echo(tabulate(context, headers=headers, tablefmt='simple', numalign='center')) + + # Display Resource Inventory + headers = [] + for header in ['Resource', 'Action', 'Expected Time (mins)']: + headers.append(click.style(header, fg='yellow', bold=True)) + + col, _ = get_terminal_size() + click.secho(" " * col, bg='blue') + click.echo(tabulate(resource_list, headers=headers, tablefmt='simple', numalign='center')) + click.secho(" " * col, bg='blue') + + @staticmethod + def _get_attr(obj, accept_keys): + for key in accept_keys: + if hasattr(obj, key): + return getattr(obj, key) + + raise Exception('guid resolve failed') + + @staticmethod + def _get_object_key(obj: dict) -> str: + kind = obj.get('kind').lower() + name_or_guid = obj['metadata']['name'] + + return '{}:{}'.format(kind, name_or_guid) diff --git a/riocli/apply/resolver.py b/riocli/apply/resolver.py new file mode 100644 index 00000000..77edeaf4 --- /dev/null +++ b/riocli/apply/resolver.py @@ -0,0 +1,195 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import functools +import json +import re +import typing + +from munch import munchify +from rapyuta_io import DeploymentPhaseConstants +from rapyuta_io.utils.rest_client import HttpMethod, RestClient + +from riocli.build.model import Build +from riocli.config.config import Configuration +from riocli.deployment.model import Deployment +from riocli.device.model import Device +from riocli.disk.model import Disk +from riocli.network.model import Network +from riocli.package.model import Package +from riocli.project.model import Project +from riocli.secret.model import Secret +from riocli.static_route.model import StaticRoute + + +class _Singleton(type): + _instances = {} + + def __call__(cls, *args, **kwargs): + if cls not in cls._instances: + cls._instances[cls] = super(_Singleton, cls).__call__(*args, **kwargs) + return cls._instances[cls] + + +class ResolverCache(object, metaclass=_Singleton): + KIND_TO_CLASS = { + 'Project': Project, + 'Secret': Secret, + 'Build': Build, + 'Device': Device, + 'Network': Network, + 'StaticRoute': StaticRoute, + 'Package': Package, + 'Disk': Disk, + 'Deployment': Deployment, + } + + KIND_REGEX = { + "organization": "^org-[a-z]{24}$", + "project": "^project-[a-z]{24}$", + "secret": "^secret-[a-z]{24}$", + "package": "^pkg-[a-z]{24}$", + "staticroute": "^staticroute-[a-z]{24}$", + "build": "^build-[a-z]{24}$", + "disk": "^disk-[a-z]{24}$", + "deployment": "^dep-[a-z]{24}$", + "network": "^net-[a-z]{24}$", + "device": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", + "user": "^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", + } + + GUID_KEYS = ['guid', 'GUID', 'uuid', 'ID', 'Id', 'id'] + NAME_KEYS = ['name', 'urlPrefix'] + + def __init__(self, client): + self.client = client + + @functools.lru_cache() + def list_objects(self, kind): + return self._list_functors(kind)() + + @functools.lru_cache() + def find_guid(self, name, kind, *args): + obj_list = self.list_objects(kind) + obj_match = list(self._find_functors(kind)(name, obj_list, *args)) + if obj_match and isinstance(obj_match, list) and len(obj_match) > 0: + return obj_match[0] + else: + return None + + def find_depends(self, depends, *args): + if 'depGuid' in depends and depends['kind'] == 'disk': + return depends['depGuid'], None + elif 'guid' in depends and depends['kind'] != 'network': + return depends['guid'], None + + elif 'nameOrGUID' in depends: + obj_list = self._list_functors(depends['kind'])() + obj_match = list(self._find_functors(depends['kind'])(depends['nameOrGUID'], obj_list, *args)) + if not obj_list or (isinstance(obj_list, list) and len(obj_list) == 0): + return None, None + if obj_match and isinstance(obj_match, list) and len(obj_match) > 0: + return self._guid_functor(depends['kind'])(obj_match[0]), obj_match[0] + else: + return None, None + return None, None + + def _guid_functor(self, kind): + mapping = { + 'secret': lambda x: munchify(x).guid, + "project": lambda x: munchify(x).guid, + "package": lambda x: munchify(x)['id'], + "staticroute": lambda x: munchify(x)['guid'], + "build": lambda x: munchify(x)['guid'], + "deployment": lambda x: munchify(x)['deploymentId'], + "network": lambda x: munchify(x)['guid'], + "disk": lambda x: munchify(x)['internalDeploymentGUID'], #This is only temporarity like this + "device": lambda x: munchify(x)['uuid'] + } + return mapping[kind] + + def _list_functors(self, kind): + mapping = { + 'secret': self.client.list_secrets, + "project": self.client.list_projects, + "package": self.client.get_all_packages, + "staticroute": self.client.get_all_static_routes, + "build": self.client.list_builds, + "deployment": functools.partial(self.client.get_all_deployments, + phases=[DeploymentPhaseConstants.SUCCEEDED, + DeploymentPhaseConstants.PROVISIONING]), + "network": self._list_networks, + "disk": self._list_disks, + "device": self.client.get_all_devices, + } + + return mapping[kind] + + def _find_functors(self, kind): + mapping = { + 'secret': self._generate_find_guid_functor(), + "project": self._generate_find_guid_functor(), + "package": lambda name, obj_list, version: filter(lambda x: name == x.name and version == x['packageVersion'], obj_list), + "staticroute": lambda name, obj_list: filter(lambda x: name == '-'.join(x.urlPrefix.split('-')[:-1]), obj_list), + "build": self._generate_find_guid_functor(), + "deployment": self._generate_find_guid_functor(), + "network": self._generate_find_guid_functor(), + "disk": self._generate_find_guid_functor(), + "device": self._generate_find_guid_functor(), + } + + return mapping[kind] + + def _generate_find_guid_functor(self, name_field='name'): + return lambda name, obj_list: filter(lambda x: name == getattr(x, name_field), obj_list) + + def _list_networks(self): + native = self.client.list_native_networks() + routed = self.client.get_all_routed_networks() + + networks = [] + if native: + networks.extend(native) + + if routed: + networks.extend(routed) + return networks + + def _list_disks(self): + config = Configuration() + catalog_host = config.data.get('catalog_host', 'https://gacatalog.apps.rapyuta.io') + url = '{}/disk'.format(catalog_host) + headers = config.get_auth_header() + response = RestClient(url).method(HttpMethod.GET).headers(headers).execute() + data = json.loads(response.text) + if not response.ok: + err_msg = data.get('error') + raise Exception(err_msg) + return munchify(data) + + @classmethod + def _maybe_guid(cls, kind: str, name_or_guid: str) -> typing.Union[str, None]: + if re.fullmatch(cls.KIND_REGEX[kind], name_or_guid): + return name_or_guid + + @classmethod + def get_model(cls, data: dict) -> typing.Any: + kind = data.get('kind', None) + if not kind: + raise Exception('kind is missing') + + kind_cls = cls.KIND_TO_CLASS.get(kind, None) + if not kind_cls: + raise Exception('invalid kind {}'.format(kind)) + + return kind_cls diff --git a/riocli/apply/util.py b/riocli/apply/util.py new file mode 100644 index 00000000..702379e2 --- /dev/null +++ b/riocli/apply/util.py @@ -0,0 +1,56 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import os +import glob + +def parse_varidac_pathargs(pathItem): + glob_files = [] + abs_path = os.path.abspath(pathItem) + #make it absolte + # does the path exist. + # is it a dir? scan recursively + # not a dir but has special charaters in it? [*?^!] + # assume its a valid glob, use it to glob recursively + # if all else fails + # consider it a file path directly. + if os.path.exists(abs_path): + if os.path.isdir(abs_path): + #TODO: Should we keep this recursive? + glob_files = glob.glob(abs_path + "/**/*", recursive=True) + else: + glob_files = glob.glob(abs_path, recursive=True) + return glob_files + +def process_files_values_secrets(files, values, secrets): + glob_files = [] + + for pathItem in files: + path_glob = parse_varidac_pathargs(pathItem) + glob_files.extend(path_glob) + + abs_values = values + if values and values != "": + abs_values = os.path.abspath(values) + if abs_values in glob_files: + glob_files.remove(abs_values) + + abs_secret = secrets + if secrets and secrets != "": + abs_secrets = os.path.abspath(secrets) + if abs_secrets in glob_files: + glob_files.remove(abs_secrets) + + glob_files = list(set(glob_files)) + return glob_files, abs_values, abs_secret diff --git a/riocli/auth/login.py b/riocli/auth/login.py index ae846c34..a380c78d 100644 --- a/riocli/auth/login.py +++ b/riocli/auth/login.py @@ -15,7 +15,7 @@ from click_help_colors import HelpColorsCommand from riocli.auth.util import select_project, get_token -from riocli.config import Configuration +from riocli.utils.context import get_root_context @click.command( @@ -23,30 +23,48 @@ help_headers_color='yellow', help_options_color='green', ) -@click.option('--email', prompt='Email', +@click.option('--email', type=str, help='Email of the Rapyuta.io account') -@click.option('--password', prompt='Password', hide_input=True, +@click.option('--password', type=str, help='Password for the Rapyuta.io account') -def login(email: str, password: str): +@click.option('--project', type=str, default=None, + help='Context will be set to the Project after authentication') +@click.option('--interactive/--no-interactive', is_flag=True, type=bool, default=True, + help='Make login interactive') +@click.pass_context +def login(ctx: click.Context, email: str, password: str, project: str, interactive: bool): """ Log into the Rapyuta.io account using the CLI. This is required to use most of the functionalities of the CLI. """ - config = Configuration() - config.data['email_id'] = email - config.data['password'] = password + if interactive: + email = email or click.prompt('Email') + password = password or click.prompt('Password', hide_input=True) - config.data['auth_token'] = get_token(email, password) + if not email: + click.secho('email not specified') + raise SystemExit(1) + if not password: + click.secho('password not specified') + raise SystemExit(1) + + ctx = get_root_context(ctx) + ctx.obj.data['email_id'] = email + ctx.obj.data['password'] = password + ctx.obj.data['auth_token'] = get_token(email, password) # Save if the file does not already exist - if not config.exists: - click.echo('Logged in successfully!') - config.save() + if not ctx.obj.exists or not interactive: + ctx.obj.save() else: click.echo("[Warning] rio already has a config file present") click.confirm('Do you want to override the config', abort=True) - select_project(config) - config.save() + if not interactive and not project: + click.echo('Logged in successfully!') + return + + select_project(ctx.obj, project=project) + ctx.obj.save() click.echo('Logged in successfully!') diff --git a/riocli/auth/logout.py b/riocli/auth/logout.py index fcc495da..120616d7 100644 --- a/riocli/auth/logout.py +++ b/riocli/auth/logout.py @@ -17,19 +17,19 @@ @click.command() -def logout(): +@click.pass_context +def logout(ctx: click.Context): """ Log out from the Rapyuta.io account using the CLI. """ - config = Configuration() - if not config.exists: + if not ctx.obj.exists: return - config.data.pop('auth_token', None) - config.data.pop('password', None) - config.data.pop('email_id', None) - config.data.pop('project_id', None) - config.save() + ctx.obj.data.pop('auth_token', None) + ctx.obj.data.pop('password', None) + ctx.obj.data.pop('email_id', None) + ctx.obj.data.pop('project_id', None) + ctx.obj.save() click.secho('Logged out successfully!', fg='green') diff --git a/riocli/auth/refresh_token.py b/riocli/auth/refresh_token.py index e4c33ce3..0822b06e 100644 --- a/riocli/auth/refresh_token.py +++ b/riocli/auth/refresh_token.py @@ -19,18 +19,18 @@ @click.command() -def refresh_token(): +@click.pass_context +def refresh_token(ctx: click.Context): """ Refreshes the authentication Token after it expires """ - config = Configuration() - email = config.data.get('email_id', None) - password = config.data.get('password', None) - if not config.exists or not email or not password: + email = ctx.obj.data.get('email_id', None) + password = ctx.obj.data.get('password', None) + if not ctx.obj.exists or not email or not password: raise LoggedOut - config.data['auth_token'] = get_token(email, password) + ctx.obj.data['auth_token'] = get_token(email, password) - config.save() + ctx.obj.save() click.echo('Token refreshed successfully!') diff --git a/riocli/auth/staging.py b/riocli/auth/staging.py index 9e151347..db7e38ad 100644 --- a/riocli/auth/staging.py +++ b/riocli/auth/staging.py @@ -16,6 +16,7 @@ from riocli.auth.login import select_project from riocli.auth.util import get_token from riocli.config import Configuration +from riocli.utils.context import get_root_context _STAGING_ENVIRONMENT_SUBDOMAIN = "apps.okd4v2.okd4beta.rapyuta.io" _NAMED_ENVIRONMENTS = ["v11", "v12", "v13", "v14", "v15", "qa"] @@ -23,37 +24,38 @@ @click.command('environment', hidden=True) @click.argument('name', type=str) -def environment(name: str): +@click.pass_context +def environment(ctx: click.Context, name: str): """ Sets the Rapyuta.io environment to use (Internal use) """ - config = Configuration() + ctx = get_root_context(ctx) if name == 'ga': - config.data.pop('environment', None) - config.data.pop('catalog_host', None) - config.data.pop('core_api_host', None) - config.data.pop('rip_host', None) + ctx.obj.data.pop('environment', None) + ctx.obj.data.pop('catalog_host', None) + ctx.obj.data.pop('core_api_host', None) + ctx.obj.data.pop('rip_host', None) else: - _configure_environment(config, name) + _configure_environment(ctx.obj, name) - config.data.pop('project_id', None) - email = config.data.get('email_id', None) - password = config.data.get('password', None) - config.save() + ctx.obj.data.pop('project_id', None) + email = ctx.obj.data.get('email_id', None) + password = ctx.obj.data.get('password', None) + ctx.obj.save() - config.data['auth_token'] = get_token(email, password) + ctx.obj.data['auth_token'] = get_token(email, password) - select_project(config) - config.save() + select_project(ctx.obj) + ctx.obj.save() def _validate_environment(name: str) -> bool: valid = name in _NAMED_ENVIRONMENTS or name.startswith('pr') if not valid: click.secho('Invalid staging environment!', fg='red') - exit(1) + raise SystemExit(1) def _configure_environment(config: Configuration, name: str) -> None: diff --git a/riocli/auth/status.py b/riocli/auth/status.py index 8d345d0f..0a50fb7f 100644 --- a/riocli/auth/status.py +++ b/riocli/auth/status.py @@ -17,15 +17,15 @@ @click.command() -def status(): +@click.pass_context +def status(ctx: click.Context): """ Shows the Login status of the CLI """ - config = Configuration() - if not config.exists: + if not ctx.obj.exists: click.secho('Logged out 🔒', fg='red') - exit(1) + raise SystemExit(1) - if 'auth_token' in config.data: + if 'auth_token' in ctx.obj.data: click.secho('Logged in 🎉', fg='green') diff --git a/riocli/auth/util.py b/riocli/auth/util.py index a20f966a..458ca10f 100644 --- a/riocli/auth/util.py +++ b/riocli/auth/util.py @@ -20,22 +20,31 @@ from riocli.config import Configuration from riocli.utils.selector import show_selection +from riocli.project.util import find_project_guid -def select_project(config: Configuration) -> str: +def select_project(config: Configuration, project: str = None) -> None: """ Launches the project selection prompt by listing all the projects. Sets the choice in the given configuration. """ client = config.new_client(with_project=False) - projects = client.list_projects() + project_guid = None + if project: + project_guid = project if project.startswith('project-') else find_project_guid(client, project) + + projects = client.list_projects() project_map = dict() + for project in projects: project_map[project.guid] = project.name - choice = show_selection(project_map, header='Select the project to activate') - config.data['project_id'] = choice + if not project_guid: + project_guid = show_selection(project_map, header='Select the project to activate') + + config.data['project_id'] = project_guid + config.data['project_name'] = project_map[project_guid] def get_token(email: str, password: str) -> str: @@ -52,7 +61,7 @@ def get_token(email: str, password: str) -> str: return token except UnauthorizedError: click.secho("incorrect email/password", fg='red') - exit(1) + raise SystemExit(1) except Exception as e: click.secho(e, fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/bootstrap.py b/riocli/bootstrap.py index e237ccef..618d60ee 100644 --- a/riocli/bootstrap.py +++ b/riocli/bootstrap.py @@ -17,22 +17,28 @@ import click import rapyuta_io.version +from click import Context from click_help_colors import HelpColorsGroup from click_plugins import with_plugins -from click_repl import register_repl from pkg_resources import iter_entry_points +from riocli.chart import chart +from riocli.apply import apply, explain, delete from riocli.auth import auth from riocli.build import build from riocli.completion import completion +from riocli.config import Configuration from riocli.deployment import deployment from riocli.device import device +from riocli.disk import disk from riocli.marketplace import marketplace from riocli.network import network from riocli.package import package +from riocli.parameter import parameter from riocli.project import project from riocli.rosbag import rosbag from riocli.secret import secret +from riocli.shell import shell, deprecated_repl from riocli.static_route import static_route @@ -40,14 +46,15 @@ @click.group( invoke_without_command=False, cls=HelpColorsGroup, - help_headers_color='yellow', - help_options_color='green', + help_headers_color="yellow", + help_options_color="green", ) -def cli(): - pass +@click.pass_context +def cli(ctx: Context, config: str = None): + ctx.obj = Configuration(filepath=config) -@cli.command('help') +@cli.command("help") @click.pass_context def cli_help(ctx): """ @@ -61,10 +68,14 @@ def version(): """ Version of the CLI/SDK """ - click.echo("rio {} / SDK {}".format(__version__, rapyuta_io.VERSIONSTR)) + click.echo("rio {} / SDK {}".format(__version__, rapyuta_io.__version__)) return +cli.add_command(apply) +cli.add_command(chart) +cli.add_command(explain) +cli.add_command(delete) cli.add_command(auth) cli.add_command(project) cli.add_command(device) @@ -77,4 +88,7 @@ def version(): cli.add_command(network) cli.add_command(completion) cli.add_command(marketplace) -register_repl(cli) +cli.add_command(parameter) +cli.add_command(disk) +cli.add_command(shell) +cli.add_command(deprecated_repl) diff --git a/riocli/build/create.py b/riocli/build/create.py index 61126a31..65a8459c 100644 --- a/riocli/build/create.py +++ b/riocli/build/create.py @@ -61,4 +61,4 @@ def create_build(build_name: str, repository: str, strategy: str, branch: str, c click.secho('Created build successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/build/delete.py b/riocli/build/delete.py index 883f634a..705d5509 100644 --- a/riocli/build/delete.py +++ b/riocli/build/delete.py @@ -36,4 +36,4 @@ def delete_build(build_name: str, build_guid: str, force: bool): click.echo(click.style('Build deleted successfully!', fg='green')) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/build/import_build.py b/riocli/build/import_build.py index 85eb78d2..31ebb391 100644 --- a/riocli/build/import_build.py +++ b/riocli/build/import_build.py @@ -65,7 +65,7 @@ def import_docker_build(arch: str, ros: bool, ros_distro: str, sim: bool, wait: click.secho('Created build successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _gather_information() -> Build: diff --git a/riocli/build/inspect.py b/riocli/build/inspect.py index a8473b5e..6345cfb6 100644 --- a/riocli/build/inspect.py +++ b/riocli/build/inspect.py @@ -35,7 +35,7 @@ def inspect_build(format_type: str, build_guid: str, build_name: str) -> None: inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_build_inspectable(build: Build) -> dict: diff --git a/riocli/build/list.py b/riocli/build/list.py index 3a3cc2ae..2939590d 100644 --- a/riocli/build/list.py +++ b/riocli/build/list.py @@ -33,7 +33,7 @@ def list_builds(status: typing.List[str]) -> None: _display_build_list(builds, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_build_list(builds: typing.List[Build], show_header: bool = True): diff --git a/riocli/build/logs.py b/riocli/build/logs.py index d162db14..bcf40cfa 100644 --- a/riocli/build/logs.py +++ b/riocli/build/logs.py @@ -32,7 +32,7 @@ def build_logs(build_name: str, build_guid: str) -> None: stream_build_logs(build_guid) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def stream_build_logs(build_guid: str) -> None: diff --git a/riocli/build/model.py b/riocli/build/model.py new file mode 100644 index 00000000..d14d23ee --- /dev/null +++ b/riocli/build/model.py @@ -0,0 +1,73 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Build as v1Build, Client, BuildOptions, CatkinOption + +from riocli.build.util import find_build_guid, BuildNotFound +from riocli.build.validation import validate +from riocli.model import Model + + +class Build(Model): + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + guid, obj = self.rc.find_depends({"kind": "build", "nameOrGUID": self.metadata.name}) + if not guid: + return False + + return obj + + def create_object(self, client: Client) -> v1Build: + build = client.create_build(build=self.to_v1()) + return build + + def update_object(self, client: Client, obj: typing.Any) -> None: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + obj.delete() + + def to_v1(self) -> v1Build: + build_opts = None + if self.spec.recipe == 'Source' and self.spec.get('catkinParameters', None): + catkin_opts = [] + for each in self.spec.catkinParameters: + catkin_opt = CatkinOption(rosPkgs=each.get('rosPackages', None), + makeArgs=each.get('makeArguments', None), + cmakeArgs=each.get('cmakeArguments', None), + catkinMakeArgs=each.get('catkinMakeArguments', None), + blacklist=each.get('blacklist', None)) + catkin_opts.append(catkin_opt) + build_opts = BuildOptions(catkin_opts) + + return v1Build(buildName=self.metadata.name, strategyType=self.spec.recipe, repository=self.spec.git.repository, + architecture=self.spec.architecture, rosDistro=self.spec.get('rosDistro', ''), + isRos=self.spec.get('rosDistro', '') != '', dockerPullSecret=self.spec.get('pullSecret', ''), + contextDir=self.spec.get('contextDir', ''), dockerFilePath=self.spec.get('dockerfile', ''), + dockerPushRepository=self.spec.get('pushSecret', ''), branch=self.spec.git.get('gitRef', ''), + triggerName=self.spec.get('triggerName', ''), tagName=self.spec.get('tagName', ''), + buildOptions=build_opts) + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/build/trigger.py b/riocli/build/trigger.py index 9d8ebcd7..c928dab3 100644 --- a/riocli/build/trigger.py +++ b/riocli/build/trigger.py @@ -37,7 +37,7 @@ def trigger_build(build_name: str, build_guid: str, trigger_name: str, tail: boo click.secho('Triggered build successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) if tail: stream_build_logs(build_guid) diff --git a/riocli/build/util.py b/riocli/build/util.py index 4109ce2c..2d4f3f0a 100644 --- a/riocli/build/util.py +++ b/riocli/build/util.py @@ -35,7 +35,11 @@ def decorated(**kwargs: typing.Any): name = get_build_name(client, guid) if guid is None: - guid = find_build_guid(client, name) + try: + guid = find_build_guid(client, name) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) kwargs['build_name'] = name kwargs['build_guid'] = guid @@ -55,5 +59,10 @@ def find_build_guid(client: Client, name: str) -> str: if build.buildName == name: return build.guid - click.secho("Build not found", fg='red') - exit(1) + raise BuildNotFound() + + +class BuildNotFound(Exception): + def __init__(self, message='build not found!'): + self.message = message + super().__init__(self.message) diff --git a/riocli/build/validation.py b/riocli/build/validation.py new file mode 100644 index 00000000..641f7a39 --- /dev/null +++ b/riocli/build/validation.py @@ -0,0 +1,273 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^secret-[a-z]{24}$': re.compile('^secret-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_buildspec(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_buildspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'repository': {'type': 'object', 'properties': {'url': {'type': 'string'}, 'ref': {'type': 'string'}, 'contextDir': {'type': 'string'}, 'gitSecret': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}}}, 'buildMethod': {'enum': ['Docker', 'Source']}}, 'required': ['buildMethod', 'repository', 'image'], 'dependencies': {'buildMethod': {'oneOf': [{'properties': {'buildMethod': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'dockerfile': {'type': 'string', 'default': 'Dockerfile'}, 'pullSecret': {'$ref': '#/definitions/secretGUID'}, 'isRos': {'type': 'boolean'}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}}, 'dependentRequired': {'isRos': ['rosDistro', 'simulation']}}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}, {'properties': {'buildMethod': {'enum': ['Source']}, 'catkin': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'isRos': {'type': 'boolean', 'const': True, 'default': True}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}, 'catkinParameters': {'$ref': '#/definitions/catkinParameters'}}, 'required': ['isRos', 'rosDistro', 'simulation', 'architecture']}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['buildMethod', 'repository', 'image']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['buildMethod', 'repository', 'image'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'repository': {'type': 'object', 'properties': {'url': {'type': 'string'}, 'ref': {'type': 'string'}, 'contextDir': {'type': 'string'}, 'gitSecret': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}}}, 'buildMethod': {'enum': ['Docker', 'Source']}}, 'required': ['buildMethod', 'repository', 'image'], 'dependencies': {'buildMethod': {'oneOf': [{'properties': {'buildMethod': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'dockerfile': {'type': 'string', 'default': 'Dockerfile'}, 'pullSecret': {'$ref': '#/definitions/secretGUID'}, 'isRos': {'type': 'boolean'}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}}, 'dependentRequired': {'isRos': ['rosDistro', 'simulation']}}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}, {'properties': {'buildMethod': {'enum': ['Source']}, 'catkin': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'isRos': {'type': 'boolean', 'const': True, 'default': True}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}, 'catkinParameters': {'$ref': '#/definitions/catkinParameters'}}, 'required': ['isRos', 'rosDistro', 'simulation', 'architecture']}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}]}}}, rule='required') + if "buildMethod" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "buildMethod" in data_keys: + data_keys.remove("buildMethod") + data__buildMethod = data["buildMethod"] + if data__buildMethod not in ['Docker']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".buildMethod must be one of ['Docker']", value=data__buildMethod, name="" + (name_prefix or "data") + ".buildMethod", definition={'enum': ['Docker']}, rule='enum') + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + validate___definitions_docker(data__docker, custom_formats, (name_prefix or "data") + ".docker") + if "image" in data_keys: + data_keys.remove("image") + data__image = data["image"] + validate___definitions_imageartifact(data__image, custom_formats, (name_prefix or "data") + ".image") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "buildMethod" in data_keys: + data_keys.remove("buildMethod") + data__buildMethod = data["buildMethod"] + if data__buildMethod not in ['Source']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".buildMethod must be one of ['Source']", value=data__buildMethod, name="" + (name_prefix or "data") + ".buildMethod", definition={'enum': ['Source']}, rule='enum') + if "catkin" in data_keys: + data_keys.remove("catkin") + data__catkin = data["catkin"] + validate___definitions_catkin(data__catkin, custom_formats, (name_prefix or "data") + ".catkin") + if "image" in data_keys: + data_keys.remove("image") + data__image = data["image"] + validate___definitions_imageartifact(data__image, custom_formats, (name_prefix or "data") + ".image") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'buildMethod': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'dockerfile': {'type': 'string', 'default': 'Dockerfile'}, 'pullSecret': {'$ref': '#/definitions/secretGUID'}, 'isRos': {'type': 'boolean'}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}}, 'dependentRequired': {'isRos': ['rosDistro', 'simulation']}}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}, {'properties': {'buildMethod': {'enum': ['Source']}, 'catkin': {'type': 'object', 'properties': {'architecture': {'$ref': '#/definitions/architecture'}, 'isRos': {'type': 'boolean', 'const': True, 'default': True}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'simulation': {'type': 'boolean', 'default': False}, 'catkinParameters': {'$ref': '#/definitions/catkinParameters'}}, 'required': ['isRos', 'rosDistro', 'simulation', 'architecture']}, 'image': {'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "repository" in data_keys: + data_keys.remove("repository") + data__repository = data["repository"] + if not isinstance(data__repository, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".repository must be object", value=data__repository, name="" + (name_prefix or "data") + ".repository", definition={'type': 'object', 'properties': {'url': {'type': 'string'}, 'ref': {'type': 'string'}, 'contextDir': {'type': 'string'}, 'gitSecret': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}}}, rule='type') + data__repository_is_dict = isinstance(data__repository, dict) + if data__repository_is_dict: + data__repository_keys = set(data__repository.keys()) + if "url" in data__repository_keys: + data__repository_keys.remove("url") + data__repository__url = data__repository["url"] + if not isinstance(data__repository__url, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".repository.url must be string", value=data__repository__url, name="" + (name_prefix or "data") + ".repository.url", definition={'type': 'string'}, rule='type') + if "ref" in data__repository_keys: + data__repository_keys.remove("ref") + data__repository__ref = data__repository["ref"] + if not isinstance(data__repository__ref, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".repository.ref must be string", value=data__repository__ref, name="" + (name_prefix or "data") + ".repository.ref", definition={'type': 'string'}, rule='type') + if "contextDir" in data__repository_keys: + data__repository_keys.remove("contextDir") + data__repository__contextDir = data__repository["contextDir"] + if not isinstance(data__repository__contextDir, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".repository.contextDir must be string", value=data__repository__contextDir, name="" + (name_prefix or "data") + ".repository.contextDir", definition={'type': 'string'}, rule='type') + if "gitSecret" in data__repository_keys: + data__repository_keys.remove("gitSecret") + data__repository__gitSecret = data__repository["gitSecret"] + validate___definitions_secretguid(data__repository__gitSecret, custom_formats, (name_prefix or "data") + ".repository.gitSecret") + if "buildMethod" in data_keys: + data_keys.remove("buildMethod") + data__buildMethod = data["buildMethod"] + validate___definitions_buildrecipe(data__buildMethod, custom_formats, (name_prefix or "data") + ".buildMethod") + return data + +def validate___definitions_buildrecipe(data, custom_formats={}, name_prefix=None): + if data not in ['Docker', 'Source']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['Docker', 'Source']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['Docker', 'Source']}, rule='enum') + return data + +def validate___definitions_secretguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^secret-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^secret-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_catkin(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'architecture': {'enum': ['amd64', 'arm32v7', 'arm64v8']}, 'isRos': {'type': 'boolean', 'const': True, 'default': True}, 'rosDistro': {'enum': ['melodic', 'kinetic', 'noetic']}, 'simulation': {'type': 'boolean', 'default': False}, 'catkinParameters': {'type': 'array', 'items': {'$ref': '#/definitions/catkinParameter'}}}, 'required': ['isRos', 'rosDistro', 'simulation', 'architecture']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['isRos', 'rosDistro', 'simulation', 'architecture']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['isRos', 'rosDistro', 'simulation', 'architecture'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'architecture': {'enum': ['amd64', 'arm32v7', 'arm64v8']}, 'isRos': {'type': 'boolean', 'const': True, 'default': True}, 'rosDistro': {'enum': ['melodic', 'kinetic', 'noetic']}, 'simulation': {'type': 'boolean', 'default': False}, 'catkinParameters': {'type': 'array', 'items': {'$ref': '#/definitions/catkinParameter'}}}, 'required': ['isRos', 'rosDistro', 'simulation', 'architecture']}, rule='required') + data_keys = set(data.keys()) + if "architecture" in data_keys: + data_keys.remove("architecture") + data__architecture = data["architecture"] + validate___definitions_architecture(data__architecture, custom_formats, (name_prefix or "data") + ".architecture") + if "isRos" in data_keys: + data_keys.remove("isRos") + data__isRos = data["isRos"] + if not isinstance(data__isRos, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".isRos must be boolean", value=data__isRos, name="" + (name_prefix or "data") + ".isRos", definition={'type': 'boolean', 'const': True, 'default': True}, rule='type') + if data__isRos != True: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".isRos must be same as const definition: True", value=data__isRos, name="" + (name_prefix or "data") + ".isRos", definition={'type': 'boolean', 'const': True, 'default': True}, rule='const') + else: data["isRos"] = True + if "rosDistro" in data_keys: + data_keys.remove("rosDistro") + data__rosDistro = data["rosDistro"] + validate___definitions_rosdistro(data__rosDistro, custom_formats, (name_prefix or "data") + ".rosDistro") + if "simulation" in data_keys: + data_keys.remove("simulation") + data__simulation = data["simulation"] + if not isinstance(data__simulation, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".simulation must be boolean", value=data__simulation, name="" + (name_prefix or "data") + ".simulation", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["simulation"] = False + if "catkinParameters" in data_keys: + data_keys.remove("catkinParameters") + data__catkinParameters = data["catkinParameters"] + validate___definitions_catkinparameters(data__catkinParameters, custom_formats, (name_prefix or "data") + ".catkinParameters") + return data + +def validate___definitions_catkinparameters(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be array", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'rosPackages': {'type': 'string'}, 'cmakeArguments': {'type': 'string'}, 'makeArguments': {'type': 'string'}, 'catkinMakeArguments': {'type': 'string'}, 'blacklist': {'type': 'string'}}}}, rule='type') + data_is_list = isinstance(data, (list, tuple)) + if data_is_list: + data_len = len(data) + for data_x, data_item in enumerate(data): + validate___definitions_catkinparameter(data_item, custom_formats, (name_prefix or "data") + "[{data_x}]") + return data + +def validate___definitions_catkinparameter(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'rosPackages': {'type': 'string'}, 'cmakeArguments': {'type': 'string'}, 'makeArguments': {'type': 'string'}, 'catkinMakeArguments': {'type': 'string'}, 'blacklist': {'type': 'string'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "rosPackages" in data_keys: + data_keys.remove("rosPackages") + data__rosPackages = data["rosPackages"] + if not isinstance(data__rosPackages, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosPackages must be string", value=data__rosPackages, name="" + (name_prefix or "data") + ".rosPackages", definition={'type': 'string'}, rule='type') + if "cmakeArguments" in data_keys: + data_keys.remove("cmakeArguments") + data__cmakeArguments = data["cmakeArguments"] + if not isinstance(data__cmakeArguments, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".cmakeArguments must be string", value=data__cmakeArguments, name="" + (name_prefix or "data") + ".cmakeArguments", definition={'type': 'string'}, rule='type') + if "makeArguments" in data_keys: + data_keys.remove("makeArguments") + data__makeArguments = data["makeArguments"] + if not isinstance(data__makeArguments, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".makeArguments must be string", value=data__makeArguments, name="" + (name_prefix or "data") + ".makeArguments", definition={'type': 'string'}, rule='type') + if "catkinMakeArguments" in data_keys: + data_keys.remove("catkinMakeArguments") + data__catkinMakeArguments = data["catkinMakeArguments"] + if not isinstance(data__catkinMakeArguments, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".catkinMakeArguments must be string", value=data__catkinMakeArguments, name="" + (name_prefix or "data") + ".catkinMakeArguments", definition={'type': 'string'}, rule='type') + if "blacklist" in data_keys: + data_keys.remove("blacklist") + data__blacklist = data["blacklist"] + if not isinstance(data__blacklist, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".blacklist must be string", value=data__blacklist, name="" + (name_prefix or "data") + ".blacklist", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_rosdistro(data, custom_formats={}, name_prefix=None): + if data not in ['melodic', 'kinetic', 'noetic']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['melodic', 'kinetic', 'noetic']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['melodic', 'kinetic', 'noetic']}, rule='enum') + return data + +def validate___definitions_architecture(data, custom_formats={}, name_prefix=None): + if data not in ['amd64', 'arm32v7', 'arm64v8']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['amd64', 'arm32v7', 'arm64v8']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['amd64', 'arm32v7', 'arm64v8']}, rule='enum') + return data + +def validate___definitions_imageartifact(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'registry': {'type': 'string'}, 'pushSecret': {'type': 'string'}, 'tagName': {'type': 'string'}, 'triggerName': {'type': 'string'}, 'webhookURL': {'type': 'string'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "registry" in data_keys: + data_keys.remove("registry") + data__registry = data["registry"] + if not isinstance(data__registry, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".registry must be string", value=data__registry, name="" + (name_prefix or "data") + ".registry", definition={'type': 'string'}, rule='type') + if "pushSecret" in data_keys: + data_keys.remove("pushSecret") + data__pushSecret = data["pushSecret"] + if not isinstance(data__pushSecret, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".pushSecret must be string", value=data__pushSecret, name="" + (name_prefix or "data") + ".pushSecret", definition={'type': 'string'}, rule='type') + if "tagName" in data_keys: + data_keys.remove("tagName") + data__tagName = data["tagName"] + if not isinstance(data__tagName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".tagName must be string", value=data__tagName, name="" + (name_prefix or "data") + ".tagName", definition={'type': 'string'}, rule='type') + if "triggerName" in data_keys: + data_keys.remove("triggerName") + data__triggerName = data["triggerName"] + if not isinstance(data__triggerName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".triggerName must be string", value=data__triggerName, name="" + (name_prefix or "data") + ".triggerName", definition={'type': 'string'}, rule='type') + if "webhookURL" in data_keys: + data_keys.remove("webhookURL") + data__webhookURL = data["webhookURL"] + if not isinstance(data__webhookURL, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".webhookURL must be string", value=data__webhookURL, name="" + (name_prefix or "data") + ".webhookURL", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_docker(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'architecture': {'enum': ['amd64', 'arm32v7', 'arm64v8']}, 'dockerfile': {'type': 'string', 'default': 'Dockerfile'}, 'pullSecret': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, 'isRos': {'type': 'boolean'}, 'rosDistro': {'enum': ['melodic', 'kinetic', 'noetic']}, 'simulation': {'type': 'boolean', 'default': False}}, 'dependentRequired': {'isRos': ['rosDistro', 'simulation']}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "architecture" in data_keys: + data_keys.remove("architecture") + data__architecture = data["architecture"] + validate___definitions_architecture(data__architecture, custom_formats, (name_prefix or "data") + ".architecture") + if "dockerfile" in data_keys: + data_keys.remove("dockerfile") + data__dockerfile = data["dockerfile"] + if not isinstance(data__dockerfile, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".dockerfile must be string", value=data__dockerfile, name="" + (name_prefix or "data") + ".dockerfile", definition={'type': 'string', 'default': 'Dockerfile'}, rule='type') + else: data["dockerfile"] = 'Dockerfile' + if "pullSecret" in data_keys: + data_keys.remove("pullSecret") + data__pullSecret = data["pullSecret"] + validate___definitions_secretguid(data__pullSecret, custom_formats, (name_prefix or "data") + ".pullSecret") + if "isRos" in data_keys: + data_keys.remove("isRos") + data__isRos = data["isRos"] + if not isinstance(data__isRos, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".isRos must be boolean", value=data__isRos, name="" + (name_prefix or "data") + ".isRos", definition={'type': 'boolean'}, rule='type') + if "rosDistro" in data_keys: + data_keys.remove("rosDistro") + data__rosDistro = data["rosDistro"] + validate___definitions_rosdistro(data__rosDistro, custom_formats, (name_prefix or "data") + ".rosDistro") + if "simulation" in data_keys: + data_keys.remove("simulation") + data__simulation = data["simulation"] + if not isinstance(data__simulation, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".simulation must be boolean", value=data__simulation, name="" + (name_prefix or "data") + ".simulation", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["simulation"] = False + return data \ No newline at end of file diff --git a/riocli/chart/__init__.py b/riocli/chart/__init__.py new file mode 100644 index 00000000..803e0986 --- /dev/null +++ b/riocli/chart/__init__.py @@ -0,0 +1,39 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_help_colors import HelpColorsGroup + +from riocli.chart.apply import apply_chart +from riocli.chart.delete import delete_chart +from riocli.chart.search import list_charts, search_chart, info_chart + + +@click.group( + invoke_without_command=False, + cls=HelpColorsGroup, + help_headers_color='yellow', + help_options_color='green', +) +def chart() -> None: + """ + Rapyuta Charts is a way to package the complete Application for Rapyuta.io Platform. + """ + pass + + +chart.add_command(search_chart) +chart.add_command(info_chart) +chart.add_command(apply_chart) +chart.add_command(delete_chart) +chart.add_command(list_charts) diff --git a/riocli/chart/apply.py b/riocli/chart/apply.py new file mode 100644 index 00000000..3026c7ad --- /dev/null +++ b/riocli/chart/apply.py @@ -0,0 +1,46 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from tempfile import TemporaryDirectory +from pathlib import Path + +import requests +from click_help_colors import HelpColorsCommand + +from riocli.chart.chart import Chart +from riocli.chart.util import find_chart + + +@click.command( + 'apply', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + help='Apply a new Rapyuta Chart in the Project', +) +@click.option('--dryrun', '-d', is_flag=True, default=False, help='dry run the yaml files without applying any change') +@click.option('--values', '-v', help="path to values yaml file. key/values specified in the values file can be used as variables in template yamls") +@click.option('--secrets', '-s', help="secret files are sops encoded value files. rio-cli expects sops to be authorized for decoding files on this computer") +@click.option('--workers', '-w', help="number of parallel workers while running apply command. defaults to 6.") +@click.argument('chart', type=str) +def apply_chart(chart: str, values: str, secrets:str, dryrun: bool, workers: int = 6) -> None: + versions = find_chart(chart) + if len(versions) > 1: + click.secho('More than one charts are available, please specify the version!', fg='red') + + chart = Chart(**versions[0]) + chart.apply_chart(values, secrets, dryrun, workers) + chart.cleanup() diff --git a/riocli/chart/chart.py b/riocli/chart/chart.py new file mode 100644 index 00000000..1125ce80 --- /dev/null +++ b/riocli/chart/chart.py @@ -0,0 +1,82 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import tarfile +from pathlib import Path +from tempfile import TemporaryDirectory + +import click +import requests +from munch import Munch + +from riocli.apply import apply, delete + + +class Chart(Munch): + def __init__(self, *args, **kwargs): + super(Chart, self).__init__(*args, **kwargs) + self.tmp_dir = None + self.downloaded = False + + def apply_chart(self, values: str = None, secrets:str = None, dryrun: bool = None, workers: int = 6): + if not self.downloaded: + self.download_chart() + + templates_dir = Path(self.tmp_dir.name, self.name, 'templates') + if not values: + values = Path(self.tmp_dir.name, self.name, 'values.yaml').as_posix() + + + apply.callback(values=values, files=[templates_dir], secrets=secrets, dryrun=dryrun, workers=workers) + + def delete_chart(self, values: str = None, secrets:str = None, dryrun: bool = None): + if not self.downloaded: + self.download_chart() + + templates_dir = Path(self.tmp_dir.name, self.name, 'templates') + if not values: + values = Path(self.tmp_dir.name, self.name, 'values.yaml').as_posix() + + delete.callback(values=values, files=[templates_dir], secrets=secrets, dryrun=dryrun) + + def download_chart(self): + self._create_temp_directory() + click.secho('Downloading {}:{} chart in {}'.format(self.name, self.version, self.tmp_dir.name), fg='yellow') + chart_filepath = Path(self.tmp_dir.name, self._chart_filename()) + + with open(chart_filepath, 'wb') as f: + resp = requests.get(self.urls[0]) + f.write(resp.content) + + self.extract_chart() + self.downloaded = True + + def extract_chart(self): + try: + chart_filepath = Path(self.tmp_dir.name, self._chart_filename()) + chart_tarball = tarfile.open(chart_filepath) + chart_tarball.extractall(path=self.tmp_dir.name) + finally: + if chart_tarball: + chart_tarball.close() + + def cleanup(self): + if self.tmp_dir: + self.tmp_dir.cleanup() + + def _chart_filename(self): + return self.urls[0].split('/')[-1] + + def _create_temp_directory(self): + prefix = 'rio-chart-{}-'.format(self.name) + self.tmp_dir = TemporaryDirectory(prefix=prefix) diff --git a/riocli/chart/delete.py b/riocli/chart/delete.py new file mode 100644 index 00000000..d2a8f2da --- /dev/null +++ b/riocli/chart/delete.py @@ -0,0 +1,38 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_help_colors import HelpColorsCommand + +from riocli.chart.chart import Chart +from riocli.chart.util import find_chart + + +@click.command( + 'delete', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + help='Delete the Rapyuta Chart from the Project', +) +@click.option('--values') +@click.option('--dryrun', '-d', is_flag=True, default=False, help='Perform dry-run for applying the chart') +@click.argument('chart', type=str) +def delete_chart(chart: str, values: str, dryrun: bool) -> None: + versions = find_chart(chart) + if len(versions) > 1: + click.secho('More than one charts are available, please specify the version!', fg='red') + + chart = Chart(**versions[0]) + chart.delete_chart(values, dryrun) + chart.cleanup() diff --git a/riocli/chart/search.py b/riocli/chart/search.py new file mode 100644 index 00000000..c44abf03 --- /dev/null +++ b/riocli/chart/search.py @@ -0,0 +1,77 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from click_help_colors import HelpColorsCommand +from munch import munchify +from tabulate import tabulate +from yaml import safe_dump_all + +from riocli.chart.util import find_chart, fetch_index + + +@click.command( + 'info', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + help='Describe the available chart with versions', +) +@click.argument('chart', type=str) +def info_chart(chart: str) -> None: + versions = find_chart(chart) + click.echo(safe_dump_all(versions)) + + +@click.command( + 'search', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + help='Search for available charts in the repository', +) +@click.argument('chart', type=str) +def search_chart(chart: str) -> None: + versions = find_chart(chart) + _display_entries(versions) + + +@click.command( + 'list', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', +) +def list_charts() -> None: + index = fetch_index() + if 'entries' not in index: + raise Exception('No entries found!') + entries = [] + for name, chart in index['entries'].items(): + for version in chart: + entries.append(version) + + _display_entries(munchify(entries)) + + +def _display_entries(entries: typing.List) -> None: + headers, table = [], [] + for header in ['Name', 'Version', 'Description', 'Created At']: + headers.append(click.style(header, fg='yellow')) + + for entry in entries: + table.append([entry.get('name'), entry.get('version'),entry.get('description'), entry.get('created')]) + + click.echo(tabulate(table, headers=headers, tablefmt='simple')) diff --git a/riocli/chart/util.py b/riocli/chart/util.py new file mode 100644 index 00000000..596932c9 --- /dev/null +++ b/riocli/chart/util.py @@ -0,0 +1,63 @@ +import typing + +import requests +from yaml import safe_load + +DEFAULT_REPOSITORY = 'https://rapyuta-robotics.github.io/rapyuta-charts/incubator/index.yaml' + + +def find_chart(chart: str) -> typing.List: + chart, ver = parse_chart(chart) + + index = fetch_index() + if 'entries' not in index: + raise Exception('No entries found!') + + if chart not in index['entries']: + raise Exception('No such chart found!') + + versions = index['entries'][chart] + if ver: + versions = _find_version(entries=versions, version=ver) + + return versions + + +def fetch_index(repository=DEFAULT_REPOSITORY) -> typing.Dict: + response = requests.get(repository) + if not response.ok: + raise Exception('Fetching index failed: %s'.format(repository)) + + index = safe_load(response.text) + return index + + +def parse_chart(val: str) -> (str, str): + # TODO: Add support for repository + chart, ver = None, None + + # Separate repository and chart + # splits = val.split('/') + # if len(splits) > 2: + # raise Exception('Multiple / are not allowed in the chart!') + # elif len(splits) == 2: + # repo, chart = splits[0], splits[1] + # else: + # chart = splits[0] + + # Separate version + splits = val.split(':') + if len(splits) > 2: + raise Exception('Multiple : are not allowed in the chart!') + elif len(splits) == 2: + chart, ver = splits[0], splits[1] + else: + chart = splits[0] + + return chart, ver + + +def _find_version(entries: typing.List, version: str): + for entry in entries: + if entry.get('version') == version: + return [entry] diff --git a/riocli/deployment/delete.py b/riocli/deployment/delete.py index 0b9e11aa..e6f98561 100644 --- a/riocli/deployment/delete.py +++ b/riocli/deployment/delete.py @@ -37,4 +37,4 @@ def delete_deployment(force: bool, deployment_name: str, deployment_guid: str) - click.secho('Deployment deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/deployment/inspect.py b/riocli/deployment/inspect.py index e2772b09..bac4675e 100644 --- a/riocli/deployment/inspect.py +++ b/riocli/deployment/inspect.py @@ -35,7 +35,7 @@ def inspect_deployment(format_type: str, deployment_name: str, deployment_guid: inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_deployment_inspectable(deployment: Deployment) -> dict: diff --git a/riocli/deployment/list.py b/riocli/deployment/list.py index b4a0728f..0d61b3a2 100644 --- a/riocli/deployment/list.py +++ b/riocli/deployment/list.py @@ -37,7 +37,7 @@ def list_deployments(device: str, phase: typing.List[str]) -> None: display_deployment_list(deployments, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def display_deployment_list(deployments: typing.List[Deployment], show_header: bool = True): diff --git a/riocli/deployment/logs.py b/riocli/deployment/logs.py index 98cc0d56..4113c135 100644 --- a/riocli/deployment/logs.py +++ b/riocli/deployment/logs.py @@ -35,7 +35,7 @@ def deployment_logs(component_name: str, exec_name: str, deployment_name: str, d stream_deployment_logs(deployment_guid, comp_id, exec_id, pod_name) except Exception as e: click.secho(e, fg='red') - exit(1) + raise SystemExit(1) def stream_deployment_logs(deployment_id, component_id, exec_id, pod_name=None): diff --git a/riocli/deployment/model.py b/riocli/deployment/model.py new file mode 100644 index 00000000..af86fd17 --- /dev/null +++ b/riocli/deployment/model.py @@ -0,0 +1,212 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import typing + +import click +from rapyuta_io import Client +from rapyuta_io.clients.catalog_client import Package +from rapyuta_io.clients.native_network import NativeNetwork +from rapyuta_io.clients.package import ProvisionConfiguration, RestartPolicy, ExecutableMount +from rapyuta_io.clients.routed_network import RoutedNetwork + +from riocli.deployment.util import add_mount_volume_provision_config +from riocli.deployment.validation import validate +from riocli.model import Model +from riocli.package.util import find_package_guid +from riocli.static_route.util import find_static_route_guid + + +class Deployment(Model): + RESTART_POLICY = { + 'always': RestartPolicy.Always, + 'never': RestartPolicy.Never, + 'onfailure': RestartPolicy.OnFailure + } + + def find_object(self, client: Client) -> typing.Any: + guid, obj = self.rc.find_depends({"kind": "deployment", "nameOrGUID": self.metadata.name}) + if not guid: + return False + + return obj + + def create_object(self, client: Client) -> typing.Any: + pkg_guid, pkg = self.rc.find_depends(self.metadata.depends, self.metadata.depends.version) + + if pkg_guid: + pkg = client.get_package(pkg_guid) + pkg.update() + + default_plan = pkg['plans'][0] + internal_component = default_plan['internalComponents'][0] + + __planId = default_plan['planId'] + __componentName = internal_component.componentName + runtime = internal_component['runtime'] + + if 'runtime' in self.spec and runtime != self.spec.runtime: + click.secho('>> runtime mismatch => ' + \ + 'deployment:{}.runtime !== package:{}.runtime '.format( + self.metadata.name, pkg['packageName'] + ), fg="red" + ) + return + + provision_config = pkg.get_provision_configuration(__planId) + + # add label + if 'labels' in self.metadata: + for key, value in self.metadata.labels.items(): + provision_config.add_label(key, value) + + # Add envArgs + if 'envArgs' in self.spec: + for items in self.spec.envArgs: + provision_config.add_parameter(__componentName, items.name, items.value) + + # Add Dependent Deployment + if 'depends' in self.spec: + for item in self.spec.depends: + dep_guid, dep = self.rc.find_depends(item) + if dep is None and dep_guid: + dep = client.get_deployment(dep_guid) + provision_config.add_dependent_deployment(dep) + + # Add Network + if 'rosNetworks' in self.spec: + for network_depends in self.spec.rosNetworks: + network_guid, network_obj = self.rc.find_depends(network_depends.depends) + + if type(network_obj) == RoutedNetwork: + provision_config.add_routed_network(network_obj, network_interface=network_depends.get('interface', None)) + if type(network_obj) == NativeNetwork: + provision_config.add_native_network(network_obj, network_interface=network_depends.get('interface', None)) + + if self.spec.runtime == 'cloud': + if 'staticRoutes' in self.spec: + for stroute in self.spec.staticRoutes: + route_guid, route = self.rc.find_depends(stroute.depends) + if route is None and route_guid: + route = client.get_static_route(route_guid) + provision_config.add_static_route(__componentName, stroute.name, route) + + # Add Disk + if 'volumes' in self.spec: + disk_mounts = {} + for vol in self.spec.volumes: + disk_guid, disk = self.rc.find_depends(vol.depends) + if not disk_guid in disk_mounts: + disk_mounts[disk_guid] = [] + + disk_mounts[disk_guid].append(ExecutableMount(vol.execName, vol.mountPath, vol.subPath)) + + for disk_guid in disk_mounts.keys(): + disk = client.get_volume_instance(disk_guid) + provision_config.mount_volume(__componentName, volume=disk, + executable_mounts=disk_mounts[disk_guid]) + + + if self.spec.runtime == 'device': + device_guid, device = self.rc.find_depends(self.spec.device.depends) + if device is None and device_guid: + device = client.get_device(device_guid) + provision_config.add_device(__componentName, device=device) + + if 'restart' in self.spec: + provision_config.add_restart_policy(__componentName, self.RESTART_POLICY[self.spec.restart.lower()]) + + # Add Network + # if self.spec.rosNetworks: + # for network in self.spec.rosNetworks: + # network_type = + + # Add Disk + exec_mounts = [] + if 'volumes' in self.spec: + for vol in self.spec.volumes: + exec_mounts.append(ExecutableMount(vol.execName, vol.mountPath, vol.subPath)) + if len(exec_mounts) > 0: + provision_config = add_mount_volume_provision_config(provision_config, __componentName, device, + exec_mounts) + + if os.environ.get('DEBUG'): + print(provision_config) + deployment = pkg.provision(self.metadata.name, provision_config) + deployment.poll_deployment_till_ready() + deployment.get_status() + return deployment + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + if 'depends' in self.spec: + pass + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + obj.deprovision() + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data): + validate(data) + + def _get_package(self, client: Client) -> Package: + name = self.metadata.depends.nameOrGUID + if name.startswith('pkg-') or name.startswith('io-'): + guid = name + else: + guid = find_package_guid(client, name, self.metadata.depends.version) + + return client.get_package(package_id=guid) + + def _get_provision_config(self, client: Client, pkg: Package): + comp_name = pkg['plans']['components'][0]['name'] + prov_config = pkg.get_provision_configuration() + self._configure_static_routes(client, prov_config, comp_name) + + return prov_config + + def _configure_networks(self, client: Client, prov_config: ProvisionConfiguration): + if not self.spec.get('rosNetworks'): + return + + native_networks = client.list_native_networks() + routed_networks = client.get_all_routed_networks() + + def _configure_disks(self, client: Client, prov_config: ProvisionConfiguration, component: str): + if not self.spec.get('volumes'): + return + + # for volume in self.spec.volumes: + # # TODO: no support for the disk resource. + # # TODO: subpath is not there in the SDK. + # prov_config.mount_volume(component_name=component, volume='', mount_path=volume.mountPath) + + def _configure_static_routes(self, client: Client, prov_config: ProvisionConfiguration, component: str): + if not self.spec.get('staticRoutes'): + return + + # TODO: List instead of get calls again and again + + for route in self.spec.staticRoutes: + name = route.depends.nameOrGUID + if name.startswith('staticroute-'): + guid = name + else: + guid = find_static_route_guid(client, name) + static_route = client.get_static_route(route_guid=guid) + prov_config.add_static_route(component_name=component, endpoint_name=route.name, static_route=static_route) diff --git a/riocli/deployment/ssh.py b/riocli/deployment/ssh.py index c0931e29..b4e1907c 100644 --- a/riocli/deployment/ssh.py +++ b/riocli/deployment/ssh.py @@ -39,7 +39,7 @@ def ssh_init(deployment_name: str, deployment_guid: str) -> None: click.secho('Deployment ready for SSH', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _ssh_setup(deployment_guid: str) -> None: diff --git a/riocli/deployment/status.py b/riocli/deployment/status.py index ea279450..f883484e 100644 --- a/riocli/deployment/status.py +++ b/riocli/deployment/status.py @@ -30,4 +30,4 @@ def status(deployment_name: str, deployment_guid: str) -> None: click.secho(deployment.status) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/deployment/util.py b/riocli/deployment/util.py index 95144e7d..c99d0c43 100644 --- a/riocli/deployment/util.py +++ b/riocli/deployment/util.py @@ -11,12 +11,18 @@ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. +import copy import functools import typing import click from rapyuta_io import Client, DeploymentPhaseConstants +from rapyuta_io.clients import Device +from rapyuta_io.clients.package import ExecutableMount +from rapyuta_io.utils import InvalidParameterException, OperationNotAllowedError +from rapyuta_io.utils.constants import DEVICE_ID + from riocli.config import new_client from riocli.utils.selector import show_selection @@ -28,7 +34,7 @@ def decorated(**kwargs: typing.Any) -> None: client = new_client() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) name = kwargs.pop('deployment_name') guid = None @@ -56,13 +62,15 @@ def get_deployment_name(client: Client, guid: str) -> str: def find_deployment_guid(client: Client, name: str) -> str: - deployments = client.get_all_deployments() + find_func = functools.partial(client.get_all_deployments, + phases=[DeploymentPhaseConstants.SUCCEEDED, + DeploymentPhaseConstants.PROVISIONING]) + deployments = find_func() for deployment in deployments: if deployment.name == name: return deployment.deploymentId - click.secho("Deployment not found", fg='red') - exit(1) + raise DeploymentNotFound() def select_details(deployment_guid, component_name=None, exec_name=None) -> (str, str, str): @@ -98,3 +106,50 @@ def select_details(deployment_guid, component_name=None, exec_name=None) -> (str pod_name = show_selection(pods, 'Choose the pod') return selected_component.componentID, exec_meta.id, pod_name + + +class DeploymentNotFound(Exception): + def __init__(self, message='deployment not found!'): + self.message = message + super().__init__(self.message) + + +def add_mount_volume_provision_config(provision_config, component_name, device, executable_mounts): + if not isinstance(device, Device): + raise InvalidParameterException('device must be of type Device') + + component_id = provision_config.plan.get_component_id(component_name) + if not isinstance(executable_mounts, list) or not all( + isinstance(mount, ExecutableMount) for mount in executable_mounts): + raise InvalidParameterException( + 'executable_mounts must be a list of rapyuta_io.clients.package.ExecutableMount') + if not device.is_online(): + raise OperationNotAllowedError('Device should be online') + if device.get_runtime() != Device.DOCKER_COMPOSE and not device.is_docker_enabled(): + raise OperationNotAllowedError('Device must be a {} device'.format(Device.DOCKER_COMPOSE)) + component_params = provision_config.parameters.get(component_id) + if component_params.get(DEVICE_ID) != device.deviceId: + raise OperationNotAllowedError('Device must be added to the component') + # self._add_disk_mount_info(device.deviceId, component_id, executable_mounts) + + dep_info = dict() + dep_info['diskResourceId'] = device.deviceId + dep_info['applicableComponentId'] = component_id + dep_info['config'] = dict() + + for mount in executable_mounts: + exec_mount = { + 'mountPath': mount.mount_path + } + if mount.sub_path: + exec_mount['subPath'] = mount.sub_path + else: + exec_mount['subPath'] = '/' + + tmp_info = copy.deepcopy(dep_info) + tmp_info['config']['mountPaths'] = { + mount.exec_name: exec_mount, + } + provision_config.context['diskMountInfo'].append(tmp_info) + + return provision_config diff --git a/riocli/deployment/validation.py b/riocli/deployment/validation.py new file mode 100644 index 00000000..2cc6484e --- /dev/null +++ b/riocli/deployment/validation.py @@ -0,0 +1,516 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^pkg-[a-z]{24}$': re.compile('^pkg-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_deployment(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_deployment(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Deployment": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Deployment", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Deployment', 'default': 'Deployment'}, rule='const') + else: data["kind"] = 'Deployment' + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_componentspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_componentspec(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + if "runtime" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if not isinstance(data__runtime, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be string", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device']}, rule='type') + if data__runtime not in ['device']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['device']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device']}, rule='enum') + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_devicedepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + if "restart" in data_keys: + data_keys.remove("restart") + data__restart = data["restart"] + if not isinstance(data__restart, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".restart must be string", value=data__restart, name="" + (name_prefix or "data") + ".restart", definition={'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, rule='type') + if data__restart not in ['always', 'onfailure', 'never']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".restart must be one of ['always', 'onfailure', 'never']", value=data__restart, name="" + (name_prefix or "data") + ".restart", definition={'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, rule='enum') + else: data["restart"] = 'always' + if "envArgs" in data_keys: + data_keys.remove("envArgs") + data__envArgs = data["envArgs"] + if not isinstance(data__envArgs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".envArgs must be array", value=data__envArgs, name="" + (name_prefix or "data") + ".envArgs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, rule='type') + data__envArgs_is_list = isinstance(data__envArgs, (list, tuple)) + if data__envArgs_is_list: + data__envArgs_len = len(data__envArgs) + for data__envArgs_x, data__envArgs_item in enumerate(data__envArgs): + validate___definitions_envargsspec(data__envArgs_item, custom_formats, (name_prefix or "data") + ".envArgs[{data__envArgs_x}]") + if "volumes" in data_keys: + data_keys.remove("volumes") + data__volumes = data["volumes"] + if not isinstance(data__volumes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".volumes must be array", value=data__volumes, name="" + (name_prefix or "data") + ".volumes", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, rule='type') + data__volumes_is_list = isinstance(data__volumes, (list, tuple)) + if data__volumes_is_list: + data__volumes_len = len(data__volumes) + for data__volumes_x, data__volumes_item in enumerate(data__volumes): + validate___definitions_devicevolumeattachspec(data__volumes_item, custom_formats, (name_prefix or "data") + ".volumes[{data__volumes_x}]") + if "rosNetworks" in data_keys: + data_keys.remove("rosNetworks") + data__rosNetworks = data["rosNetworks"] + if not isinstance(data__rosNetworks, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosNetworks must be array", value=data__rosNetworks, name="" + (name_prefix or "data") + ".rosNetworks", definition={'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, rule='type') + data__rosNetworks_is_list = isinstance(data__rosNetworks, (list, tuple)) + if data__rosNetworks_is_list: + data__rosNetworks_len = len(data__rosNetworks) + for data__rosNetworks_x, data__rosNetworks_item in enumerate(data__rosNetworks): + validate___definitions_devicenetworkattachspec(data__rosNetworks_item, custom_formats, (name_prefix or "data") + ".rosNetworks[{data__rosNetworks_x}]") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if not isinstance(data__runtime, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be string", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['cloud']}, rule='type') + if data__runtime not in ['cloud']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['cloud']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['cloud']}, rule='enum') + if "envArgs" in data_keys: + data_keys.remove("envArgs") + data__envArgs = data["envArgs"] + if not isinstance(data__envArgs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".envArgs must be array", value=data__envArgs, name="" + (name_prefix or "data") + ".envArgs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, rule='type') + data__envArgs_is_list = isinstance(data__envArgs, (list, tuple)) + if data__envArgs_is_list: + data__envArgs_len = len(data__envArgs) + for data__envArgs_x, data__envArgs_item in enumerate(data__envArgs): + validate___definitions_envargsspec(data__envArgs_item, custom_formats, (name_prefix or "data") + ".envArgs[{data__envArgs_x}]") + if "volumes" in data_keys: + data_keys.remove("volumes") + data__volumes = data["volumes"] + if not isinstance(data__volumes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".volumes must be array", value=data__volumes, name="" + (name_prefix or "data") + ".volumes", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, rule='type') + data__volumes_is_list = isinstance(data__volumes, (list, tuple)) + if data__volumes_is_list: + data__volumes_len = len(data__volumes) + for data__volumes_x, data__volumes_item in enumerate(data__volumes): + validate___definitions_cloudvolumeattachspec(data__volumes_item, custom_formats, (name_prefix or "data") + ".volumes[{data__volumes_x}]") + if "staticRoutes" in data_keys: + data_keys.remove("staticRoutes") + data__staticRoutes = data["staticRoutes"] + if not isinstance(data__staticRoutes, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".staticRoutes must be array", value=data__staticRoutes, name="" + (name_prefix or "data") + ".staticRoutes", definition={'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, rule='type') + data__staticRoutes_is_list = isinstance(data__staticRoutes, (list, tuple)) + if data__staticRoutes_is_list: + data__staticRoutes_len = len(data__staticRoutes) + for data__staticRoutes_x, data__staticRoutes_item in enumerate(data__staticRoutes): + validate___definitions_endpointspec(data__staticRoutes_item, custom_formats, (name_prefix or "data") + ".staticRoutes[{data__staticRoutes_x}]") + if "rosNetworks" in data_keys: + data_keys.remove("rosNetworks") + data__rosNetworks = data["rosNetworks"] + if not isinstance(data__rosNetworks, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosNetworks must be array", value=data__rosNetworks, name="" + (name_prefix or "data") + ".rosNetworks", definition={'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, rule='type') + data__rosNetworks_is_list = isinstance(data__rosNetworks, (list, tuple)) + if data__rosNetworks_is_list: + data__rosNetworks_len = len(data__rosNetworks) + for data__rosNetworks_x, data__rosNetworks_item in enumerate(data__rosNetworks): + validate___definitions_cloudnetworkattachspec(data__rosNetworks_item, custom_formats, (name_prefix or "data") + ".rosNetworks[{data__rosNetworks_x}]") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'properties': {'kind': {'const': 'device', 'default': 'device'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, 'staticRoutes': {'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if not isinstance(data__runtime, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be string", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, rule='type') + if data__runtime not in ['device', 'cloud']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['device', 'cloud']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, rule='enum') + else: data["runtime"] = 'cloud' + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + if not isinstance(data__depends, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends must be array", value=data__depends, name="" + (name_prefix or "data") + ".depends", definition={'type': 'array', 'items': {'properties': {'kind': {'const': 'deployment', 'default': 'deployment'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, rule='type') + data__depends_is_list = isinstance(data__depends, (list, tuple)) + if data__depends_is_list: + data__depends_len = len(data__depends) + for data__depends_x, data__depends_item in enumerate(data__depends): + validate___definitions_deploymentdepends(data__depends_item, custom_formats, (name_prefix or "data") + ".depends[{data__depends_x}]") + return data + +def validate___definitions_deploymentdepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "deployment": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: deployment", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'deployment', 'default': 'deployment'}, rule='const') + else: data["kind"] = 'deployment' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_cloudnetworkattachspec(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_networkdepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + if "topics" in data_keys: + data_keys.remove("topics") + data__topics = data["topics"] + if not isinstance(data__topics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics must be array", value=data__topics, name="" + (name_prefix or "data") + ".topics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topics_is_list = isinstance(data__topics, (list, tuple)) + if data__topics_is_list: + data__topics_len = len(data__topics) + for data__topics_x, data__topics_item in enumerate(data__topics): + if not isinstance(data__topics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + " must be string", value=data__topics_item, name="" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_networkdepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "network": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: network", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'network', 'default': 'network'}, rule='const') + else: data["kind"] = 'network' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + data__depends_is_dict = isinstance(data__depends, dict) + if data__depends_is_dict: + data__depends_keys = set(data__depends.keys()) + if "kind" in data__depends_keys: + data__depends_keys.remove("kind") + data__depends__kind = data__depends["kind"] + if data__depends__kind != "staticroute": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends.kind must be same as const definition: staticroute", value=data__depends__kind, name="" + (name_prefix or "data") + ".depends.kind", definition={'const': 'staticroute', 'default': 'staticroute'}, rule='const') + else: data__depends["kind"] = 'staticroute' + if "nameOrGUID" in data__depends_keys: + data__depends_keys.remove("nameOrGUID") + data__depends__nameOrGUID = data__depends["nameOrGUID"] + if not isinstance(data__depends__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".depends.nameOrGUID must be string", value=data__depends__nameOrGUID, name="" + (name_prefix or "data") + ".depends.nameOrGUID", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_cloudvolumeattachspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'disk', 'default': 'disk'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "execName" in data_keys: + data_keys.remove("execName") + data__execName = data["execName"] + if not isinstance(data__execName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".execName must be string", value=data__execName, name="" + (name_prefix or "data") + ".execName", definition={'type': 'string'}, rule='type') + if "mountPath" in data_keys: + data_keys.remove("mountPath") + data__mountPath = data["mountPath"] + if not isinstance(data__mountPath, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".mountPath must be string", value=data__mountPath, name="" + (name_prefix or "data") + ".mountPath", definition={'type': 'string'}, rule='type') + if "subPath" in data_keys: + data_keys.remove("subPath") + data__subPath = data["subPath"] + if not isinstance(data__subPath, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".subPath must be string", value=data__subPath, name="" + (name_prefix or "data") + ".subPath", definition={'type': 'string'}, rule='type') + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_diskdepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + return data + +def validate___definitions_diskdepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "disk": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: disk", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'disk', 'default': 'disk'}, rule='const') + else: data["kind"] = 'disk' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_devicenetworkattachspec(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_networkdepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + if "interface" in data_keys: + data_keys.remove("interface") + data__interface = data["interface"] + if not isinstance(data__interface, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".interface must be string", value=data__interface, name="" + (name_prefix or "data") + ".interface", definition={'type': 'string'}, rule='type') + if "topics" in data_keys: + data_keys.remove("topics") + data__topics = data["topics"] + if not isinstance(data__topics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics must be array", value=data__topics, name="" + (name_prefix or "data") + ".topics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topics_is_list = isinstance(data__topics, (list, tuple)) + if data__topics_is_list: + data__topics_len = len(data__topics) + for data__topics_x, data__topics_item in enumerate(data__topics): + if not isinstance(data__topics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + " must be string", value=data__topics_item, name="" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_devicevolumeattachspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "execName" in data_keys: + data_keys.remove("execName") + data__execName = data["execName"] + if not isinstance(data__execName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".execName must be string", value=data__execName, name="" + (name_prefix or "data") + ".execName", definition={'type': 'string'}, rule='type') + if "mountPath" in data_keys: + data_keys.remove("mountPath") + data__mountPath = data["mountPath"] + if not isinstance(data__mountPath, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".mountPath must be string", value=data__mountPath, name="" + (name_prefix or "data") + ".mountPath", definition={'type': 'string'}, rule='type') + if "subPath" in data_keys: + data_keys.remove("subPath") + data__subPath = data["subPath"] + if not isinstance(data__subPath, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".subPath must be string", value=data__subPath, name="" + (name_prefix or "data") + ".subPath", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_envargsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "value" in data_keys: + data_keys.remove("value") + data__value = data["value"] + if not isinstance(data__value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".value must be string", value=data__value, name="" + (name_prefix or "data") + ".value", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_devicedepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "device": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: device", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'device', 'default': 'device'}, rule='const') + else: data["kind"] = 'device' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'package', 'default': 'package'}, 'nameOrGUID': {'type': 'string'}, 'version': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}, 'guid': {'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}}, 'required': ['name', 'depends']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'depends']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'depends'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'package', 'default': 'package'}, 'nameOrGUID': {'type': 'string'}, 'version': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}, 'guid': {'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}}, 'required': ['name', 'depends']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "depends" in data_keys: + data_keys.remove("depends") + data__depends = data["depends"] + validate___definitions_packagedepends(data__depends, custom_formats, (name_prefix or "data") + ".depends") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_packageguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_packageguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^pkg-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^pkg-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_packagedepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "package": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: package", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'package', 'default': 'package'}, rule='const') + else: data["kind"] = 'package' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data \ No newline at end of file diff --git a/riocli/deployment/wait.py b/riocli/deployment/wait.py index cc733181..011a0f0e 100644 --- a/riocli/deployment/wait.py +++ b/riocli/deployment/wait.py @@ -41,7 +41,7 @@ def wait_for_deployment(deployment_name: str, deployment_guid: str) -> None: click.secho('Device is either offline or not reachable', fg='red') else: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/config.py b/riocli/device/config.py index 94a7b090..9a3a6d6c 100644 --- a/riocli/device/config.py +++ b/riocli/device/config.py @@ -50,7 +50,7 @@ def list_config(device_name: str, device_guid: str) -> None: _display_config_list(config_variables, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_config.command('create') @@ -70,7 +70,7 @@ def create_config(device_name: str, device_guid: str, key: str, value: str) -> N click.secho('Config Variable added successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_config.command('update') @@ -88,7 +88,7 @@ def update_config(device_name: str, device_guid: str, key: str, value: str) -> N click.secho('Config variable updated successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_config.command('delete') @@ -105,7 +105,7 @@ def delete_config(device_name: str, device_guid: str, key: str) -> None: click.secho('Config variable deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_config_list(config_variables: typing.List[DeviceConfig], show_header: bool = True) -> None: diff --git a/riocli/device/create.py b/riocli/device/create.py index b40af625..9013a0de 100644 --- a/riocli/device/create.py +++ b/riocli/device/create.py @@ -21,7 +21,7 @@ @click.command('create') @click.option('--description', type=str, help='Description of the device', default='') -@click.option('--runtime', help='Runtime of the Device', default='dockercompose', +@click.option('--runtime', help='Runtime of the Device', multiple=True, type=click.Choice(['preinstalled', 'dockercompose'], case_sensitive=False)) @click.option('--ros', help='ROS Distribution for the Device', default='melodic', type=click.Choice(['kinetic', 'melodic', 'noetic'], case_sensitive=False)) @@ -35,7 +35,7 @@ def create_device( device_name: str, description: str, - runtime: str, + runtime: [], ros: str, python: str, rosbag_mount_path: str, @@ -49,12 +49,14 @@ def create_device( with spinner(): python_version = DevicePythonVersion(python) ros_distro = ROSDistro(ros) - runtime = DeviceRuntime(runtime) - device = Device(name=device_name, description=description, runtime=runtime, ros_distro=ros_distro, + runtime_docker = DeviceRuntime.DOCKER in runtime + runtime_preinstalled = DeviceRuntime.PREINSTALLED in runtime + device = Device(name=device_name, description=description, ros_distro=ros_distro, + runtime_docker=runtime_docker, runtime_preinstalled=runtime_preinstalled, python_version=python_version, rosbag_mount_path=rosbag_mount_path, ros_workspace=catkin_workspace) client.create_device(device) click.secho('Device created successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/delete.py b/riocli/device/delete.py index 3e7f3e89..68cc3399 100644 --- a/riocli/device/delete.py +++ b/riocli/device/delete.py @@ -36,4 +36,4 @@ def delete_device(device_name: str, device_guid: str, force: bool): click.secho('Device deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/deployment.py b/riocli/device/deployment.py index 092d944a..554c4d98 100644 --- a/riocli/device/deployment.py +++ b/riocli/device/deployment.py @@ -39,6 +39,6 @@ def list_deployments(device_name: str, device_guid: str) -> None: display_deployment_list(deployments, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/execute.py b/riocli/device/execute.py index c3e9fdbb..8955f0aa 100644 --- a/riocli/device/execute.py +++ b/riocli/device/execute.py @@ -34,5 +34,5 @@ def execute_command(device_name: str, device_guid: str, user: str, shell: str, c click.secho(response) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/files.py b/riocli/device/files.py index f69e4917..7bcfab70 100644 --- a/riocli/device/files.py +++ b/riocli/device/files.py @@ -47,7 +47,7 @@ def list_uploads(device_name: str, device_guid: str) -> None: _display_upload_list(uploads=uploads, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('create') @@ -79,7 +79,7 @@ def create_upload( click.secho('File upload requested successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('status') @@ -95,7 +95,7 @@ def upload_status(device_name: str, device_guid: str, file_name: str, request_id click.secho(status.status) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('delete') @@ -112,7 +112,7 @@ def delete_upload(device_name: str, device_guid: str, file_name: str, request_id click.secho('Deleted upload successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('download') @@ -129,7 +129,7 @@ def download_log(device_name: str, device_guid: str, file_name: str, request_id: click.secho(url) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('cancel') @@ -146,7 +146,7 @@ def cancel_upload(device_name: str, device_guid: str, file_name: str, request_id click.secho('Cancelled upload successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_uploads.command('share') @@ -165,16 +165,16 @@ def shared_url(device_name: str, device_guid: str, file_name: str, request_id: s click.secho(public_url.url, fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_upload_list(uploads: LogUploads, show_header: bool = True) -> None: if show_header: - click.secho('{:34} {:20} {:16} {:<12} {:64}'. - format('Upload ID', 'Name', 'Status', 'Total Size', 'Error'), + click.secho('{:34} {:20} {:16} {:<12}'. + format('Upload ID', 'Name', 'Status', 'Total Size'), fg='yellow') for upload in uploads: - click.secho('{:34} {:20} {:16} {:<12} {:64}'.format(upload.request_uuid, upload.filename, upload.status, - upload.total_size, upload.error_message)) + click.secho('{:34} {:20} {:16} {:<12}'.format(upload.request_uuid, upload.filename, upload.status, + upload.total_size)) diff --git a/riocli/device/label.py b/riocli/device/label.py index 90020153..09b4460c 100644 --- a/riocli/device/label.py +++ b/riocli/device/label.py @@ -50,7 +50,7 @@ def list_labels(device_name: str, device_guid: str) -> None: _display_label_list(labels, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_labels.command('create') @@ -70,7 +70,7 @@ def create_label(device_name: str, device_guid: str, key: str, value: str) -> No click.secho('Label added successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_labels.command('update') @@ -88,7 +88,7 @@ def update_label(device_name: str, device_guid: str, key: str, value: str) -> No click.secho('Label updated successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_labels.command('delete') @@ -105,7 +105,7 @@ def delete_label(device_name: str, device_guid: str, key: str) -> None: click.secho('Label deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_label_list(labels: typing.List[Label], show_header: bool = True) -> None: diff --git a/riocli/device/list.py b/riocli/device/list.py index fb57add2..968ecc15 100644 --- a/riocli/device/list.py +++ b/riocli/device/list.py @@ -30,7 +30,7 @@ def list_devices() -> None: _display_device_list(devices, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_device_list(devices: typing.List[Device], show_header: bool = True) -> None: diff --git a/riocli/device/metric.py b/riocli/device/metric.py index 1c979eb9..2b73bccd 100644 --- a/riocli/device/metric.py +++ b/riocli/device/metric.py @@ -51,7 +51,7 @@ def list_metrics(device_name: str, device_guid: str) -> None: _display_metric_list(metrics, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_metrics.command('subscribe') @@ -70,7 +70,7 @@ def subscribe_metrics(device_name: str, device_guid: str, metric: str) -> None: click.secho('Metrics subscribed successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_metrics.command('unsubscribe') @@ -89,7 +89,7 @@ def unsubscribe_metrics(device_name: str, device_guid: str, metric: str) -> None click.secho('Metrics un-subscribed successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_metric_list(metrics: typing.List[Metric], show_header: bool = True) -> None: diff --git a/riocli/device/model.py b/riocli/device/model.py new file mode 100644 index 00000000..46c4e104 --- /dev/null +++ b/riocli/device/model.py @@ -0,0 +1,71 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Client +from rapyuta_io.clients.device import Device as v1Device, DevicePythonVersion, DeviceRuntime + +from riocli.device.util import find_device_guid, DeviceNotFound +from riocli.device.validation import validate +from riocli.model import Model + + +class Device(Model): + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + guid, obj = self.rc.find_depends({"kind": "device", "nameOrGUID": self.metadata.name}) + if not guid: + return False + + return obj + + def create_object(self, client: Client) -> v1Device: + device = client.create_device(self.to_v1()) + return device + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + obj.delete() + + def to_v1(self) -> v1Device: + python_version = DevicePythonVersion(self.spec.python) + rosbag_mount_path = None + ros_workspace = None + + docker_enabled = self.spec.get('docker', False) and self.spec.docker.enabled + if docker_enabled: + rosbag_mount_path = self.spec.docker.rosbagMountPath + + preinstalled_enabled = self.spec.get('preinstalled', False) and self.spec.preinstalled.enabled + if preinstalled_enabled and self.spec.preinstalled.get('catkinWorkspace'): + ros_workspace = self.spec.preinstalled.catkinWorkspace + + return v1Device(name=self.metadata.name, description=self.spec.get('description'), + runtime_docker=docker_enabled, runtime_preinstalled=preinstalled_enabled, + ros_distro=self.spec.rosDistro, python_version=python_version, + rosbag_mount_path=rosbag_mount_path, ros_workspace=ros_workspace) + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/device/onboard.py b/riocli/device/onboard.py index 7bf0147f..fc1e5383 100644 --- a/riocli/device/onboard.py +++ b/riocli/device/onboard.py @@ -35,4 +35,4 @@ def device_onboard(device_name: str, device_guid: str) -> None: click.secho(script.full_command()) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/tools/device_init.py b/riocli/device/tools/device_init.py index e6235b01..8f61d703 100644 --- a/riocli/device/tools/device_init.py +++ b/riocli/device/tools/device_init.py @@ -35,7 +35,7 @@ def device_init(device_name: str, device_guid: str) -> None: _setup_local() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _setup_device(device_guid: str) -> None: diff --git a/riocli/device/tools/forward.py b/riocli/device/tools/forward.py index 6f9291b1..20e2613c 100644 --- a/riocli/device/tools/forward.py +++ b/riocli/device/tools/forward.py @@ -38,4 +38,4 @@ def port_forward(device_name: str, device_guid: str, remote_port: int, local_por run_tunnel_on_local(local_port=local_port, path=path, background=False) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/tools/rapyuta_logs.py b/riocli/device/tools/rapyuta_logs.py index 0b7e8e6d..de58ea0b 100644 --- a/riocli/device/tools/rapyuta_logs.py +++ b/riocli/device/tools/rapyuta_logs.py @@ -33,4 +33,4 @@ def rapyuta_agent_logs(device_name: str, device_guid: str) -> None: os.system('ssh -p {} -o StrictHostKeyChecking=no root@localhost tail -f /var/log/salt/minion'.format(local_port)) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/tools/scp.py b/riocli/device/tools/scp.py index 9a9ccc23..92853e76 100644 --- a/riocli/device/tools/scp.py +++ b/riocli/device/tools/scp.py @@ -34,7 +34,7 @@ def scp(source, destination) -> None: if src_device_guid is None and dest_device_guid is None: click.secho('One of source or destination paths should be a remote path of format ' ':path', fg='red') - exit(1) + raise SystemExit(1) if src_device_guid is not None: copy_from_device(src_device_guid, src, dest) @@ -43,4 +43,4 @@ def scp(source, destination) -> None: copy_to_device(dest_device_guid, src, dest) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/tools/service.py b/riocli/device/tools/service.py index 296cad77..7121a04c 100644 --- a/riocli/device/tools/service.py +++ b/riocli/device/tools/service.py @@ -27,7 +27,7 @@ def status_all(device_name: str, device_guid: str) -> None: click.secho(response) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def run_service_cmd(device_guid, service_name, service_cmd=""): @@ -37,7 +37,7 @@ def run_service_cmd(device_guid, service_name, service_cmd=""): click.secho(response) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @click.command('status') diff --git a/riocli/device/tools/ssh.py b/riocli/device/tools/ssh.py index b56bcd7a..8a4fd5d1 100644 --- a/riocli/device/tools/ssh.py +++ b/riocli/device/tools/ssh.py @@ -46,7 +46,7 @@ def device_ssh(device_name: str, device_guid: str, user: str, local_port: int, r os.system('ssh -p {} {} -o StrictHostKeyChecking=no {}@localhost'.format(local_port, extra_args, user)) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @click.command('ssh-authorize') @@ -64,4 +64,4 @@ def ssh_authorize_key(device_name: str, device_guid: str, public_key_file: click click.secho('Keys added successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/device/tools/util.py b/riocli/device/tools/util.py index 498c05c7..04586144 100644 --- a/riocli/device/tools/util.py +++ b/riocli/device/tools/util.py @@ -41,13 +41,13 @@ def run_tunnel_on_local(local_port: int, path: str, background: bool = False) -> def copy_from_device(device_guid: str, src: str, dest: str) -> None: - file = '{}-{}'.format(src, random_string(7, 5)) + file = '{}-{}'.format(src, random_string(7, 5)).lstrip('/').replace('/', '-') client = new_client() device = client.get_device(device_id=device_guid) - request_uuid = device.upload_log_file(LogsUploadRequest(src, file_name=file, metadata={'cli_req_id': file})) + request_uuid = device.upload_log_file(LogsUploadRequest(src, file_name=file)) while True: status = device.get_log_upload_status(request_uuid) - if status.status != "IN PROGRESS": + if status.status not in ["IN PROGRESS", "PENDING"]: break time.sleep(10) diff --git a/riocli/device/topic.py b/riocli/device/topic.py index 1500e89b..bca30ba8 100644 --- a/riocli/device/topic.py +++ b/riocli/device/topic.py @@ -48,7 +48,7 @@ def list_topics(device_name: str, device_guid: str) -> None: _display_topic_list(device.topic_status()) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_topics.command('subscribe') @@ -69,7 +69,7 @@ def subscribe_topic(device_name: str, device_guid: str, topic: str, kind: str) - click.secho('Topic subscribed successfully', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @device_topics.command('unsubscribe') @@ -90,7 +90,7 @@ def unsubscribe_topic(device_name: str, device_guid: str, topic: str, kind: str) click.secho('Topic un-subscribed successfully', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_topic_list(status: TopicsStatus, show_header: bool = True) -> None: diff --git a/riocli/device/util.py b/riocli/device/util.py index 0a81004b..c740458e 100644 --- a/riocli/device/util.py +++ b/riocli/device/util.py @@ -30,7 +30,7 @@ def decorated(**kwargs: typing.Any): client = new_client() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) name = kwargs.pop('device_name') @@ -49,7 +49,11 @@ def decorated(**kwargs: typing.Any): name = get_device_name(client, guid) if guid is None: - guid = find_device_guid(client, name) + try: + guid = find_device_guid(client, name) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) kwargs['device_name'] = name kwargs['device_guid'] = guid @@ -69,8 +73,7 @@ def find_device_guid(client: Client, name: str) -> str: if device.name == name: return device.uuid - click.secho("device not found", fg='red') - exit(1) + raise DeviceNotFound() def name_to_request_id(f: typing.Callable) -> typing.Callable: @@ -80,7 +83,7 @@ def decorated(**kwargs): client = new_client() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) device_guid = kwargs.get('device_guid') device = client.get_device(device_id=device_guid) @@ -103,7 +106,7 @@ def find_request_id(requests: typing.List[LogUploads], file_name: str) -> (str, return request.filename, request.request_uuid click.secho("file not found", fg='red') - exit(1) + raise SystemExit(1) def device_identity(src, devices=[]): @@ -127,3 +130,8 @@ def is_remote_path(src, devices=[]): if device.name == parts[0]: return device.uuid, Path(parts[1]).absolute().as_posix() return None, src + +class DeviceNotFound(Exception): + def __init__(self, message='device not found'): + self.message = message + super().__init__(self.message) diff --git a/riocli/device/validation.py b/riocli/device/validation.py new file mode 100644 index 00000000..4d081e94 --- /dev/null +++ b/riocli/device/validation.py @@ -0,0 +1,228 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_device(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_device(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Device'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'rosDistro': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'python': {'type': 'string', 'enum': ['2', '3'], 'default': '3'}}, 'dependencies': {'docker': {'oneOf': [{'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'rosbagMountPath': {'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}}}}}]}, 'preinstalled': {'oneOf': [{'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'catkinWorkspace': {'type': 'string'}}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Device'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'rosDistro': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'python': {'type': 'string', 'enum': ['2', '3'], 'default': '3'}}, 'dependencies': {'docker': {'oneOf': [{'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'rosbagMountPath': {'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}}}}}]}, 'preinstalled': {'oneOf': [{'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'catkinWorkspace': {'type': 'string'}}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1'}, rule='const') + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Device": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Device", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Device'}, rule='const') + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_devicespec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_devicespec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'rosDistro': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'python': {'type': 'string', 'enum': ['2', '3'], 'default': '3'}}, 'dependencies': {'docker': {'oneOf': [{'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'rosbagMountPath': {'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}}}}}]}, 'preinstalled': {'oneOf': [{'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'catkinWorkspace': {'type': 'string'}}}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + if "docker" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + if not isinstance(data__docker, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must be object", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'enabled': {'enum': [False]}}}, rule='type') + data__docker_is_dict = isinstance(data__docker, dict) + if data__docker_is_dict: + data__docker_keys = set(data__docker.keys()) + if "enabled" in data__docker_keys: + data__docker_keys.remove("enabled") + data__docker__enabled = data__docker["enabled"] + if data__docker__enabled not in [False]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.enabled must be one of [False]", value=data__docker__enabled, name="" + (name_prefix or "data") + ".docker.enabled", definition={'enum': [False]}, rule='enum') + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + if not isinstance(data__docker, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must be object", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'rosbagMountPath': {'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}}}, rule='type') + data__docker_is_dict = isinstance(data__docker, dict) + if data__docker_is_dict: + data__docker_keys = set(data__docker.keys()) + if "enabled" in data__docker_keys: + data__docker_keys.remove("enabled") + data__docker__enabled = data__docker["enabled"] + if data__docker__enabled not in [True]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.enabled must be one of [True]", value=data__docker__enabled, name="" + (name_prefix or "data") + ".docker.enabled", definition={'enum': [True]}, rule='enum') + if "rosbagMountPath" in data__docker_keys: + data__docker_keys.remove("rosbagMountPath") + data__docker__rosbagMountPath = data__docker["rosbagMountPath"] + if not isinstance(data__docker__rosbagMountPath, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.rosbagMountPath must be string", value=data__docker__rosbagMountPath, name="" + (name_prefix or "data") + ".docker.rosbagMountPath", definition={'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}, rule='type') + else: data__docker["rosbagMountPath"] = '/opt/rapyuta/volumes/rosbag' + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'docker': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'rosbagMountPath': {'type': 'string', 'default': '/opt/rapyuta/volumes/rosbag'}}}}}]}, rule='oneOf') + if "preinstalled" in data: + data_one_of_count2 = 0 + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "preinstalled" in data_keys: + data_keys.remove("preinstalled") + data__preinstalled = data["preinstalled"] + if not isinstance(data__preinstalled, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".preinstalled must be object", value=data__preinstalled, name="" + (name_prefix or "data") + ".preinstalled", definition={'type': 'object', 'properties': {'enabled': {'enum': [False]}}}, rule='type') + data__preinstalled_is_dict = isinstance(data__preinstalled, dict) + if data__preinstalled_is_dict: + data__preinstalled_keys = set(data__preinstalled.keys()) + if "enabled" in data__preinstalled_keys: + data__preinstalled_keys.remove("enabled") + data__preinstalled__enabled = data__preinstalled["enabled"] + if data__preinstalled__enabled not in [False]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".preinstalled.enabled must be one of [False]", value=data__preinstalled__enabled, name="" + (name_prefix or "data") + ".preinstalled.enabled", definition={'enum': [False]}, rule='enum') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "preinstalled" in data_keys: + data_keys.remove("preinstalled") + data__preinstalled = data["preinstalled"] + if not isinstance(data__preinstalled, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".preinstalled must be object", value=data__preinstalled, name="" + (name_prefix or "data") + ".preinstalled", definition={'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'catkinWorkspace': {'type': 'string'}}}, rule='type') + data__preinstalled_is_dict = isinstance(data__preinstalled, dict) + if data__preinstalled_is_dict: + data__preinstalled_keys = set(data__preinstalled.keys()) + if "enabled" in data__preinstalled_keys: + data__preinstalled_keys.remove("enabled") + data__preinstalled__enabled = data__preinstalled["enabled"] + if data__preinstalled__enabled not in [True]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".preinstalled.enabled must be one of [True]", value=data__preinstalled__enabled, name="" + (name_prefix or "data") + ".preinstalled.enabled", definition={'enum': [True]}, rule='enum') + if "catkinWorkspace" in data__preinstalled_keys: + data__preinstalled_keys.remove("catkinWorkspace") + data__preinstalled__catkinWorkspace = data__preinstalled["catkinWorkspace"] + if not isinstance(data__preinstalled__catkinWorkspace, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".preinstalled.catkinWorkspace must be string", value=data__preinstalled__catkinWorkspace, name="" + (name_prefix or "data") + ".preinstalled.catkinWorkspace", definition={'type': 'string'}, rule='type') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count2) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [False]}}}}}, {'properties': {'preinstalled': {'type': 'object', 'properties': {'enabled': {'enum': [True]}, 'catkinWorkspace': {'type': 'string'}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "rosDistro" in data_keys: + data_keys.remove("rosDistro") + data__rosDistro = data["rosDistro"] + if not isinstance(data__rosDistro, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosDistro must be string", value=data__rosDistro, name="" + (name_prefix or "data") + ".rosDistro", definition={'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, rule='type') + if data__rosDistro not in ['kinetic', 'melodic', 'noetic']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosDistro must be one of ['kinetic', 'melodic', 'noetic']", value=data__rosDistro, name="" + (name_prefix or "data") + ".rosDistro", definition={'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, rule='enum') + else: data["rosDistro"] = 'melodic' + if "python" in data_keys: + data_keys.remove("python") + data__python = data["python"] + if not isinstance(data__python, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".python must be string", value=data__python, name="" + (name_prefix or "data") + ".python", definition={'type': 'string', 'enum': ['2', '3'], 'default': '3'}, rule='type') + if data__python not in ['2', '3']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".python must be one of ['2', '3']", value=data__python, name="" + (name_prefix or "data") + ".python", definition={'type': 'string', 'enum': ['2', '3'], 'default': '3'}, rule='enum') + else: data["python"] = '3' + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_uuid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/disk/__init__.py b/riocli/disk/__init__.py new file mode 100644 index 00000000..5771dd43 --- /dev/null +++ b/riocli/disk/__init__.py @@ -0,0 +1,38 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_help_colors import HelpColorsGroup + +from riocli.disk.create import create_disk +from riocli.disk.list import list_disks +from riocli.disk.delete import delete_disk + + +@click.group( + invoke_without_command=False, + cls=HelpColorsGroup, + help_headers_color='yellow', + help_options_color='green', +) +def disk() -> None: + """ + Persistent disks + """ + pass + + +disk.add_command(list_disks) +disk.add_command(create_disk) +disk.add_command(delete_disk) +#disk.add_command(inspect_project) diff --git a/riocli/disk/create.py b/riocli/disk/create.py new file mode 100644 index 00000000..f313d480 --- /dev/null +++ b/riocli/disk/create.py @@ -0,0 +1,44 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_spinner import spinner +from rapyuta_io.clients.persistent_volumes import DiskCapacity +from rapyuta_io.utils.rest_client import HttpMethod + +from riocli.disk.util import _api_call + + +@click.command('create') +@click.argument('disk-name', type=str) +@click.option('--capacity', 'capacity', type=int) +def create_disk(disk_name: str, capacity: int) -> None: + """ + Creates a new disk + """ + try: + capacity = DiskCapacity(capacity) + with spinner(): + payload = { + "name": disk_name, + "diskType": "ssd", + "runtime": "cloud", + "capacity": DiskCapacity(capacity).value, + } + disk = _api_call(HttpMethod.POST, payload=payload) + + click.secho('Disk {} ({}) created successfully!'. + format(disk['name'], disk['guid']), fg='green') + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) diff --git a/riocli/disk/delete.py b/riocli/disk/delete.py new file mode 100644 index 00000000..c98e5359 --- /dev/null +++ b/riocli/disk/delete.py @@ -0,0 +1,38 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click +from click_spinner import spinner +from rapyuta_io.utils.rest_client import HttpMethod + +from riocli.disk.util import name_to_guid, _api_call + + +@click.command('delete') +@click.option('--force', '-f', 'force', is_flag=True, default=False, help='Skip confirmation') +@click.argument('disk-name', required=True) +@name_to_guid +def delete_disk(disk_name: str, disk_guid: str, force: bool): + """ + Delete the disk from the Platform + """ + if not force: + click.confirm('Deleting disk {} ({})'.format(disk_name, disk_guid), abort=True) + + try: + with spinner(): + _api_call(HttpMethod.DELETE, guid=disk_guid, load_response=False) + click.echo(click.style('Disk deleted successfully!', fg='green')) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) diff --git a/riocli/disk/list.py b/riocli/disk/list.py new file mode 100644 index 00000000..b02f9b3a --- /dev/null +++ b/riocli/disk/list.py @@ -0,0 +1,43 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Build +from rapyuta_io.utils.rest_client import HttpMethod + +from riocli.disk.util import _api_call + + +@click.command('list') +def list_disks() -> None: + """ + List the disks in the selected project + """ + try: + disks = _api_call(HttpMethod.GET) + _display_disk_list(disks, show_header=True) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + +def _display_disk_list(disks: typing.Any, show_header: bool = True): + if show_header: + click.secho('{:30} {:25} {:12} {:8} {:<64}'.format('Disk ID', 'Name', 'Status', 'Capacity', 'Used By'), + fg='yellow') + + for disk in disks: + click.secho('{:30} {:25} {:12} {:8} {:<64}'.format(disk['guid'], disk['name'], disk['status'], disk['capacity'], + disk['usedBy'])) diff --git a/riocli/disk/model.py b/riocli/disk/model.py new file mode 100644 index 00000000..762513f9 --- /dev/null +++ b/riocli/disk/model.py @@ -0,0 +1,83 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from time import sleep +import typing + +import click +import click_spinner +from munch import munchify +from rapyuta_io import Client +from rapyuta_io.utils.rest_client import HttpMethod + +from riocli.disk.util import _api_call, find_disk_guid, DiskNotFound +from riocli.model import Model + + +class Disk(Model): + def find_object(self, client: Client) -> typing.Any: + _, disk = self.rc.find_depends({'kind': 'disk', 'nameOrGUID': self.metadata.name}) + if not disk: + return False + + return disk + + def create_object(self, client: Client) -> typing.Any: + labels = self.metadata.get('labels', None) + payload = { + "labels": labels, + "name": self.metadata.name, + "diskType": "ssd", + "runtime": self.spec.runtime, + "capacity": self.spec.capacity, + } + with click_spinner.spinner(): + result = _api_call(HttpMethod.POST, payload=payload) + result = munchify(result) + disk_dep_guid, disk = self.rc.find_depends({'kind': self.kind.lower(), 'nameOrGUID':self.metadata.name}) + volume_instance = client.get_volume_instance(disk_dep_guid) + try: + volume_instance.poll_deployment_till_ready(sleep_interval=5) + return result + except Exception as e: + click.secho(">> Warning: Error Polling for disk ({}:{})".format(self.kind.lower(), self.metadata.name), fg="yellow") + return result + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + self._poll_till_available(client, obj) + volume_instance = client.get_volume_instance(obj.internalDeploymentGUID) + volume_instance.destroy_volume_instance() + + def _poll_till_available(self, client: Client, obj: typing.Any, sleep_interval=5, retries=10): + dep_guid = obj.internalDeploymentGUID + deployment = client.get_deployment(deployment_id=dep_guid) + + for _ in range(retries): + status = deployment.get_status().status + if status != 'Available': + sleep(sleep_interval) + continue + + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(d): + pass + + diff --git a/riocli/disk/util.py b/riocli/disk/util.py new file mode 100644 index 00000000..524a70b2 --- /dev/null +++ b/riocli/disk/util.py @@ -0,0 +1,93 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import json +import typing +import functools +import click + +from rapyuta_io import Client +from rapyuta_io.utils.rest_client import RestClient, HttpMethod + +from riocli.config import Configuration, new_client + + +def name_to_guid(f: typing.Callable) -> typing.Callable: + @functools.wraps(f) + def decorated(**kwargs: typing.Any): + client = new_client() + name = kwargs.pop('disk_name') + guid = None + + if name.startswith('disk-'): + guid = name + name = None + + if name is None: + name = get_disk_name(client, guid) + + if guid is None: + try: + guid = find_disk_guid(client, name) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + kwargs['disk_name'] = name + kwargs['disk_guid'] = guid + f(**kwargs) + + return decorated + + +def get_disk_name(client: Client, guid: str) -> str: + disk = _api_call(HttpMethod.GET, guid=guid) + return disk['name'] + + +def find_disk_guid(client: Client, name: str) -> str: + try: + disks = _api_call(HttpMethod.GET) + for disk in disks: + if disk['name'] == name: + return disk['guid'] + raise DiskNotFound() + except Exception: + raise DiskNotFound() + + +def _api_call(method: str, guid: typing.Union[str, None] = None, + payload: typing.Union[typing.Dict, None] = None, load_response: bool = True, +) -> typing.Any: + config = Configuration() + catalog_host = config.data.get('catalog_host', 'https://gacatalog.apps.rapyuta.io') + url = '{}/disk'.format(catalog_host) + if guid: + url = '{}/{}'.format(url, guid) + headers = config.get_auth_header() + response = RestClient(url).method(method).headers(headers).execute(payload=payload) + data = None + err_msg = 'error in the api call' + if load_response: + data = json.loads(response.text) + + if not response.ok: + err_msg = data.get('error') + raise Exception(err_msg) + return data + + +class DiskNotFound(Exception): + def __init__(self, message='disk not found!'): + self.message = message + super().__init__(self.message) diff --git a/riocli/disk/validation.py b/riocli/disk/validation.py new file mode 100644 index 00000000..8addb6c7 --- /dev/null +++ b/riocli/disk/validation.py @@ -0,0 +1,137 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^disk-[a-z]{24}$': re.compile('^disk-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_disk(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_disk(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Disk', 'default': 'Disk'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/diskGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap'}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'runtime': {'default': 'cloud', 'const': 'cloud'}, 'capacity': {'type': 'number', 'enum': [4, 8, 16, 32, 64, 128, 256, 512]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Disk', 'default': 'Disk'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/diskGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap'}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'runtime': {'default': 'cloud', 'const': 'cloud'}, 'capacity': {'type': 'number', 'enum': [4, 8, 16, 32, 64, 128, 256, 512]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Disk": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Disk", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Disk', 'default': 'Disk'}, rule='const') + else: data["kind"] = 'Disk' + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_diskspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_diskspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'runtime': {'default': 'cloud', 'const': 'cloud'}, 'capacity': {'type': 'number', 'enum': [4, 8, 16, 32, 64, 128, 256, 512]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if data__runtime != "cloud": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be same as const definition: cloud", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'default': 'cloud', 'const': 'cloud'}, rule='const') + else: data["runtime"] = 'cloud' + if "capacity" in data_keys: + data_keys.remove("capacity") + data__capacity = data["capacity"] + if not isinstance(data__capacity, (int, float)) or isinstance(data__capacity, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".capacity must be number", value=data__capacity, name="" + (name_prefix or "data") + ".capacity", definition={'type': 'number', 'enum': [4, 8, 16, 32, 64, 128, 256, 512]}, rule='type') + if data__capacity not in [4, 8, 16, 32, 64, 128, 256, 512]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".capacity must be one of [4, 8, 16, 32, 64, 128, 256, 512]", value=data__capacity, name="" + (name_prefix or "data") + ".capacity", definition={'type': 'number', 'enum': [4, 8, 16, 32, 64, 128, 256, 512]}, rule='enum') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^disk-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^disk-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_diskguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_diskguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^disk-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^disk-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^disk-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^disk-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/marketplace/inspect.py b/riocli/marketplace/inspect.py index 1858bb60..9778c4c5 100644 --- a/riocli/marketplace/inspect.py +++ b/riocli/marketplace/inspect.py @@ -40,5 +40,5 @@ def inspect_marketplace(format_type: str, rrn: str, version: str = None) -> None inspect_with_format(package, format_type=format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/marketplace/install.py b/riocli/marketplace/install.py index 2e964b62..75b85b7b 100644 --- a/riocli/marketplace/install.py +++ b/riocli/marketplace/install.py @@ -40,7 +40,7 @@ def install_product(rrn: str, version: str, dry_run: bool, ignore_missing: bool, """ if not dependency_file and not rrn: click.secho('Either one of RRN or Dependency Filename must be provided', fg='red') - exit(1) + raise SystemExit(1) try: if dependency_file is not None: dependencies = parse_dependency_file(dependency_file) @@ -49,7 +49,7 @@ def install_product(rrn: str, version: str, dry_run: bool, ignore_missing: bool, single_product(rrn, version, dry_run, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def bulk_product(products: dict, dry_run: bool, ignore_missing: bool) -> None: diff --git a/riocli/marketplace/list.py b/riocli/marketplace/list.py index 8bbeb582..1c4f6df4 100644 --- a/riocli/marketplace/list.py +++ b/riocli/marketplace/list.py @@ -31,7 +31,7 @@ def list_marketplace() -> None: display_marketplace_list(packages=packages, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def display_marketplace_list(packages: list, show_header: bool = True) -> None: diff --git a/riocli/model/__init__.py b/riocli/model/__init__.py new file mode 100644 index 00000000..24985462 --- /dev/null +++ b/riocli/model/__init__.py @@ -0,0 +1 @@ +from riocli.model.base import Model diff --git a/riocli/model/base.py b/riocli/model/base.py new file mode 100644 index 00000000..3df86a92 --- /dev/null +++ b/riocli/model/base.py @@ -0,0 +1,141 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import typing +from abc import ABC, abstractmethod +from datetime import datetime +from shutil import get_terminal_size + +import click +from munch import Munch, munchify +from rapyuta_io import Client + +from riocli.project.util import find_project_guid + +prompt = ">> {}{}{} [{}]" #>> msg spacer rigth_msg time + +DELETE_POLICY_LABEL = 'rapyuta.io/deletionPolicy' + +def message_with_prompt(msg, right_msg="", fg='white', with_time=True): + columns, _ = get_terminal_size() + time = datetime.now().isoformat('T') + spacer = ' '*(int(columns) - len(msg + right_msg + time) - 12) + msg = prompt.format(msg, spacer, right_msg, time) + click.secho(msg, fg=fg) + + +class Model(ABC, Munch): + + def apply(self, client: Client, *args, **kwargs) -> typing.Any: + try: + self._set_project_in_client(client) + obj = self.find_object(client) + dryrun = kwargs.get("dryrun", False) + if not obj: + message_with_prompt("⌛ Create {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + if not dryrun: + result = self.create_object(client) + message_with_prompt("✅ Created {}:{}".format(self.kind.lower(), self.metadata.name), fg='green') + return result + else: + message_with_prompt('🔎 {}:{} exists. will be updated'.format(self.kind.lower(), self.metadata.name)) + message_with_prompt("⌛ Update {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + if not dryrun: + result = self.update_object(client, obj) + message_with_prompt("✅ Updated {}:{}".format(self.kind.lower(), self.metadata.name), fg='green') + return result + except Exception as e: + message_with_prompt("‼ ERR {}:{}. {} ‼".format(self.kind.lower(), self.metadata.name, str(e)), fg="red") + raise e + + def delete(self, client: Client, obj: typing.Any, *args, **kwargs): + try: + self._set_project_in_client(client) + obj = self.find_object(client) + dryrun = kwargs.get("dryrun", False) + + if not obj: + message_with_prompt('⁉ {}:{} does not exist'.format(self.kind.lower(), self.metadata.name)) + return + else: + message_with_prompt("⌛ Delete {}:{}".format(self.kind.lower(), self.metadata.name), fg='yellow') + if not dryrun: + labels = self.metadata.get('labels', {}) + if DELETE_POLICY_LABEL in labels and \ + labels.get(DELETE_POLICY_LABEL) and \ + labels.get(DELETE_POLICY_LABEL).lower() == "retain": + click.secho(">> Warning: delete protection enabled on {}:{}. Resource will be retained ".format(self.kind.lower(), self.metadata.name), fg="yellow") + return + + + self.delete_object(client, obj) + message_with_prompt("❌ Deleted {}:{}".format(self.kind.lower(), self.metadata.name), fg='red') + + except Exception as e: + message_with_prompt("‼ ERR {}:{}. {} ‼".format(self.kind.lower(), self.metadata.name, str(e)), fg="red") + raise e + + @abstractmethod + def find_object(self, client: Client) -> typing.Any: + pass + + @abstractmethod + def create_object(self, client: Client) -> typing.Any: + pass + + @abstractmethod + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + @staticmethod + @abstractmethod + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + @classmethod + @abstractmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + @abstractmethod + def validate(d): + pass + + @classmethod + def from_dict(cls, client: Client, d: typing.Dict): + cls.pre_process(client, d) + cls.validate(d) + return cls(munchify(d)) + + def _set_project_in_client(self, client: Client) -> Client: + # If the Type is Project itself then no need to configure Client. + if self.kind == 'Project': + return client + + # If Project is not specified then no need to configure the Client. It + # will use the pre-configured Project by default. + # + # TODO(ankit): Move this to the pre-processing step, once implemented. + project = self.metadata.get('project', None) + if not project: + return client + + # This should work unless someone has a Project Name starting with + # 'project-' prefix + if not project.startswith('project-'): + project = find_project_guid(client, project) + + client.set_project(project_guid=project) + return client diff --git a/riocli/network/create.py b/riocli/network/create.py index 2a5a2f81..b5bb38ba 100644 --- a/riocli/network/create.py +++ b/riocli/network/create.py @@ -46,4 +46,4 @@ def create_network(name: str, network: str, **kwargs: typing.Any) -> None: create_native_network(name, **kwargs) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/network/delete.py b/riocli/network/delete.py index fc236d31..0ae802fe 100644 --- a/riocli/network/delete.py +++ b/riocli/network/delete.py @@ -45,4 +45,4 @@ def delete_network(force: bool, network_name: str, network_guid: str, network_ty click.secho('{} Network deleted successfully!'.format(network_type.capitalize()), fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/network/inspect.py b/riocli/network/inspect.py index 09389bd3..9f8acf07 100644 --- a/riocli/network/inspect.py +++ b/riocli/network/inspect.py @@ -37,4 +37,4 @@ def inspect_network(format_type: str, network_name: str, network_guid: str, netw inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/network/list.py b/riocli/network/list.py index e23d350a..2e2c1fd7 100644 --- a/riocli/network/list.py +++ b/riocli/network/list.py @@ -40,7 +40,7 @@ def list_networks(network: str) -> None: _display_network_list(networks, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_network_list( diff --git a/riocli/network/logs.py b/riocli/network/logs.py index 4a61bbd5..0789d4ce 100644 --- a/riocli/network/logs.py +++ b/riocli/network/logs.py @@ -27,7 +27,7 @@ def network_logs(network_name: str, network_guid: str, network_type: str) -> Non if network_type == 'routed': # FIXME: For routed network, it returns Pod not found error click.secho('Not implemented yet!', fg='red') - exit(1) + raise SystemExit(1) elif network_type == 'native': native_network_logs(network_name, network_guid) @@ -43,4 +43,4 @@ def native_network_logs(network_name: str, network_guid: str) -> None: stream_deployment_logs(deployment.deploymentId, comp_id, exec_id, pod_name) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/network/model.py b/riocli/network/model.py new file mode 100644 index 00000000..26178458 --- /dev/null +++ b/riocli/network/model.py @@ -0,0 +1,112 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing +from typing import Union, Any, Dict + +from rapyuta_io import Client +from rapyuta_io.clients.native_network import NativeNetwork, NativeNetworkLimits, Parameters as NativeNetworkParameters +from rapyuta_io.clients.routed_network import RoutedNetwork, RoutedNetworkLimits, Parameters as RoutedNetworkParameters + +from riocli.model import Model +from riocli.network.util import find_network_name, NetworkNotFound +from riocli.network.validation import validate + + +class Network(Model): + _RoutedNetworkLimits = { + 'small': RoutedNetworkLimits.SMALL, + 'medium': RoutedNetworkLimits.MEDIUM, + 'large': RoutedNetworkLimits.LARGE, + } + + _NativeNetworkLimits = { + 'xSmall': NativeNetworkLimits.X_SMALL, + 'small': NativeNetworkLimits.SMALL, + 'medium': NativeNetworkLimits.MEDIUM, + 'large': NativeNetworkLimits.LARGE, + + } + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + try: + network, _ = find_network_name(client, self.metadata.name, self.spec.type, is_resolve_conflict=False) + return network + except NetworkNotFound: + return False + + def create_object(self, client: Client) -> Union[NativeNetwork, RoutedNetwork]: + if self.spec.type == 'routed': + return self._create_routed_network(client) + + network = client.create_native_network(self.to_v1(client)) + return network + + def update_object(self, client: Client, obj: Union[RoutedNetwork, NativeNetwork]) -> Any: + # try: + # obj.delete() + # self.create_object(client) + # except Exception as e: + # click.secho(str(e), fg='red') + # raise SystemExit(1) + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + obj.delete() + + @classmethod + def pre_process(cls, client: Client, d: Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) + + def to_v1(self, client: Client) -> NativeNetwork: + if self.spec.runtime == 'cloud': + limits = self._get_limits() + parameters = NativeNetworkParameters(limits=limits) + else: + device = client.get_device(self.spec.deviceGUID) + parameters = NativeNetworkParameters(device=device, + network_interface=self.spec.networkInterface) + + return NativeNetwork(self.metadata.name, self.spec.runtime.lower(), self.spec.rosDistro, parameters=parameters) + + def _create_routed_network(self, client: Client) -> RoutedNetwork: + if self.spec.runtime == 'cloud': + network = self._create_cloud_routed_network(client) + else: + network = self._create_device_routed_network(client) + + return network + + def _create_cloud_routed_network(self, client: Client) -> RoutedNetwork: + limits = self._get_limits() + parameters = RoutedNetworkParameters(limits) + return client.create_cloud_routed_network(self.metadata.name, self.spec.rosDistro, True, parameters=parameters) + + def _create_device_routed_network(self, client: Client) -> RoutedNetwork: + device = client.get_device(self.spec.deviceGUID) + return client.create_device_routed_network(name=self.metadata.name, ros_distro=self.spec.rosDistro, shared=True, + device=device, + network_interface=self.spec.networkInterface) + + def _get_limits(self) -> Union[RoutedNetworkLimits, NativeNetworkLimits]: + if self.spec.type == 'routed': + return self._RoutedNetworkLimits[self.spec.resourceLimits] + else: + return self._NativeNetworkLimits[self.spec.resourceLimits] diff --git a/riocli/network/util.py b/riocli/network/util.py index 9721f933..758f2895 100644 --- a/riocli/network/util.py +++ b/riocli/network/util.py @@ -12,7 +12,7 @@ # See the License for the specific language governing permissions and # limitations under the License. import functools -import typing +from typing import Optional, Union, Tuple, Callable, Any import click from rapyuta_io import DeploymentPhaseConstants, Client @@ -23,12 +23,12 @@ from riocli.utils.selector import show_selection -def name_to_guid(f: typing.Callable) -> typing.Callable: +def name_to_guid(f: Callable) -> Callable: @functools.wraps(f) - def decorated(**kwargs: typing.Any): + def decorated(**kwargs: Any): client = new_client() - name = kwargs.pop('network_name', None) + name = kwargs.pop('network_name') network_type = kwargs.pop('network', None) guid = None @@ -56,7 +56,7 @@ def find_network_guid( client: Client, guid: str, network_type: str, -) -> (typing.Union[NativeNetwork, RoutedNetwork], str): +) -> Tuple[Union[RoutedNetwork, NativeNetwork], str]: if network_type is None or network_type == 'routed': routed_networks = client.get_all_routed_networks() for network in routed_networks: @@ -74,12 +74,15 @@ def find_network_guid( if network.guid == guid: return network, 'native' + raise NetworkNotFound() + def find_network_name( client: Client, name: str, - network_type: str, -) -> (typing.Union[RoutedNetwork, NativeNetwork], str): + network_type: Optional[str], + is_resolve_conflict: bool = True +) -> Tuple[Optional[Union[RoutedNetwork, NativeNetwork]], str]: routed, native = None, None if network_type in [None, 'routed']: routed = find_routed_network_name(client, name) @@ -87,10 +90,10 @@ def find_network_name( if network_type in [None, 'native']: native = find_native_network_name(client, name) - return resolve_conflict(routed, native, network_type) + return resolve_conflict(routed, native, network_type, is_resolve_conflict) -def find_native_network_name(client: Client, name: str) -> NativeNetwork: +def find_native_network_name(client: Client, name: str) -> Optional[NativeNetwork]: native_networks = client.list_native_networks() for network in native_networks: phase = network.internal_deployment_status.phase @@ -100,7 +103,7 @@ def find_native_network_name(client: Client, name: str) -> NativeNetwork: return network -def find_routed_network_name(client: Client, name: str) -> RoutedNetwork: +def find_routed_network_name(client: Client, name: str) -> Optional[RoutedNetwork]: routed_networks = client.get_all_routed_networks() for network in routed_networks: if network.phase == DeploymentPhaseConstants.DEPLOYMENT_STOPPED.value: @@ -110,16 +113,24 @@ def find_routed_network_name(client: Client, name: str) -> RoutedNetwork: def resolve_conflict( - routed: RoutedNetwork, - native: NativeNetwork, - network_type: str, -) -> (typing.Union[RoutedNetwork, NativeNetwork], str): - # If only routed, or only native network was found, there is no conflict to resolve. + routed: Optional[RoutedNetwork], + native: Optional[NativeNetwork], + network_type: Optional[str], + is_resolve_conflict: bool = True +) -> Tuple[Optional[Union[RoutedNetwork, NativeNetwork]], str]: + if not routed and not native: + raise NetworkNotFound() + + # If only routed, or only native network was found, there is no conflict to + # resolve. if routed and not native: return routed, 'routed' elif native and not routed: return native, 'native' + if not is_resolve_conflict: + raise NetworkConflict() + # Check if user already offered a choice in case of conflict if network_type: choice = network_type @@ -138,14 +149,14 @@ def resolve_conflict( return native, choice else: click.secho('Invalid choice. Try again', fg='red') - exit(1) + raise SystemExit(1) def get_network( client: Client, network_guid: str, network_type: str, -) -> typing.Union[RoutedNetwork, NativeNetwork]: +) -> Optional[Union[RoutedNetwork, NativeNetwork]]: if network_type == 'routed': return client.get_routed_network(network_guid) elif network_type == 'native': @@ -153,10 +164,21 @@ def get_network( def get_network_internal_deployment( - network: typing.Union[RoutedNetwork, NativeNetwork], + network: Union[RoutedNetwork, NativeNetwork], network_type: str, -) -> str: +) -> Optional[str]: if network_type == 'routed': return network.internalDeploymentGUID elif network_type == 'native': return network.internal_deployment_guid + + +class NetworkNotFound(Exception): + def __init__(self, message='network not found!'): + self.message = message + super().__init__(self.message) + +class NetworkConflict(Exception): + def __init__(self, message='both routed and native networks exist with the same name!'): + self.message = message + super().__init__(self.message) diff --git a/riocli/network/validation.py b/riocli/network/validation.py new file mode 100644 index 00000000..4215abba --- /dev/null +++ b/riocli/network/validation.py @@ -0,0 +1,214 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^network-[a-z]{24}$': re.compile('^network-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_network(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_network(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Network'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/networkGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'type': {'$ref': '#/definitions/networkType'}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'runtime': {'$ref': '#/definitions/runtime'}}, 'required': ['type', 'rosDistro', 'runtime'], 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'$ref': '#/definitions/resourceLimits'}}, 'required': ['runtime', 'resourceLimits']}, {'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'$ref': '#/definitions/uuid'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'$ref': '#/definitions/restartPolicy', 'default': 'Always'}}, 'required': ['deviceGUID', 'networkInterface']}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Network'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/networkGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'type': {'$ref': '#/definitions/networkType'}, 'rosDistro': {'$ref': '#/definitions/rosDistro'}, 'runtime': {'$ref': '#/definitions/runtime'}}, 'required': ['type', 'rosDistro', 'runtime'], 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'$ref': '#/definitions/resourceLimits'}}, 'required': ['runtime', 'resourceLimits']}, {'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'$ref': '#/definitions/uuid'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'$ref': '#/definitions/restartPolicy', 'default': 'Always'}}, 'required': ['deviceGUID', 'networkInterface']}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1'}, rule='const') + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Network": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Network", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Network'}, rule='const') + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_networkspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_networkspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'enum': ['routed', 'native']}, 'rosDistro': {'enum': ['melodic', 'kinetic', 'noetic']}, 'runtime': {'enum': ['cloud', 'device']}}, 'required': ['type', 'rosDistro', 'runtime'], 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'enum': ['xSmall', 'small', 'medium', 'large']}}, 'required': ['runtime', 'resourceLimits']}, {'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'enum': ['always', 'never', 'onFailure']}}, 'required': ['deviceGUID', 'networkInterface']}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['type', 'rosDistro', 'runtime']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type', 'rosDistro', 'runtime'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'enum': ['routed', 'native']}, 'rosDistro': {'enum': ['melodic', 'kinetic', 'noetic']}, 'runtime': {'enum': ['cloud', 'device']}}, 'required': ['type', 'rosDistro', 'runtime'], 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'enum': ['xSmall', 'small', 'medium', 'large']}}, 'required': ['runtime', 'resourceLimits']}, {'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'enum': ['always', 'never', 'onFailure']}}, 'required': ['deviceGUID', 'networkInterface']}]}}}, rule='required') + if "runtime" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['runtime', 'resourceLimits']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['runtime', 'resourceLimits'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'enum': ['xSmall', 'small', 'medium', 'large']}}, 'required': ['runtime', 'resourceLimits']}, rule='required') + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if data__runtime not in ['cloud']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['cloud']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'enum': ['cloud']}, rule='enum') + if "resourceLimits" in data_keys: + data_keys.remove("resourceLimits") + data__resourceLimits = data["resourceLimits"] + validate___definitions_resourcelimits(data__resourceLimits, custom_formats, (name_prefix or "data") + ".resourceLimits") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['deviceGUID', 'networkInterface']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['deviceGUID', 'networkInterface'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'enum': ['always', 'never', 'onFailure']}}, 'required': ['deviceGUID', 'networkInterface']}, rule='required') + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if data__runtime not in ['device']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['device']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'enum': ['device']}, rule='enum') + if "deviceGUID" in data_keys: + data_keys.remove("deviceGUID") + data__deviceGUID = data["deviceGUID"] + validate___definitions_uuid(data__deviceGUID, custom_formats, (name_prefix or "data") + ".deviceGUID") + if "networkInterface" in data_keys: + data_keys.remove("networkInterface") + data__networkInterface = data["networkInterface"] + if not isinstance(data__networkInterface, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".networkInterface must be string", value=data__networkInterface, name="" + (name_prefix or "data") + ".networkInterface", definition={'type': 'string'}, rule='type') + if "restartPolicy" in data_keys: + data_keys.remove("restartPolicy") + data__restartPolicy = data["restartPolicy"] + validate___definitions_restartpolicy(data__restartPolicy, custom_formats, (name_prefix or "data") + ".restartPolicy") + else: data["restartPolicy"] = 'Always' + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'enum': ['cloud']}, 'resourceLimits': {'enum': ['xSmall', 'small', 'medium', 'large']}}, 'required': ['runtime', 'resourceLimits']}, {'properties': {'runtime': {'enum': ['device']}, 'deviceGUID': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'networkInterface': {'type': 'string'}, 'restartPolicy': {'enum': ['always', 'never', 'onFailure']}}, 'required': ['deviceGUID', 'networkInterface']}]}, rule='oneOf') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + validate___definitions_networktype(data__type, custom_formats, (name_prefix or "data") + ".type") + if "rosDistro" in data_keys: + data_keys.remove("rosDistro") + data__rosDistro = data["rosDistro"] + validate___definitions_rosdistro(data__rosDistro, custom_formats, (name_prefix or "data") + ".rosDistro") + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + validate___definitions_runtime(data__runtime, custom_formats, (name_prefix or "data") + ".runtime") + return data + +def validate___definitions_runtime(data, custom_formats={}, name_prefix=None): + if data not in ['cloud', 'device']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['cloud', 'device']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['cloud', 'device']}, rule='enum') + return data + +def validate___definitions_rosdistro(data, custom_formats={}, name_prefix=None): + if data not in ['melodic', 'kinetic', 'noetic']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['melodic', 'kinetic', 'noetic']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['melodic', 'kinetic', 'noetic']}, rule='enum') + return data + +def validate___definitions_networktype(data, custom_formats={}, name_prefix=None): + if data not in ['routed', 'native']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['routed', 'native']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['routed', 'native']}, rule='enum') + return data + +def validate___definitions_restartpolicy(data, custom_formats={}, name_prefix=None): + if data not in ['always', 'never', 'onFailure']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['always', 'never', 'onFailure']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['always', 'never', 'onFailure']}, rule='enum') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_resourcelimits(data, custom_formats={}, name_prefix=None): + if data not in ['xSmall', 'small', 'medium', 'large']: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be one of ['xSmall', 'small', 'medium', 'large']", value=data, name="" + (name_prefix or "data") + "", definition={'enum': ['xSmall', 'small', 'medium', 'large']}, rule='enum') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^network-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^network-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_networkguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_networkguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^network-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^network-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^network-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^network-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/package/create.py b/riocli/package/create.py index 5de42c08..f97fb2f6 100644 --- a/riocli/package/create.py +++ b/riocli/package/create.py @@ -42,4 +42,4 @@ def create_package(manifest: click.File, format_type: str) -> None: click.secho('Package created successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/package/delete.py b/riocli/package/delete.py index 3640b753..7a05b97c 100644 --- a/riocli/package/delete.py +++ b/riocli/package/delete.py @@ -37,4 +37,4 @@ def delete_package(force: bool, package_name: str, package_guid: str) -> None: click.secho('Package deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/package/deployment.py b/riocli/package/deployment.py index 65926e2f..ca80b0da 100644 --- a/riocli/package/deployment.py +++ b/riocli/package/deployment.py @@ -35,4 +35,4 @@ def list_package_deployments(package_name: str, package_guid: str) -> None: display_deployment_list(deployments, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/package/inspect.py b/riocli/package/inspect.py index 0ca92f74..b0c89baf 100644 --- a/riocli/package/inspect.py +++ b/riocli/package/inspect.py @@ -37,7 +37,7 @@ def inspect_package(format_type: str, package_name: str, package_guid: str) -> N inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_package_inspectable(package: Package) -> dict: diff --git a/riocli/package/list.py b/riocli/package/list.py index f98837c3..8b42a620 100644 --- a/riocli/package/list.py +++ b/riocli/package/list.py @@ -32,29 +32,38 @@ def list_packages(filter_word: str) -> None: _display_package_list(packages, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_package_list( packages: typing.List[Package], show_header: bool = True, - truncate_limit: int = 32, + truncate_limit: int = 48, ) -> None: if show_header: - click.secho('{:30} {:34} {:10} {:<32}'. - format('Package ID', 'Name', 'Version', 'Description'), + click.secho('{:30} {:10} {:34} {:<48}'. + format('Name', 'Version', 'Package ID', 'Description'), fg='yellow') # Show IO Packages first - packages.sort(key=lambda p: p.packageId) - - for package in packages: - description = package.description - name = package.packageName - if truncate_limit: - if len(description) > truncate_limit: - description = description[:truncate_limit] + '..' - if len(name) > truncate_limit: - name = name[:truncate_limit] + '..' - click.echo('{:30} {:34} {:10} {:<32}'. - format(package.packageId, name, package.packageVersion, description)) + iter_pkg = list(map(lambda x: x.packageName, packages)) + iter_pkg.sort() + + package_dict = {} + for pkgName in iter_pkg: + filtered_pkg = list(filter(lambda x: x.packageName == pkgName, packages)) + filtered_pkg.sort(key=lambda x: x.packageVersion) + package_dict[pkgName] = filtered_pkg + + + for pkgName, pkgVersionList in package_dict.items(): + for package in pkgVersionList: + description = package.description + name = package.packageName + if truncate_limit: + if len(description) > truncate_limit: + description = description[:truncate_limit] + '..' + if len(name) > truncate_limit: + name = name[:truncate_limit] + '..' + click.echo('{:30} {:10} {:34} {:<48}'. + format(name, package.packageVersion, package.packageId, description)) diff --git a/riocli/package/model.py b/riocli/package/model.py new file mode 100644 index 00000000..458490c7 --- /dev/null +++ b/riocli/package/model.py @@ -0,0 +1,246 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from textwrap import indent +import typing +import json +import os + +import click +from munch import munchify +from rapyuta_io import Project as v1Project, Client + +from riocli.model import Model +# from riocli.package.util import find_project_guid, ProjectNotFound +from riocli.package.validation import validate +from rapyuta_io.clients.package import RestartPolicy + + +class Package(Model): + RESTART_POLICY = { + 'always': RestartPolicy.Always, + 'never': RestartPolicy.Never, + 'onfailure': RestartPolicy.OnFailure + } + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client): + guid, obj = self.rc.find_depends({"kind": self.kind.lower(), "nameOrGUID": self.metadata.name}, + self.metadata.version) + if not guid: + return False + + return obj + + def create_object(self, client: Client): + # click.secho('{}/{} {} created'.format(self.apiVersion, self.kind, self.metadata.name), fg='green') + pkg_object = munchify({ + 'name': 'default', + 'packageVersion': 'v1.0.0', + 'apiVersion': "2.1.0", + 'plans': [ + { + "inboundROSInterfaces": { + "anyIncomingScopedOrTargetedRosConfig": False + }, + 'singleton': False, + 'bindable': True, + 'name' : 'default', + 'dependentDeployments': [], + 'exposedParameters': [], + 'components': [ + ] + } + ], + }) + component_obj = munchify({ + 'requiredRuntime': 'cloud', + 'architecture': 'amd64', + 'executables': [], + 'parameters': [], + 'ros': {'services': [], 'topics': [], 'isROS': False, 'actions': []}, + 'exposedParameters': [], + 'metadata': {}, + }) + + # metadata + # ✓ name, ✓ description, ✓ version + + pkg_object.name = self.metadata.name + pkg_object.packageVersion = self.metadata.version + + if 'description' in self.metadata: + pkg_object.description = self.metadata.description + + + # spec + # executables + component_obj.name = 'default' #self.metadata.name #package == component in the single component model + + # TODO validate transform. specially nested secret. + component_obj.executables = list(map(self._map_executable, self.spec.executables)) + component_obj.requiredRuntime = self.spec.runtime + + + + # ✓ parameters + # TODO validate transform. + if 'environmentVars' in self.spec: + fixed_default = [] + for envVar in self.spec.environmentVars: + obj = envVar.copy() + if 'defaultValue' in obj: + obj['default'] = obj['defaultValue'] + del obj['default'] + + fixed_default.append(obj) + component_obj.parameters = fixed_default + # handle exposed params + exposed_parameters = [] + for entry in filter(lambda x: 'exposed' in x and x.exposed, self.spec.environmentVars): + if os.environ.get('DEBUG'): + print(entry.name) + exposed_parameters.append({'component': component_obj.name, 'param': entry.name, 'targetParam': entry.exposedName}) + pkg_object.plans[0].exposedParameters = exposed_parameters + + # device + # ✓ arch, ✓ restart + if self.spec.runtime == 'device': + component_obj.required_runtime = 'device' + component_obj.architecture = self.spec.device.arch + if 'restart' in self.spec.device: + component_obj.restart_policy = self.RESTART_POLICY[self.spec.device.restart.lower()] + + # cloud + # ✓ replicas + # ✓ endpoints + if 'cloud' in self.spec: + component_obj.cloudInfra = munchify(dict()) + if 'replicas' in self.spec.cloud: + component_obj.cloudInfra.replicas = self.spec.cloud.replicas + else: + component_obj.cloudInfra.replicas = 1 + + if 'endpoints' in self.spec: + endpoints = list(map(self._map_endpoints, self.spec.endpoints)) + component_obj.cloudInfra.endpoints = endpoints + + # ros: + # ✓ isros + # ✓ topic + # ✓ service + # ✓ action + # rosbagjob + if 'ros' in self.spec: + component_obj.ros.isRos = True + component_obj.ros.ros_distro = self.spec.ros.version + pkg_object.inboundROSInterfaces = munchify({}) + + pkg_object.inboundROSInterfaces.anyIncomingScopedOrTargetedRosConfig = self.spec.ros.inboundScopedTargeted if 'inboundScopedTargeted' in self.spec.ros else False + if 'rosEndpoints' in self.spec.ros: + component_obj.ros.topics = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'topic')) + component_obj.ros.services = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'service')) + component_obj.ros.actions = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'action')) + + pkg_object.plans[0].components = [component_obj] + # return package + # print(json.dumps(pkg_object)) + return client.create_package(pkg_object) + + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + client.delete_package(obj.packageId) + + def to_v1(self): + # return v1Project(self.metadata.name) + pass + + def _get_rosendpoint_struct(self, rosEndpoints, filter_type): + topic_list = filter(lambda x: x.type == filter_type, rosEndpoints) + return_list = [] + for topic in topic_list: + if topic.compression is False: + topic.compression = "" + else: + topic.compression = "snappy" + return_list.append(topic) + return return_list + + def _map_executable(self, exec): + + exec_object = munchify({ + "name": exec.name, + "simulationOptions": { + "simulation": exec.simulation if 'simulation' in exec else False + } + }) + + if 'limits' in exec: + exec_object.limits = { + "cpu": exec.limits.cpu, + "memory": exec.limits.memory + } + + if exec.runAsBash: + if 'command' in exec: + exec_object.cmd = ['/bin/bash', '-c', exec.command] + else: + #TODO verify this is right for secret? + if 'command' in exec: + exec_object.cmd = [exec.command] + + + if exec.type == 'docker': + exec_object.docker = exec.docker.image + if 'pullSecret' in exec.docker and exec.docker.pullSecret.depends: + secret_guid, secret = self.rc.find_depends(exec.docker.pullSecret.depends) + exec_object.secret = secret_guid + + if exec.type == 'build': + exec_object.buildGUID = exec.build.depends.guid + #TODO verify this is right for secret? + # if exec.docker.pullSecret and exec.docker.pullSecret.depends and exec.docker.pullSecret.depends.guid: + # exec_object.secret = exec.docker.pullSecret.depends.guid + + #TODO handle preinstalled + + return exec_object + + def _map_endpoints(self, endpoint): + exposedExternally = endpoint.type.split("-")[0] == 'external' + proto = "-".join(endpoint.type.split("-")[1:]) + if 'tls-tcp' in proto: + proto = 'tcp' + + + if 'range' in endpoint.type: + proto = proto.replace("-range", '') + return { + "name": endpoint.name, "exposeExternally": exposedExternally, + "portRange": endpoint.portRange, "proto": proto.upper()} + else: + return { + "name": endpoint.name, "exposeExternally": exposedExternally, + "port": endpoint.port, "targetPort": endpoint.targetPort, "proto": proto.upper()} + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/package/util.py b/riocli/package/util.py index 86d8ec9b..2fa68959 100644 --- a/riocli/package/util.py +++ b/riocli/package/util.py @@ -28,7 +28,7 @@ def decorated(**kwargs: typing.Any): client = new_client() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) name = kwargs.pop('package_name') guid = None @@ -60,7 +60,7 @@ def find_package_guid(client: Client, name: str, version: str = None) -> str: packages = client.get_all_packages(name=name, version=version) if len(packages) == 0: click.secho("package not found", fg='red') - exit(1) + raise SystemExit(1) if len(packages) == 1: return packages[0].packageId diff --git a/riocli/package/validation.py b/riocli/package/validation.py new file mode 100644 index 00000000..92a70cf6 --- /dev/null +++ b/riocli/package/validation.py @@ -0,0 +1,1001 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^pkg-[a-z]{24}$': re.compile('^pkg-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_package(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_package(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Package": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Package", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Package', 'default': 'Package'}, rule='const') + else: data["kind"] = 'Package' + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_componentspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_componentspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', 'properties': {'enabled': {'type': 'boolean', 'default': False}}, 'dependencies': {'enabled': {'oneOf': [{'properties': {'enabled': {'enum': [False]}}}, {'properties': {'enabled': {'type': 'boolean', 'enum': [True]}, 'version': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'inboundScopedTargeted': {'type': 'boolean', 'default': False}, 'rosEndpoints': {'type': 'array', 'items': {'$ref': '#/definitions/rosEndpointSpec'}}}}]}}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + if "runtime" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if data__runtime not in ['device']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['device']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'enum': ['device']}, rule='enum') + if "device" in data_keys: + data_keys.remove("device") + data__device = data["device"] + validate___definitions_devicecomponentinfospec(data__device, custom_formats, (name_prefix or "data") + ".device") + if "executables" in data_keys: + data_keys.remove("executables") + data__executables = data["executables"] + if not isinstance(data__executables, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".executables must be array", value=data__executables, name="" + (name_prefix or "data") + ".executables", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, rule='type') + data__executables_is_list = isinstance(data__executables, (list, tuple)) + if data__executables_is_list: + data__executables_len = len(data__executables) + for data__executables_x, data__executables_item in enumerate(data__executables): + validate___definitions_deviceexecutablespec(data__executables_item, custom_formats, (name_prefix or "data") + ".executables[{data__executables_x}]") + if "environmentArgs" in data_keys: + data_keys.remove("environmentArgs") + data__environmentArgs = data["environmentArgs"] + if not isinstance(data__environmentArgs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".environmentArgs must be array", value=data__environmentArgs, name="" + (name_prefix or "data") + ".environmentArgs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, rule='type') + data__environmentArgs_is_list = isinstance(data__environmentArgs, (list, tuple)) + if data__environmentArgs_is_list: + data__environmentArgs_len = len(data__environmentArgs) + for data__environmentArgs_x, data__environmentArgs_item in enumerate(data__environmentArgs): + validate___definitions_environmentspec(data__environmentArgs_item, custom_formats, (name_prefix or "data") + ".environmentArgs[{data__environmentArgs_x}]") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if data__runtime not in ['cloud']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['cloud']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'enum': ['cloud']}, rule='enum') + if "cloud" in data_keys: + data_keys.remove("cloud") + data__cloud = data["cloud"] + validate___definitions_cloudcomponentinfospec(data__cloud, custom_formats, (name_prefix or "data") + ".cloud") + if "executables" in data_keys: + data_keys.remove("executables") + data__executables = data["executables"] + if not isinstance(data__executables, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".executables must be array", value=data__executables, name="" + (name_prefix or "data") + ".executables", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, rule='type') + data__executables_is_list = isinstance(data__executables, (list, tuple)) + if data__executables_is_list: + data__executables_len = len(data__executables) + for data__executables_x, data__executables_item in enumerate(data__executables): + validate___definitions_cloudexecutablespec(data__executables_item, custom_formats, (name_prefix or "data") + ".executables[{data__executables_x}]") + if "environmentVars" in data_keys: + data_keys.remove("environmentVars") + data__environmentVars = data["environmentVars"] + if not isinstance(data__environmentVars, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".environmentVars must be array", value=data__environmentVars, name="" + (name_prefix or "data") + ".environmentVars", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, rule='type') + data__environmentVars_is_list = isinstance(data__environmentVars, (list, tuple)) + if data__environmentVars_is_list: + data__environmentVars_len = len(data__environmentVars) + for data__environmentVars_x, data__environmentVars_item in enumerate(data__environmentVars): + validate___definitions_environmentspec(data__environmentVars_item, custom_formats, (name_prefix or "data") + ".environmentVars[{data__environmentVars_x}]") + if "endpoints" in data_keys: + data_keys.remove("endpoints") + data__endpoints = data["endpoints"] + if not isinstance(data__endpoints, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".endpoints must be array", value=data__endpoints, name="" + (name_prefix or "data") + ".endpoints", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}, rule='type') + data__endpoints_is_list = isinstance(data__endpoints, (list, tuple)) + if data__endpoints_is_list: + data__endpoints_len = len(data__endpoints) + for data__endpoints_x, data__endpoints_item in enumerate(data__endpoints): + validate___definitions_endpointspec(data__endpoints_item, custom_formats, (name_prefix or "data") + ".endpoints[{data__endpoints_x}]") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "runtime" in data_keys: + data_keys.remove("runtime") + data__runtime = data["runtime"] + if not isinstance(data__runtime, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be string", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, rule='type') + if data__runtime not in ['device', 'cloud']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runtime must be one of ['device', 'cloud']", value=data__runtime, name="" + (name_prefix or "data") + ".runtime", definition={'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, rule='enum') + else: data["runtime"] = 'cloud' + if "ros" in data_keys: + data_keys.remove("ros") + data__ros = data["ros"] + validate___definitions_roscomponentspec(data__ros, custom_formats, (name_prefix or "data") + ".ros") + return data + +def validate___definitions_roscomponentspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'enabled': {'type': 'boolean', 'default': False}}, 'dependencies': {'enabled': {'oneOf': [{'properties': {'enabled': {'enum': [False]}}}, {'properties': {'enabled': {'type': 'boolean', 'enum': [True]}, 'version': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'inboundScopedTargeted': {'type': 'boolean', 'default': False}, 'rosEndpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, 'name': {'type': 'string'}, 'compression': {'type': 'boolean', 'default': False}, 'scoped': {'type': 'boolean', 'default': False}, 'targeted': {'type': 'boolean', 'default': False}}, 'required': ['type', 'name'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}}}}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + if "enabled" in data: + data_one_of_count2 = 0 + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "enabled" in data_keys: + data_keys.remove("enabled") + data__enabled = data["enabled"] + if data__enabled not in [False]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".enabled must be one of [False]", value=data__enabled, name="" + (name_prefix or "data") + ".enabled", definition={'enum': [False]}, rule='enum') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "enabled" in data_keys: + data_keys.remove("enabled") + data__enabled = data["enabled"] + if not isinstance(data__enabled, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".enabled must be boolean", value=data__enabled, name="" + (name_prefix or "data") + ".enabled", definition={'type': 'boolean', 'enum': [True]}, rule='type') + if data__enabled not in [True]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".enabled must be one of [True]", value=data__enabled, name="" + (name_prefix or "data") + ".enabled", definition={'type': 'boolean', 'enum': [True]}, rule='enum') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, rule='type') + if data__version not in ['kinetic', 'melodic', 'noetic']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be one of ['kinetic', 'melodic', 'noetic']", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, rule='enum') + else: data["version"] = 'melodic' + if "inboundScopedTargeted" in data_keys: + data_keys.remove("inboundScopedTargeted") + data__inboundScopedTargeted = data["inboundScopedTargeted"] + if not isinstance(data__inboundScopedTargeted, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".inboundScopedTargeted must be boolean", value=data__inboundScopedTargeted, name="" + (name_prefix or "data") + ".inboundScopedTargeted", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["inboundScopedTargeted"] = False + if "rosEndpoints" in data_keys: + data_keys.remove("rosEndpoints") + data__rosEndpoints = data["rosEndpoints"] + if not isinstance(data__rosEndpoints, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosEndpoints must be array", value=data__rosEndpoints, name="" + (name_prefix or "data") + ".rosEndpoints", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, 'name': {'type': 'string'}, 'compression': {'type': 'boolean', 'default': False}, 'scoped': {'type': 'boolean', 'default': False}, 'targeted': {'type': 'boolean', 'default': False}}, 'required': ['type', 'name'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}}}}, rule='type') + data__rosEndpoints_is_list = isinstance(data__rosEndpoints, (list, tuple)) + if data__rosEndpoints_is_list: + data__rosEndpoints_len = len(data__rosEndpoints) + for data__rosEndpoints_x, data__rosEndpoints_item in enumerate(data__rosEndpoints): + validate___definitions_rosendpointspec(data__rosEndpoints_item, custom_formats, (name_prefix or "data") + ".rosEndpoints[{data__rosEndpoints_x}]") + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count2) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'enabled': {'enum': [False]}}}, {'properties': {'enabled': {'type': 'boolean', 'enum': [True]}, 'version': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'inboundScopedTargeted': {'type': 'boolean', 'default': False}, 'rosEndpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, 'name': {'type': 'string'}, 'compression': {'type': 'boolean', 'default': False}, 'scoped': {'type': 'boolean', 'default': False}, 'targeted': {'type': 'boolean', 'default': False}}, 'required': ['type', 'name'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "enabled" in data_keys: + data_keys.remove("enabled") + data__enabled = data["enabled"] + if not isinstance(data__enabled, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".enabled must be boolean", value=data__enabled, name="" + (name_prefix or "data") + ".enabled", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["enabled"] = False + return data + +def validate___definitions_rosendpointspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, 'name': {'type': 'string'}, 'compression': {'type': 'boolean', 'default': False}, 'scoped': {'type': 'boolean', 'default': False}, 'targeted': {'type': 'boolean', 'default': False}}, 'required': ['type', 'name'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['type', 'name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type', 'name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, 'name': {'type': 'string'}, 'compression': {'type': 'boolean', 'default': False}, 'scoped': {'type': 'boolean', 'default': False}, 'targeted': {'type': 'boolean', 'default': False}}, 'required': ['type', 'name'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}}}, rule='required') + if "type" in data: + data_one_of_count3 = 0 + if data_one_of_count3 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['topic']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['topic']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['topic']}, rule='enum') + if "qos" in data_keys: + data_keys.remove("qos") + data__qos = data["qos"] + if not isinstance(data__qos, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".qos must be string", value=data__qos, name="" + (name_prefix or "data") + ".qos", definition={'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}, rule='type') + if data__qos not in ['low', 'medium', 'hi', 'max']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".qos must be one of ['low', 'medium', 'hi', 'max']", value=data__qos, name="" + (name_prefix or "data") + ".qos", definition={'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}, rule='enum') + else: data["qos"] = 'low' + data_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data_one_of_count3 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['service']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['service']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['service']}, rule='enum') + if "timeout" in data_keys: + data_keys.remove("timeout") + data__timeout = data["timeout"] + if not isinstance(data__timeout, (int, float)) or isinstance(data__timeout, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeout must be number", value=data__timeout, name="" + (name_prefix or "data") + ".timeout", definition={'type': 'number', 'default': 120, 'min': 0}, rule='type') + else: data["timeout"] = 120 + data_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data_one_of_count3 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['action']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['action']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['action']}, rule='enum') + data_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data_one_of_count3 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count3) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['topic']}, 'qos': {'type': 'string', 'enum': ['low', 'medium', 'hi', 'max'], 'default': 'low'}}}, {'properties': {'type': {'enum': ['service']}, 'timeout': {'type': 'number', 'default': 120, 'min': 0}}}, {'properties': {'type': {'enum': ['action']}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if not isinstance(data__type, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be string", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, rule='type') + if data__type not in ['topic', 'service', 'action']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['topic', 'service', 'action']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'topic', 'enum': ['topic', 'service', 'action']}, rule='enum') + else: data["type"] = 'topic' + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "compression" in data_keys: + data_keys.remove("compression") + data__compression = data["compression"] + if not isinstance(data__compression, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".compression must be boolean", value=data__compression, name="" + (name_prefix or "data") + ".compression", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["compression"] = False + if "scoped" in data_keys: + data_keys.remove("scoped") + data__scoped = data["scoped"] + if not isinstance(data__scoped, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".scoped must be boolean", value=data__scoped, name="" + (name_prefix or "data") + ".scoped", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["scoped"] = False + if "targeted" in data_keys: + data_keys.remove("targeted") + data__targeted = data["targeted"] + if not isinstance(data__targeted, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".targeted must be boolean", value=data__targeted, name="" + (name_prefix or "data") + ".targeted", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["targeted"] = False + return data + +def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}, rule='required') + if "type" in data: + data_one_of_count4 = 0 + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['port', 'targetPort']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['port', 'targetPort'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['external-http']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['external-http']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['external-http']}, rule='enum') + if "port" in data_keys: + data_keys.remove("port") + data__port = data["port"] + validate___definitions_portnumber(data__port, custom_formats, (name_prefix or "data") + ".port") + else: data["port"] = 80 + if "targetPort" in data_keys: + data_keys.remove("targetPort") + data__targetPort = data["targetPort"] + validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['port', 'targetPort']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['port', 'targetPort'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['external-https']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['external-https']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['external-https']}, rule='enum') + if "port" in data_keys: + data_keys.remove("port") + data__port = data["port"] + validate___definitions_portnumber(data__port, custom_formats, (name_prefix or "data") + ".port") + else: data["port"] = 443 + if "targetPort" in data_keys: + data_keys.remove("targetPort") + data__targetPort = data["targetPort"] + validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['port', 'targetPort']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['port', 'targetPort'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['external-tls-tcp']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['external-tls-tcp']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['external-tls-tcp']}, rule='enum') + if "port" in data_keys: + data_keys.remove("port") + data__port = data["port"] + validate___definitions_portnumber(data__port, custom_formats, (name_prefix or "data") + ".port") + else: data["port"] = 443 + if "targetPort" in data_keys: + data_keys.remove("targetPort") + data__targetPort = data["targetPort"] + validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['port', 'targetPort']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['port', 'targetPort'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['internal-tcp']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['internal-tcp']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['internal-tcp']}, rule='enum') + if "port" in data_keys: + data_keys.remove("port") + data__port = data["port"] + validate___definitions_portnumber(data__port, custom_formats, (name_prefix or "data") + ".port") + else: data["port"] = 80 + if "targetPort" in data_keys: + data_keys.remove("targetPort") + data__targetPort = data["targetPort"] + validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['port', 'targetPort']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['port', 'targetPort'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['internal-udp']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['internal-udp']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['internal-udp']}, rule='enum') + if "port" in data_keys: + data_keys.remove("port") + data__port = data["port"] + validate___definitions_portnumber(data__port, custom_formats, (name_prefix or "data") + ".port") + else: data["port"] = 80 + if "targetPort" in data_keys: + data_keys.remove("targetPort") + data__targetPort = data["targetPort"] + validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['portRange']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['portRange'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['internal-tcp-range']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['internal-tcp-range']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['internal-tcp-range']}, rule='enum') + if "portRange" in data_keys: + data_keys.remove("portRange") + data__portRange = data["portRange"] + if not isinstance(data__portRange, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".portRange must be string", value=data__portRange, name="" + (name_prefix or "data") + ".portRange", definition={'type': 'string', 'default': '22,80, 1024-1030'}, rule='type') + else: data["portRange"] = '22,80, 1024-1030' + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['portRange']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['portRange'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}, rule='required') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['internal-udp-range']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['internal-udp-range']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['internal-udp-range']}, rule='enum') + if "portRange" in data_keys: + data_keys.remove("portRange") + data__portRange = data["portRange"] + if not isinstance(data__portRange, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".portRange must be string", value=data__portRange, name="" + (name_prefix or "data") + ".portRange", definition={'type': 'string', 'default': '53,1024-1025'}, rule='type') + else: data["portRange"] = '53,1024-1025' + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count4) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}, rule='oneOf') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if not isinstance(data__type, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be string", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}, rule='type') + if data__type not in ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}, rule='enum') + else: data["type"] = 'external-http' + return data + +def validate___definitions_portnumber(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (int)) and not (isinstance(data, float) and data.is_integer()) or isinstance(data, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be integer", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'integer', 'min': 1, 'max': 65531}, rule='type') + return data + +def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='required') + if "type" in data: + data_one_of_count5 = 0 + if data_one_of_count5 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['docker']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['docker']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['docker']}, rule='enum') + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + if not isinstance(data__docker, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must be object", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}, rule='type') + data__docker_is_dict = isinstance(data__docker, dict) + if data__docker_is_dict: + data__docker_len = len(data__docker) + if not all(prop in data__docker for prop in ['image']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must contain ['image'] properties", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}, rule='required') + data__docker_keys = set(data__docker.keys()) + if "image" in data__docker_keys: + data__docker_keys.remove("image") + data__docker__image = data__docker["image"] + if not isinstance(data__docker__image, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.image must be string", value=data__docker__image, name="" + (name_prefix or "data") + ".docker.image", definition={'type': 'string'}, rule='type') + if "pullSecret" in data__docker_keys: + data__docker_keys.remove("pullSecret") + data__docker__pullSecret = data__docker["pullSecret"] + if not isinstance(data__docker__pullSecret, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.pullSecret must be object", value=data__docker__pullSecret, name="" + (name_prefix or "data") + ".docker.pullSecret", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='type') + data__docker__pullSecret_is_dict = isinstance(data__docker__pullSecret, dict) + if data__docker__pullSecret_is_dict: + data__docker__pullSecret_len = len(data__docker__pullSecret) + if not all(prop in data__docker__pullSecret for prop in ['depends']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.pullSecret must contain ['depends'] properties", value=data__docker__pullSecret, name="" + (name_prefix or "data") + ".docker.pullSecret", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='required') + data__docker__pullSecret_keys = set(data__docker__pullSecret.keys()) + if "depends" in data__docker__pullSecret_keys: + data__docker__pullSecret_keys.remove("depends") + data__docker__pullSecret__depends = data__docker__pullSecret["depends"] + validate___definitions_secretdepends(data__docker__pullSecret__depends, custom_formats, (name_prefix or "data") + ".docker.pullSecret.depends") + data_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data_one_of_count5 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['build']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['build']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['build']}, rule='enum') + if "build" in data_keys: + data_keys.remove("build") + data__build = data["build"] + if not isinstance(data__build, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must be object", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='type') + data__build_is_dict = isinstance(data__build, dict) + if data__build_is_dict: + data__build_len = len(data__build) + if not all(prop in data__build for prop in ['depends']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must contain ['depends'] properties", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='required') + data__build_keys = set(data__build.keys()) + if "depends" in data__build_keys: + data__build_keys.remove("depends") + data__build__depends = data__build["depends"] + validate___definitions_secretdepends(data__build__depends, custom_formats, (name_prefix or "data") + ".build.depends") + data_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data_one_of_count5 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['preInstalled']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['preInstalled']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['preInstalled']}, rule='enum') + data_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data_one_of_count5 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count5) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if not isinstance(data__type, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be string", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, rule='type') + if data__type not in ['docker', 'build']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['docker', 'build']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, rule='enum') + else: data["type"] = 'docker' + if "command" in data_keys: + data_keys.remove("command") + data__command = data["command"] + if not isinstance(data__command, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".command must be string", value=data__command, name="" + (name_prefix or "data") + ".command", definition={'type': 'string'}, rule='type') + if "runAsBash" in data_keys: + data_keys.remove("runAsBash") + data__runAsBash = data["runAsBash"] + if not isinstance(data__runAsBash, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runAsBash must be boolean", value=data__runAsBash, name="" + (name_prefix or "data") + ".runAsBash", definition={'type': 'boolean', 'default': True}, rule='type') + else: data["runAsBash"] = True + if "simulation" in data_keys: + data_keys.remove("simulation") + data__simulation = data["simulation"] + if not isinstance(data__simulation, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".simulation must be boolean", value=data__simulation, name="" + (name_prefix or "data") + ".simulation", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["simulation"] = False + if "limits" in data_keys: + data_keys.remove("limits") + data__limits = data["limits"] + if not isinstance(data__limits, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".limits must be object", value=data__limits, name="" + (name_prefix or "data") + ".limits", definition={'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32678}}}, rule='type') + data__limits_is_dict = isinstance(data__limits, dict) + if data__limits_is_dict: + data__limits_keys = set(data__limits.keys()) + if "cpu" in data__limits_keys: + data__limits_keys.remove("cpu") + data__limits__cpu = data__limits["cpu"] + if not isinstance(data__limits__cpu, (int, float)) or isinstance(data__limits__cpu, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".limits.cpu must be number", value=data__limits__cpu, name="" + (name_prefix or "data") + ".limits.cpu", definition={'type': 'number', 'min': 0.1, 'max': 8}, rule='type') + if "memory" in data__limits_keys: + data__limits_keys.remove("memory") + data__limits__memory = data__limits["memory"] + if not isinstance(data__limits__memory, (int, float)) or isinstance(data__limits__memory, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".limits.memory must be number", value=data__limits__memory, name="" + (name_prefix or "data") + ".limits.memory", definition={'type': 'number', 'min': 256, 'max': 32678}, rule='type') + return data + +def validate___definitions_secretdepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "secret": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: secret", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'secret', 'default': 'secret'}, rule='const') + else: data["kind"] = 'secret' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_cloudcomponentinfospec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "replicas" in data_keys: + data_keys.remove("replicas") + data__replicas = data["replicas"] + if not isinstance(data__replicas, (int, float)) or isinstance(data__replicas, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".replicas must be number", value=data__replicas, name="" + (name_prefix or "data") + ".replicas", definition={'type': 'number', 'default': 1}, rule='type') + else: data["replicas"] = 1 + return data + +def validate___definitions_environmentspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}, rule='required') + if "exposed" in data: + data_one_of_count6 = 0 + if data_one_of_count6 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['exposedName']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['exposedName'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, rule='required') + data_keys = set(data.keys()) + if "exposed" in data_keys: + data_keys.remove("exposed") + data__exposed = data["exposed"] + if data__exposed not in [True]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposed must be one of [True]", value=data__exposed, name="" + (name_prefix or "data") + ".exposed", definition={'enum': [True]}, rule='enum') + if "exposedName" in data_keys: + data_keys.remove("exposedName") + data__exposedName = data["exposedName"] + if not isinstance(data__exposedName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposedName must be string", value=data__exposedName, name="" + (name_prefix or "data") + ".exposedName", definition={'type': 'string'}, rule='type') + data_one_of_count6 += 1 + except JsonSchemaValueException: pass + if data_one_of_count6 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "exposed" in data_keys: + data_keys.remove("exposed") + data__exposed = data["exposed"] + if data__exposed not in [False]: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposed must be one of [False]", value=data__exposed, name="" + (name_prefix or "data") + ".exposed", definition={'enum': [False]}, rule='enum') + data_one_of_count6 += 1 + except JsonSchemaValueException: pass + if data_one_of_count6 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count6) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string'}, rule='type') + if "default" in data_keys: + data_keys.remove("default") + data__default = data["default"] + if not isinstance(data__default, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".default must be string", value=data__default, name="" + (name_prefix or "data") + ".default", definition={'type': 'string'}, rule='type') + if "exposed" in data_keys: + data_keys.remove("exposed") + data__exposed = data["exposed"] + if not isinstance(data__exposed, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposed must be boolean", value=data__exposed, name="" + (name_prefix or "data") + ".exposed", definition={'type': 'boolean', 'default': False}, rule='type') + else: data["exposed"] = False + return data + +def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='required') + if "type" in data: + data_one_of_count7 = 0 + if data_one_of_count7 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['docker']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['docker']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['docker']}, rule='enum') + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + if not isinstance(data__docker, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must be object", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}, rule='type') + data__docker_is_dict = isinstance(data__docker, dict) + if data__docker_is_dict: + data__docker_len = len(data__docker) + if not all(prop in data__docker for prop in ['image']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker must contain ['image'] properties", value=data__docker, name="" + (name_prefix or "data") + ".docker", definition={'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}, rule='required') + data__docker_keys = set(data__docker.keys()) + if "image" in data__docker_keys: + data__docker_keys.remove("image") + data__docker__image = data__docker["image"] + if not isinstance(data__docker__image, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".docker.image must be string", value=data__docker__image, name="" + (name_prefix or "data") + ".docker.image", definition={'type': 'string'}, rule='type') + if "pullSecret" in data__docker_keys: + data__docker_keys.remove("pullSecret") + data__docker__pullSecret = data__docker["pullSecret"] + validate___definitions_secretdepends(data__docker__pullSecret, custom_formats, (name_prefix or "data") + ".docker.pullSecret") + data_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data_one_of_count7 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['build']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['build']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['build']}, rule='enum') + if "build" in data_keys: + data_keys.remove("build") + data__build = data["build"] + if not isinstance(data__build, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must be object", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='type') + data__build_is_dict = isinstance(data__build, dict) + if data__build_is_dict: + data__build_len = len(data__build) + if not all(prop in data__build for prop in ['depends']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must contain ['depends'] properties", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='required') + data__build_keys = set(data__build.keys()) + if "depends" in data__build_keys: + data__build_keys.remove("depends") + data__build__depends = data__build["depends"] + validate___definitions_builddepends(data__build__depends, custom_formats, (name_prefix or "data") + ".build.depends") + data_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data_one_of_count7 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['preInstalled']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['preInstalled']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['preInstalled']}, rule='enum') + data_one_of_count7 += 1 + except JsonSchemaValueException: pass + if data_one_of_count7 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count7) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if not isinstance(data__type, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be string", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, rule='type') + if data__type not in ['docker', 'build', 'preInstalled']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['docker', 'build', 'preInstalled']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, rule='enum') + else: data["type"] = 'docker' + if "command" in data_keys: + data_keys.remove("command") + data__command = data["command"] + if not isinstance(data__command, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".command must be string", value=data__command, name="" + (name_prefix or "data") + ".command", definition={'type': 'string'}, rule='type') + if "runAsBash" in data_keys: + data_keys.remove("runAsBash") + data__runAsBash = data["runAsBash"] + if not isinstance(data__runAsBash, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".runAsBash must be boolean", value=data__runAsBash, name="" + (name_prefix or "data") + ".runAsBash", definition={'type': 'boolean', 'default': True}, rule='type') + else: data["runAsBash"] = True + return data + +def validate___definitions_builddepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "build": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: build", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'build', 'default': 'build'}, rule='const') + else: data["kind"] = 'build' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_devicecomponentinfospec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "arch" in data_keys: + data_keys.remove("arch") + data__arch = data["arch"] + if not isinstance(data__arch, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".arch must be string", value=data__arch, name="" + (name_prefix or "data") + ".arch", definition={'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, rule='type') + if data__arch not in ['arm32v7', 'arm64v8', 'amd64']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".arch must be one of ['arm32v7', 'arm64v8', 'amd64']", value=data__arch, name="" + (name_prefix or "data") + ".arch", definition={'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, rule='enum') + else: data["arch"] = 'amd64' + if "restart" in data_keys: + data_keys.remove("restart") + data__restart = data["restart"] + if not isinstance(data__restart, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".restart must be string", value=data__restart, name="" + (name_prefix or "data") + ".restart", definition={'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}, rule='type') + if data__restart not in ['always', 'never', 'onfailure']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".restart must be one of ['always', 'never', 'onfailure']", value=data__restart, name="" + (name_prefix or "data") + ".restart", definition={'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}, rule='enum') + else: data["restart"] = 'always' + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name', 'version']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'version']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'version'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name', 'version']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "version" in data_keys: + data_keys.remove("version") + data__version = data["version"] + if not isinstance(data__version, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".version must be string", value=data__version, name="" + (name_prefix or "data") + ".version", definition={'type': 'string'}, rule='type') + if "tag" in data_keys: + data_keys.remove("tag") + data__tag = data["tag"] + if not isinstance(data__tag, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".tag must be string", value=data__tag, name="" + (name_prefix or "data") + ".tag", definition={'type': 'string'}, rule='type') + if "description" in data_keys: + data_keys.remove("description") + data__description = data["description"] + if not isinstance(data__description, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".description must be string", value=data__description, name="" + (name_prefix or "data") + ".description", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_packageguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_packageguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^pkg-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^pkg-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^pkg-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/parameter/__init__.py b/riocli/parameter/__init__.py new file mode 100644 index 00000000..0b3a95bc --- /dev/null +++ b/riocli/parameter/__init__.py @@ -0,0 +1,44 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from riocli.parameter.diff import diff_configurations +from riocli.parameter.download import download_configurations +from riocli.parameter.upload import upload_configurations +import click +from click_help_colors import HelpColorsGroup + +# from riocli.parameter.diff import diff_configurations +# from riocli.parameter.download import download_configurations + + +@click.group( + invoke_without_command=False, + cls=HelpColorsGroup, + help_headers_color='yellow', + help_options_color='green', +) +def parameter() -> None: + """ + Define groups of executables to deploy together + """ + pass + + +parameter.add_command(upload_configurations) +parameter.add_command(download_configurations) +parameter.add_command(diff_configurations) + + + + diff --git a/riocli/parameter/delete.py b/riocli/parameter/delete.py new file mode 100644 index 00000000..5128a5b2 --- /dev/null +++ b/riocli/parameter/delete.py @@ -0,0 +1,46 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from email.policy import default +import json +from riocli.parameter.utils import compile_local_configurations +from xmlrpc.client import Boolean +import os +from shutil import copyfile +from tempfile import mkdtemp +import click +import yaml +from click_spinner import spinner + +from riocli.config import new_client + + +@click.command('delete') +@click.option('--tree-names', type=click.STRING, multiple=True, default=None, + help='Directory names to upload') +def delete_configurations(tree_names:str = None) -> None: + """ + Upload a set of configurations to IO. + + Compile the IO configurations from the paths provided. Output to a temporary directory. Upload the directory. + """ + try: + client = new_client() + pass + except IOError as e: + click.secho(str(e.__traceback__), fg='red') + click.secho(str(e), fg='red') + raise SystemExit(1) + + diff --git a/riocli/parameter/diff.py b/riocli/parameter/diff.py new file mode 100644 index 00000000..0b2db8ee --- /dev/null +++ b/riocli/parameter/diff.py @@ -0,0 +1,87 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ----------------------------------------------------------------------------- +# +# Configurations +# Args +# path, tree_names, delete_existing=True|False +# ----------------------------------------------------------------------------- + +from email.policy import default +import json + +from dictdiffer import diff +from riocli.parameter.download import download_configurations +from typing import Tuple +from riocli.parameter.utils import compile_local_configurations, parse_configurations +from xmlrpc.client import Boolean +import os +from shutil import copyfile +from tempfile import mkdtemp +import click +import yaml +from click_spinner import spinner + +from riocli.config import new_client +from rapyuta_io.utils.error import APIError, InternalServerError + + +@click.command('diff') +@click.option('--path', type=click.Path(dir_okay=True, file_okay=False, writable=True, exists=True, resolve_path=True), default=["."], + help='Root Path for the Parameters to be download') +@click.option('--tree-names', type=click.STRING, multiple=True, default=None, + help='Tree names to fetch') +@click.option('--delete-existing', is_flag=True, + help='Overwrite existing parameter tree') +def diff_configurations(path: click.Path, tree_names:Tuple = None, delete_existing: Boolean = False) -> None: + """ + Download the configurations + """ + if path is None: + click.secho( f"Base path missing. cannot diff without a local path to compare with remote tree", fg='red') + raise SystemExit(1) + + try: + client = new_client() + with spinner(): + tmppath = mkdtemp() # Temporary directory to hold the configurations + client.download_configurations(tmppath, tree_names=list(tree_names), delete_existing_trees=delete_existing) + remote_configuration = parse_configurations(tmppath, tree_names=tree_names) + local_configuration = parse_configurations(path, tree_names=tree_names) + result = diff(local_configuration, remote_configuration) + print("") + for entry in result: + action, key_path, value_mutation = entry + color = 'yellow' + action_sybom = "~" + if action== 'add': + color = 'green' + action_sybom = "+" + if action == "remove": + color = 'red' + action_sybom = "-" + click.secho(f"{action_sybom}{action} {'.'.join(key_path)} {value_mutation}", fg=color) + + + + except (APIError, InternalServerError) as e: + click.secho( f"failed API request {str(e)}", fg='red') + raise SystemExit(1) + except (IOError, OSError) as e: + click.secho( f"failed file/directory creation {str(e)}", fg='red') + raise SystemExit(1) + + + diff --git a/riocli/parameter/download.py b/riocli/parameter/download.py new file mode 100644 index 00000000..e2d722be --- /dev/null +++ b/riocli/parameter/download.py @@ -0,0 +1,69 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +# ----------------------------------------------------------------------------- +# +# Configurations +# Args +# path, tree_names, delete_existing=True|False +# ----------------------------------------------------------------------------- + +from email.policy import default +import json +from typing import Tuple +from riocli.parameter.utils import compile_local_configurations +from xmlrpc.client import Boolean +import os +from shutil import copyfile +from tempfile import mkdtemp +import click +import yaml +from click_spinner import spinner + +from riocli.config import new_client +from rapyuta_io.utils.error import APIError, InternalServerError + + +@click.command('download') +@click.option('--path', type=click.Path(dir_okay=True, file_okay=False, writable=True, exists=True, resolve_path=True), default=["."], + help='Root Path for the Parameters to be download') +@click.option('--tree-names', type=click.STRING, multiple=True, default=None, + help='Tree names to fetch') +@click.option('--delete-existing', is_flag=True, + help='Overwrite existing parameter tree') +def download_configurations(path: click.Path, tree_names:Tuple = None, delete_existing: Boolean = False) -> None: + """ + Download the configurations + """ + if path is None: + path = mkdtemp() # Temporary directory to hold the configurations + + tree_names = list(tree_names) + + try: + client = new_client() + + client.download_configurations(str(path), tree_names=tree_names, delete_existing_trees=delete_existing) + + except (APIError, InternalServerError) as e: + click.secho( f"failed API request {str(e)}", fg='red') + raise SystemExit(1) + except (IOError, OSError) as e: + click.secho( f"failed file/directory creation {str(e)}", fg='red') + raise SystemExit(1) + + click.secho("Downloaded IO configurations to '{}'".format(path), fg='green') + return path + + diff --git a/riocli/parameter/upload.py b/riocli/parameter/upload.py new file mode 100644 index 00000000..26be47b2 --- /dev/null +++ b/riocli/parameter/upload.py @@ -0,0 +1,95 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from email.policy import default +import json +from riocli.parameter.utils import compile_local_configurations +from xmlrpc.client import Boolean +import os +from shutil import copyfile +from tempfile import mkdtemp +import click +import yaml +from click_spinner import spinner + +from riocli.config import new_client + + +@click.command('upload') +@click.option('--paths', type=click.Path(), default=["."], multiple=True, + help='Path for the Parameters Directory file') +@click.option('--tree-names', type=click.STRING, multiple=True, default=None, + help='Directory names to upload') +@click.option('--delete-existing', is_flag=True, + help='Overwrite existing parameter tree') +def upload_configurations(paths: click.Path, tree_names:str = None, delete_existing: Boolean = False) -> None: + """ + Upload a set of configurations to IO. + + Compile the IO configurations from the paths provided. Output to a temporary directory. Upload the directory. + """ + try: + client = new_client() + uploaded_configuration = None + + with spinner(): + paths = list(paths) + print(tree_names) + + configurations = compile_local_configurations(paths, tree_names=tree_names) + d_tmp = mkdtemp() # Temporary directory to hold the merged configurations + rev_paths = list(reversed(paths)) # path list in reverse order + print(configurations.items()) + for rel_file_path, configuration in configurations.items(): + file_path = os.path.join(d_tmp, rel_file_path) + file_name, file_extension = os.path.splitext(file_path) # f is a file name with extension + print(".") + try: + os.makedirs(os.path.dirname(file_path)) + except OSError: + pass + + if file_extension == '.yaml': + with open(file_path, 'w') as fp: + fp.write(yaml.safe_dump(configuration, indent=4)) + click.secho("Wrote file '{}'".format(file_path)) + else: + for src_path in rev_paths: + src = os.path.abspath(os.path.join(src_path, rel_file_path)) + try: + copyfile(src, file_path) + except IOError as e: + # file not found in this directory, try the next + click.secho(str(e), fg='red') + raise SystemExit(1) + else: + # copied the file, break out of the loop + click.secho("Copied file '{}' to '{}'".format(src, file_path)) + break + + uploaded_configuration = client.upload_configurations(d_tmp, delete_existing_trees=delete_existing) + + if upload_configurations: + click.secho('Parameter uploaded successfully!', fg='green') + return upload_configurations + else: + click.secho(str(e), fg='red') + raise SystemExit(1) + + except IOError as e: + click.secho(str(e.__traceback__), fg='red') + click.secho(str(e), fg='red') + raise SystemExit(1) + + diff --git a/riocli/parameter/utils.py b/riocli/parameter/utils.py new file mode 100644 index 00000000..05e00fff --- /dev/null +++ b/riocli/parameter/utils.py @@ -0,0 +1,110 @@ +# Copyright 2021 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os +import yaml +from copy import copy, deepcopy +import six + + +# To run docstring tests +# python -m doctest -v doctest_simple.py + +# Copyright Ferry Boender, released under the MIT license. +def deep_merge(tgt, src): + """Deep update target dict with src + For each k,v in src: if k doesn't exist in target, it is deep copied from + src to target. Otherwise, if v is a list, target[k] is replaced with + src[k]. If v is a set, target[k] is updated with v, If v is a dict, + recursively deep-update it. + + Examples: + >>> t = {'name': 'Ferry', 'hobbies': ['programming', 'sci-fi']} + >>> print deep_merge(t, {'hobbies': ['gaming']}) + {'name': 'Ferry', 'hobbies': ['gaming']} + """ + target = deepcopy(tgt) + for k, v in six.iteritems(src): + if type(v) == list: + target[k] = deepcopy(v) + elif type(v) == dict: + if not k in target: + target[k] = deepcopy(v) + else: + target[k] = deep_merge(target[k], v) + elif type(v) == set: + if not k in target: + target[k] = v.copy() + else: + target[k].update(v.copy()) + else: + target[k] = copy(v) + return target + + +def compile_local_configurations(paths, tree_names=None): + """ + Iterate over each path in "paths" and merge each + Read the configuration files from the repository, merging the warehouse-specific configs + over the top of the default configurations. Returns a single dict where the root keys + are the relative file path for each config. + """ + configurations = {} + print(paths) + for path in paths: + abs_path = os.path.abspath(path) + # log.debug("Compiling local configuration directory '{}'".format(abs_path)) + cfg = parse_configurations(abs_path, tree_names) + print(configurations) + print(cfg) + configurations = deep_merge(configurations, cfg) + + return configurations + + + +def parse_configurations(root_dir, tree_names=None): + """ + Parse the configurations and return as a dict + """ + configurations = {} + + for root, dirs, files in os.walk(root_dir, followlinks=True): + for f in files: + file_name, file_extension = os.path.splitext(f) # f is a file name with extension + + relpath = root[len(root_dir)+1:] # get relative path without leading / + + # Only upload certain sub-directories + if tree_names: + # Check top-level directory against list + if relpath.split(os.sep)[0] not in tree_names: + continue + + contents = "" + if file_extension == '.yaml': + with open(os.path.join(root, f), 'r') as fp: + contents = yaml.safe_load(fp) + + configurations[os.path.join(relpath, f)] = contents + + return configurations + + + +def show_configurations(args): + """ + Show the local IO configuration + """ + return compile_local_configurations(paths=args.paths, tree_names=args.configurations) + diff --git a/riocli/project/create.py b/riocli/project/create.py index ead9485f..2fd66c2b 100644 --- a/riocli/project/create.py +++ b/riocli/project/create.py @@ -34,4 +34,4 @@ def create_project(project_name: str) -> None: fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/project/delete.py b/riocli/project/delete.py index a4f306a2..21ad9359 100644 --- a/riocli/project/delete.py +++ b/riocli/project/delete.py @@ -36,4 +36,4 @@ def delete_project(force: bool, project_name: str, project_guid: str) -> None: click.secho('Project deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/project/inspect.py b/riocli/project/inspect.py index a0f8b576..3e1f0e92 100644 --- a/riocli/project/inspect.py +++ b/riocli/project/inspect.py @@ -36,7 +36,7 @@ def inspect_project(format_type: str, project_name: str, project_guid: str) -> N inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_project_inspectable(obj: Project) -> dict: diff --git a/riocli/project/list.py b/riocli/project/list.py index 46b3c171..9c48518f 100644 --- a/riocli/project/list.py +++ b/riocli/project/list.py @@ -20,25 +20,30 @@ @click.command('list') -def list_project() -> None: +@click.pass_context +def list_project(ctx: click.Context) -> None: """ List all the projects you are part of """ try: client = new_client(with_project=False) projects = client.list_projects() - _display_project_list(projects, show_header=True) + current = ctx.obj.data.get('project_id', None) + _display_project_list(projects, current, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) -def _display_project_list(projects: typing.List[Project], show_header: bool = True) -> None: +def _display_project_list(projects: typing.List[Project], current: str = None, show_header: bool = True) -> None: if show_header: click.secho('{:40} {:<25} {:<27} {:40}'. format('Project ID', 'Project Name', 'Created At', 'Creator'), fg='yellow') for project in projects: + fg = None + if project.guid == current: + fg = 'green' click.secho('{:40} {:<25} {:<24} {:40}'.format(project.guid, project.name, - project.created_at, project.creator)) + project.created_at, project.creator), fg=fg) diff --git a/riocli/project/model.py b/riocli/project/model.py new file mode 100644 index 00000000..dcdc9b21 --- /dev/null +++ b/riocli/project/model.py @@ -0,0 +1,55 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Project as v1Project, Client + +from riocli.model import Model +from riocli.project.util import find_project_guid, ProjectNotFound +from riocli.project.validation import validate + + +class Project(Model): + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + guid, obj = self.rc.find_depends({"kind": "project", "nameOrGUID": self.metadata.name}) + if not guid: + return False + + return obj + + def create_object(self, client: Client) -> v1Project: + project = client.create_project(self.to_v1()) + return project + + def update_object(self, client: Client, obj: typing.Any) -> typing.Any: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + obj.delete() + + def to_v1(self) -> v1Project: + return v1Project(self.metadata.name) + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/project/select.py b/riocli/project/select.py index aa8a2c41..256beb3c 100644 --- a/riocli/project/select.py +++ b/riocli/project/select.py @@ -13,18 +13,20 @@ # limitations under the License. import click -from riocli.config import Configuration from riocli.project.util import name_to_guid +from riocli.utils.context import get_root_context @click.command('select') @click.argument('project-name', type=str) @name_to_guid -def select_project(project_name: str, project_guid: str) -> None: +@click.pass_context +def select_project(ctx: click.Context, project_name: str, project_guid: str) -> None: """ Sets the given project in the CLI context. All other resources use this project to act upon. """ - config = Configuration() - config.data['project_id'] = project_guid - config.save() + ctx = get_root_context(ctx) + ctx.obj.data['project_id'] = project_guid + ctx.obj.data['project_name'] = project_name + ctx.obj.save() click.secho('Project {} ({}) is selected!'.format(project_name, project_guid), fg='green') diff --git a/riocli/project/util.py b/riocli/project/util.py index eda01035..10ebfe0f 100644 --- a/riocli/project/util.py +++ b/riocli/project/util.py @@ -35,7 +35,11 @@ def decorated(**kwargs: typing.Any): name = get_project_name(client, guid) if guid is None: - guid = find_project_guid(client, name) + try: + guid = find_project_guid(client, name) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) kwargs['project_name'] = name kwargs['project_guid'] = guid @@ -50,10 +54,15 @@ def find_project_guid(client: Client, name: str) -> str: if project.name == name: return project.guid - click.secho("project not found", fg='red') - exit(1) + raise ProjectNotFound() def get_project_name(client: Client, guid: str) -> str: project = client.get_project(guid) return project.name + + +class ProjectNotFound(Exception): + def __init__(self, message='project not found'): + self.message = message + super().__init__(self.message) diff --git a/riocli/project/validation.py b/riocli/project/validation.py new file mode 100644 index 00000000..7961d80e --- /dev/null +++ b/riocli/project/validation.py @@ -0,0 +1,191 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_project(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_project(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Project'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/projectGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'string'}}}, 'required': ['users']}, 'status': {'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'$ref': '#/definitions/user'}, 'uniqueItems': True}}, 'required': ['users']}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Project'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/projectGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'string'}}}, 'required': ['users']}, 'status': {'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'$ref': '#/definitions/user'}, 'uniqueItems': True}}, 'required': ['users']}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Project": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Project", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Project'}, rule='const') + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_projectspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + if "status" in data_keys: + data_keys.remove("status") + data__status = data["status"] + validate___definitions_projectstatus(data__status, custom_formats, (name_prefix or "data") + ".status") + return data + +def validate___definitions_projectstatus(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, 'uniqueItems': True}}, 'required': ['users']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['users']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['users'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, 'uniqueItems': True}}, 'required': ['users']}, rule='required') + data_keys = set(data.keys()) + if "users" in data_keys: + data_keys.remove("users") + data__users = data["users"] + if not isinstance(data__users, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".users must be array", value=data__users, name="" + (name_prefix or "data") + ".users", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, 'uniqueItems': True}, rule='type') + data__users_is_list = isinstance(data__users, (list, tuple)) + if data__users_is_list: + def fn(var): return frozenset(dict((k, fn(v)) for k, v in var.items()).items()) if hasattr(var, "items") else tuple(fn(v) for v in var) if isinstance(var, (dict, list)) else str(var) if isinstance(var, bool) else var + data__users_len = len(data__users) + if data__users_len > len(set(fn(data__users_x) for data__users_x in data__users)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".users must contain unique items", value=data__users, name="" + (name_prefix or "data") + ".users", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/uuid'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, 'uniqueItems': True}, rule='uniqueItems') + for data__users_x, data__users_item in enumerate(data__users): + validate___definitions_user(data__users_item, custom_formats, (name_prefix or "data") + ".users[{data__users_x}]") + return data + +def validate___definitions_user(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['guid', 'state', 'email_id']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['guid', 'state', 'email_id'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'email_id': {'type': 'string'}, 'first_name': {'type': 'string'}, 'last_name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'state': {'type': 'string'}}, 'required': ['guid', 'state', 'email_id']}, rule='required') + data_keys = set(data.keys()) + if "email_id" in data_keys: + data_keys.remove("email_id") + data__emailid = data["email_id"] + if not isinstance(data__emailid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email_id must be string", value=data__emailid, name="" + (name_prefix or "data") + ".email_id", definition={'type': 'string'}, rule='type') + if "first_name" in data_keys: + data_keys.remove("first_name") + data__firstname = data["first_name"] + if not isinstance(data__firstname, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".first_name must be string", value=data__firstname, name="" + (name_prefix or "data") + ".first_name", definition={'type': 'string'}, rule='type') + if "last_name" in data_keys: + data_keys.remove("last_name") + data__lastname = data["last_name"] + if not isinstance(data__lastname, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".last_name must be string", value=data__lastname, name="" + (name_prefix or "data") + ".last_name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_uuid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "state" in data_keys: + data_keys.remove("state") + data__state = data["state"] + if not isinstance(data__state, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".state must be string", value=data__state, name="" + (name_prefix or "data") + ".state", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_projectspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'string'}}}, 'required': ['users']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['users']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['users'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'users': {'type': 'array', 'items': {'type': 'string'}}}, 'required': ['users']}, rule='required') + data_keys = set(data.keys()) + if "users" in data_keys: + data_keys.remove("users") + data__users = data["users"] + if not isinstance(data__users, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".users must be array", value=data__users, name="" + (name_prefix or "data") + ".users", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__users_is_list = isinstance(data__users, (list, tuple)) + if data__users_is_list: + data__users_len = len(data__users) + for data__users_x, data__users_item in enumerate(data__users): + if not isinstance(data__users_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".users[{data__users_x}]".format(**locals()) + " must be string", value=data__users_item, name="" + (name_prefix or "data") + ".users[{data__users_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_projectguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/rosbag/blob.py b/riocli/rosbag/blob.py index 108a2f0c..335592cb 100644 --- a/riocli/rosbag/blob.py +++ b/riocli/rosbag/blob.py @@ -49,7 +49,7 @@ def blob_delete(guid: str) -> None: click.secho('Rosbag Blob deleted successfully', fg='green') except ResourceNotFoundError as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @rosbag_blob.command('download') @@ -67,7 +67,7 @@ def blob_download(guid: str, filename: str, download_dir: str) -> None: click.secho('Rosbag Blob downloaded successfully', fg='green') except ResourceNotFoundError as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @rosbag_blob.command('list') @@ -96,7 +96,7 @@ def blob_list(guids: typing.List[str], deployment_ids: typing.List[str], compone _display_rosbag_blob_list(rosbag_blobs, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_rosbag_blob_list(blobs: typing.List[ROSBagBlob], show_header: bool = True) -> None: diff --git a/riocli/rosbag/job.py b/riocli/rosbag/job.py index 4a2c2891..727858ef 100644 --- a/riocli/rosbag/job.py +++ b/riocli/rosbag/job.py @@ -73,7 +73,7 @@ def job_create(name: str, deployment_id: str, component_instance_id: str, all_to click.secho('Rosbag Job created successfully', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @rosbag_job.command('stop') @@ -94,7 +94,7 @@ def job_stop(deployment_guid: str, deployment_name: str, component_instance_ids: click.secho('Rosbag Job stopped successfully', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) @rosbag_job.command('list') @@ -120,7 +120,7 @@ def job_list(deployment_guid: str, deployment_name: str, _display_rosbag_job_list(rosbag_jobs, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_rosbag_job_list(jobs: typing.List[ROSBagJob], show_header: bool = True) -> None: diff --git a/riocli/secret/create.py b/riocli/secret/create.py index 9eef837d..e1b9502c 100644 --- a/riocli/secret/create.py +++ b/riocli/secret/create.py @@ -47,4 +47,4 @@ def create_secret(secret_type: str, username: str, password: str, email: str, re ssh_key=ssh_priv_key) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/secret/delete.py b/riocli/secret/delete.py index c6c92aee..2f84177b 100644 --- a/riocli/secret/delete.py +++ b/riocli/secret/delete.py @@ -36,4 +36,4 @@ def delete_secret(force: str, secret_name: str, secret_guid: str) -> None: click.secho('Secret deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/secret/import_secret.py b/riocli/secret/import_secret.py index 633dc4e3..21b2a5ba 100644 --- a/riocli/secret/import_secret.py +++ b/riocli/secret/import_secret.py @@ -65,7 +65,7 @@ def create_secret(secret: Secret) -> None: click.secho("Secret created successfully!", fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def secret_from_docker_config() -> Secret: @@ -75,7 +75,7 @@ def secret_from_docker_config() -> Secret: if not config or 'auths' not in config: click.secho("docker config not found!", fg='red') - exit(1) + raise SystemExit(1) registries = list(filter(lambda x: 'rapyuta.io' not in x, config['auths'].keys())) choice = show_selection(registries, header='Found these registries in the docker config') diff --git a/riocli/secret/inspect.py b/riocli/secret/inspect.py index 2e8d90f1..ee02ac4b 100644 --- a/riocli/secret/inspect.py +++ b/riocli/secret/inspect.py @@ -35,7 +35,7 @@ def inspect_secret(format_type: str, secret_name: str, secret_guid: str) -> None inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_secret_inspectable(obj: Secret) -> dict: diff --git a/riocli/secret/list.py b/riocli/secret/list.py index 564fd8b3..f9209032 100644 --- a/riocli/secret/list.py +++ b/riocli/secret/list.py @@ -32,7 +32,7 @@ def list_secrets(secret_type: typing.Union[str, typing.Tuple[str]]) -> None: _display_secret_list(secrets, secret_type, show_header=True) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def _display_secret_list( diff --git a/riocli/secret/model.py b/riocli/secret/model.py new file mode 100644 index 00000000..e7e97a96 --- /dev/null +++ b/riocli/secret/model.py @@ -0,0 +1,77 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Secret as v1Secret, SecretConfigDocker, SecretConfigSourceBasicAuth, \ + SecretConfigSourceSSHAuth, Client + +from riocli.model import Model +from riocli.secret.util import find_secret_guid, SecretNotFound +from riocli.secret.validation import validate + + +class Secret(Model): + + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + _, secret = self.rc.find_depends({'kind': 'secret', 'nameOrGUID': self.metadata.name}) + if not secret: + return False + + return secret + + def create_object(self, client: Client) -> v1Secret: + secret = client.create_secret(self.to_v1()) + return secret + + def update_object(self, client: Client, obj: typing.Any) -> None: + pass + + def delete_object(self, client: Client, obj: typing.Any) -> typing.Any: + client.delete_secret(obj.guid) + + def to_v1(self) -> v1Secret: + if self.spec.type == 'Docker': + return self._docker_secret_to_v1() + else: + return self._git_secret_to_v1() + + def _docker_secret_to_v1(self) -> v1Secret: + config = SecretConfigDocker(self.spec.docker.username, self.spec.docker.password, self.spec.docker.email, self.spec.docker.registry) + return v1Secret(self.metadata.name, config) + + def _git_secret_to_v1(self) -> v1Secret: + if self.spec.git.authMethod == 'SSH Auth': + config = SecretConfigSourceSSHAuth(self.spec.git.privateKey) + elif self.spec.git.authMethod == 'HTTP/S Basic Auth': + ca_cert = self.spec.git.get('ca_cert', None) + config = SecretConfigSourceBasicAuth(self.spec.git.username, self.spec.git.password, ca_cert=ca_cert) + elif self.spec.git.authMethod == 'HTTP/S Token Auth': + # TODO(ankit): Implement it once SDK has support for it. + raise Exception('token-based secret is not supported yet!') + else: + raise Exception('invalid gitAuthMethod for secret!') + + return v1Secret(self.metadata.name, config) + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/secret/source_secret.py b/riocli/secret/source_secret.py index ee926b75..eb10858d 100644 --- a/riocli/secret/source_secret.py +++ b/riocli/secret/source_secret.py @@ -52,7 +52,7 @@ def create_basic_auth_secret( if not ca_cert_data: click.secho("Empty CA Cert file. Try again with correct file", fg='red') - exit(1) + raise SystemExit(1) secret_config = SecretConfigSourceBasicAuth(username=username, password=password, ca_cert=ca_cert_data) @@ -70,7 +70,7 @@ def create_ssh_secret(secret_name: str, ssh_key: click.File = None) -> None: data = ssh_key.read() if not data: click.secho("Empty key file. Try again with correct key file", fg='red') - exit(1) + raise SystemExit(1) secret_config = SecretConfigSourceSSHAuth(ssh_key=data) client = new_client() diff --git a/riocli/secret/util.py b/riocli/secret/util.py index af40e2f6..c939ec6f 100644 --- a/riocli/secret/util.py +++ b/riocli/secret/util.py @@ -27,12 +27,12 @@ def decorated(**kwargs: typing.Any): client = new_client() except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) name = kwargs.pop('secret_name') guid = None - if name.startswith('secret-'): + if name.startswith('secret-') and len(name) == 31: guid = name name = None @@ -40,7 +40,11 @@ def decorated(**kwargs: typing.Any): name = get_secret_name(client, guid) if guid is None: - guid = find_secret_guid(client, name) + try: + guid = find_secret_guid(client, name) + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) kwargs['secret_name'] = name kwargs['secret_guid'] = guid @@ -55,10 +59,15 @@ def find_secret_guid(client: Client, name: str) -> str: if secret.name == name: return secret.guid - click.secho("secret not found", fg='red') - exit(1) + raise SecretNotFound() def get_secret_name(client: Client, guid: str) -> str: secret = client.get_secret(guid) return secret.name + + +class SecretNotFound(Exception): + def __init__(self, message='secret not found'): + self.message = message + super().__init__(self.message) diff --git a/riocli/secret/validation.py b/riocli/secret/validation.py new file mode 100644 index 00000000..4a0c1c4b --- /dev/null +++ b/riocli/secret/validation.py @@ -0,0 +1,303 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z'), + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^secret-[a-z]{24}$': re.compile('^secret-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_secret(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_secret(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Secret'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/secretGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['Docker']}, 'docker': {'type': 'object', '$ref': '#/definitions/docker', 'required': None}}}, {'properties': {'type': {'enum': ['Git']}, 'git': {'type': 'object', '$ref': '#/definitions/git'}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Secret'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/secretGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}, 'spec': {'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['Docker']}, 'docker': {'type': 'object', '$ref': '#/definitions/docker', 'required': None}}}, {'properties': {'type': {'enum': ['Git']}, 'git': {'type': 'object', '$ref': '#/definitions/git'}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "Secret": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: Secret", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'Secret'}, rule='const') + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + if "spec" in data_keys: + data_keys.remove("spec") + data__spec = data["spec"] + validate___definitions_secretspec(data__spec, custom_formats, (name_prefix or "data") + ".spec") + return data + +def validate___definitions_secretspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'registry': {'type': 'string', 'default': 'https://index.docker.io/v1/'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'email': {'type': 'string'}}, 'required': ['username', 'password', 'email']}}}, {'properties': {'type': {'enum': ['Git']}, 'git': {'type': 'object', 'properties': {'authMethod': {'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}}, 'dependencies': {'authMethod': {'oneOf': [{'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}]}}}}}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['type']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'type': {'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'registry': {'type': 'string', 'default': 'https://index.docker.io/v1/'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'email': {'type': 'string'}}, 'required': ['username', 'password', 'email']}}}, {'properties': {'type': {'enum': ['Git']}, 'git': {'type': 'object', 'properties': {'authMethod': {'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}}, 'dependencies': {'authMethod': {'oneOf': [{'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}]}}}}}]}}}, rule='required') + if "type" in data: + data_one_of_count1 = 0 + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['Docker']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['Docker']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['Docker']}, rule='enum') + if "docker" in data_keys: + data_keys.remove("docker") + data__docker = data["docker"] + validate___definitions_docker(data__docker, custom_formats, (name_prefix or "data") + ".docker") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if data__type not in ['Git']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['Git']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['Git']}, rule='enum') + if "git" in data_keys: + data_keys.remove("git") + data__git = data["git"] + validate___definitions_git(data__git, custom_formats, (name_prefix or "data") + ".git") + data_one_of_count1 += 1 + except JsonSchemaValueException: pass + if data_one_of_count1 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['Docker']}, 'docker': {'type': 'object', 'properties': {'registry': {'type': 'string', 'default': 'https://index.docker.io/v1/'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'email': {'type': 'string'}}, 'required': ['username', 'password', 'email']}}}, {'properties': {'type': {'enum': ['Git']}, 'git': {'type': 'object', 'properties': {'authMethod': {'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}}, 'dependencies': {'authMethod': {'oneOf': [{'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}]}}}}}]}, rule='oneOf') + data_keys = set(data.keys()) + if "type" in data_keys: + data_keys.remove("type") + data__type = data["type"] + if not isinstance(data__type, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be string", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}, rule='type') + if data__type not in ['Docker', 'Git']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['Docker', 'Git']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'type': 'string', 'default': 'Docker', 'enum': ['Docker', 'Git']}, rule='enum') + else: data["type"] = 'Docker' + return data + +def validate___definitions_git(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'authMethod': {'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}}, 'dependencies': {'authMethod': {'oneOf': [{'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}]}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + if "authMethod" in data: + data_one_of_count2 = 0 + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['username', 'password']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['username', 'password'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, rule='required') + data_keys = set(data.keys()) + if "authMethod" in data_keys: + data_keys.remove("authMethod") + data__authMethod = data["authMethod"] + if not isinstance(data__authMethod, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be string", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['HTTP/S Basic Auth']}, rule='type') + if data__authMethod not in ['HTTP/S Basic Auth']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be one of ['HTTP/S Basic Auth']", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['HTTP/S Basic Auth']}, rule='enum') + if "username" in data_keys: + data_keys.remove("username") + data__username = data["username"] + if not isinstance(data__username, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".username must be string", value=data__username, name="" + (name_prefix or "data") + ".username", definition={'type': 'string'}, rule='type') + if "password" in data_keys: + data_keys.remove("password") + data__password = data["password"] + if not isinstance(data__password, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".password must be string", value=data__password, name="" + (name_prefix or "data") + ".password", definition={'type': 'string'}, rule='type') + if "caCert" in data_keys: + data_keys.remove("caCert") + data__caCert = data["caCert"] + if not isinstance(data__caCert, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".caCert must be string", value=data__caCert, name="" + (name_prefix or "data") + ".caCert", definition={'type': 'string'}, rule='type') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['token']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['token'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, rule='required') + data_keys = set(data.keys()) + if "authMethod" in data_keys: + data_keys.remove("authMethod") + data__authMethod = data["authMethod"] + if not isinstance(data__authMethod, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be string", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['HTTP/S Token Auth']}, rule='type') + if data__authMethod not in ['HTTP/S Token Auth']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be one of ['HTTP/S Token Auth']", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['HTTP/S Token Auth']}, rule='enum') + if "token" in data_keys: + data_keys.remove("token") + data__token = data["token"] + if not isinstance(data__token, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".token must be string", value=data__token, name="" + (name_prefix or "data") + ".token", definition={'type': 'string'}, rule='type') + if "caCert" in data_keys: + data_keys.remove("caCert") + data__caCert = data["caCert"] + if not isinstance(data__caCert, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".caCert must be string", value=data__caCert, name="" + (name_prefix or "data") + ".caCert", definition={'type': 'string'}, rule='type') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['privateKey']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['privateKey'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}, rule='required') + data_keys = set(data.keys()) + if "authMethod" in data_keys: + data_keys.remove("authMethod") + data__authMethod = data["authMethod"] + if not isinstance(data__authMethod, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be string", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['SSH Auth']}, rule='type') + if data__authMethod not in ['SSH Auth']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be one of ['SSH Auth']", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'enum': ['SSH Auth']}, rule='enum') + if "privateKey" in data_keys: + data_keys.remove("privateKey") + data__privateKey = data["privateKey"] + if not isinstance(data__privateKey, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".privateKey must be string", value=data__privateKey, name="" + (name_prefix or "data") + ".privateKey", definition={'type': 'string'}, rule='type') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count2) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Basic Auth']}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['username', 'password']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['HTTP/S Token Auth']}, 'token': {'type': 'string'}, 'caCert': {'type': 'string'}}, 'required': ['token']}, {'properties': {'authMethod': {'type': 'string', 'enum': ['SSH Auth']}, 'privateKey': {'type': 'string'}}, 'required': ['privateKey']}]}, rule='oneOf') + data_keys = set(data.keys()) + if "authMethod" in data_keys: + data_keys.remove("authMethod") + data__authMethod = data["authMethod"] + if not isinstance(data__authMethod, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be string", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}, rule='type') + if data__authMethod not in ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".authMethod must be one of ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']", value=data__authMethod, name="" + (name_prefix or "data") + ".authMethod", definition={'type': 'string', 'default': 'HTTP/S Basic Auth', 'enum': ['HTTP/S Basic Auth', 'HTTP/S Token Auth', 'SSH Auth']}, rule='enum') + else: data["authMethod"] = 'HTTP/S Basic Auth' + return data + +def validate___definitions_docker(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'registry': {'type': 'string', 'default': 'https://index.docker.io/v1/'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'email': {'type': 'string'}}, 'required': ['username', 'password', 'email']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['username', 'password', 'email']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['username', 'password', 'email'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'registry': {'type': 'string', 'default': 'https://index.docker.io/v1/'}, 'username': {'type': 'string'}, 'password': {'type': 'string'}, 'email': {'type': 'string'}}, 'required': ['username', 'password', 'email']}, rule='required') + data_keys = set(data.keys()) + if "registry" in data_keys: + data_keys.remove("registry") + data__registry = data["registry"] + if not isinstance(data__registry, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".registry must be string", value=data__registry, name="" + (name_prefix or "data") + ".registry", definition={'type': 'string', 'default': 'https://index.docker.io/v1/'}, rule='type') + else: data["registry"] = 'https://index.docker.io/v1/' + if "username" in data_keys: + data_keys.remove("username") + data__username = data["username"] + if not isinstance(data__username, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".username must be string", value=data__username, name="" + (name_prefix or "data") + ".username", definition={'type': 'string'}, rule='type') + if "password" in data_keys: + data_keys.remove("password") + data__password = data["password"] + if not isinstance(data__password, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".password must be string", value=data__password, name="" + (name_prefix or "data") + ".password", definition={'type': 'string'}, rule='type') + if "email" in data_keys: + data_keys.remove("email") + data__email = data["email"] + if not isinstance(data__email, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".email must be string", value=data__email, name="" + (name_prefix or "data") + ".email", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_secretguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_secretguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^secret-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^secret-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^secret-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/shell/__init__.py b/riocli/shell/__init__.py new file mode 100644 index 00000000..7284ce18 --- /dev/null +++ b/riocli/shell/__init__.py @@ -0,0 +1,74 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import os + +import click +from click_help_colors import HelpColorsCommand +from click_repl import repl +from prompt_toolkit.history import FileHistory, ThreadedHistory + +from riocli.config import Configuration +from riocli.shell.prompt import prompt_callback + + +@click.command( + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', +) +@click.pass_context +def shell(ctx: click.Context): + """ + Interactive Shell for Rapyuta.io + """ + start_shell(ctx) + + +@click.command( + 'repl', + cls=HelpColorsCommand, + help_headers_color='yellow', + help_options_color='green', + hidden=True +) +@click.pass_context +def deprecated_repl(ctx: click.Context): + """ + [Deprecated] Use "rio shell" instead + """ + start_shell(ctx) + + +def start_shell(ctx: click.Context): + prompt_config = _parse_config(ctx.obj) + while True: + try: + repl(click.get_current_context(), prompt_kwargs=prompt_config) + except Exception as e: + click.secho(str(e), fg='red') + else: + break + + +def _parse_config(config: Configuration) -> dict: + history_path = os.path.join(click.get_app_dir(config.APP_NAME), "history") + default_prompt_kwargs = { + 'history': ThreadedHistory(FileHistory(history_path)), + 'message': prompt_callback, + 'enable_suspend': True + } + + shell_config = config.data.get('shell', {}) + + return {**default_prompt_kwargs, **shell_config} diff --git a/riocli/shell/prompt.py b/riocli/shell/prompt.py new file mode 100644 index 00000000..f8c122c3 --- /dev/null +++ b/riocli/shell/prompt.py @@ -0,0 +1,19 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import click + + +@click.pass_context +def prompt_callback(ctx: click.Context) -> str: + return '{} > '.format(ctx.obj.data['project_name']) diff --git a/riocli/static_route/create.py b/riocli/static_route/create.py index 0e904ceb..2acfb9e9 100644 --- a/riocli/static_route/create.py +++ b/riocli/static_route/create.py @@ -30,4 +30,4 @@ def create_static_route(prefix: str) -> None: click.secho("Static Route created successfully for URL {}".format(route.urlString), fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/static_route/delete.py b/riocli/static_route/delete.py index 01ace215..a858ae92 100644 --- a/riocli/static_route/delete.py +++ b/riocli/static_route/delete.py @@ -38,4 +38,4 @@ def delete_static_route(static_route: str, static_route_guid: str, force: bool) click.secho('Static Route deleted successfully!', fg='green') except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/static_route/inspect.py b/riocli/static_route/inspect.py index 925c999d..dae7d39f 100644 --- a/riocli/static_route/inspect.py +++ b/riocli/static_route/inspect.py @@ -35,7 +35,7 @@ def inspect_static_route(format_type: str, static_route: str, static_route_guid: inspect_with_format(data, format_type) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) def make_static_route_inspectable(static_route_data: StaticRoute) -> dict: diff --git a/riocli/static_route/list.py b/riocli/static_route/list.py index f18bed09..4c2b4d12 100644 --- a/riocli/static_route/list.py +++ b/riocli/static_route/list.py @@ -28,6 +28,6 @@ def list_static_routes() -> None: repr_static_routes(routes) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/static_route/model.py b/riocli/static_route/model.py new file mode 100644 index 00000000..96127837 --- /dev/null +++ b/riocli/static_route/model.py @@ -0,0 +1,52 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +import typing + +import click +from rapyuta_io import Client +from rapyuta_io.clients.static_route import StaticRoute as v1StaticRoute + +from riocli.model import Model +from riocli.static_route.util import StaticRouteNotFound +from riocli.static_route.validation import validate + + +class StaticRoute(Model): + def __init__(self, *args, **kwargs): + self.update(*args, **kwargs) + + def find_object(self, client: Client) -> bool: + _, static_route = self.rc.find_depends({"kind": "staticroute", "nameOrGUID": self.metadata.name}) + if not static_route: + return False + + return static_route + + def create_object(self, client: Client) -> v1StaticRoute: + static_route = client.create_static_route(self.metadata.name) + return static_route + + def update_object(self, client: Client, obj: typing.Any) -> None: + pass + + def delete_object(self, client: Client, obj: typing.Any): + client.delete_static_route(obj.guid) + + @classmethod + def pre_process(cls, client: Client, d: typing.Dict) -> None: + pass + + @staticmethod + def validate(data) -> None: + validate(data) diff --git a/riocli/static_route/open.py b/riocli/static_route/open.py index 949de96d..348596b4 100644 --- a/riocli/static_route/open.py +++ b/riocli/static_route/open.py @@ -30,4 +30,4 @@ def open_static_route(static_route, static_route_guid) -> None: click.launch(url='https://{}'.format(route.urlString), wait=False) except Exception as e: click.secho(str(e), fg='red') - exit(1) + raise SystemExit(1) diff --git a/riocli/static_route/util.py b/riocli/static_route/util.py index 8c79d03d..e4f84929 100644 --- a/riocli/static_route/util.py +++ b/riocli/static_route/util.py @@ -56,8 +56,7 @@ def find_static_route_guid(client: Client, name: str) -> str: if route.urlPrefix == name or route.urlString == name: return route.guid - click.secho("Static route not found", fg='red') - exit(1) + raise StaticRouteNotFound() def repr_static_routes(routes: typing.List[StaticRoute]) -> None: @@ -74,3 +73,8 @@ def repr_static_routes(routes: typing.List[StaticRoute]) -> None: '{:<36} {:<25} {:36} {:36} {:32}'. format(route.guid, route.urlPrefix, route.urlString, route.creator, route.CreatedAt)) + +class StaticRouteNotFound(Exception): + def __init__(self, message='secret not found'): + self.message = message + super().__init__(self.message) diff --git a/riocli/static_route/validation.py b/riocli/static_route/validation.py new file mode 100644 index 00000000..554aab27 --- /dev/null +++ b/riocli/static_route/validation.py @@ -0,0 +1,102 @@ +VERSION = "2.16.1" +import re +from fastjsonschema import JsonSchemaValueException + + +REGEX_PATTERNS = { + '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$': re.compile('^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}\\Z'), + '^project-[a-z]{24}$': re.compile('^project-[a-z]{24}\\Z') +} + +NoneType = type(None) + +def validate(data, custom_formats={}, name_prefix=None): + validate___definitions_staticroute(data, custom_formats, (name_prefix or "data") + "") + return data + +def validate___definitions_staticroute(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'StaticRoute'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/projectGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}}, 'required': ['apiVersion', 'kind', 'metadata']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'StaticRoute'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'$ref': '#/definitions/projectGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name']}}, 'required': ['apiVersion', 'kind', 'metadata']}, rule='required') + data_keys = set(data.keys()) + if "apiVersion" in data_keys: + data_keys.remove("apiVersion") + data__apiVersion = data["apiVersion"] + if data__apiVersion != "apiextensions.rapyuta.io/v1": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".apiVersion must be same as const definition: apiextensions.rapyuta.io/v1", value=data__apiVersion, name="" + (name_prefix or "data") + ".apiVersion", definition={'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, rule='const') + else: data["apiVersion"] = 'apiextensions.rapyuta.io/v1' + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "StaticRoute": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: StaticRoute", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'StaticRoute'}, rule='const') + if "metadata" in data_keys: + data_keys.remove("metadata") + data__metadata = data["metadata"] + validate___definitions_metadata(data__metadata, custom_formats, (name_prefix or "data") + ".metadata") + return data + +def validate___definitions_metadata(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'guid': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'creator': {'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, 'project': {'type': 'string', 'pattern': '^project-[a-z]{24}$'}, 'labels': {'type': 'object', 'additionalProperties': {'type': 'string'}}}, 'required': ['name']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + validate___definitions_projectguid(data__guid, custom_formats, (name_prefix or "data") + ".guid") + if "creator" in data_keys: + data_keys.remove("creator") + data__creator = data["creator"] + validate___definitions_uuid(data__creator, custom_formats, (name_prefix or "data") + ".creator") + if "project" in data_keys: + data_keys.remove("project") + data__project = data["project"] + validate___definitions_projectguid(data__project, custom_formats, (name_prefix or "data") + ".project") + if "labels" in data_keys: + data_keys.remove("labels") + data__labels = data["labels"] + validate___definitions_stringmap(data__labels, custom_formats, (name_prefix or "data") + ".labels") + return data + +def validate___definitions_stringmap(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'additionalProperties': {'type': 'string'}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + for data_key in data_keys: + if data_key not in []: + data_value = data.get(data_key) + if not isinstance(data_value, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".{data_key}".format(**locals()) + " must be string", value=data_value, name="" + (name_prefix or "data") + ".{data_key}".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_uuid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^[a-fA-F0-9]{8}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{4}-[a-fA-F0-9]{12}$'}, rule='pattern') + return data + +def validate___definitions_projectguid(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be string", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='type') + if isinstance(data, str): + if not REGEX_PATTERNS['^project-[a-z]{24}$'].search(data): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must match pattern ^project-[a-z]{24}$", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'string', 'pattern': '^project-[a-z]{24}$'}, rule='pattern') + return data \ No newline at end of file diff --git a/riocli/usergroup/__init__.py b/riocli/usergroup/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/riocli/usergroup/list.py b/riocli/usergroup/list.py new file mode 100644 index 00000000..e69de29b diff --git a/riocli/utils/context.py b/riocli/utils/context.py new file mode 100644 index 00000000..3e7787ca --- /dev/null +++ b/riocli/utils/context.py @@ -0,0 +1,27 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +from click import Context + + +def get_root_context(ctx: Context) -> Context: + """ + get_root_context figures out the top-level Context from the given context by walking down the linked-list. + + https://click.palletsprojects.com/en/8.0.x/complex/#contexts + """ + while True: + if ctx.parent is None: + return ctx + + ctx = ctx.parent diff --git a/riocli/utils/mermaid.py b/riocli/utils/mermaid.py new file mode 100644 index 00000000..11b0ccc8 --- /dev/null +++ b/riocli/utils/mermaid.py @@ -0,0 +1,65 @@ +from urllib.parse import quote, unquote +import base64 +import zlib +import json + + + +def mermaid_safe(s: str): + return s.replace(" ", "_") + + +def js_string_to_byte(data): + return bytes(data, 'iso-8859-1') + + +def js_bytes_to_string(data): + return data.decode('iso-8859-1') + + +def js_btoa(data): + return base64.b64encode(data) + + + +# def js_encode_uri_component(data): +# return quote(data) + + +# def js_atob(data): +# return base64.b64decode(data) + +# def pako_inflate_raw(data): +# decompress = zlib.decompressobj(-15) +# decompressed_data = decompress.decompress(data) +# decompressed_data += decompress.flush() +# return decompressed_data + + +# def pako_deflate_raw(data): +# compress = zlib.compressobj( +# zlib.Z_DEFAULT_COMPRESSION, zlib.DEFLATED, -15, 8, +# zlib.Z_DEFAULT_STRATEGY) +# compressed_data = compress.compress(js_string_to_byte(js_encode_uri_component(data))) +# compressed_data += compress.flush() +# return compressed_data + + +def mermaid_link(diagram): + obj = { + "code": diagram, + "mermaid": { + "theme": "default" + }, + "updateEditor": False, + "autoSync": True, + "updateDiagram": False + } + json_str = json.dumps(obj) + json_bytes = js_string_to_byte(json_str) + encoded_uri = js_btoa( json_bytes ) + return "https://mermaid.live/view#base64:{}".format(js_bytes_to_string(encoded_uri)) + + + + \ No newline at end of file diff --git a/scripts/generate-validation.py b/scripts/generate-validation.py new file mode 100644 index 00000000..bf04f03a --- /dev/null +++ b/scripts/generate-validation.py @@ -0,0 +1,50 @@ +# Copyright 2022 Rapyuta Robotics +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +# Usage: python generate-validation.py +# Run it from the Root of the repository inside virtual-environment. +import json +from yaml import safe_load +from pathlib import Path, PurePosixPath +from fastjsonschema import compile_to_code + +schema_dir = Path('jsonschema') +json_schema_path = schema_dir.joinpath('json') +json_schema_path.mkdir(parents=True, exist_ok=True) + +# with open(schema_dir.joinpath('primitives.yaml')) as primitive: +# primitives = safe_load(primitive.read()) +# primitives = primitives.get('definitions', None) + +for schema in schema_dir.glob('*-schema.yaml'): + print("processed {}".format(schema)) + base_path = Path('riocli').joinpath(schema) + + + module_name = Path(base_path).stem.replace("-schema", "") + module = Path('riocli').joinpath(module_name).\ + joinpath('validation.py') + with open(schema) as f: + body = safe_load(f.read()) + + json_output = json_schema_path.joinpath(module_name+".schema.json") + print(json_output) + with open(json_output, 'w') as outfile: + content = json.dumps(body) + outfile.write(content) + # Inject primitives + # body['definitions'] = body['definitions'].extend(primitives) + code = compile_to_code(body) + module.write_text(code) + diff --git a/setup.py b/setup.py index 441ce6e8..28307318 100644 --- a/setup.py +++ b/setup.py @@ -20,6 +20,11 @@ setup( name="rapyuta-io-cli", packages=find_packages(), + package_data={ + 'riocli': [ + 'apply/manifests/*.yaml' + ] + }, include_package_data=True, entry_points={"console_scripts": ["rio = riocli.bootstrap:cli"]}, version=version, @@ -30,43 +35,28 @@ author_email="opensource@rapyuta-robotics.com", url="http://docs.rapyuta.io", install_requires=[ - "argparse==1.4.0", - "certifi==2019.9.11", - "chardet==3.0.4", - "click==8.0.1", - "click-completion==0.5.2", - "click-help-colors==0.9.1", - "click-repl==0.2.0", - "click-plugins==1.1.1", - "click-spinner==0.1.10", - "concurrencytest==0.1.2", - "enum34==1.1.6", - "extras==1.0.0", - "fixtures==3.0.0", - "funcsigs==1.0.2", - "idna==2.6", - "jinja2==3.0.1; python_version >= '3.6'", - "linecache2==1.0.0", - "markupsafe==2.0.1; python_version >= '3.6'", - "mock==2.0.0", - "nose==1.3.1", - "pbr==5.4.4", - "prompt-toolkit==3.0.20; python_full_version >= '3.6.2'", - "pyfakefs==3.7", - "python-dateutil==2.8.1; python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "python-mimeparse==1.6.0", - "python-subunit==1.4.0", - "pytz==2021.1", - "pyyaml==5.4.1", - "rapyuta-io==0.32.0", - "requests==2.18.4", - "shellingham==1.4.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "six==1.13.0; python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", - "testtools==2.4.0", - "traceback2==1.4.0", - "unittest2==1.1.0", - "urllib3==1.22", - "wcwidth==0.2.5", + "pretty-traceback>=2022.1018", + "argparse>=1.4.0", + "click-completion>=0.5.2", + "click-help-colors>=0.9.1", + "click-repl>=0.2.0", + "click-spinner>=0.1.10", + "click-plugins>=1.1.1", + "click>=8.0.1", + "dictdiffer>=0.9.0", + "fastjsonschema>=2.16.1", + "graphlib-backport>=1.0.3", + "jinja2>=3.0.1", + "munch>=2.4.0", + "python-dateutil>=2.8.2", + "pytz", + "pyyaml>=5.4.1", + "rapyuta-io==1.5.0", + "requests>=2.20.0", + "setuptools", + "six>=1.13.0", + "tabulate>=0.8.0", + "urllib3>=1.23", ], setup_requires=["flake8"], )