From 875d50fd4486dd93514f36a3871a3bf9f7841344 Mon Sep 17 00:00:00 2001 From: Shiv Mohith <95404550+shiv-mohith@users.noreply.github.com> Date: Wed, 14 Dec 2022 14:17:56 +0545 Subject: [PATCH] feat(rosbags): adds support in apply packages and deployment, and adds rosbag job update and trigger upload. * Update package-schema.yaml * implements the model * separates out device rosbag spec and cloud rosbags spec * adds rosbag to deployment schema * Update model.py * adds rosbag patch command * rosbag: support only minutes for split by duration * rosbag: support float values for max split duration * Revert "rosbag: support float values for max split duration" This reverts commit 17d231cc6db8cbccfb9bbf430120e2c61d243003. * Update model.py * Update validation.py * minor changes * fixes version in pipfile * minor changes to deployment schema * improves error handling * adds rosbags examples * reverts auth environment changes * reverts auth environment changes * fixes some review comments * separated job trigger and update commands * reverts build schema changes * updates rosbags in package schema * updates schema validation --- Pipfile | 1 + jsonschema/deployment-schema.yaml | 147 ++++++ jsonschema/package-schema.yaml | 148 +++++- .../13-device-package-with-rosbag.yaml | 32 ++ .../14-cloud-package-with-rosbag.yaml | 40 ++ ...device-deployment-no-override-options.yaml | 25 + ...vice-deployment-with-override-options.yaml | 33 ++ ...ice-deployment-with-on-demand-options.yaml | 27 + riocli/apply/resolver.py | 2 +- riocli/deployment/model.py | 95 +++- riocli/deployment/validation.py | 335 +++++++++++- riocli/package/model.py | 72 ++- riocli/package/validation.py | 475 +++++++++++++++--- riocli/rosbag/job.py | 111 +++- riocli/rosbag/util.py | 4 + setup.py | 1 + 16 files changed, 1417 insertions(+), 131 deletions(-) create mode 100644 riocli/apply/manifests/13-device-package-with-rosbag.yaml create mode 100644 riocli/apply/manifests/14-cloud-package-with-rosbag.yaml create mode 100644 riocli/apply/manifests/15-device-deployment-no-override-options.yaml create mode 100644 riocli/apply/manifests/16-device-deployment-with-override-options.yaml create mode 100644 riocli/apply/manifests/17-device-deployment-with-on-demand-options.yaml create mode 100644 riocli/rosbag/util.py diff --git a/Pipfile b/Pipfile index 103f6143..698cf76f 100644 --- a/Pipfile +++ b/Pipfile @@ -27,6 +27,7 @@ munch = ">=2.4.0" pyyaml = ">=5.4.1" rapyuta-io = ">=1.5.0" tabulate = ">=0.8.0" +pyrfc3339 = ">=1.1" [requires] python_version = "3" diff --git a/jsonschema/deployment-schema.yaml b/jsonschema/deployment-schema.yaml index fcf4fbda..4dd115bf 100644 --- a/jsonschema/deployment-schema.yaml +++ b/jsonschema/deployment-schema.yaml @@ -156,6 +156,11 @@ definitions: items: "$ref": "#/definitions/deviceNetworkAttachSpec" + rosBagJobs: + type: array + items: + "$ref": "#/definitions/deviceROSBagJobSpec" + - properties: runtime: type: string @@ -187,6 +192,148 @@ definitions: items: "$ref": "#/definitions/managedServiceSpec" + rosBagJobs: + type: array + items: + "$ref": "#/definitions/cloudROSBagJobSpec" + + cloudROSBagJobSpec: + type: object + properties: + name: + type: string + recordOptions: + "$ref": "#/definitions/rosbagRecordOptionsSpec" + overrideOptions: + "$ref": "#/definitions/rosbagOverrideOptionsSpec" + required: + - name + - recordOptions + + deviceROSBagJobSpec: + type: object + properties: + name: + type: string + recordOptions: + "$ref": "#/definitions/rosbagRecordOptionsSpec" + uploadOptions: + "$ref": "#/definitions/rosbagUploadOptionsSpec" + overrideOptions: + "$ref": "#/definitions/rosbagOverrideOptionsSpec" + required: + - name + - recordOptions + + rosbagRecordOptionsSpec: + type: object + oneOf: + - required: + - allTopics + - anyOf: + - required: + - topics + - required: + - topicIncludeRegex + properties: + allTopics: + type: boolean + topics: + type: array + items: + type: string + topicIncludeRegex: + type: array + items: + type: string + topicExcludeRegex: + type: string + maxMessageCount: + type: integer + node: + type: string + compression: + type: string + enum: + - BZ2 + - LZ4 + maxSplits: + type: integer + maxSplitSize: + type: integer + chunkSize: + type: integer + prefix: + type: string + maxSplitDuration: + type: integer + + rosbagUploadOptionsSpec: + type: object + properties: + maxUploadRate: + type: integer + default: 1048576 + purgeAfter: + type: boolean + uploadType: + type: string + enum: + - OnStop + - Continuous + - OnDemand + default: OnDemand + onDemandOpts: + type: object + "$ref": "#/definitions/rosbagOnDemandUploadOptionsSpec" + + rosbagOnDemandUploadOptionsSpec: + type: object + properties: + timeRange: + type: object + properties: + from: + type: integer + default: 0 + to: + type: integer + default: 0 + required: + - from + - to + required: + - timeRange + + rosbagOverrideOptionsSpec: + type: object + properties: + topicOverrideInfo: + type: array + items: + "$ref": "#/definitions/rosbagTopicOverrideInfoSpec" + excludeTopics: + type: array + items: + type: string + + rosbagTopicOverrideInfoSpec: + type: object + oneOf: + - required: + - topicName + - recordFrequency + - required: + - topicName + - latched + properties: + topicName: + type: string + recordFrequency: + type: integer + latched: + type: boolean + stringMap: type: object additionalProperties: diff --git a/jsonschema/package-schema.yaml b/jsonschema/package-schema.yaml index 826be0d1..523ed097 100644 --- a/jsonschema/package-schema.yaml +++ b/jsonschema/package-schema.yaml @@ -60,6 +60,7 @@ definitions: ros: type: object "$ref": "#/definitions/rosComponentSpec" + dependencies: runtime: oneOf: @@ -74,11 +75,14 @@ definitions: type: array items: "$ref": "#/definitions/deviceExecutableSpec" - environmentArgs: type: array items: "$ref": "#/definitions/environmentSpec" + rosBagJobs: + type: array + items: + "$ref": "#/definitions/deviceROSBagJobSpec" - properties: runtime: @@ -103,6 +107,11 @@ definitions: items: "$ref": "#/definitions/endpointSpec" + rosBagJobs: + type: array + items: + "$ref": "#/definitions/cloudROSBagJobSpec" + deviceExecutableSpec: type: object properties: @@ -215,7 +224,7 @@ definitions: type: object properties: depends: - "$ref": "#/definitions/secretDepends" + "$ref": "#/definitions/buildDepends" required: - depends - properties: @@ -473,6 +482,141 @@ definitions: enum: - action + cloudROSBagJobSpec: + type: object + properties: + name: + type: string + recordOptions: + "$ref": "#/definitions/rosbagRecordOptionsSpec" + overrideOptions: + "$ref": "#/definitions/rosbagOverrideOptionsSpec" + required: + - name + - recordOptions + + deviceROSBagJobSpec: + type: object + properties: + name: + type: string + recordOptions: + "$ref": "#/definitions/rosbagRecordOptionsSpec" + uploadOptions: + "$ref": "#/definitions/rosbagUploadOptionsSpec" + overrideOptions: + "$ref": "#/definitions/rosbagOverrideOptionsSpec" + required: + - name + - recordOptions + + rosbagRecordOptionsSpec: + type: object + oneOf: + - required: + - allTopics + - anyOf: + - required: + - topics + - required: + - topicIncludeRegex + properties: + allTopics: + type: boolean + topics: + type: array + items: + type: string + topicIncludeRegex: + type: array + items: + type: string + topicExcludeRegex: + type: string + maxMessageCount: + type: integer + node: + type: string + compression: + type: string + enum: + - BZ2 + - LZ4 + maxSplits: + type: integer + maxSplitSize: + type: integer + chunkSize: + type: integer + prefix: + type: string + maxSplitDuration: + type: integer + + rosbagUploadOptionsSpec: + type: object + properties: + maxUploadRate: + type: integer + default: 1048576 + purgeAfter: + type: boolean + uploadType: + type: string + enum: + - OnStop + - Continuous + - OnDemand + default: OnDemand + onDemandOpts: + type: object + "$ref": "#/definitions/rosbagOnDemandUploadOptionsSpec" + + rosbagOnDemandUploadOptionsSpec: + type: object + properties: + timeRange: + type: object + properties: + from: + type: integer + to: + type: integer + required: + - from + - to + required: + - timeRange + + rosbagOverrideOptionsSpec: + type: object + properties: + topicOverrideInfo: + type: array + items: + "$ref": "#/definitions/rosbagTopicOverrideInfoSpec" + excludeTopics: + type: array + items: + type: string + + rosbagTopicOverrideInfoSpec: + type: object + oneOf: + - required: + - topicName + - recordFrequency + - required: + - topicName + - latched + properties: + topicName: + type: string + recordFrequency: + type: integer + latched: + type: boolean + stringMap: type: object additionalProperties: diff --git a/riocli/apply/manifests/13-device-package-with-rosbag.yaml b/riocli/apply/manifests/13-device-package-with-rosbag.yaml new file mode 100644 index 00000000..8bcd55c6 --- /dev/null +++ b/riocli/apply/manifests/13-device-package-with-rosbag.yaml @@ -0,0 +1,32 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" #We will create a package +metadata: + name: "rosbag-dev" + version: "1.0.0" + labels: + app: test +spec: + runtime: "device" + device: + arch: "amd64" + restart: "always" + ros: + enabled: True + rosBagJobs: + - name: "testbag" + recordOptions: + allTopics: True + maxSplits: 5 + compression: LZ4 + maxSplitDuration: 1 + uploadOptions: + uploadType: "OnDemand" + executables: + - name: "exec" + type: build + command: "roslaunch talker talker.launch" + runAsBash: False + build: + depends: + kind: build + nameOrGUID: "build-jtmnspmxafziwbrfjknnvyaf" diff --git a/riocli/apply/manifests/14-cloud-package-with-rosbag.yaml b/riocli/apply/manifests/14-cloud-package-with-rosbag.yaml new file mode 100644 index 00000000..741b7b30 --- /dev/null +++ b/riocli/apply/manifests/14-cloud-package-with-rosbag.yaml @@ -0,0 +1,40 @@ +apiVersion: "apiextensions.rapyuta.io/v1" +kind: "Package" #We will create a package +metadata: + name: "rosbag-cloud" + version: "1.0.0" + labels: + app: test +spec: + runtime: cloud + cloud: + replicas: 1 + ros: + enabled: True + rosBagJobs: + - name: "testbag" + recordOptions: + topicIncludeRegex: + - "/telemetry" + maxSplits: 5 + maxSplitSize: 10 + uploadOptions: + uploadType: "OnDemand" + overrideOptions: + topicOverrideInfo: + - topicName: "/topic1" + recordFrequency: 10 + - topicName: "/topic2" + latched: True + excludeTopics: + - "/topic3" + - "/topic4" + executables: + - name: "exec" + type: build + command: "roslaunch talker talker.launch" + runAsBash: False + build: + depends: + kind: build + nameOrGUID: "build-fshjqzkbqlqwvgsksiioweuk" \ No newline at end of file diff --git a/riocli/apply/manifests/15-device-deployment-no-override-options.yaml b/riocli/apply/manifests/15-device-deployment-no-override-options.yaml new file mode 100644 index 00000000..0fd554bf --- /dev/null +++ b/riocli/apply/manifests/15-device-deployment-no-override-options.yaml @@ -0,0 +1,25 @@ +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: device-no-override-options + depends: + kind: package + nameOrGUID: "pkg-epzkouhcwgqkbmibexugvlob" + version: "v1.0.0" +spec: + runtime: device + device: + depends: + kind: device + nameOrGUID: device1 + rosBagJobs: + - name: "testbag" + recordOptions: + topicIncludeRegex: + - "/telemetry" + maxSplits: 5 + compression: LZ4 + maxSplitDuration: 3 + uploadOptions: + uploadType: "Continuous" + diff --git a/riocli/apply/manifests/16-device-deployment-with-override-options.yaml b/riocli/apply/manifests/16-device-deployment-with-override-options.yaml new file mode 100644 index 00000000..e8654912 --- /dev/null +++ b/riocli/apply/manifests/16-device-deployment-with-override-options.yaml @@ -0,0 +1,33 @@ +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: device-with-override-options + depends: + kind: package + nameOrGUID: "pkg-epzkouhcwgqkbmibexugvlob" + version: "v1.0.0" +spec: + runtime: device + device: + depends: + kind: device + nameOrGUID: device1 + rosBagJobs: + - name: "testbag" + recordOptions: + topicIncludeRegex: + - "/telemetry" + maxSplits: 5 + compression: LZ4 + maxSplitDuration: 180 + uploadOptions: + uploadType: "Continuous" + overrideOptions: + topicOverrideInfo: + - topicName: "/topic1" + recordFrequency: 10 + - topicName: "/topic2" + latched: True + excludeTopics: + - "/topic3" + - "/topic4" \ No newline at end of file diff --git a/riocli/apply/manifests/17-device-deployment-with-on-demand-options.yaml b/riocli/apply/manifests/17-device-deployment-with-on-demand-options.yaml new file mode 100644 index 00000000..0ae658ee --- /dev/null +++ b/riocli/apply/manifests/17-device-deployment-with-on-demand-options.yaml @@ -0,0 +1,27 @@ +apiVersion: apiextensions.rapyuta.io/v1 +kind: Deployment +metadata: + name: device-with-on-demand-opts + depends: + kind: package + nameOrGUID: "pkg-zofyhzjewrdlxpykebzbrzrb" + version: "v1.0.0" +spec: + runtime: device + device: + depends: + kind: device + nameOrGUID: device1 + rosBagJobs: + - name: "testbag" + recordOptions: + allTopics: True + maxSplits: 10 + compression: LZ4 + maxSplitDuration: 1 + uploadOptions: + uploadType: "OnDemand" + onDemandOpts: + timeRange: + from: 10 + to: 11 \ No newline at end of file diff --git a/riocli/apply/resolver.py b/riocli/apply/resolver.py index ef389cd5..0e508667 100644 --- a/riocli/apply/resolver.py +++ b/riocli/apply/resolver.py @@ -73,7 +73,7 @@ class ResolverCache(object, metaclass=_Singleton): } GUID_KEYS = ['guid', 'GUID', 'uuid', 'ID', 'Id', 'id'] - NAME_KEYS = ['name', 'urlPrefix'] + NAME_KEYS = ['name', 'urlPrefix', 'buildName'] def __init__(self, client): self.client = client diff --git a/riocli/deployment/model.py b/riocli/deployment/model.py index e5a14ac1..a0ee25d4 100644 --- a/riocli/deployment/model.py +++ b/riocli/deployment/model.py @@ -20,6 +20,8 @@ from rapyuta_io.clients.native_network import NativeNetwork from rapyuta_io.clients.package import ProvisionConfiguration, RestartPolicy, ExecutableMount from rapyuta_io.clients.routed_network import RoutedNetwork +from rapyuta_io.clients.rosbag import ROSBagJob, ROSBagOptions, ROSBagCompression, UploadOptions, \ + ROSBagOnDemandUploadOptions, ROSBagTimeRange, ROSBagUploadTypes, OverrideOptions, TopicOverrideInfo from riocli.deployment.util import add_mount_volume_provision_config from riocli.deployment.validation import validate @@ -44,14 +46,14 @@ def find_object(self, client: Client) -> typing.Any: def create_object(self, client: Client) -> typing.Any: pkg_guid, pkg = self.rc.find_depends(self.metadata.depends, self.metadata.depends.version) - + if pkg_guid: pkg = client.get_package(pkg_guid) pkg.update() default_plan = pkg['plans'][0] internal_component = default_plan['internalComponents'][0] - + __planId = default_plan['planId'] __componentName = internal_component.componentName runtime = internal_component['runtime'] @@ -88,12 +90,18 @@ def create_object(self, client: Client) -> typing.Any: if 'rosNetworks' in self.spec: for network_depends in self.spec.rosNetworks: network_guid, network_obj = self.rc.find_depends(network_depends.depends) - + if type(network_obj) == RoutedNetwork: - provision_config.add_routed_network(network_obj, network_interface=network_depends.get('interface', None)) + provision_config.add_routed_network(network_obj, + network_interface=network_depends.get('interface', None)) if type(network_obj) == NativeNetwork: - provision_config.add_native_network(network_obj, network_interface=network_depends.get('interface', None)) - + provision_config.add_native_network(network_obj, + network_interface=network_depends.get('interface', None)) + + if 'rosBagJobs' in self.spec: + for req_job in self.spec.rosBagJobs: + provision_config.add_rosbag_job(__componentName, self._form_rosbag_job(req_job)) + if self.spec.runtime == 'cloud': if 'staticRoutes' in self.spec: for stroute in self.spec.staticRoutes: @@ -220,3 +228,78 @@ def _configure_static_routes(self, client: Client, prov_config: ProvisionConfigu guid = find_static_route_guid(client, name) static_route = client.get_static_route(route_guid=guid) prov_config.add_static_route(component_name=component, endpoint_name=route.name, static_route=static_route) + + def _form_rosbag_job(self, req_job): + rosbag_job_kw_args = { + 'name': req_job.name, + 'rosbag_options': ROSBagOptions( + all_topics=req_job.recordOptions.get('allTopics'), + topics=req_job.recordOptions.get('topics'), + topic_include_regex=req_job.recordOptions.get('topicIncludeRegex'), + topic_exclude_regex=req_job.recordOptions.get('topicExcludeRegex'), + max_message_count=req_job.recordOptions.get('maxMessageCount'), + node=req_job.recordOptions.get('node'), + compression=ROSBagCompression(req_job.recordOptions.compression) if hasattr( + req_job.recordOptions, 'compression' + ) else None, + max_splits=req_job.recordOptions.get('maxSplits'), + max_split_size=req_job.recordOptions.get('maxSplitSize'), + chunk_size=req_job.recordOptions.get('chunkSize'), + max_split_duration=req_job.recordOptions.get('maxSplitDuration') + )} + + if 'uploadOptions' in req_job: + rosbag_job_kw_args['upload_options'] = self._form_rosbag_upload_options(req_job.uploadOptions) + + if 'overrideOptions' in req_job: + rosbag_job_kw_args['override_options'] = self._form_rosbag_override_options(req_job.overrideOptions) + + return ROSBagJob(**rosbag_job_kw_args) + + @staticmethod + def _form_rosbag_upload_options(upload_options): + upload_options_kw_args = { + 'max_upload_rate': upload_options.maxUploadRate, + 'upload_type': ROSBagUploadTypes(upload_options.uploadType), + } + + if 'purgeAfter' in upload_options: + upload_options_kw_args['purge_after'] = upload_options.purgeAfter + + if 'onDemandOpts' in upload_options: + time_range = ROSBagTimeRange( + from_time=upload_options.onDemandOpts.timeRange['from'], + to_time=upload_options.onDemandOpts.timeRange['to'] + ) + + upload_options_kw_args['on_demand_options'] = ROSBagOnDemandUploadOptions(time_range) + + return UploadOptions(**upload_options_kw_args) + + @staticmethod + def _form_rosbag_override_options(override_options): + override_options_kw_args = {} + + if 'topicOverrideInfo' in override_options: + override_infos = [] + for info in override_options.topicOverrideInfo: + topic_override_info_kw_args = { + 'topic_name': info.topicName + } + + if 'recordFrequency' in info: + topic_override_info_kw_args['record_frequency'] = info.recordFrequency + + if 'latched' in info: + topic_override_info_kw_args['latched'] = info.latched + + override_info = TopicOverrideInfo(**topic_override_info_kw_args) + + override_infos.append(override_info) + + override_options_kw_args['topic_override_info'] = override_infos + + if 'excludeTopics' in override_options: + override_options_kw_args['exclude_topics'] = override_options.excludeTopics + + return OverrideOptions(**override_options_kw_args) diff --git a/riocli/deployment/validation.py b/riocli/deployment/validation.py index ea3e0d49..4a507eb6 100644 --- a/riocli/deployment/validation.py +++ b/riocli/deployment/validation.py @@ -17,12 +17,12 @@ def validate(data, custom_formats={}, name_prefix=None): def validate___definitions_deployment(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/deviceROSBagJobSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/cloudROSBagJobSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') data_is_dict = isinstance(data, dict) if data_is_dict: data_len = len(data) if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Deployment', 'default': 'Deployment'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'depends': {'$ref': '#/definitions/packageDepends'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}}, 'required': ['name', 'depends']}, 'spec': {'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'depends': {'type': 'array', 'items': {'$ref': '#/definitions/deploymentDepends'}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'type': 'object', '$ref': '#/definitions/deviceDepends'}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/deviceVolumeAttachSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/deviceNetworkAttachSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/deviceROSBagJobSpec'}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'$ref': '#/definitions/envArgsSpec'}}, 'volumes': {'type': 'array', 'items': {'$ref': '#/definitions/cloudVolumeAttachSpec'}}, 'staticRoutes': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosNetworks': {'type': 'array', 'items': {'$ref': '#/definitions/cloudNetworkAttachSpec'}}, 'managedServices': {'type': 'array', 'items': {'$ref': '#/definitions/managedServiceSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/cloudROSBagJobSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') data_keys = set(data.keys()) if "apiVersion" in data_keys: data_keys.remove("apiVersion") @@ -105,6 +105,16 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data__rosNetworks_len = len(data__rosNetworks) for data__rosNetworks_x, data__rosNetworks_item in enumerate(data__rosNetworks): validate___definitions_devicenetworkattachspec(data__rosNetworks_item, custom_formats, (name_prefix or "data") + ".rosNetworks[{data__rosNetworks_x}]") + if "rosBagJobs" in data_keys: + data_keys.remove("rosBagJobs") + data__rosBagJobs = data["rosBagJobs"] + if not isinstance(data__rosBagJobs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosBagJobs must be array", value=data__rosBagJobs, name="" + (name_prefix or "data") + ".rosBagJobs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'uploadOptions': {'$ref': '#/definitions/rosbagUploadOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}, rule='type') + data__rosBagJobs_is_list = isinstance(data__rosBagJobs, (list, tuple)) + if data__rosBagJobs_is_list: + data__rosBagJobs_len = len(data__rosBagJobs) + for data__rosBagJobs_x, data__rosBagJobs_item in enumerate(data__rosBagJobs): + validate___definitions_devicerosbagjobspec(data__rosBagJobs_item, custom_formats, (name_prefix or "data") + ".rosBagJobs[{data__rosBagJobs_x}]") data_one_of_count1 += 1 except JsonSchemaValueException: pass if data_one_of_count1 < 2: @@ -169,10 +179,20 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data__managedServices_len = len(data__managedServices) for data__managedServices_x, data__managedServices_item in enumerate(data__managedServices): validate___definitions_managedservicespec(data__managedServices_item, custom_formats, (name_prefix or "data") + ".managedServices[{data__managedServices_x}]") + if "rosBagJobs" in data_keys: + data_keys.remove("rosBagJobs") + data__rosBagJobs = data["rosBagJobs"] + if not isinstance(data__rosBagJobs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosBagJobs must be array", value=data__rosBagJobs, name="" + (name_prefix or "data") + ".rosBagJobs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}, rule='type') + data__rosBagJobs_is_list = isinstance(data__rosBagJobs, (list, tuple)) + if data__rosBagJobs_is_list: + data__rosBagJobs_len = len(data__rosBagJobs) + for data__rosBagJobs_x, data__rosBagJobs_item in enumerate(data__rosBagJobs): + validate___definitions_cloudrosbagjobspec(data__rosBagJobs_item, custom_formats, (name_prefix or "data") + ".rosBagJobs[{data__rosBagJobs_x}]") data_one_of_count1 += 1 except JsonSchemaValueException: pass if data_one_of_count1 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'properties': {'kind': {'const': 'device', 'default': 'device'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, 'staticRoutes': {'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'managedServices': {'type': 'array', 'items': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/managedServiceDepends'}}}}}}]}, rule='oneOf') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'type': 'string', 'enum': ['device']}, 'depends': {'properties': {'kind': {'const': 'device', 'default': 'device'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}, 'restart': {'type': 'string', 'enum': ['always', 'onfailure', 'never'], 'default': 'always'}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'interface': {'type': 'string'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'uploadOptions': {'$ref': '#/definitions/rosbagUploadOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}, {'properties': {'runtime': {'type': 'string', 'enum': ['cloud']}, 'envArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'value': {'type': 'string'}}}}, 'volumes': {'type': 'array', 'items': {'type': 'object', 'properties': {'execName': {'type': 'string'}, 'mountPath': {'type': 'string'}, 'subPath': {'type': 'string'}, 'depends': {'$ref': '#/definitions/diskDepends'}}}}, 'staticRoutes': {'type': 'array', 'items': {'properties': {'name': {'type': 'string'}, 'depends': {'properties': {'kind': {'const': 'staticroute', 'default': 'staticroute'}, 'nameOrGUID': {'type': 'string'}}}}}}, 'rosNetworks': {'type': 'array', 'items': {'properties': {'depends': {'$ref': '#/definitions/networkDepends'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'managedServices': {'type': 'array', 'items': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/managedServiceDepends'}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}]}, rule='oneOf') data_keys = set(data.keys()) if "runtime" in data_keys: data_keys.remove("runtime") @@ -216,6 +236,222 @@ def validate___definitions_deploymentdepends(data, custom_formats={}, name_prefi raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') return data +def validate___definitions_cloudrosbagjobspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'recordOptions']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'recordOptions'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "recordOptions" in data_keys: + data_keys.remove("recordOptions") + data__recordOptions = data["recordOptions"] + validate___definitions_rosbagrecordoptionsspec(data__recordOptions, custom_formats, (name_prefix or "data") + ".recordOptions") + if "overrideOptions" in data_keys: + data_keys.remove("overrideOptions") + data__overrideOptions = data["overrideOptions"] + validate___definitions_rosbagoverrideoptionsspec(data__overrideOptions, custom_formats, (name_prefix or "data") + ".overrideOptions") + return data + +def validate___definitions_rosbagoverrideoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "topicOverrideInfo" in data_keys: + data_keys.remove("topicOverrideInfo") + data__topicOverrideInfo = data["topicOverrideInfo"] + if not isinstance(data__topicOverrideInfo, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicOverrideInfo must be array", value=data__topicOverrideInfo, name="" + (name_prefix or "data") + ".topicOverrideInfo", definition={'type': 'array', 'items': {'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}}, rule='type') + data__topicOverrideInfo_is_list = isinstance(data__topicOverrideInfo, (list, tuple)) + if data__topicOverrideInfo_is_list: + data__topicOverrideInfo_len = len(data__topicOverrideInfo) + for data__topicOverrideInfo_x, data__topicOverrideInfo_item in enumerate(data__topicOverrideInfo): + validate___definitions_rosbagtopicoverrideinfospec(data__topicOverrideInfo_item, custom_formats, (name_prefix or "data") + ".topicOverrideInfo[{data__topicOverrideInfo_x}]") + if "excludeTopics" in data_keys: + data_keys.remove("excludeTopics") + data__excludeTopics = data["excludeTopics"] + if not isinstance(data__excludeTopics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".excludeTopics must be array", value=data__excludeTopics, name="" + (name_prefix or "data") + ".excludeTopics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludeTopics_is_list = isinstance(data__excludeTopics, (list, tuple)) + if data__excludeTopics_is_list: + data__excludeTopics_len = len(data__excludeTopics) + for data__excludeTopics_x, data__excludeTopics_item in enumerate(data__excludeTopics): + if not isinstance(data__excludeTopics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".excludeTopics[{data__excludeTopics_x}]".format(**locals()) + " must be string", value=data__excludeTopics_item, name="" + (name_prefix or "data") + ".excludeTopics[{data__excludeTopics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_rosbagtopicoverrideinfospec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}, rule='type') + data_one_of_count2 = 0 + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicName', 'recordFrequency']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicName', 'recordFrequency'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicName', 'recordFrequency']}, rule='required') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicName', 'latched']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicName', 'latched'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicName', 'latched']}, rule='required') + data_one_of_count2 += 1 + except JsonSchemaValueException: pass + if data_one_of_count2 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count2) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}, rule='oneOf') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "topicName" in data_keys: + data_keys.remove("topicName") + data__topicName = data["topicName"] + if not isinstance(data__topicName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicName must be string", value=data__topicName, name="" + (name_prefix or "data") + ".topicName", definition={'type': 'string'}, rule='type') + if "recordFrequency" in data_keys: + data_keys.remove("recordFrequency") + data__recordFrequency = data["recordFrequency"] + if not isinstance(data__recordFrequency, (int)) and not (isinstance(data__recordFrequency, float) and data__recordFrequency.is_integer()) or isinstance(data__recordFrequency, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".recordFrequency must be integer", value=data__recordFrequency, name="" + (name_prefix or "data") + ".recordFrequency", definition={'type': 'integer'}, rule='type') + if "latched" in data_keys: + data_keys.remove("latched") + data__latched = data["latched"] + if not isinstance(data__latched, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".latched must be boolean", value=data__latched, name="" + (name_prefix or "data") + ".latched", definition={'type': 'boolean'}, rule='type') + return data + +def validate___definitions_rosbagrecordoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, rule='type') + data_one_of_count3 = 0 + if data_one_of_count3 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['allTopics']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['allTopics'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['allTopics']}, rule='required') + data_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data_one_of_count3 < 2: + try: + data_any_of_count4 = 0 + if not data_any_of_count4: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topics']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topics'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topics']}, rule='required') + data_any_of_count4 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count4: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicIncludeRegex']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicIncludeRegex'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicIncludeRegex']}, rule='required') + data_any_of_count4 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count4: + raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}, rule='anyOf') + data_one_of_count3 += 1 + except JsonSchemaValueException: pass + if data_one_of_count3 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count3) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, rule='oneOf') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "allTopics" in data_keys: + data_keys.remove("allTopics") + data__allTopics = data["allTopics"] + if not isinstance(data__allTopics, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".allTopics must be boolean", value=data__allTopics, name="" + (name_prefix or "data") + ".allTopics", definition={'type': 'boolean'}, rule='type') + if "topics" in data_keys: + data_keys.remove("topics") + data__topics = data["topics"] + if not isinstance(data__topics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics must be array", value=data__topics, name="" + (name_prefix or "data") + ".topics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topics_is_list = isinstance(data__topics, (list, tuple)) + if data__topics_is_list: + data__topics_len = len(data__topics) + for data__topics_x, data__topics_item in enumerate(data__topics): + if not isinstance(data__topics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + " must be string", value=data__topics_item, name="" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "topicIncludeRegex" in data_keys: + data_keys.remove("topicIncludeRegex") + data__topicIncludeRegex = data["topicIncludeRegex"] + if not isinstance(data__topicIncludeRegex, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicIncludeRegex must be array", value=data__topicIncludeRegex, name="" + (name_prefix or "data") + ".topicIncludeRegex", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topicIncludeRegex_is_list = isinstance(data__topicIncludeRegex, (list, tuple)) + if data__topicIncludeRegex_is_list: + data__topicIncludeRegex_len = len(data__topicIncludeRegex) + for data__topicIncludeRegex_x, data__topicIncludeRegex_item in enumerate(data__topicIncludeRegex): + if not isinstance(data__topicIncludeRegex_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicIncludeRegex[{data__topicIncludeRegex_x}]".format(**locals()) + " must be string", value=data__topicIncludeRegex_item, name="" + (name_prefix or "data") + ".topicIncludeRegex[{data__topicIncludeRegex_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "topicExcludeRegex" in data_keys: + data_keys.remove("topicExcludeRegex") + data__topicExcludeRegex = data["topicExcludeRegex"] + if not isinstance(data__topicExcludeRegex, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicExcludeRegex must be string", value=data__topicExcludeRegex, name="" + (name_prefix or "data") + ".topicExcludeRegex", definition={'type': 'string'}, rule='type') + if "maxMessageCount" in data_keys: + data_keys.remove("maxMessageCount") + data__maxMessageCount = data["maxMessageCount"] + if not isinstance(data__maxMessageCount, (int)) and not (isinstance(data__maxMessageCount, float) and data__maxMessageCount.is_integer()) or isinstance(data__maxMessageCount, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxMessageCount must be integer", value=data__maxMessageCount, name="" + (name_prefix or "data") + ".maxMessageCount", definition={'type': 'integer'}, rule='type') + if "node" in data_keys: + data_keys.remove("node") + data__node = data["node"] + if not isinstance(data__node, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".node must be string", value=data__node, name="" + (name_prefix or "data") + ".node", definition={'type': 'string'}, rule='type') + if "compression" in data_keys: + data_keys.remove("compression") + data__compression = data["compression"] + if not isinstance(data__compression, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".compression must be string", value=data__compression, name="" + (name_prefix or "data") + ".compression", definition={'type': 'string', 'enum': ['BZ2', 'LZ4']}, rule='type') + if data__compression not in ['BZ2', 'LZ4']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".compression must be one of ['BZ2', 'LZ4']", value=data__compression, name="" + (name_prefix or "data") + ".compression", definition={'type': 'string', 'enum': ['BZ2', 'LZ4']}, rule='enum') + if "maxSplits" in data_keys: + data_keys.remove("maxSplits") + data__maxSplits = data["maxSplits"] + if not isinstance(data__maxSplits, (int)) and not (isinstance(data__maxSplits, float) and data__maxSplits.is_integer()) or isinstance(data__maxSplits, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplits must be integer", value=data__maxSplits, name="" + (name_prefix or "data") + ".maxSplits", definition={'type': 'integer'}, rule='type') + if "maxSplitSize" in data_keys: + data_keys.remove("maxSplitSize") + data__maxSplitSize = data["maxSplitSize"] + if not isinstance(data__maxSplitSize, (int)) and not (isinstance(data__maxSplitSize, float) and data__maxSplitSize.is_integer()) or isinstance(data__maxSplitSize, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplitSize must be integer", value=data__maxSplitSize, name="" + (name_prefix or "data") + ".maxSplitSize", definition={'type': 'integer'}, rule='type') + if "chunkSize" in data_keys: + data_keys.remove("chunkSize") + data__chunkSize = data["chunkSize"] + if not isinstance(data__chunkSize, (int)) and not (isinstance(data__chunkSize, float) and data__chunkSize.is_integer()) or isinstance(data__chunkSize, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".chunkSize must be integer", value=data__chunkSize, name="" + (name_prefix or "data") + ".chunkSize", definition={'type': 'integer'}, rule='type') + if "prefix" in data_keys: + data_keys.remove("prefix") + data__prefix = data["prefix"] + if not isinstance(data__prefix, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".prefix must be string", value=data__prefix, name="" + (name_prefix or "data") + ".prefix", definition={'type': 'string'}, rule='type') + if "maxSplitDuration" in data_keys: + data_keys.remove("maxSplitDuration") + data__maxSplitDuration = data["maxSplitDuration"] + if not isinstance(data__maxSplitDuration, (int)) and not (isinstance(data__maxSplitDuration, float) and data__maxSplitDuration.is_integer()) or isinstance(data__maxSplitDuration, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplitDuration must be integer", value=data__maxSplitDuration, name="" + (name_prefix or "data") + ".maxSplitDuration", definition={'type': 'integer'}, rule='type') + return data + def validate___definitions_managedservicespec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'managedservice', 'default': 'managedservice'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}}, rule='type') @@ -370,6 +606,99 @@ def validate___definitions_diskdepends(data, custom_formats={}, name_prefix=None raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') return data +def validate___definitions_devicerosbagjobspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'uploadOptions': {'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', '$ref': '#/definitions/rosbagOnDemandUploadOptionsSpec'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'recordOptions']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'recordOptions'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'uploadOptions': {'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', '$ref': '#/definitions/rosbagOnDemandUploadOptionsSpec'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "recordOptions" in data_keys: + data_keys.remove("recordOptions") + data__recordOptions = data["recordOptions"] + validate___definitions_rosbagrecordoptionsspec(data__recordOptions, custom_formats, (name_prefix or "data") + ".recordOptions") + if "uploadOptions" in data_keys: + data_keys.remove("uploadOptions") + data__uploadOptions = data["uploadOptions"] + validate___definitions_rosbaguploadoptionsspec(data__uploadOptions, custom_formats, (name_prefix or "data") + ".uploadOptions") + if "overrideOptions" in data_keys: + data_keys.remove("overrideOptions") + data__overrideOptions = data["overrideOptions"] + validate___definitions_rosbagoverrideoptionsspec(data__overrideOptions, custom_formats, (name_prefix or "data") + ".overrideOptions") + return data + +def validate___definitions_rosbaguploadoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer', 'default': 0}, 'to': {'type': 'integer', 'default': 0}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "maxUploadRate" in data_keys: + data_keys.remove("maxUploadRate") + data__maxUploadRate = data["maxUploadRate"] + if not isinstance(data__maxUploadRate, (int)) and not (isinstance(data__maxUploadRate, float) and data__maxUploadRate.is_integer()) or isinstance(data__maxUploadRate, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxUploadRate must be integer", value=data__maxUploadRate, name="" + (name_prefix or "data") + ".maxUploadRate", definition={'type': 'integer', 'default': 1048576}, rule='type') + else: data["maxUploadRate"] = 1048576 + if "purgeAfter" in data_keys: + data_keys.remove("purgeAfter") + data__purgeAfter = data["purgeAfter"] + if not isinstance(data__purgeAfter, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".purgeAfter must be boolean", value=data__purgeAfter, name="" + (name_prefix or "data") + ".purgeAfter", definition={'type': 'boolean'}, rule='type') + if "uploadType" in data_keys: + data_keys.remove("uploadType") + data__uploadType = data["uploadType"] + if not isinstance(data__uploadType, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".uploadType must be string", value=data__uploadType, name="" + (name_prefix or "data") + ".uploadType", definition={'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, rule='type') + if data__uploadType not in ['OnStop', 'Continuous', 'OnDemand']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".uploadType must be one of ['OnStop', 'Continuous', 'OnDemand']", value=data__uploadType, name="" + (name_prefix or "data") + ".uploadType", definition={'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, rule='enum') + else: data["uploadType"] = 'OnDemand' + if "onDemandOpts" in data_keys: + data_keys.remove("onDemandOpts") + data__onDemandOpts = data["onDemandOpts"] + validate___definitions_rosbagondemanduploadoptionsspec(data__onDemandOpts, custom_formats, (name_prefix or "data") + ".onDemandOpts") + return data + +def validate___definitions_rosbagondemanduploadoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer', 'default': 0}, 'to': {'type': 'integer', 'default': 0}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['timeRange']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['timeRange'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer', 'default': 0}, 'to': {'type': 'integer', 'default': 0}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}, rule='required') + data_keys = set(data.keys()) + if "timeRange" in data_keys: + data_keys.remove("timeRange") + data__timeRange = data["timeRange"] + if not isinstance(data__timeRange, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange must be object", value=data__timeRange, name="" + (name_prefix or "data") + ".timeRange", definition={'type': 'object', 'properties': {'from': {'type': 'integer', 'default': 0}, 'to': {'type': 'integer', 'default': 0}}, 'required': ['from', 'to']}, rule='type') + data__timeRange_is_dict = isinstance(data__timeRange, dict) + if data__timeRange_is_dict: + data__timeRange_len = len(data__timeRange) + if not all(prop in data__timeRange for prop in ['from', 'to']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange must contain ['from', 'to'] properties", value=data__timeRange, name="" + (name_prefix or "data") + ".timeRange", definition={'type': 'object', 'properties': {'from': {'type': 'integer', 'default': 0}, 'to': {'type': 'integer', 'default': 0}}, 'required': ['from', 'to']}, rule='required') + data__timeRange_keys = set(data__timeRange.keys()) + if "from" in data__timeRange_keys: + data__timeRange_keys.remove("from") + data__timeRange__from = data__timeRange["from"] + if not isinstance(data__timeRange__from, (int)) and not (isinstance(data__timeRange__from, float) and data__timeRange__from.is_integer()) or isinstance(data__timeRange__from, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange.from must be integer", value=data__timeRange__from, name="" + (name_prefix or "data") + ".timeRange.from", definition={'type': 'integer', 'default': 0}, rule='type') + else: data__timeRange["from"] = 0 + if "to" in data__timeRange_keys: + data__timeRange_keys.remove("to") + data__timeRange__to = data__timeRange["to"] + if not isinstance(data__timeRange__to, (int)) and not (isinstance(data__timeRange__to, float) and data__timeRange__to.is_integer()) or isinstance(data__timeRange__to, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange.to must be integer", value=data__timeRange__to, name="" + (name_prefix or "data") + ".timeRange.to", definition={'type': 'integer', 'default': 0}, rule='type') + else: data__timeRange["to"] = 0 + return data + def validate___definitions_devicenetworkattachspec(data, custom_formats={}, name_prefix=None): data_is_dict = isinstance(data, dict) if data_is_dict: diff --git a/riocli/package/model.py b/riocli/package/model.py index 7396248a..5fcca4a1 100644 --- a/riocli/package/model.py +++ b/riocli/package/model.py @@ -32,11 +32,12 @@ class Package(Model): 'never': RestartPolicy.Never, 'onfailure': RestartPolicy.OnFailure } + def __init__(self, *args, **kwargs): self.update(*args, **kwargs) def find_object(self, client: Client): - guid, obj = self.rc.find_depends({"kind": self.kind.lower(), "nameOrGUID": self.metadata.name}, + guid, obj = self.rc.find_depends({"kind": self.kind.lower(), "nameOrGUID": self.metadata.name}, self.metadata.version) if not guid: return False @@ -44,7 +45,6 @@ def find_object(self, client: Client): return obj def create_object(self, client: Client): - # click.secho('{}/{} {} created'.format(self.apiVersion, self.kind, self.metadata.name), fg='green') pkg_object = munchify({ 'name': 'default', 'packageVersion': 'v1.0.0', @@ -53,7 +53,7 @@ def create_object(self, client: Client): 'bindable': True, 'plans': [ { - "inboundROSInterfaces": { + "inboundROSInterfaces": { "anyIncomingScopedOrTargetedRosConfig": False }, 'singleton': False, @@ -80,26 +80,23 @@ def create_object(self, client: Client): # metadata # ✓ name, ✓ description, ✓ version - + pkg_object.name = self.metadata.name pkg_object.packageVersion = self.metadata.version - + if 'description' in self.metadata: pkg_object.description = self.metadata.description - - + # spec # executables - component_obj.name = 'default' #self.metadata.name #package == component in the single component model - + component_obj.name = 'default' # self.metadata.name #package == component in the single component model + # TODO validate transform. specially nested secret. component_obj.executables = list(map(self._map_executable, self.spec.executables)) for exec in component_obj.executables: if hasattr(exec, 'cmd') is False: setattr(exec, 'cmd', []) component_obj.requiredRuntime = self.spec.runtime - - # ✓ parameters # TODO validate transform. @@ -118,9 +115,10 @@ def create_object(self, client: Client): for entry in filter(lambda x: 'exposed' in x and x.exposed, self.spec.environmentVars): if os.environ.get('DEBUG'): print(entry.name) - exposed_parameters.append({'component': component_obj.name, 'param': entry.name, 'targetParam': entry.exposedName}) + exposed_parameters.append( + {'component': component_obj.name, 'param': entry.name, 'targetParam': entry.exposedName}) pkg_object.plans[0].exposedParameters = exposed_parameters - + # device # ✓ arch, ✓ restart if self.spec.runtime == 'device': @@ -128,7 +126,7 @@ def create_object(self, client: Client): component_obj.architecture = self.spec.device.arch if 'restart' in self.spec.device: component_obj.restart_policy = self.RESTART_POLICY[self.spec.device.restart.lower()] - + # cloud # ✓ replicas # ✓ endpoints @@ -140,31 +138,33 @@ def create_object(self, client: Client): component_obj.cloudInfra.replicas = 1 if 'endpoints' in self.spec: - endpoints = list(map(self._map_endpoints, self.spec.endpoints)) + endpoints = list(map(self._map_endpoints, self.spec.endpoints)) component_obj.cloudInfra.endpoints = endpoints - + # ros: # ✓ isros # ✓ topic # ✓ service # ✓ action # rosbagjob - if 'ros' in self.spec: + if 'ros' in self.spec and self.spec.ros.enabled: component_obj.ros.isROS = True - component_obj.ros.ros_distro = self.spec.ros.version + component_obj.ros.ros_distro = self.spec.ros.version pkg_object.plans[0].inboundROSInterfaces = munchify({}) - + pkg_object.plans[0].inboundROSInterfaces.anyIncomingScopedOrTargetedRosConfig = self.spec.ros.inboundScopedTargeted if 'inboundScopedTargeted' in self.spec.ros else False if 'rosEndpoints' in self.spec.ros: component_obj.ros.topics = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'topic')) component_obj.ros.services = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'service')) component_obj.ros.actions = list(self._get_rosendpoint_struct(self.spec.ros.rosEndpoints, 'action')) - + + if 'rosBagJobs' in self.spec: + component_obj.rosBagJobDefs = self.spec.rosBagJobs + pkg_object.plans[0].components = [component_obj] # return package # print(json.dumps(pkg_object)) return client.create_package(pkg_object) - def update_object(self, client: Client, obj: typing.Any) -> typing.Any: pass @@ -188,43 +188,42 @@ def _get_rosendpoint_struct(self, rosEndpoints, filter_type): return return_list def _map_executable(self, exec): - + exec_object = munchify({ "name": exec.name, "simulationOptions": { - "simulation": exec.simulation if 'simulation' in exec else False + "simulation": exec.simulation if 'simulation' in exec else False } }) - + if 'limits' in exec: exec_object.limits = { "cpu": exec.limits.cpu, "memory": exec.limits.memory } - + if exec.runAsBash: if 'command' in exec: exec_object.cmd = ['/bin/bash', '-c', exec.command] else: - #TODO verify this is right for secret? + # TODO verify this is right for secret? if 'command' in exec: exec_object.cmd = [exec.command] - if exec.type == 'docker': exec_object.docker = exec.docker.image if 'pullSecret' in exec.docker and exec.docker.pullSecret.depends: - secret_guid, secret = self.rc.find_depends(exec.docker.pullSecret.depends) + secret_guid, secret = self.rc.find_depends(exec.docker.pullSecret.depends) exec_object.secret = secret_guid - + if exec.type == 'build': exec_object.buildGUID = exec.build.depends.guid - #TODO verify this is right for secret? + # TODO verify this is right for secret? # if exec.docker.pullSecret and exec.docker.pullSecret.depends and exec.docker.pullSecret.depends.guid: - # exec_object.secret = exec.docker.pullSecret.depends.guid - - #TODO handle preinstalled - + # exec_object.secret = exec.docker.pullSecret.depends.guid + + # TODO handle preinstalled + return exec_object def _map_endpoints(self, endpoint): @@ -233,15 +232,14 @@ def _map_endpoints(self, endpoint): if 'tls-tcp' in proto: proto = 'tcp' - if 'range' in endpoint.type: proto = proto.replace("-range", '') return { - "name": endpoint.name, "exposeExternally": exposedExternally, + "name": endpoint.name, "exposeExternally": exposedExternally, "portRange": endpoint.portRange, "proto": proto.upper()} else: return { - "name": endpoint.name, "exposeExternally": exposedExternally, + "name": endpoint.name, "exposeExternally": exposedExternally, "port": endpoint.port, "targetPort": endpoint.targetPort, "proto": proto.upper()} @classmethod diff --git a/riocli/package/validation.py b/riocli/package/validation.py index 30ddf734..6574bb00 100644 --- a/riocli/package/validation.py +++ b/riocli/package/validation.py @@ -17,12 +17,12 @@ def validate(data, custom_formats={}, name_prefix=None): def validate___definitions_package(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/deviceROSBagJobSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/cloudROSBagJobSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='type') data_is_dict = isinstance(data, dict) if data_is_dict: data_len = len(data) if not all(prop in data for prop in ['apiVersion', 'kind', 'metadata', 'spec']): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['apiVersion', 'kind', 'metadata', 'spec'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'apiVersion': {'const': 'apiextensions.rapyuta.io/v1', 'default': 'apiextensions.rapyuta.io/v1'}, 'kind': {'const': 'Package', 'default': 'Package'}, 'metadata': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'version': {'type': 'string'}, 'tag': {'type': 'string'}, 'description': {'type': 'string'}, 'guid': {'$ref': '#/definitions/packageGUID'}, 'creator': {'$ref': '#/definitions/uuid'}, 'project': {'$ref': '#/definitions/projectGUID'}, 'labels': {'$ref': '#/definitions/stringMap', 'uniqueItems': True}}, 'required': ['name', 'version']}, 'spec': {'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', '$ref': '#/definitions/rosComponentSpec'}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', '$ref': '#/definitions/deviceComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/deviceExecutableSpec'}}, 'environmentArgs': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/deviceROSBagJobSpec'}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', '$ref': '#/definitions/cloudComponentInfoSpec'}, 'executables': {'type': 'array', 'items': {'$ref': '#/definitions/cloudExecutableSpec'}}, 'environmentVars': {'type': 'array', 'items': {'$ref': '#/definitions/environmentSpec'}}, 'endpoints': {'type': 'array', 'items': {'$ref': '#/definitions/endpointSpec'}}, 'rosBagJobs': {'type': 'array', 'items': {'$ref': '#/definitions/cloudROSBagJobSpec'}}}}]}}}}, 'required': ['apiVersion', 'kind', 'metadata', 'spec']}, rule='required') data_keys = set(data.keys()) if "apiVersion" in data_keys: data_keys.remove("apiVersion") @@ -48,7 +48,7 @@ def validate___definitions_package(data, custom_formats={}, name_prefix=None): def validate___definitions_componentspec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', 'properties': {'enabled': {'type': 'boolean', 'default': False}}, 'dependencies': {'enabled': {'oneOf': [{'properties': {'enabled': {'enum': [False]}}}, {'properties': {'enabled': {'type': 'boolean', 'enum': [True]}, 'version': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'inboundScopedTargeted': {'type': 'boolean', 'default': False}, 'rosEndpoints': {'type': 'array', 'items': {'$ref': '#/definitions/rosEndpointSpec'}}}}]}}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}}}]}}}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'runtime': {'type': 'string', 'enum': ['device', 'cloud'], 'default': 'cloud'}, 'ros': {'type': 'object', 'properties': {'enabled': {'type': 'boolean', 'default': False}}, 'dependencies': {'enabled': {'oneOf': [{'properties': {'enabled': {'enum': [False]}}}, {'properties': {'enabled': {'type': 'boolean', 'enum': [True]}, 'version': {'type': 'string', 'enum': ['kinetic', 'melodic', 'noetic'], 'default': 'melodic'}, 'inboundScopedTargeted': {'type': 'boolean', 'default': False}, 'rosEndpoints': {'type': 'array', 'items': {'$ref': '#/definitions/rosEndpointSpec'}}}}]}}}}, 'dependencies': {'runtime': {'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'uploadOptions': {'$ref': '#/definitions/rosbagUploadOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}]}}}, rule='type') data_is_dict = isinstance(data, dict) if data_is_dict: if "runtime" in data: @@ -87,6 +87,16 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data__environmentArgs_len = len(data__environmentArgs) for data__environmentArgs_x, data__environmentArgs_item in enumerate(data__environmentArgs): validate___definitions_environmentspec(data__environmentArgs_item, custom_formats, (name_prefix or "data") + ".environmentArgs[{data__environmentArgs_x}]") + if "rosBagJobs" in data_keys: + data_keys.remove("rosBagJobs") + data__rosBagJobs = data["rosBagJobs"] + if not isinstance(data__rosBagJobs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosBagJobs must be array", value=data__rosBagJobs, name="" + (name_prefix or "data") + ".rosBagJobs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'uploadOptions': {'$ref': '#/definitions/rosbagUploadOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}, rule='type') + data__rosBagJobs_is_list = isinstance(data__rosBagJobs, (list, tuple)) + if data__rosBagJobs_is_list: + data__rosBagJobs_len = len(data__rosBagJobs) + for data__rosBagJobs_x, data__rosBagJobs_item in enumerate(data__rosBagJobs): + validate___definitions_devicerosbagjobspec(data__rosBagJobs_item, custom_formats, (name_prefix or "data") + ".rosBagJobs[{data__rosBagJobs_x}]") data_one_of_count1 += 1 except JsonSchemaValueException: pass if data_one_of_count1 < 2: @@ -107,7 +117,7 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data_keys.remove("executables") data__executables = data["executables"] if not isinstance(data__executables, (list, tuple)): - raise JsonSchemaValueException("" + (name_prefix or "data") + ".executables must be array", value=data__executables, name="" + (name_prefix or "data") + ".executables", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + ".executables must be array", value=data__executables, name="" + (name_prefix or "data") + ".executables", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, rule='type') data__executables_is_list = isinstance(data__executables, (list, tuple)) if data__executables_is_list: data__executables_len = len(data__executables) @@ -133,10 +143,20 @@ def validate___definitions_componentspec(data, custom_formats={}, name_prefix=No data__endpoints_len = len(data__endpoints) for data__endpoints_x, data__endpoints_item in enumerate(data__endpoints): validate___definitions_endpointspec(data__endpoints_item, custom_formats, (name_prefix or "data") + ".endpoints[{data__endpoints_x}]") + if "rosBagJobs" in data_keys: + data_keys.remove("rosBagJobs") + data__rosBagJobs = data["rosBagJobs"] + if not isinstance(data__rosBagJobs, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".rosBagJobs must be array", value=data__rosBagJobs, name="" + (name_prefix or "data") + ".rosBagJobs", definition={'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}, rule='type') + data__rosBagJobs_is_list = isinstance(data__rosBagJobs, (list, tuple)) + if data__rosBagJobs_is_list: + data__rosBagJobs_len = len(data__rosBagJobs) + for data__rosBagJobs_x, data__rosBagJobs_item in enumerate(data__rosBagJobs): + validate___definitions_cloudrosbagjobspec(data__rosBagJobs_item, custom_formats, (name_prefix or "data") + ".rosBagJobs[{data__rosBagJobs_x}]") data_one_of_count1 += 1 except JsonSchemaValueException: pass if data_one_of_count1 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}}}]}, rule='oneOf') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count1) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'runtime': {'enum': ['device']}, 'device': {'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'$ref': '#/definitions/secretDepends'}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentArgs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'uploadOptions': {'$ref': '#/definitions/rosbagUploadOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}, {'properties': {'runtime': {'enum': ['cloud']}, 'cloud': {'type': 'object', 'properties': {'replicas': {'type': 'number', 'default': 1}}}, 'executables': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/secretDepends'}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'$ref': '#/definitions/buildDepends'}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}}, 'environmentVars': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}}, 'endpoints': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 443}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'$ref': '#/definitions/portNumber', 'default': 80}, 'targetPort': {'$ref': '#/definitions/portNumber'}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}}, 'rosBagJobs': {'type': 'array', 'items': {'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'$ref': '#/definitions/rosbagRecordOptionsSpec'}, 'overrideOptions': {'$ref': '#/definitions/rosbagOverrideOptionsSpec'}}, 'required': ['name', 'recordOptions']}}}}]}, rule='oneOf') data_keys = set(data.keys()) if "runtime" in data_keys: data_keys.remove("runtime") @@ -316,6 +336,222 @@ def validate___definitions_rosendpointspec(data, custom_formats={}, name_prefix= else: data["targeted"] = False return data +def validate___definitions_cloudrosbagjobspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'recordOptions']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'recordOptions'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "recordOptions" in data_keys: + data_keys.remove("recordOptions") + data__recordOptions = data["recordOptions"] + validate___definitions_rosbagrecordoptionsspec(data__recordOptions, custom_formats, (name_prefix or "data") + ".recordOptions") + if "overrideOptions" in data_keys: + data_keys.remove("overrideOptions") + data__overrideOptions = data["overrideOptions"] + validate___definitions_rosbagoverrideoptionsspec(data__overrideOptions, custom_formats, (name_prefix or "data") + ".overrideOptions") + return data + +def validate___definitions_rosbagoverrideoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "topicOverrideInfo" in data_keys: + data_keys.remove("topicOverrideInfo") + data__topicOverrideInfo = data["topicOverrideInfo"] + if not isinstance(data__topicOverrideInfo, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicOverrideInfo must be array", value=data__topicOverrideInfo, name="" + (name_prefix or "data") + ".topicOverrideInfo", definition={'type': 'array', 'items': {'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}}, rule='type') + data__topicOverrideInfo_is_list = isinstance(data__topicOverrideInfo, (list, tuple)) + if data__topicOverrideInfo_is_list: + data__topicOverrideInfo_len = len(data__topicOverrideInfo) + for data__topicOverrideInfo_x, data__topicOverrideInfo_item in enumerate(data__topicOverrideInfo): + validate___definitions_rosbagtopicoverrideinfospec(data__topicOverrideInfo_item, custom_formats, (name_prefix or "data") + ".topicOverrideInfo[{data__topicOverrideInfo_x}]") + if "excludeTopics" in data_keys: + data_keys.remove("excludeTopics") + data__excludeTopics = data["excludeTopics"] + if not isinstance(data__excludeTopics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".excludeTopics must be array", value=data__excludeTopics, name="" + (name_prefix or "data") + ".excludeTopics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__excludeTopics_is_list = isinstance(data__excludeTopics, (list, tuple)) + if data__excludeTopics_is_list: + data__excludeTopics_len = len(data__excludeTopics) + for data__excludeTopics_x, data__excludeTopics_item in enumerate(data__excludeTopics): + if not isinstance(data__excludeTopics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".excludeTopics[{data__excludeTopics_x}]".format(**locals()) + " must be string", value=data__excludeTopics_item, name="" + (name_prefix or "data") + ".excludeTopics[{data__excludeTopics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + return data + +def validate___definitions_rosbagtopicoverrideinfospec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}, rule='type') + data_one_of_count4 = 0 + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicName', 'recordFrequency']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicName', 'recordFrequency'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicName', 'recordFrequency']}, rule='required') + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicName', 'latched']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicName', 'latched'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicName', 'latched']}, rule='required') + data_one_of_count4 += 1 + except JsonSchemaValueException: pass + if data_one_of_count4 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count4) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['topicName', 'recordFrequency']}, {'required': ['topicName', 'latched']}], 'properties': {'topicName': {'type': 'string'}, 'recordFrequency': {'type': 'integer'}, 'latched': {'type': 'boolean'}}}, rule='oneOf') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "topicName" in data_keys: + data_keys.remove("topicName") + data__topicName = data["topicName"] + if not isinstance(data__topicName, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicName must be string", value=data__topicName, name="" + (name_prefix or "data") + ".topicName", definition={'type': 'string'}, rule='type') + if "recordFrequency" in data_keys: + data_keys.remove("recordFrequency") + data__recordFrequency = data["recordFrequency"] + if not isinstance(data__recordFrequency, (int)) and not (isinstance(data__recordFrequency, float) and data__recordFrequency.is_integer()) or isinstance(data__recordFrequency, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".recordFrequency must be integer", value=data__recordFrequency, name="" + (name_prefix or "data") + ".recordFrequency", definition={'type': 'integer'}, rule='type') + if "latched" in data_keys: + data_keys.remove("latched") + data__latched = data["latched"] + if not isinstance(data__latched, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".latched must be boolean", value=data__latched, name="" + (name_prefix or "data") + ".latched", definition={'type': 'boolean'}, rule='type') + return data + +def validate___definitions_rosbagrecordoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, rule='type') + data_one_of_count5 = 0 + if data_one_of_count5 < 2: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['allTopics']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['allTopics'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['allTopics']}, rule='required') + data_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data_one_of_count5 < 2: + try: + data_any_of_count6 = 0 + if not data_any_of_count6: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topics']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topics'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topics']}, rule='required') + data_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count6: + try: + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['topicIncludeRegex']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['topicIncludeRegex'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'required': ['topicIncludeRegex']}, rule='required') + data_any_of_count6 += 1 + except JsonSchemaValueException: pass + if not data_any_of_count6: + raise JsonSchemaValueException("" + (name_prefix or "data") + " cannot be validated by any definition", value=data, name="" + (name_prefix or "data") + "", definition={'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}, rule='anyOf') + data_one_of_count5 += 1 + except JsonSchemaValueException: pass + if data_one_of_count5 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count5) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, rule='oneOf') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "allTopics" in data_keys: + data_keys.remove("allTopics") + data__allTopics = data["allTopics"] + if not isinstance(data__allTopics, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".allTopics must be boolean", value=data__allTopics, name="" + (name_prefix or "data") + ".allTopics", definition={'type': 'boolean'}, rule='type') + if "topics" in data_keys: + data_keys.remove("topics") + data__topics = data["topics"] + if not isinstance(data__topics, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics must be array", value=data__topics, name="" + (name_prefix or "data") + ".topics", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topics_is_list = isinstance(data__topics, (list, tuple)) + if data__topics_is_list: + data__topics_len = len(data__topics) + for data__topics_x, data__topics_item in enumerate(data__topics): + if not isinstance(data__topics_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + " must be string", value=data__topics_item, name="" + (name_prefix or "data") + ".topics[{data__topics_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "topicIncludeRegex" in data_keys: + data_keys.remove("topicIncludeRegex") + data__topicIncludeRegex = data["topicIncludeRegex"] + if not isinstance(data__topicIncludeRegex, (list, tuple)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicIncludeRegex must be array", value=data__topicIncludeRegex, name="" + (name_prefix or "data") + ".topicIncludeRegex", definition={'type': 'array', 'items': {'type': 'string'}}, rule='type') + data__topicIncludeRegex_is_list = isinstance(data__topicIncludeRegex, (list, tuple)) + if data__topicIncludeRegex_is_list: + data__topicIncludeRegex_len = len(data__topicIncludeRegex) + for data__topicIncludeRegex_x, data__topicIncludeRegex_item in enumerate(data__topicIncludeRegex): + if not isinstance(data__topicIncludeRegex_item, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicIncludeRegex[{data__topicIncludeRegex_x}]".format(**locals()) + " must be string", value=data__topicIncludeRegex_item, name="" + (name_prefix or "data") + ".topicIncludeRegex[{data__topicIncludeRegex_x}]".format(**locals()) + "", definition={'type': 'string'}, rule='type') + if "topicExcludeRegex" in data_keys: + data_keys.remove("topicExcludeRegex") + data__topicExcludeRegex = data["topicExcludeRegex"] + if not isinstance(data__topicExcludeRegex, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".topicExcludeRegex must be string", value=data__topicExcludeRegex, name="" + (name_prefix or "data") + ".topicExcludeRegex", definition={'type': 'string'}, rule='type') + if "maxMessageCount" in data_keys: + data_keys.remove("maxMessageCount") + data__maxMessageCount = data["maxMessageCount"] + if not isinstance(data__maxMessageCount, (int)) and not (isinstance(data__maxMessageCount, float) and data__maxMessageCount.is_integer()) or isinstance(data__maxMessageCount, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxMessageCount must be integer", value=data__maxMessageCount, name="" + (name_prefix or "data") + ".maxMessageCount", definition={'type': 'integer'}, rule='type') + if "node" in data_keys: + data_keys.remove("node") + data__node = data["node"] + if not isinstance(data__node, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".node must be string", value=data__node, name="" + (name_prefix or "data") + ".node", definition={'type': 'string'}, rule='type') + if "compression" in data_keys: + data_keys.remove("compression") + data__compression = data["compression"] + if not isinstance(data__compression, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".compression must be string", value=data__compression, name="" + (name_prefix or "data") + ".compression", definition={'type': 'string', 'enum': ['BZ2', 'LZ4']}, rule='type') + if data__compression not in ['BZ2', 'LZ4']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".compression must be one of ['BZ2', 'LZ4']", value=data__compression, name="" + (name_prefix or "data") + ".compression", definition={'type': 'string', 'enum': ['BZ2', 'LZ4']}, rule='enum') + if "maxSplits" in data_keys: + data_keys.remove("maxSplits") + data__maxSplits = data["maxSplits"] + if not isinstance(data__maxSplits, (int)) and not (isinstance(data__maxSplits, float) and data__maxSplits.is_integer()) or isinstance(data__maxSplits, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplits must be integer", value=data__maxSplits, name="" + (name_prefix or "data") + ".maxSplits", definition={'type': 'integer'}, rule='type') + if "maxSplitSize" in data_keys: + data_keys.remove("maxSplitSize") + data__maxSplitSize = data["maxSplitSize"] + if not isinstance(data__maxSplitSize, (int)) and not (isinstance(data__maxSplitSize, float) and data__maxSplitSize.is_integer()) or isinstance(data__maxSplitSize, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplitSize must be integer", value=data__maxSplitSize, name="" + (name_prefix or "data") + ".maxSplitSize", definition={'type': 'integer'}, rule='type') + if "chunkSize" in data_keys: + data_keys.remove("chunkSize") + data__chunkSize = data["chunkSize"] + if not isinstance(data__chunkSize, (int)) and not (isinstance(data__chunkSize, float) and data__chunkSize.is_integer()) or isinstance(data__chunkSize, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".chunkSize must be integer", value=data__chunkSize, name="" + (name_prefix or "data") + ".chunkSize", definition={'type': 'integer'}, rule='type') + if "prefix" in data_keys: + data_keys.remove("prefix") + data__prefix = data["prefix"] + if not isinstance(data__prefix, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".prefix must be string", value=data__prefix, name="" + (name_prefix or "data") + ".prefix", definition={'type': 'string'}, rule='type') + if "maxSplitDuration" in data_keys: + data_keys.remove("maxSplitDuration") + data__maxSplitDuration = data["maxSplitDuration"] + if not isinstance(data__maxSplitDuration, (int)) and not (isinstance(data__maxSplitDuration, float) and data__maxSplitDuration.is_integer()) or isinstance(data__maxSplitDuration, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxSplitDuration must be integer", value=data__maxSplitDuration, name="" + (name_prefix or "data") + ".maxSplitDuration", definition={'type': 'integer'}, rule='type') + return data + def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}, rule='type') @@ -325,8 +561,8 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non if not all(prop in data for prop in ['name', 'type']): raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'external-http', 'enum': ['external-http', 'external-https', 'external-tls-tcp', 'internal-tcp', 'internal-udp', 'internal-tcp-range', 'internal-udp-range']}}, 'required': ['name', 'type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}}}, rule='required') if "type" in data: - data_one_of_count4 = 0 - if data_one_of_count4 < 2: + data_one_of_count7 = 0 + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -348,9 +584,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non data_keys.remove("targetPort") data__targetPort = data["targetPort"] validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -372,9 +608,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non data_keys.remove("targetPort") data__targetPort = data["targetPort"] validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -396,9 +632,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non data_keys.remove("targetPort") data__targetPort = data["targetPort"] validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -420,9 +656,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non data_keys.remove("targetPort") data__targetPort = data["targetPort"] validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -444,9 +680,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non data_keys.remove("targetPort") data__targetPort = data["targetPort"] validate___definitions_portnumber(data__targetPort, custom_formats, (name_prefix or "data") + ".targetPort") - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -465,9 +701,9 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non if not isinstance(data__portRange, (str)): raise JsonSchemaValueException("" + (name_prefix or "data") + ".portRange must be string", value=data__portRange, name="" + (name_prefix or "data") + ".portRange", definition={'type': 'string', 'default': '22,80, 1024-1030'}, rule='type') else: data["portRange"] = '22,80, 1024-1030' - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 < 2: + if data_one_of_count7 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -486,10 +722,10 @@ def validate___definitions_endpointspec(data, custom_formats={}, name_prefix=Non if not isinstance(data__portRange, (str)): raise JsonSchemaValueException("" + (name_prefix or "data") + ".portRange must be string", value=data__portRange, name="" + (name_prefix or "data") + ".portRange", definition={'type': 'string', 'default': '53,1024-1025'}, rule='type') else: data["portRange"] = '53,1024-1025' - data_one_of_count4 += 1 + data_one_of_count7 += 1 except JsonSchemaValueException: pass - if data_one_of_count4 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count4) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}, rule='oneOf') + if data_one_of_count7 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count7) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['external-http']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-https']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['external-tls-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-udp']}, 'port': {'type': 'integer', 'min': 1, 'max': 65531}, 'targetPort': {'type': 'integer', 'min': 1, 'max': 65531}}, 'required': ['port', 'targetPort']}, {'properties': {'type': {'enum': ['internal-tcp-range']}, 'portRange': {'type': 'string', 'default': '22,80, 1024-1030'}}, 'required': ['portRange']}, {'properties': {'type': {'enum': ['internal-udp-range']}, 'portRange': {'type': 'string', 'default': '53,1024-1025'}}, 'required': ['portRange']}]}, rule='oneOf') data_keys = set(data.keys()) if "name" in data_keys: data_keys.remove("name") @@ -513,15 +749,15 @@ def validate___definitions_portnumber(data, custom_formats={}, name_prefix=None) def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='type') data_is_dict = isinstance(data, dict) if data_is_dict: data_len = len(data) if not all(prop in data for prop in ['type']): - raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='required') + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}, 'simulation': {'type': 'boolean', 'default': False}, 'limits': {'type': 'object', 'properties': {'cpu': {'type': 'number', 'min': 0.1, 'max': 8}, 'memory': {'type': 'number', 'min': 256, 'max': 32768}}}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='required') if "type" in data: - data_one_of_count5 = 0 - if data_one_of_count5 < 2: + data_one_of_count8 = 0 + if data_one_of_count8 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -562,9 +798,9 @@ def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_pre data__docker__pullSecret_keys.remove("depends") data__docker__pullSecret__depends = data__docker__pullSecret["depends"] validate___definitions_secretdepends(data__docker__pullSecret__depends, custom_formats, (name_prefix or "data") + ".docker.pullSecret.depends") - data_one_of_count5 += 1 + data_one_of_count8 += 1 except JsonSchemaValueException: pass - if data_one_of_count5 < 2: + if data_one_of_count8 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -578,20 +814,20 @@ def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_pre data_keys.remove("build") data__build = data["build"] if not isinstance(data__build, (dict)): - raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must be object", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='type') + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must be object", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='type') data__build_is_dict = isinstance(data__build, dict) if data__build_is_dict: data__build_len = len(data__build) if not all(prop in data__build for prop in ['depends']): - raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must contain ['depends'] properties", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='required') + raise JsonSchemaValueException("" + (name_prefix or "data") + ".build must contain ['depends'] properties", value=data__build, name="" + (name_prefix or "data") + ".build", definition={'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}, rule='required') data__build_keys = set(data__build.keys()) if "depends" in data__build_keys: data__build_keys.remove("depends") data__build__depends = data__build["depends"] - validate___definitions_secretdepends(data__build__depends, custom_formats, (name_prefix or "data") + ".build.depends") - data_one_of_count5 += 1 + validate___definitions_builddepends(data__build__depends, custom_formats, (name_prefix or "data") + ".build.depends") + data_one_of_count8 += 1 except JsonSchemaValueException: pass - if data_one_of_count5 < 2: + if data_one_of_count8 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -601,10 +837,10 @@ def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_pre data__type = data["type"] if data__type not in ['preInstalled']: raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['preInstalled']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['preInstalled']}, rule='enum') - data_one_of_count5 += 1 + data_one_of_count8 += 1 except JsonSchemaValueException: pass - if data_one_of_count5 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count5) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') + if data_one_of_count8 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count8) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') data_keys = set(data.keys()) if "name" in data_keys: data_keys.remove("name") @@ -656,6 +892,28 @@ def validate___definitions_cloudexecutablespec(data, custom_formats={}, name_pre raise JsonSchemaValueException("" + (name_prefix or "data") + ".limits.memory must be number", value=data__limits__memory, name="" + (name_prefix or "data") + ".limits.memory", definition={'type': 'number', 'min': 256, 'max': 32768}, rule='type') return data +def validate___definitions_builddepends(data, custom_formats={}, name_prefix=None): + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "kind" in data_keys: + data_keys.remove("kind") + data__kind = data["kind"] + if data__kind != "build": + raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: build", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'build', 'default': 'build'}, rule='const') + else: data["kind"] = 'build' + if "nameOrGUID" in data_keys: + data_keys.remove("nameOrGUID") + data__nameOrGUID = data["nameOrGUID"] + if not isinstance(data__nameOrGUID, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') + if "guid" in data_keys: + data_keys.remove("guid") + data__guid = data["guid"] + if not isinstance(data__guid, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') + return data + def validate___definitions_secretdepends(data, custom_formats={}, name_prefix=None): data_is_dict = isinstance(data, dict) if data_is_dict: @@ -692,6 +950,97 @@ def validate___definitions_cloudcomponentinfospec(data, custom_formats={}, name_ else: data["replicas"] = 1 return data +def validate___definitions_devicerosbagjobspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'uploadOptions': {'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', '$ref': '#/definitions/rosbagOnDemandUploadOptionsSpec'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['name', 'recordOptions']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name', 'recordOptions'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'recordOptions': {'type': 'object', 'oneOf': [{'required': ['allTopics']}, {'anyOf': [{'required': ['topics']}, {'required': ['topicIncludeRegex']}]}], 'properties': {'allTopics': {'type': 'boolean'}, 'topics': {'type': 'array', 'items': {'type': 'string'}}, 'topicIncludeRegex': {'type': 'array', 'items': {'type': 'string'}}, 'topicExcludeRegex': {'type': 'string'}, 'maxMessageCount': {'type': 'integer'}, 'node': {'type': 'string'}, 'compression': {'type': 'string', 'enum': ['BZ2', 'LZ4']}, 'maxSplits': {'type': 'integer'}, 'maxSplitSize': {'type': 'integer'}, 'chunkSize': {'type': 'integer'}, 'prefix': {'type': 'string'}, 'maxSplitDuration': {'type': 'integer'}}}, 'uploadOptions': {'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', '$ref': '#/definitions/rosbagOnDemandUploadOptionsSpec'}}}, 'overrideOptions': {'type': 'object', 'properties': {'topicOverrideInfo': {'type': 'array', 'items': {'$ref': '#/definitions/rosbagTopicOverrideInfoSpec'}}, 'excludeTopics': {'type': 'array', 'items': {'type': 'string'}}}}}, 'required': ['name', 'recordOptions']}, rule='required') + data_keys = set(data.keys()) + if "name" in data_keys: + data_keys.remove("name") + data__name = data["name"] + if not isinstance(data__name, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".name must be string", value=data__name, name="" + (name_prefix or "data") + ".name", definition={'type': 'string'}, rule='type') + if "recordOptions" in data_keys: + data_keys.remove("recordOptions") + data__recordOptions = data["recordOptions"] + validate___definitions_rosbagrecordoptionsspec(data__recordOptions, custom_formats, (name_prefix or "data") + ".recordOptions") + if "uploadOptions" in data_keys: + data_keys.remove("uploadOptions") + data__uploadOptions = data["uploadOptions"] + validate___definitions_rosbaguploadoptionsspec(data__uploadOptions, custom_formats, (name_prefix or "data") + ".uploadOptions") + if "overrideOptions" in data_keys: + data_keys.remove("overrideOptions") + data__overrideOptions = data["overrideOptions"] + validate___definitions_rosbagoverrideoptionsspec(data__overrideOptions, custom_formats, (name_prefix or "data") + ".overrideOptions") + return data + +def validate___definitions_rosbaguploadoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'maxUploadRate': {'type': 'integer', 'default': 1048576}, 'purgeAfter': {'type': 'boolean'}, 'uploadType': {'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, 'onDemandOpts': {'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer'}, 'to': {'type': 'integer'}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}}}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_keys = set(data.keys()) + if "maxUploadRate" in data_keys: + data_keys.remove("maxUploadRate") + data__maxUploadRate = data["maxUploadRate"] + if not isinstance(data__maxUploadRate, (int)) and not (isinstance(data__maxUploadRate, float) and data__maxUploadRate.is_integer()) or isinstance(data__maxUploadRate, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".maxUploadRate must be integer", value=data__maxUploadRate, name="" + (name_prefix or "data") + ".maxUploadRate", definition={'type': 'integer', 'default': 1048576}, rule='type') + else: data["maxUploadRate"] = 1048576 + if "purgeAfter" in data_keys: + data_keys.remove("purgeAfter") + data__purgeAfter = data["purgeAfter"] + if not isinstance(data__purgeAfter, (bool)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".purgeAfter must be boolean", value=data__purgeAfter, name="" + (name_prefix or "data") + ".purgeAfter", definition={'type': 'boolean'}, rule='type') + if "uploadType" in data_keys: + data_keys.remove("uploadType") + data__uploadType = data["uploadType"] + if not isinstance(data__uploadType, (str)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".uploadType must be string", value=data__uploadType, name="" + (name_prefix or "data") + ".uploadType", definition={'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, rule='type') + if data__uploadType not in ['OnStop', 'Continuous', 'OnDemand']: + raise JsonSchemaValueException("" + (name_prefix or "data") + ".uploadType must be one of ['OnStop', 'Continuous', 'OnDemand']", value=data__uploadType, name="" + (name_prefix or "data") + ".uploadType", definition={'type': 'string', 'enum': ['OnStop', 'Continuous', 'OnDemand'], 'default': 'OnDemand'}, rule='enum') + else: data["uploadType"] = 'OnDemand' + if "onDemandOpts" in data_keys: + data_keys.remove("onDemandOpts") + data__onDemandOpts = data["onDemandOpts"] + validate___definitions_rosbagondemanduploadoptionsspec(data__onDemandOpts, custom_formats, (name_prefix or "data") + ".onDemandOpts") + return data + +def validate___definitions_rosbagondemanduploadoptionsspec(data, custom_formats={}, name_prefix=None): + if not isinstance(data, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer'}, 'to': {'type': 'integer'}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}, rule='type') + data_is_dict = isinstance(data, dict) + if data_is_dict: + data_len = len(data) + if not all(prop in data for prop in ['timeRange']): + raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['timeRange'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'timeRange': {'type': 'object', 'properties': {'from': {'type': 'integer'}, 'to': {'type': 'integer'}}, 'required': ['from', 'to']}}, 'required': ['timeRange']}, rule='required') + data_keys = set(data.keys()) + if "timeRange" in data_keys: + data_keys.remove("timeRange") + data__timeRange = data["timeRange"] + if not isinstance(data__timeRange, (dict)): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange must be object", value=data__timeRange, name="" + (name_prefix or "data") + ".timeRange", definition={'type': 'object', 'properties': {'from': {'type': 'integer'}, 'to': {'type': 'integer'}}, 'required': ['from', 'to']}, rule='type') + data__timeRange_is_dict = isinstance(data__timeRange, dict) + if data__timeRange_is_dict: + data__timeRange_len = len(data__timeRange) + if not all(prop in data__timeRange for prop in ['from', 'to']): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange must contain ['from', 'to'] properties", value=data__timeRange, name="" + (name_prefix or "data") + ".timeRange", definition={'type': 'object', 'properties': {'from': {'type': 'integer'}, 'to': {'type': 'integer'}}, 'required': ['from', 'to']}, rule='required') + data__timeRange_keys = set(data__timeRange.keys()) + if "from" in data__timeRange_keys: + data__timeRange_keys.remove("from") + data__timeRange__from = data__timeRange["from"] + if not isinstance(data__timeRange__from, (int)) and not (isinstance(data__timeRange__from, float) and data__timeRange__from.is_integer()) or isinstance(data__timeRange__from, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange.from must be integer", value=data__timeRange__from, name="" + (name_prefix or "data") + ".timeRange.from", definition={'type': 'integer'}, rule='type') + if "to" in data__timeRange_keys: + data__timeRange_keys.remove("to") + data__timeRange__to = data__timeRange["to"] + if not isinstance(data__timeRange__to, (int)) and not (isinstance(data__timeRange__to, float) and data__timeRange__to.is_integer()) or isinstance(data__timeRange__to, bool): + raise JsonSchemaValueException("" + (name_prefix or "data") + ".timeRange.to must be integer", value=data__timeRange__to, name="" + (name_prefix or "data") + ".timeRange.to", definition={'type': 'integer'}, rule='type') + return data + def validate___definitions_environmentspec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}, rule='type') @@ -701,8 +1050,8 @@ def validate___definitions_environmentspec(data, custom_formats={}, name_prefix= if not all(prop in data for prop in ['name']): raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['name'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'description': {'type': 'string'}, 'default': {'type': 'string'}, 'exposed': {'type': 'boolean', 'default': False}}, 'required': ['name'], 'dependencies': {'exposed': {'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}}}, rule='required') if "exposed" in data: - data_one_of_count6 = 0 - if data_one_of_count6 < 2: + data_one_of_count9 = 0 + if data_one_of_count9 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -720,9 +1069,9 @@ def validate___definitions_environmentspec(data, custom_formats={}, name_prefix= data__exposedName = data["exposedName"] if not isinstance(data__exposedName, (str)): raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposedName must be string", value=data__exposedName, name="" + (name_prefix or "data") + ".exposedName", definition={'type': 'string'}, rule='type') - data_one_of_count6 += 1 + data_one_of_count9 += 1 except JsonSchemaValueException: pass - if data_one_of_count6 < 2: + if data_one_of_count9 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -732,10 +1081,10 @@ def validate___definitions_environmentspec(data, custom_formats={}, name_prefix= data__exposed = data["exposed"] if data__exposed not in [False]: raise JsonSchemaValueException("" + (name_prefix or "data") + ".exposed must be one of [False]", value=data__exposed, name="" + (name_prefix or "data") + ".exposed", definition={'enum': [False]}, rule='enum') - data_one_of_count6 += 1 + data_one_of_count9 += 1 except JsonSchemaValueException: pass - if data_one_of_count6 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count6) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}, rule='oneOf') + if data_one_of_count9 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count9) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'exposed': {'enum': [True]}, 'exposedName': {'type': 'string'}}, 'required': ['exposedName']}, {'properties': {'exposed': {'enum': [False]}}}]}, rule='oneOf') data_keys = set(data.keys()) if "name" in data_keys: data_keys.remove("name") @@ -769,8 +1118,8 @@ def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_pr if not all(prop in data for prop in ['type']): raise JsonSchemaValueException("" + (name_prefix or "data") + " must contain ['type'] properties", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'name': {'type': 'string'}, 'type': {'type': 'string', 'default': 'docker', 'enum': ['docker', 'build', 'preInstalled']}, 'command': {'type': 'string'}, 'runAsBash': {'type': 'boolean', 'default': True}}, 'required': ['type'], 'dependencies': {'type': {'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}}}, rule='required') if "type" in data: - data_one_of_count7 = 0 - if data_one_of_count7 < 2: + data_one_of_count10 = 0 + if data_one_of_count10 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -800,9 +1149,9 @@ def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_pr data__docker_keys.remove("pullSecret") data__docker__pullSecret = data__docker["pullSecret"] validate___definitions_secretdepends(data__docker__pullSecret, custom_formats, (name_prefix or "data") + ".docker.pullSecret") - data_one_of_count7 += 1 + data_one_of_count10 += 1 except JsonSchemaValueException: pass - if data_one_of_count7 < 2: + if data_one_of_count10 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -827,9 +1176,9 @@ def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_pr data__build_keys.remove("depends") data__build__depends = data__build["depends"] validate___definitions_builddepends(data__build__depends, custom_formats, (name_prefix or "data") + ".build.depends") - data_one_of_count7 += 1 + data_one_of_count10 += 1 except JsonSchemaValueException: pass - if data_one_of_count7 < 2: + if data_one_of_count10 < 2: try: data_is_dict = isinstance(data, dict) if data_is_dict: @@ -839,10 +1188,10 @@ def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_pr data__type = data["type"] if data__type not in ['preInstalled']: raise JsonSchemaValueException("" + (name_prefix or "data") + ".type must be one of ['preInstalled']", value=data__type, name="" + (name_prefix or "data") + ".type", definition={'enum': ['preInstalled']}, rule='enum') - data_one_of_count7 += 1 + data_one_of_count10 += 1 except JsonSchemaValueException: pass - if data_one_of_count7 != 1: - raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count7) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') + if data_one_of_count10 != 1: + raise JsonSchemaValueException("" + (name_prefix or "data") + " must be valid exactly by one definition" + (" (" + str(data_one_of_count10) + " matches found)"), value=data, name="" + (name_prefix or "data") + "", definition={'oneOf': [{'properties': {'type': {'enum': ['docker']}, 'docker': {'type': 'object', 'properties': {'image': {'type': 'string'}, 'pullSecret': {'properties': {'kind': {'const': 'secret', 'default': 'secret'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['image']}}}, {'properties': {'type': {'enum': ['build']}, 'build': {'type': 'object', 'properties': {'depends': {'properties': {'kind': {'const': 'build', 'default': 'build'}, 'nameOrGUID': {'type': 'string'}, 'guid': {'type': 'string'}}}}, 'required': ['depends']}}}, {'properties': {'type': {'enum': ['preInstalled']}}}]}, rule='oneOf') data_keys = set(data.keys()) if "name" in data_keys: data_keys.remove("name") @@ -870,28 +1219,6 @@ def validate___definitions_deviceexecutablespec(data, custom_formats={}, name_pr else: data["runAsBash"] = True return data -def validate___definitions_builddepends(data, custom_formats={}, name_prefix=None): - data_is_dict = isinstance(data, dict) - if data_is_dict: - data_keys = set(data.keys()) - if "kind" in data_keys: - data_keys.remove("kind") - data__kind = data["kind"] - if data__kind != "build": - raise JsonSchemaValueException("" + (name_prefix or "data") + ".kind must be same as const definition: build", value=data__kind, name="" + (name_prefix or "data") + ".kind", definition={'const': 'build', 'default': 'build'}, rule='const') - else: data["kind"] = 'build' - if "nameOrGUID" in data_keys: - data_keys.remove("nameOrGUID") - data__nameOrGUID = data["nameOrGUID"] - if not isinstance(data__nameOrGUID, (str)): - raise JsonSchemaValueException("" + (name_prefix or "data") + ".nameOrGUID must be string", value=data__nameOrGUID, name="" + (name_prefix or "data") + ".nameOrGUID", definition={'type': 'string'}, rule='type') - if "guid" in data_keys: - data_keys.remove("guid") - data__guid = data["guid"] - if not isinstance(data__guid, (str)): - raise JsonSchemaValueException("" + (name_prefix or "data") + ".guid must be string", value=data__guid, name="" + (name_prefix or "data") + ".guid", definition={'type': 'string'}, rule='type') - return data - def validate___definitions_devicecomponentinfospec(data, custom_formats={}, name_prefix=None): if not isinstance(data, (dict)): raise JsonSchemaValueException("" + (name_prefix or "data") + " must be object", value=data, name="" + (name_prefix or "data") + "", definition={'type': 'object', 'properties': {'arch': {'type': 'string', 'enum': ['arm32v7', 'arm64v8', 'amd64'], 'default': 'amd64'}, 'restart': {'type': 'string', 'default': 'always', 'enum': ['always', 'never', 'onfailure']}}}, rule='type') diff --git a/riocli/rosbag/job.py b/riocli/rosbag/job.py index 727858ef..e9bb5601 100644 --- a/riocli/rosbag/job.py +++ b/riocli/rosbag/job.py @@ -14,12 +14,15 @@ import typing import click +import pyrfc3339 from click_help_colors import HelpColorsGroup from click_spinner import spinner -from rapyuta_io.clients.rosbag import ROSBagOptions, ROSBagJob, ROSBagCompression, ROSBagJobStatus +from rapyuta_io.clients.rosbag import ROSBagOptions, ROSBagJob, ROSBagCompression, ROSBagJobStatus, ROSBagUploadTypes, \ + ROSBagOnDemandUploadOptions, ROSBagTimeRange from riocli.config import new_client -from riocli.deployment.util import name_to_guid as deployment_name_to_guid +from riocli.deployment.util import name_to_guid as deployment_name_to_guid +from riocli.rosbag.util import ROSBagJobNotFound @click.group( @@ -123,6 +126,104 @@ def job_list(deployment_guid: str, deployment_name: str, raise SystemExit(1) +@rosbag_job.command('trigger') +@click.argument('deployment-name') +@click.argument('job-guid') +@click.option('--upload-from', help='Rosbags recorded after or at this time are uploaded. Specify time in RFC 3339 ' + 'format (1985-04-12T23:20:50.52Z)', required=True) +@click.option('--upload-to', help='Rosbags recorded before or at this time are uploaded. Specify time in RFC 3339 ' + 'format (1985-04-12T23:20:50.52Z)', required=True) +@deployment_name_to_guid +def job_trigger_upload(deployment_guid: str, deployment_name: str, job_guid: str, + upload_from: str, upload_to: str) -> None: + """ + Trigger Rosbag Upload + + Here are some examples of RFC3339 date/time format that can be given to '--upload-from' & + '--upload-to' options + + 1. 2022-10-21T23:20:50.52Z + + This represents 20 minutes and 50.52 seconds after the 23rd hour of + October 21st, 2022 in UTC. + + 2. 2022-10-21T23:20:50.52+05:30 + + This represents 20 minutes and 50.52 seconds after the 23rd hour of + October 21st, 2022 with an offset of +05:30 from UTC (Indian Standard Time). + + Note that this is equivalent to 2022-10-21T17:50:50.52Z in UTC. + + 3. 2022-10-21T23:20:50.52+09:00 + + This represents 20 minutes and 50.52 seconds after the 23rd hour of + October 21st, 2022 with an offset of +09:00 from UTC (Japan Standard Time). + + Note that this is equivalent to 2022-10-21T14:20:50.52Z in UTC. + + + 4. 2022-10-21T23:20:50.52-07:00 + + This represents 20 minutes and 50.52 seconds after the 23rd hour of + October 21st, 2022 with an offset of -07:00 from UTC (Pacific Daylight Time). + + Note that this is equivalent to 2022-10-22T06:20:50.52Z in UTC. + + Ref: https://www.rfc-editor.org/rfc/rfc3339#section-5.8 + """ + try: + client = new_client() + with spinner(): + rosbag_jobs = client.list_rosbag_jobs(deployment_id=deployment_guid, guids=[job_guid]) + if len(rosbag_jobs) == 0: + raise ROSBagJobNotFound() + + if rosbag_jobs[0].upload_options and \ + rosbag_jobs[0].upload_options.upload_type != ROSBagUploadTypes.ON_DEMAND: + click.secho( + "Warning: this job does not have OnDemand upload type so triggering will not have any effect but," + " it will take into effect when job's upload type is changed to OnDemand", fg='yellow' + ) + + time_range = ROSBagTimeRange( + from_time=int(pyrfc3339.parse(upload_from).timestamp()), + to_time=int(pyrfc3339.parse(upload_to).timestamp()) + ) + on_demand_options = ROSBagOnDemandUploadOptions(time_range) + + rosbag_jobs[0].patch(on_demand_options=on_demand_options) + + click.secho('Rosbag upload triggered successfully', fg='green') + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + +@rosbag_job.command('update') +@click.argument('deployment-name') +@click.argument('job-guid') +@click.option('--upload-mode', help='Change upload mode', + type=click.Choice([t for t in ROSBagUploadTypes]), required=True) +@deployment_name_to_guid +def update_job(deployment_guid: str, deployment_name: str, job_guid: str, upload_mode: str) -> None: + """ + Update the Rosbag Job + """ + try: + client = new_client() + with spinner(): + rosbag_jobs = client.list_rosbag_jobs(deployment_id=deployment_guid, guids=[job_guid]) + if len(rosbag_jobs) == 0: + raise ROSBagJobNotFound() + + rosbag_jobs[0].patch(upload_type=upload_mode) + + click.secho('Rosbag Job updated successfully', fg='green') + except Exception as e: + click.secho(str(e), fg='red') + raise SystemExit(1) + + def _display_rosbag_job_list(jobs: typing.List[ROSBagJob], show_header: bool = True) -> None: if show_header: header = '{:<35} {:<25} {:<15} {:20} {:40}'.format( @@ -141,9 +242,3 @@ def _display_rosbag_job_list(jobs: typing.List[ROSBagJob], show_header: bool = T job.component_type.name, 'None' if job.device_id is None else job.device_id, )) - - - - - - diff --git a/riocli/rosbag/util.py b/riocli/rosbag/util.py new file mode 100644 index 00000000..6b1ad35f --- /dev/null +++ b/riocli/rosbag/util.py @@ -0,0 +1,4 @@ +class ROSBagJobNotFound(Exception): + def __init__(self, message='rosbag job not found'): + self.message = message + super().__init__(self.message) diff --git a/setup.py b/setup.py index 28307318..9c7cd25d 100644 --- a/setup.py +++ b/setup.py @@ -57,6 +57,7 @@ "six>=1.13.0", "tabulate>=0.8.0", "urllib3>=1.23", + "pyrfc3339>=1.1" ], setup_requires=["flake8"], )