diff --git a/.vscode/launch-csharp.json b/.vscode/launch-csharp.json deleted file mode 100644 index d98da457e..000000000 --- a/.vscode/launch-csharp.json +++ /dev/null @@ -1,60 +0,0 @@ -{ - // Use IntelliSense to find out which attributes exist for C# debugging - // Use hover for the description of the existing attributes - // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md - "version": "0.2.0", - "configurations": [ - { - "name": ".NET FWO Test", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_test", - "program": "/usr/bin/dotnet", - // "program": "${workspaceFolder}/roles/test/files/FWO_Test/bin/Debug/net5.0/FWO.Test.dll", - "args": ["test"], - "cwd": "${workspaceFolder}/roles/test/files/FWO_Test", - "stopAtEntry": false, - "console": "internalConsole" - }, - { - "name": "Blazor (web)", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_UI", - "program": "${workspaceFolder}/roles/ui/files/FWO_UI/bin/Debug/net5.0/FWO.Ui.dll", - "args": [], - "cwd": "${workspaceFolder}/roles/ui/files/FWO_UI", - "stopAtEntry": false, - "serverReadyAction": { - "action": "openExternally", - "pattern": "\\bNow listening on:\\s+(https?://\\S+)" - }, - "env": { - "ASPNETCORE_ENVIRONMENT": "Development" - }, - "sourceFileMap": { - "/Views": "${workspaceFolder}/Views" - } - }, - { - "name": ".NET Core Attach ", - "type": "coreclr", - "request": "attach", - "processId": "${command:pickProcess}" - }, - { - "name": "MiddlewareServer", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_middleware", - "enableStepFiltering": false, - // If you have changed target frameworks, make sure to update the program path. - "program": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server/bin/Debug/net5.0/FWO.Middleware.Server.dll", - "args": [], - "cwd": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server", - // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console - "console": "internalConsole", - "stopAtEntry": false - } - ] -} \ No newline at end of file diff --git a/.vscode/launch-python-compat.json b/.vscode/launch-python-compat.json deleted file mode 100644 index edad440bb..000000000 --- a/.vscode/launch-python-compat.json +++ /dev/null @@ -1,66 +0,0 @@ -{ - "version": "0.2.0", - "configurations": [ - - { - "name": "Python: Current File", - "type": "python", - "request": "launch", - "program": "${file}", - "console": "integratedTerminal", - "env": { "PYTHONPATH":"${PYTHONPATH}:${workspaceRoot}"}, - "args": ["-a localhost", "-u hugo", "-w secret", "-l layer1", "-c/home/tim/tmp/blb_mgm.cfg.anon"] - }, - { - "name": ".NET FWO Test", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_test", - "program": "/usr/bin/dotnet", - "args": [ - "test" - ], - "cwd": "${workspaceFolder}/roles/test/files/FWO_Test", - "stopAtEntry": "false", - "console": "internalConsole" - }, - { - "name": "Blazor (web)", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_UI", - "program": "${workspaceFolder}/roles/ui/files/FWO_UI/bin/Debug/net5.0/FWO.Ui.dll", - "args": [], - "cwd": "${workspaceFolder}/roles/ui/files/FWO_UI", - "stopAtEntry": "false", - "serverReadyAction": { - "action": "openExternally", - "pattern": "\\bNow listening on:\\s+(https?://\\S+)" - }, - "env": { - "ASPNETCORE_ENVIRONMENT": "Development" - }, - "sourceFileMap": { - "/Views": "${workspaceFolder}/Views" - } - }, - { - "name": ".NET Core Attach ", - "type": "coreclr", - "request": "attach", - "processId": "${command:pickProcess}" - }, - { - "name": "MiddlewareServer", - "type": "coreclr", - "request": "launch", - "preLaunchTask": "build_middleware", - "enableStepFiltering": "false", - "program": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server/bin/Debug/net5.0/FWO.Middleware.Server.dll", - "args": [], - "cwd": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server", - "console": "internalConsole", - "stopAtEntry": "false" - } - ] -} \ No newline at end of file diff --git a/.vscode/launch.json b/.vscode/launch.json index d98da457e..5a708c9ed 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,23 +1,101 @@ { - // Use IntelliSense to find out which attributes exist for C# debugging - // Use hover for the description of the existing attributes - // For further information visit https://github.com/OmniSharp/omnisharp-vscode/blob/master/debugger-launchjson.md - "version": "0.2.0", - "configurations": [ + "version": "0.2.0", + "configurations": [ { - "name": ".NET FWO Test", + "name": "py-cpr8x-get-config", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { + "PYTHONPATH": "${PYTHONPATH}:${workspaceRoot}" + }, + "args": [ + "-asting-mgmt", + "-uitsecorg", + "-w/home/tim/tmp/passwd", + "-lFirstLayer shared with inline layer", + "-o/home/tim/tmp/sting.cfg" + ] + }, + { + "name": "py-cpr8x-enrich-config", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { + "PYTHONPATH": "${PYTHONPATH}:${workspaceRoot}" + }, + "args": [ + "-asting-mgmt", + "-uitsecorg", + "-w/home/tim/tmp/passwd", + "-lFirstLayer shared with inline layer", + "-c/home/tim/tmp/sting.cfg" + ] + }, + { + "name": "py-cpr8x-parse-config", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { + "PYTHONPATH": "${PYTHONPATH}:${workspaceRoot}" + }, + "args": [ + "-f/home/tim/tmp/sting.cfg", + "-rFirstLayer shared with inline layer" + ] + }, + { + "name": "py-fortimanager", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { + "PYTHONPATH": "${PYTHONPATH}:${workspaceRoot}" + }, + "args": [ + "-a10.5.1.55", + "-uapiuser", + "-w/home/tim/tmp/passwd", + "-llayer1", + "-d1", + "-o/tmp/outfile" + ] + }, + { + "name": "py-import_mgm", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { + "PYTHONPATH": "${PYTHONPATH}:${workspaceRoot}" + }, + "args": [ + "-m4", + "-d1" + ] + }, + { + "name": "c#-FWO Test", "type": "coreclr", "request": "launch", "preLaunchTask": "build_test", "program": "/usr/bin/dotnet", - // "program": "${workspaceFolder}/roles/test/files/FWO_Test/bin/Debug/net5.0/FWO.Test.dll", - "args": ["test"], + "args": [ + "test" + ], "cwd": "${workspaceFolder}/roles/test/files/FWO_Test", "stopAtEntry": false, "console": "internalConsole" }, { - "name": "Blazor (web)", + "name": "c#-Blazor UI", "type": "coreclr", "request": "launch", "preLaunchTask": "build_UI", @@ -37,24 +115,26 @@ } }, { - "name": ".NET Core Attach ", + "name": "c#-.NET Core Attach ", "type": "coreclr", "request": "attach", "processId": "${command:pickProcess}" }, { - "name": "MiddlewareServer", + "name": "c#-MiddlewareServer", "type": "coreclr", "request": "launch", "preLaunchTask": "build_middleware", "enableStepFiltering": false, - // If you have changed target frameworks, make sure to update the program path. "program": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server/bin/Debug/net5.0/FWO.Middleware.Server.dll", "args": [], "cwd": "${workspaceFolder}/roles/middleware/files/FWO.Middleware.Server", - // For more information about the 'console' field, see https://aka.ms/VSCode-CS-LaunchJson-Console "console": "internalConsole", - "stopAtEntry": false + "stopAtEntry": false, + "serverReadyAction": { + "action": "openExternally", + "pattern": "\\bNow listening on:\\s+(https?://\\S+)" + } } ] } \ No newline at end of file diff --git a/ansible.cfg b/ansible.cfg index 921acf2a7..f51e5ef6f 100644 --- a/ansible.cfg +++ b/ansible.cfg @@ -3,6 +3,7 @@ inventory = inventory #timeout = 60 force_handlers = True +stdout_callback = yaml [ssh_connection] pipelining = True diff --git a/documentation/api/api_handling_documentation.md b/documentation/api/api_handling_documentation.md new file mode 100644 index 000000000..724080406 --- /dev/null +++ b/documentation/api/api_handling_documentation.md @@ -0,0 +1,17 @@ +# Common API Helpers + +## How to convert file from json to yaml + + python -c 'import sys, yaml, json; yaml.safe_dump(json.load(sys.stdin), sys.stdout, default_flow_style=False)' < file.json > file.yaml + +## How to convert a yaml file to json + + python -c 'import sys, yaml, json; json.dump(yaml.safe_load(sys.stdin), sys.stdout)' < meta.yaml >meta.json + +## How to convert JSON pretty print + +from pp to compact: + python -c 'import sys, json; json.dump(json.load(sys.stdin), sys.stdout)' < file.json > file.json + +from compact to pp: + python -c 'import sys, json; json.dump(json.load(sys.stdin), sys.stdout, indent=3)' < file.json > file.json diff --git a/documentation/api/api_interface_docu.md b/documentation/api/fwo_api_doc_generation_interface.md similarity index 100% rename from documentation/api/api_interface_docu.md rename to documentation/api/fwo_api_doc_generation_interface.md diff --git a/documentation/api/api_documentation.md b/documentation/api/fwo_api_documentation.md similarity index 63% rename from documentation/api/api_documentation.md rename to documentation/api/fwo_api_documentation.md index 65a130b5f..4fdeaef65 100644 --- a/documentation/api/api_documentation.md +++ b/documentation/api/fwo_api_documentation.md @@ -55,3 +55,20 @@ mutation deleteIncompleteImport($mgmId: Int!) { } } ``` + +## common helper doc +### How to convert file from json to yaml + + python -c 'import sys, yaml, json; yaml.safe_dump(json.load(sys.stdin), sys.stdout, default_flow_style=False)' < file.json > file.yaml + +### How to convert a yaml file to json + + python -c 'import sys, yaml, json; json.dump(yaml.safe_load(sys.stdin), sys.stdout)' < meta.yaml >meta.json + +### How to convert JSON pretty print + +from pp to compact: + python -c 'import sys, json; json.dump(json.load(sys.stdin), sys.stdout)' < file.json > file.json + +from compact to pp: + python -c 'import sys, json; json.dump(json.load(sys.stdin), sys.stdout, indent=3)' < file.json > file.json diff --git a/documentation/api/hasura-howto.md b/documentation/api/hasura-howto.md index 1e96d0e06..32be31a39 100644 --- a/documentation/api/hasura-howto.md +++ b/documentation/api/hasura-howto.md @@ -21,14 +21,6 @@ ## debugging hasura using docker ps docker logs c37388157052 -## How to convert hasura metadata file from json to yaml (for re-import) - - python -c 'import sys, yaml, json; yaml.safe_dump(json.load(sys.stdin), sys.stdout, default_flow_style=False)' < file.json > file.yaml - -## How to convert a yaml file to json - - python -c 'import sys, yaml, json; json.dump(yaml.safe_load(sys.stdin), sys.stdout)' < meta.yaml >meta.json - ## Sending graphql queries Method: POST diff --git a/documentation/developer-docs/README.MD b/documentation/developer-docs/README.MD index a92f3904e..3f1e6fdae 100644 --- a/documentation/developer-docs/README.MD +++ b/documentation/developer-docs/README.MD @@ -21,7 +21,11 @@ Edit architecture diagram (internal only) at https://xfer.cactus.de/index.php/f/ To manually rollback a hanging import of management with ID 1: -`sudo -u postgres psql -d fworchdb -c "select * from rollback_import_of_mgm(1)"` +`sudo -u postgres psql -d fworchdb -c "select * from rollback_import_of_mgm()"` + +old, more invasive version: + +`select * from rollback_import_of_mgm_without_cascade();` ## clean code @@ -58,7 +62,7 @@ Code blocks must contain language in first line. Either use three ~ or ` chars, - Only one top level header in the same document - ``` + ```console # Title text @@ -72,7 +76,7 @@ Code blocks must contain language in first line. Either use three ~ or ` chars, - One or zero whitespaces after list marker - ``` + ```console - List Item - List Item ``` @@ -81,13 +85,13 @@ Code blocks must contain language in first line. Either use three ~ or ` chars, - Example without text - ``` + ```console ``` - Example with text: - ``` + ```console [Link to whatever](https://github.com/CactuseSecurity/firewall-orchestrator/blob/master/whatever/whatever.md) ``` diff --git a/documentation/developer-docs/db-debugging/postgresql-debugging-using-pgadmin.md b/documentation/developer-docs/db-debugging/postgresql-debugging-using-pgadmin.md new file mode 100644 index 000000000..3877be921 --- /dev/null +++ b/documentation/developer-docs/db-debugging/postgresql-debugging-using-pgadmin.md @@ -0,0 +1,34 @@ + +# to debug postgresql stored procedures in plgsql + + +## using pgadmin + +the following was tested with Ubuntu 20.04 + +- install pgadmin + + sudo apt install pgadmin4 + +- install debug package (here for postgresql v12) + + sudo apt-get install postgresql-12-pldebugger + +- edit postgresql to allow debugging and add the following line: + + shared_preload_libraries = 'plugin_debugger' + + +- restart postresql service + + sudo systemctl restart postgresql + +- to add the debug extension start pgadmin and run in query editor + + CREATE EXTENSION pldbgapi; + +- select a stored procedure you wish to debug and select Object - Debugging - Debug + +## using vscode + +see diff --git a/documentation/developer-docs/csharp/visual-studio/readme.md b/documentation/developer-docs/visual-studio/readme.md similarity index 100% rename from documentation/developer-docs/csharp/visual-studio/readme.md rename to documentation/developer-docs/visual-studio/readme.md diff --git a/documentation/developer-docs/csharp/visual-studio/remote-development-vs-code.md b/documentation/developer-docs/visual-studio/remote-development-vs-code.md similarity index 100% rename from documentation/developer-docs/csharp/visual-studio/remote-development-vs-code.md rename to documentation/developer-docs/visual-studio/remote-development-vs-code.md diff --git a/documentation/developer-docs/visual-studio/vscode-debug-python-importer.md b/documentation/developer-docs/visual-studio/vscode-debug-python-importer.md new file mode 100644 index 000000000..10f336c3d --- /dev/null +++ b/documentation/developer-docs/visual-studio/vscode-debug-python-importer.md @@ -0,0 +1,19 @@ +# debug python code in vscode + +debugging c# and python does not seem to be compatible, so we need to change the .vscode/lauch.json file. + +add the following for python debugging (replacing the parameter with sensible values): + +```json + "configurations": [ + + { + "name": "Python: Current File", + "type": "python", + "request": "launch", + "program": "${file}", + "console": "integratedTerminal", + "env": { "PYTHONPATH":"${PYTHONPATH}:${workspaceRoot}"}, + "args": ["-a localhost", "-u hugo", "-w ~/api_pwd", "-l layer1", "-c/home/tim/tmp/mgm1.cfg.anon"] + }, +``` \ No newline at end of file diff --git a/documentation/developer-docs/csharp/visual-studio/vscode-debugging.md b/documentation/developer-docs/visual-studio/vscode-debugging.md similarity index 100% rename from documentation/developer-docs/csharp/visual-studio/vscode-debugging.md rename to documentation/developer-docs/visual-studio/vscode-debugging.md diff --git a/documentation/importer/fortinet/fortiManager.md b/documentation/importer/fortinet/fortiManager.md new file mode 100644 index 000000000..1bcc07333 --- /dev/null +++ b/documentation/importer/fortinet/fortiManager.md @@ -0,0 +1,166 @@ +# Integrating Fortinet FortiManager 7.x + +## crate api user profile + +```console +config system admin profile + edit "apiuserprofile" + set super-user-profile enable + next +end +``` + +NB: the user will have full rw access via UI but can be restricted to read-only via API as shown below. +Need to find out if there is a more secure way to create an all-read-only api user + +## Create read-only api user +```console +config system admin user + edit "apiuser" + set password xxx + set adom "all_adoms" + set profileid "apiuserprofile" + set rpc-permit read +``` + +## Create full access api user +```console +config system admin user + edit "apiuser" + set password xxx + set adom "all_adoms" + set profileid "apiuserprofile" + set rpc-permit read-write +``` + +## login +```console +curl --request POST \ + --url https://10.5.1.55/jsonrpc \ + --header 'Content-Type: application/json' \ + --data '{ + "id": 1, + "method": "exec", + "params": [ + { + "data": [ + { + "passwd": "xxx", + "user": "apiuser" + } + ], + "url": "sys/login/user" + } + ] +}' +``` + +gives +```json +{ + "id": 1, + "result": [ + { + "status": { + "code": 0, + "message": "OK" + }, + "url": "sys\/login\/user" + } + ], + "session": "KCOuhqKTFt3ISXKntpIVO2kA5GJ+QcorMoxm8xLGru0HxrwwpgWuTtRcU8P9XCpbIRlDjjSv2+lzTYYIt1bSzw==" +} +``` + +## logout + +```console +curl --request POST \ + --url https://10.5.1.55/jsonrpc \ + --header 'Content-Type: application/json' \ + --data '{ + "id": 1, + "jsonrpc": "1.0", + "method": "exec", + "params": [ + { + "url": "sys/logout" + } + ], + "verbose": 1, + "session": "BJG0kh4qBopxgjJ+DwEyxJSWCl3MzHdeeympX4GJqw50EoLIjXoLH3+7W3e4N9EqtWb5IhGKBJugGKS6HQrDUg==" +}' +``` + +## get a list of all ADOMs + +```console +curl --request POST \ + --url https://10.5.1.55/jsonrpc \ + --header 'Content-Type: application/json' \ + --data '{ + "method": "get", + "params": [ + { + "fields": ["name", "oid", "uuid"], + "url": "/dvmdb/adom" + } + ], + "session": "lieHUJqiA0VldI45nVh8K2o0kP2XRm7NrrayIL1t977BG78\/wukwCgnFnpClbH9A6rAQbCPVjcGVFOw1VwULLQ==" +} +' +``` + +## get a list of fw rules + +```console +curl --request POST \ + --url https://10.5.1.55/jsonrpc \ + --header 'Content-Type: application/json' \ + --data '{ + "method": "get", + "params": [ + { + "url": "/pm/config/adom/my_adom/pkg/mypkg/firewall/policy" + } + ], + "verbose": 1, + "id": 2, + "session": "++7z161rod0cGMaStLUWohDpyUnsyT030tNuLPyVYvIhd0GCLXwp9vCJRKnYV4I0Q\/di1bSL3Wf7o8oNnWu6cw==" +}' +``` + +## get nat rules + +### get snat rules +```console +{ + "method": "get", + "params": [ + { + "url": "/pm/config/adom/my_adom/pkg/mypkg/firewall/central-snat-map" + } + ], + "verbose": 1, + "id": 2, + "session": "++7z161rod0cGMaStLUWohDpyUnsyT030tNuLPyVYvIhd0GCLXwp9vCJRKnYV4I0Q\/di1bSL3Wf7o8oNnWu6cw==" +}' +``` +### get dnat rules + +```console +curl --request POST \ + --url https://10.5.1.55/jsonrpc \ + --header 'Content-Type: application/json' \ + --data '{ + "method": "get", + "params": [ + { + "url": "/pm/config/adom/my_adom/pkg/mypkg/firewall/central/dnat" + } + ], + "verbose": 1, + "id": 2, + "session": "++7z161rod0cGMaStLUWohDpyUnsyT030tNuLPyVYvIhd0GCLXwp9vCJRKnYV4I0Q\/di1bSL3Wf7o8oNnWu6cw==" +}' +``` \ No newline at end of file diff --git a/documentation/importer/import-via-fw-api.md b/documentation/importer/import-via-fw-api.md new file mode 100644 index 000000000..9aebb3ef0 --- /dev/null +++ b/documentation/importer/import-via-fw-api.md @@ -0,0 +1,3 @@ +# Importing firewall configs via API + +Use a tool like insomnia for testing. diff --git a/documentation/importer/paloAlto/paloAltoApi.md b/documentation/importer/paloAlto/paloAltoApi.md new file mode 100644 index 000000000..c257c99fa --- /dev/null +++ b/documentation/importer/paloAlto/paloAltoApi.md @@ -0,0 +1,556 @@ +# Integrating Palo Alto firewall + +All examples here are given for PanOS 9.0. PAN-OS has two APIs: XML and REST. There is also an extra REST API for the central management server Panorama. The XML-API can be used to get the whole config in one go. + +## Create api user +see + +## login +```console +curl --insecure --request GET --url 'https://PAN-IP/api/?type=keygen&user=fwo&password=xxx' +``` +gets us a session key in XML format which seems to be valid indefinetly?!: +```xml + + + LUFRPT1Tb2xDZnk0R25WbDJONWJNMmlEMHNpS0Y2d1U9T3ZLZFhydER6SDZKYk9OQit2cmVTVUtEb2MyMVBDUkdBOGY3UzlDS0VrTT0= + + +``` + +More secure: +see , but note: You cannot use basic authentication when you Get Your API Key. + + +## get API version + +`curl -X GET "https:///api/?type=version&key="' + +## Get all network objects +The session key can be used to get objects as follows (for single fw, the name of the vsys seems to be vsys1): +```console +curl --insecure --request GET \ + --url 'https://PAN-IP/restapi/v9.1/Objects/Addresses?location=vsys&vsys=vsys1' \ + --header 'X-PAN-KEY: LUFRPT1JdHF6SnVndXNEU2VxVFIvNnZ1bG1yeFk0S2c9clVWeGhkdnNQNTBRK1BzNXBCeEMvNzdTSks1NWVDdzJLSmZXa1JsUkYzdW9OUnJSb1pDREdseitlVUtNc1VKSw==' +``` +Gives us the network objects in JSON format: + +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "45", + "@count": "45", + "entry": [ + { + "@name": "ext-interface-ip-10.9.8.2", + "@location": "vsys", + "@vsys": "vsys1", + "ip-netmask": "10.9.8.2" + }, + { + "@name": "cactus-da", + "@location": "vsys", + "@vsys": "vsys1", + "ip-netmask": "85.182.155.96\/27", + "tag": { + "member": [ + "cactus-DA" + ] + } + }, + { + "@name": "fb_inet_10.9.8.1", + "@location": "vsys", + "@vsys": "vsys1", + "ip-netmask": "10.9.8.1" + }, + { + "@name": "gware.cactus.de_85.182.155.108", + "@location": "vsys", + "@vsys": "vsys1", + "ip-netmask": "85.182.155.108\/32", + "tag": { + "member": [ + "cactus-DA" + ] + } + } + ] + } +} +``` +To get address groups: + +```console +curl --request GET \ + --url 'https://10.8.6.3/restapi/v9.1/Objects/AddressGroups?location=vsys&vsys=vsys1' \ + --header 'X-PAN-KEY: LUFRPT1zUmdXTlZjUFZPaWxmc0R2eHRPa1FvdmtlV009T3ZLZFhydER6SDZKYk9OQit2cmVTZHNYWDJrdHREWDVyN1VnZG01VXNKWT0=' \ +``` + +Retrieves tag-based filters: +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "3", + "@count": "3", + "entry": [ + { + "@name": "GRP_tims-ip-adressen", + "@location": "vsys", + "@vsys": "vsys1", + "dynamic": { + "filter": "'tims-clients'" + } + }, + { + "@name": "GRP_guest-ips", + "@location": "vsys", + "@vsys": "vsys1", + "dynamic": { + "filter": "'guests' " + } + }, + { + "@name": "GRP_kids_ips", + "@location": "vsys", + "@vsys": "vsys1", + "dynamic": { + "filter": "'kids-ips'" + } + } + ] + } +} +``` +## get service objects + +first predefined services: + +```console +curl --request GET \ + --url 'https://10.8.6.3/restapi/v9.1/Objects/Services?location=predefined' \ + --header 'X-PAN-KEY: LUFRPT1JdHF6SnVndXNEU2VxVFIvNnZ1bG1yeFk0S2c9clVWeGhkdnNQNTBRK1BzNXBCeEMvNzdTSks1NWVDdzJLSmZXa1JsUkYzdW9OUnJSb1pDREdseitlVUtNc1VKSw==' \ +``` + +yields: + +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "2", + "@count": "2", + "entry": [ + { + "@name": "service-http", + "@location": "predefined", + "protocol": { + "tcp": { + "port": "80,8080" + } + } + }, + { + "@name": "service-https", + "@location": "predefined", + "protocol": { + "tcp": { + "port": "443" + } + } + } + ] + } +} +``` + +Then self-defined: + +```console +curl --insecure --request GET \ + --url 'https://PAN-IP/restapi/v9.1/Objects/Services?location=vsys&vsys=vsys1' \ + --header 'X-PAN-KEY: LUFRPT1JdHF6SnVndXNEU2VxVFIvNnZ1bG1yeFk0S2c9clVWeGhkdnNQNTBRK1BzNXBCeEMvNzdTSks1NWVDdzJLSmZXa1JsUkYzdW9OUnJSb1pDREdseitlVUtNc1VKSw==' +``` + +give us: + +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "42", + "@count": "42", + "entry": [ + { + "@name": "tcp_64285", + "@location": "vsys", + "@vsys": "vsys1", + "protocol": { + "tcp": { + "port": "64285", + "override": { + "no": [] + } + } + } + }, + { + "@name": "steam_27000-27100", + "@location": "vsys", + "@vsys": "vsys1", + "protocol": { + "udp": { + "port": "27000-27100", + "override": { + "no": [] + } + } + } + }, + { + "@name": "svc_3000_tcp_hbci", + "@location": "vsys", + "@vsys": "vsys1", + "protocol": { + "tcp": { + "port": "3000", + "override": { + "no": [] + } + } + } + }, + { + "@name": "fritzbox_tcp_14013", + "@location": "vsys", + "@vsys": "vsys1", + "protocol": { + "tcp": { + "port": "14013", + "override": { + "no": [] + } + } + } + } + ] + } +} +``` + + +## get (predefined) applications + +in order to get the application names we need API v9.1! + +with version 9.0: +```console +curl --insecure --request GET \ + --url 'https://10.8.6.3/restapi/9.0/Objects/Applications?location=predefined' \ + --header 'X-PAN-KEY: LUFRPT1zUmdXTlZjUFZPaWxmc0R2eHRPa1FvdmtlV009T3ZLZFhydER6SDZKYk9OQit2cmVTZHNYWDJrdHREWDVyN1VnZG01VXNKWT0=' \ +``` +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "3566", + "@count": "3566", + "entry": [ + { + "default": { + "port": { + "member": [ + "tcp\/3468,6346,11300" + ] + } + }, + "category": "general-internet", + "subcategory": "file-sharing", + "technology": "peer-to-peer", + "risk": "5", + "evasive-behavior": "yes", + "consume-big-bandwidth": "yes", + "used-by-malware": "yes", + "able-to-transfer-file": "yes", + "has-known-vulnerability": "yes", + "tunnel-other-application": "no", + "prone-to-misuse": "yes", + "pervasive-use": "yes" + } + ] + } +} +``` +With v9.1: + +```console +curl --request GET \ + --url 'https://10.8.6.3/restapi/v9.1/Objects/Applications?location=predefined' \ + --header 'X-PAN-KEY: LUFRPT1zUmdXTlZjUFZPaWxmc0R2eHRPa1FvdmtlV009T3ZLZFhydER6SDZKYk9OQit2cmVTZHNYWDJrdHREWDVyN1VnZG01VXNKWT0=' +``` + +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "3566", + "@count": "3566", + "entry": [ + { + "@name": "100bao", + "@location": "predefined", + "default": { + "port": { + "member": [ + "tcp\/3468,6346,11300" + ] + } + }, + "category": "general-internet", + "subcategory": "file-sharing", + "technology": "peer-to-peer", + "risk": "5", + "evasive-behavior": "yes", + "consume-big-bandwidth": "yes", + "used-by-malware": "yes", + "able-to-transfer-file": "yes", + "has-known-vulnerability": "yes", + "tunnel-other-application": "no", + "prone-to-misuse": "yes", + "pervasive-use": "yes" + }, + { + "@name": "open-vpn", + "@location": "predefined", + "default": { + "port": { + "member": [ + "tcp\/1194", + "tcp\/443", + "udp\/1194" + ] + } + }, + "category": "networking", + "subcategory": "encrypted-tunnel", + "technology": "client-server", + "timeout": "3600", + "risk": "3", + "evasive-behavior": "no", + "consume-big-bandwidth": "no", + "used-by-malware": "no", + "able-to-transfer-file": "yes", + "has-known-vulnerability": "yes", + "tunnel-other-application": "yes", + "tunnel-applications": { + "member": [ + "cyberghost-vpn", + "frozenway", + "hotspot-shield", + "ipvanish", + "spotflux" + ] + }, + "prone-to-misuse": "no", + "pervasive-use": "yes" + } + ] + } +} +``` + +## get rules + +```console +curl --insecure --request GET \ + --url 'https://PAN-IP/restapi/v9.1/Policies/SecurityRules?location=vsys&vsys=vsys1' \ + --header 'X-PAN-KEY: LUFRPT1JdHF6SnVndXNEU2VxVFIvNnZ1bG1yeFk0S2c9clVWeGhkdnNQNTBRK1BzNXBCeEMvNzdTSks1NWVDdzJLSmZXa1JsUkYzdW9OUnJSb1pDREdseitlVUtNc1VKSw==' \ +``` + +gives us: + +```json +{ + "@status": "success", + "@code": "19", + "result": { + "@total-count": "85", + "@count": "85", + "entry": [ + { + "@name": "special access tim-1", + "@uuid": "ca58af60-05c3-4806-b7c6-aea7a1ddc70c", + "@location": "vsys", + "@vsys": "vsys1", + "to": { + "member": [ + "untrust" + ] + }, + "from": { + "member": [ + "trust" + ] + }, + "source": { + "member": [ + "GRP_tims-ip-adressen" + ] + }, + "destination": { + "member": [ + "any" + ] + }, + "source-user": { + "member": [ + "any" + ] + }, + "application": { + "member": [ + "open-vpn", + "ssh", + "ssh-tunnel", + "ssl", + "web-browsing" + ] + }, + "service": { + "member": [ + "any" + ] + }, + "hip-profiles": { + "member": [ + "any" + ] + }, + "action": "allow", + "rule-type": "interzone", + "profile-setting": { + "profiles": { + "url-filtering": { + "member": [ + "default" + ] + }, + "file-blocking": { + "member": [ + "strict file blocking" + ] + }, + "virus": { + "member": [ + "test-av-profile" + ] + }, + "spyware": { + "member": [ + "strict" + ] + }, + "vulnerability": { + "member": [ + "default" + ] + }, + "wildfire-analysis": { + "member": [ + "default" + ] + } + } + }, + "tag": { + "member": [ + "tims-clients" + ] + }, + "log-start": "yes", + "category": { + "member": [ + "any" + ] + }, + "disabled": "no", + "log-setting": "forwarder-traffic-log", + "group-tag": "tims-clients" + }, + { + "@name": "DMZ minecraft", + "@uuid": "ac91834b-0ac3-4d9a-abcd-3ad69075bed7", + "@location": "vsys", + "@vsys": "vsys1", + "to": { + "member": [ + "any" + ] + }, + "from": { + "member": [ + "untrust" + ] + }, + "source": { + "member": [ + "any" + ] + }, + "destination": { + "member": [ + "ext-interface-ip-10.9.8.2" + ] + }, + "source-user": { + "member": [ + "any" + ] + }, + "category": { + "member": [ + "any" + ] + }, + "application": { + "member": [ + "any" + ] + }, + "service": { + "member": [ + "tcp_60999" + ] + }, + "hip-profiles": { + "member": [ + "any" + ] + }, + "action": "allow", + "log-start": "yes", + "rule-type": "universal", + "log-setting": "forwarder-traffic-log", + "profile-setting": { + "profiles": { + "vulnerability": { + "member": [ + "strict" + ] + } + } + }, + "disabled": "no" + } + ] + } +} +``` \ No newline at end of file diff --git a/documentation/importer/python-debugging.md b/documentation/importer/python-debugging.md new file mode 100644 index 000000000..db43f8911 --- /dev/null +++ b/documentation/importer/python-debugging.md @@ -0,0 +1,9 @@ +# python debugging + +importer files end up in different directories during installation process (not the same as in the source/installer code). For debugging in order use something like: + +`sudo ln -s /home/tim/dev/tpur-fwo-june/firewall-orchestrator/roles/importer/files/importer /usr/local/fworch/importer` + +or the following in python code + +`sys.path.append(r"/home/tim/dev/tpur-fwo-june/firewall-orchestrator/roles/importer/files/importer")` diff --git a/documentation/installer/install-for-testing.md b/documentation/installer/install-for-testing.md index 2532b1cb1..0d33268cd 100644 --- a/documentation/installer/install-for-testing.md +++ b/documentation/installer/install-for-testing.md @@ -34,7 +34,7 @@ Set debug level for extended debugging info during installation. ```console ansible-playbook/ site.yml -e "debug_level='2'" -K ``` -## Running tests after installation +## Running integration tests after installation/upgrade To only run tests (for an existing installation) use tags as follows: @@ -42,6 +42,14 @@ To only run tests (for an existing installation) use tags as follows: ansible-playbook/ site.yml --tags test -K ``` +## Running unit tests only + +To only run tests (for an existing installation, can only be combined with installation_mode=upgrade) use tags as follows: + +```console +ansible-playbook/ site.yml --tags unittest -e "installation_mode=upgrade" -K +``` + ## Parameter "api_no_metadata" to prevent meta data import e.g. if your hasura metadata file needs to be re-created from scratch, then use the following switch:: diff --git a/documentation/revision-history.md b/documentation/revision-history.md index fa99e62cc..8f4a0b0c3 100644 --- a/documentation/revision-history.md +++ b/documentation/revision-history.md @@ -139,3 +139,7 @@ adding report template format fk and permissions ### 5.3.4 - 29.07.2021 - moving to API hasura v2.0 + +### 5.4.1 - xx.09.2021 +- moving towards full API-based importer modules +- in preparation for coming import changes diff --git a/inventory/group_vars/all.yml b/inventory/group_vars/all.yml index 9b9dba15c..13640b121 100644 --- a/inventory/group_vars/all.yml +++ b/inventory/group_vars/all.yml @@ -1,5 +1,5 @@ ### general settings -product_version: "5.3.4" +product_version: "5.4.1" ansible_python_interpreter: /usr/bin/python3 ansible_ssh_common_args: '-o StrictHostKeyChecking=no' product_name: fworch @@ -112,4 +112,5 @@ http_conf_dir: /etc/{{ webserver_package_name }}/sites-available/ ################# testing ######################### test_dir: "{{ fworch_home }}/test" +test_fortigate_name: fortigate_test csharp_test_start_dir: "{{ fworch_home }}/test/csharp/FWO_Test" diff --git a/inventory/group_vars/apiserver.yml b/inventory/group_vars/apiserver.yml index 8d186c067..74a715082 100644 --- a/inventory/group_vars/apiserver.yml +++ b/inventory/group_vars/apiserver.yml @@ -6,7 +6,7 @@ api_hasura_admin_test_password: "not4production" api_user_email: "{{ api_user }}@{{ api_ip_address }}" api_home: "{{ fworch_home }}/api" api_hasura_cli_bin: "/usr/local/bin/hasura" -api_hasura_version: "v2.0.3" +api_hasura_version: "v2.0.8" api_project_name: api api_no_metadata: false # debug > info > warn > error diff --git a/inventory/group_vars/frontends.yml b/inventory/group_vars/frontends.yml index 1de7d37d7..b4f8c8811 100644 --- a/inventory/group_vars/frontends.yml +++ b/inventory/group_vars/frontends.yml @@ -7,6 +7,8 @@ ui_app_name: "{{ product_name }}" ui_dir: "{{ fworch_home }}/ui/files" ui_start_dir: "{{ ui_dir }}/FWO_UI" ui_admin_password_file: "{{ fworch_secrets_dir }}/ui_admin_pwd" +importer_password: not4production +importer_password_file: "{{ fworch_secrets_dir }}/importer_pwd" # ui_comm_mode valid values: # standard (meaning with http-->https rewrite and websockets), no_ws, allow_http, no_ws_and_allow_http ui_comm_mode: standard diff --git a/inventory/group_vars/sampleserver.yml b/inventory/group_vars/sampleserver.yml index 5222ae783..85f7cdd03 100644 --- a/inventory/group_vars/sampleserver.yml +++ b/inventory/group_vars/sampleserver.yml @@ -2,9 +2,11 @@ sample_config_user: fworchsample sample_config_user_home: "/home/{{ sample_config_user }}" import_sample_server: localhost -sample_config_hostname: isosrv -importer_hostname: localhost +sample_config_hostname: isosrv #todo: delete? not used anywhere +importer_hostname: localhost sample_data_rate: medium +sample_role_purpose: demo +sample_fortigate_name: fortigate_demo # sample openldap server second_ldap_db: no diff --git a/roles/api/files/replace_metadata.json b/roles/api/files/replace_metadata.json index c0d4d5777..53d66626c 100644 --- a/roles/api/files/replace_metadata.json +++ b/roles/api/files/replace_metadata.json @@ -72,6 +72,34 @@ } } } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "log_obj_id", + "new_obj_id", + "old_obj_id", + "import_admin", + "doku_admin", + "control_id", + "abs_change_id", + "change_action", + "changelog_obj_comment", + "documented", + "docu_time", + "mgm_id", + "change_type_id", + "security_relevant", + "change_request_info", + "change_time", + "unique_name" + ], + "filter": {}, + "allow_aggregations": true + } + } ] }, { @@ -171,6 +199,34 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [ + "log_rule_id", + "doku_admin", + "control_id", + "import_admin", + "new_rule_id", + "old_rule_id", + "implicit_change", + "abs_change_id", + "change_action", + "changelog_rule_comment", + "documented", + "docu_time", + "mgm_id", + "dev_id", + "change_type_id", + "security_relevant", + "change_request_info", + "change_time", + "unique_name" + ], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -327,6 +383,34 @@ } } } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "log_svc_id", + "doku_admin", + "control_id", + "import_admin", + "new_svc_id", + "old_svc_id", + "abs_change_id", + "change_action", + "changelog_svc_comment", + "documented", + "docu_time", + "mgm_id", + "change_type_id", + "security_relevant", + "change_request_info", + "change_time", + "unique_name" + ], + "filter": {}, + "allow_aggregations": true + } + } ] }, { @@ -391,6 +475,34 @@ } } } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "log_usr_id", + "new_user_id", + "old_user_id", + "import_admin", + "doku_admin", + "control_id", + "abs_change_id", + "change_action", + "changelog_user_comment", + "documented", + "docu_time", + "mgm_id", + "change_type_id", + "security_relevant", + "change_request_info", + "change_time", + "unique_name" + ], + "filter": {}, + "allow_aggregations": true + } + } ] }, { @@ -721,6 +833,29 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [ + "dev_id", + "mgm_id", + "dev_name", + "dev_rulebase", + "dev_typ_id", + "tenant_id", + "dev_active", + "dev_comment", + "dev_create", + "dev_update", + "do_not_import", + "clearing_import_ran", + "force_initial_import", + "hide_in_gui" + ], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -915,12 +1050,84 @@ } ] }, + { + "table": { + "schema": "public", + "name": "import_config" + }, + "object_relationships": [ + { + "name": "import_control", + "using": { + "foreign_key_constraint_on": "import_id" + } + }, + { + "name": "management", + "using": { + "foreign_key_constraint_on": "mgm_id" + } + } + ], + "insert_permissions": [ + { + "role": "importer", + "permission": { + "check": {}, + "columns": ["config", "import_id", "mgm_id"], + "backend_only": false + } + } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": ["import_id", "mgm_id", "config"], + "filter": {}, + "allow_aggregations": true + } + } + ], + "delete_permissions": [ + { + "role": "importer", + "permission": { + "filter": {} + } + } + ] + }, { "table": { "schema": "public", "name": "import_control" }, "object_relationships": [ + { + "name": "import_config", + "using": { + "foreign_key_constraint_on": { + "column": "import_id", + "table": { + "schema": "public", + "name": "import_config" + } + } + } + }, + { + "name": "import_full_config", + "using": { + "foreign_key_constraint_on": { + "column": "import_id", + "table": { + "schema": "public", + "name": "import_full_config" + } + } + } + }, { "name": "management", "using": { @@ -1458,6 +1665,30 @@ } } ], + "insert_permissions": [ + { + "role": "importer", + "permission": { + "check": {}, + "columns": [ + "control_id", + "start_time", + "stop_time", + "is_initial_import", + "delimiter_group", + "delimiter_zone", + "delimiter_user", + "delimiter_list", + "mgm_id", + "last_change_in_config", + "successful_import", + "changes_found", + "import_errors" + ], + "backend_only": false + } + } + ], "select_permissions": [ { "role": "auditor", @@ -1481,6 +1712,28 @@ "allow_aggregations": true } }, + { + "role": "importer", + "permission": { + "columns": [ + "control_id", + "changes_found", + "is_initial_import", + "successful_import", + "delimiter_group", + "delimiter_list", + "delimiter_user", + "delimiter_zone", + "import_errors", + "mgm_id", + "last_change_in_config", + "start_time", + "stop_time" + ], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -1555,6 +1808,86 @@ "allow_aggregations": true } } + ], + "update_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "control_id", + "changes_found", + "is_initial_import", + "successful_import", + "delimiter_group", + "delimiter_list", + "delimiter_user", + "delimiter_zone", + "import_errors", + "mgm_id", + "last_change_in_config", + "start_time", + "stop_time" + ], + "filter": {}, + "check": {} + } + } + ], + "delete_permissions": [ + { + "role": "importer", + "permission": { + "filter": {} + } + } + ] + }, + { + "table": { + "schema": "public", + "name": "import_full_config" + }, + "object_relationships": [ + { + "name": "import_control", + "using": { + "foreign_key_constraint_on": "import_id" + } + }, + { + "name": "management", + "using": { + "foreign_key_constraint_on": "mgm_id" + } + } + ], + "insert_permissions": [ + { + "role": "importer", + "permission": { + "check": {}, + "columns": ["config", "import_id", "mgm_id"], + "backend_only": false + } + } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": ["import_id", "mgm_id", "config"], + "filter": {}, + "allow_aggregations": true + } + } + ], + "delete_permissions": [ + { + "role": "importer", + "permission": { + "filter": {} + } + } ] }, { @@ -1598,6 +1931,64 @@ "backend_only": false } } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "obj_id", + "obj_zone", + "obj_name", + "obj_typ", + "obj_member_names", + "obj_member_refs", + "obj_member_excludes", + "obj_sw", + "obj_ip", + "obj_ip_end", + "obj_color", + "obj_comment", + "obj_location", + "control_id", + "obj_uid", + "last_change_admin", + "last_change_time", + "obj_scope" + ], + "filter": {}, + "allow_aggregations": true + } + } + ], + "update_permissions": [ + { + "role": "importer", + "permission": { + "columns": [ + "control_id", + "obj_id", + "last_change_admin", + "obj_name", + "obj_scope", + "obj_sw", + "obj_ip", + "obj_ip_end", + "obj_color", + "obj_comment", + "obj_location", + "obj_member_excludes", + "obj_member_names", + "obj_member_refs", + "obj_typ", + "obj_uid", + "obj_zone", + "last_change_time" + ], + "filter": {}, + "check": null + } + } ] }, { @@ -1976,6 +2367,18 @@ } } }, + { + "name": "import_configs", + "using": { + "foreign_key_constraint_on": { + "column": "mgm_id", + "table": { + "schema": "public", + "name": "import_config" + } + } + } + }, { "name": "import_controls", "using": { @@ -1988,6 +2391,18 @@ } } }, + { + "name": "import_full_configs", + "using": { + "foreign_key_constraint_on": { + "column": "mgm_id", + "table": { + "schema": "public", + "name": "import_full_config" + } + } + } + }, { "name": "objects", "using": { @@ -2118,6 +2533,38 @@ "allow_aggregations": true } }, + { + "role": "importer", + "permission": { + "columns": [ + "mgm_id", + "dev_typ_id", + "mgm_name", + "mgm_comment", + "tenant_id", + "mgm_create", + "mgm_update", + "ssh_public_key", + "ssh_private_key", + "ssh_hostname", + "ssh_port", + "ssh_user", + "last_import_md5_complete_config", + "last_import_md5_rules", + "last_import_md5_objects", + "last_import_md5_users", + "do_not_import", + "clearing_import_ran", + "force_initial_import", + "config_path", + "hide_in_gui", + "importer_hostname", + "debug_level" + ], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6089,6 +6536,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6258,6 +6713,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6348,6 +6811,26 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [ + "dev_typ_id", + "dev_typ_manufacturer", + "dev_typ_name", + "dev_typ_version", + "dev_typ_comment", + "dev_typ_predef_svc", + "dev_typ_predef_obj", + "dev_typ_is_mgmt", + "dev_typ_config_file_rules", + "dev_typ_config_file_basic_objects", + "dev_typ_config_file_users" + ], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6439,6 +6922,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6493,6 +6984,16 @@ } } } + ], + "select_permissions": [ + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + } ] }, { @@ -6526,6 +7027,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6593,6 +7102,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6658,6 +7175,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { @@ -6709,6 +7234,14 @@ "filter": {} } }, + { + "role": "importer", + "permission": { + "columns": [], + "filter": {}, + "allow_aggregations": true + } + }, { "role": "recertifier", "permission": { diff --git a/roles/common/tasks/main.yml b/roles/common/tasks/main.yml index 11fd903cd..57e6c4d1a 100644 --- a/roles/common/tasks/main.yml +++ b/roles/common/tasks/main.yml @@ -6,7 +6,7 @@ - "{{ ansible_version.string is version_compare('2.8', '>=') }}" msg: Ansible 2.8 or above is required - - name: check for existing main config file + - name: check for existing main config file {{ fworch_conf_file }} stat: path: "{{ fworch_conf_file }}" register: already_installed @@ -82,10 +82,13 @@ upgrade: dist when: ansible_facts['distribution'] == "Red Hat" or ansible_facts['distribution'] == "CentOS" - - name: install package rsyslog + - name: install packages rsyslog, rsync package: - name: "rsyslog" + name: "{{ item }}" state: present + loop: + - rsyslog + - rsync - name: change rsyslog config to receive logs blockinfile: diff --git a/roles/database/files/sql/creation/fworch-create-constraints.sql b/roles/database/files/sql/creation/fworch-create-constraints.sql index 591ad4fb2..1271998ed 100755 --- a/roles/database/files/sql/creation/fworch-create-constraints.sql +++ b/roles/database/files/sql/creation/fworch-create-constraints.sql @@ -5,6 +5,7 @@ Alter Table "changelog_rule" add Constraint "alt_key_changelog_rule" UNIQUE ("ab Alter Table "changelog_service" add Constraint "alt_key_changelog_service" UNIQUE ("abs_change_id"); Alter Table "changelog_user" add Constraint "alt_key_changelog_user" UNIQUE ("abs_change_id"); Alter Table "import_changelog" add Constraint "Alter_Key14" UNIQUE ("import_changelog_nr","control_id"); +Alter Table "import_control" add Constraint "control_id_stop_time_unique" UNIQUE ("stop_time","control_id"); Alter Table "object" add Constraint "obj_altkey" UNIQUE ("mgm_id","zone_id","obj_uid","obj_create"); -- Alter Table "rule" add Constraint "rule_altkey" UNIQUE ("mgm_id","rule_uid","rule_create"); Alter Table "rule" add Constraint "rule_altkey" UNIQUE ("dev_id","rule_uid","rule_create"); diff --git a/roles/database/files/sql/creation/fworch-create-foreign-keys.sql b/roles/database/files/sql/creation/fworch-create-foreign-keys.sql index 2ce3a0ef2..6e5a84d08 100755 --- a/roles/database/files/sql/creation/fworch-create-foreign-keys.sql +++ b/roles/database/files/sql/creation/fworch-create-foreign-keys.sql @@ -33,7 +33,11 @@ Alter table "device" add foreign key ("mgm_id") references "management" ("mgm_i Alter table "device" add foreign key ("tenant_id") references "tenant" ("tenant_id") on update restrict on delete cascade; Alter table "error_log" add foreign key ("error_id") references "error" ("error_id") on update restrict on delete cascade; Alter table "import_changelog" add foreign key ("control_id") references "import_control" ("control_id") on update restrict on delete cascade; +Alter table "import_config" add constraint "import_config_import_id_f_key" foreign key ("import_id") references "import_control" ("control_id") on update restrict on delete cascade; +Alter table "import_config" add constraint "import_config_mgm_id_f_key" foreign key ("mgm_id") references "management" ("mgm_id") on update restrict on delete cascade; Alter table "import_control" add foreign key ("mgm_id") references "management" ("mgm_id") on update restrict on delete cascade; +Alter table "import_full_config" add constraint "import_full_config_import_id_f_key" foreign key ("import_id") references "import_control" ("control_id") on update restrict on delete cascade; +Alter table "import_full_config" add constraint "import_full_config_mgm_id_f_key" foreign key ("mgm_id") references "management" ("mgm_id") on update restrict on delete cascade; Alter table "import_object" add foreign key ("control_id") references "import_control" ("control_id") on update restrict on delete cascade; Alter table "import_rule" add foreign key ("control_id") references "import_control" ("control_id") on update restrict on delete cascade; Alter table "import_service" add foreign key ("control_id") references "import_control" ("control_id") on update restrict on delete cascade; diff --git a/roles/database/files/sql/creation/fworch-create-indices.sql b/roles/database/files/sql/creation/fworch-create-indices.sql index b675fee9e..5117e0abb 100755 --- a/roles/database/files/sql/creation/fworch-create-indices.sql +++ b/roles/database/files/sql/creation/fworch-create-indices.sql @@ -11,9 +11,9 @@ Create unique index "stm_fw_typ_akey" on "stm_dev_typ" using btree ("dev_typ_nam Create index "stm_nattypes_akey" on "stm_nattyp" using btree ("nattyp_name"); Create unique index "stm_obj_typ_akey" on "stm_obj_typ" using btree ("obj_typ_name"); Create index "import_control_start_time_idx" on "import_control" using btree ("start_time"); --- Create index "rule_oder_idx" on "rule_order" using btree ("control_id","rule_id"); +-- make sure a maximum of one stop_time=null entry exists per mgm_id (only one running import per mgm): +CREATE UNIQUE INDEX import_control_only_one_null_stop_time_per_mgm_when_null ON import_control (mgm_id) WHERE stop_time IS NULL; Create index "IX_relationship11" on "object" ("obj_nat_install"); --- Create index "IX_Relationship126" on "rule_order" ("dev_id"); Create index "IX_Relationship128" on "changelog_rule" ("dev_id"); Create index "IX_Relationship186" on "rule" ("dev_id"); Create index "IX_relationship7" on "device" ("tenant_id"); diff --git a/roles/database/files/sql/creation/fworch-create-tables.sql b/roles/database/files/sql/creation/fworch-create-tables.sql index d1994bfa1..c2cf329bb 100755 --- a/roles/database/files/sql/creation/fworch-create-tables.sql +++ b/roles/database/files/sql/creation/fworch-create-tables.sql @@ -615,6 +615,22 @@ Create table "import_control" primary key ("control_id") ); +-- temporary table for storing the fw-relevant config during import +CREATE TABLE IF NOT EXISTS "import_config" ( + "import_id" bigint NOT NULL, + "mgm_id" integer NOT NULL, + "config" jsonb NOT NULL, + PRIMARY KEY ("import_id") +); + +-- permanent table for storing the full config as an archive +CREATE TABLE "import_full_config" ( + "import_id" bigint NOT NULL, + "mgm_id" integer NOT NULL, + "config" jsonb NOT NULL, + PRIMARY KEY ("import_id") +); + -- temporary import tables ------------------------------------- Create table "import_service" diff --git a/roles/database/files/sql/creation/fworch-create-triggers.sql b/roles/database/files/sql/creation/fworch-create-triggers.sql new file mode 100644 index 000000000..f631a997b --- /dev/null +++ b/roles/database/files/sql/creation/fworch-create-triggers.sql @@ -0,0 +1,103 @@ + +------------------- +-- the following triggers creates the bigserial obj_id as it does not seem to be set automatically, +-- when insert via jsonb function and specifying no obj_id + +CREATE OR REPLACE FUNCTION import_object_obj_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.obj_id = coalesce(NEW.obj_id, nextval('import_object_obj_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_object_obj_id_seq ON import_object CASCADE; +CREATE TRIGGER import_object_obj_id_seq BEFORE INSERT ON import_object FOR EACH ROW EXECUTE PROCEDURE import_object_obj_id_seq(); + +CREATE OR REPLACE FUNCTION import_service_svc_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.svc_id = coalesce(NEW.svc_id, nextval('import_service_svc_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_service_svc_id_seq ON import_service CASCADE; +CREATE TRIGGER import_service_svc_id_seq BEFORE INSERT ON import_service FOR EACH ROW EXECUTE PROCEDURE import_service_svc_id_seq(); + +CREATE OR REPLACE FUNCTION import_user_user_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.user_id = coalesce(NEW.user_id, nextval('import_user_user_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_user_user_id_seq ON import_user CASCADE; +CREATE TRIGGER import_user_user_id_seq BEFORE INSERT ON import_user FOR EACH ROW EXECUTE PROCEDURE import_user_user_id_seq(); + +CREATE OR REPLACE FUNCTION import_rule_rule_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.rule_id = coalesce(NEW.rule_id, nextval('import_rule_rule_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_rule_rule_id_seq ON import_rule CASCADE; +CREATE TRIGGER import_rule_rule_id_seq BEFORE INSERT ON import_rule FOR EACH ROW EXECUTE PROCEDURE import_rule_rule_id_seq(); + +------------------- + +CREATE OR REPLACE FUNCTION import_config_from_jsonb () + RETURNS TRIGGER + AS $BODY$ +DECLARE + import_id BIGINT; + r_import_result RECORD; +BEGIN + INSERT INTO import_object + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_object, NEW.config -> 'network_objects'); + + INSERT INTO import_service + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_service, NEW.config -> 'service_objects'); + + INSERT INTO import_user + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_user, NEW.config -> 'user_objects'); + + INSERT INTO import_zone + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_zone, NEW.config -> 'zone_objects'); + + INSERT INTO import_rule + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_rule, NEW.config -> 'rules'); + + -- finally start the stored procedure import + PERFORM import_all_main(NEW.import_id); + + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql +VOLATILE +COST 100; + +ALTER FUNCTION public.import_config_from_jsonb () OWNER TO fworch; + +DROP TRIGGER IF EXISTS import_config_insert ON import_config CASCADE; + +CREATE TRIGGER import_config_insert + BEFORE INSERT ON import_config + FOR EACH ROW + EXECUTE PROCEDURE import_config_from_jsonb (); diff --git a/roles/database/files/sql/creation/fworch-fill-stm.sql b/roles/database/files/sql/creation/fworch-fill-stm.sql index 6f9f220e4..47cea05b2 100644 --- a/roles/database/files/sql/creation/fworch-fill-stm.sql +++ b/roles/database/files/sql/creation/fworch-fill-stm.sql @@ -1,7 +1,4 @@ --- SET client_encoding=UTF8 --- \encoding UTF8 - INSERT INTO language ("name", "culture_info") VALUES('German', 'de-DE'); INSERT INTO language ("name", "culture_info") VALUES('English', 'en-US'); @@ -105,162 +102,19 @@ insert into stm_track (track_id,track_name) VALUES (20,'utm'); insert into stm_track (track_id,track_name) VALUES (22,'utm start'); -- check point R8x: insert into stm_track (track_id,track_name) VALUES (21,'network log'); +-- netscreen: +-- insert into stm_track (track_id,track_name) VALUES (13,'count traffic'); -- netscreen: traffic means traffic shaping not logging insert into request_type (request_type_id, request_type_name, request_type_comment) VALUES (1, 'ARS', 'Remedy ARS Ticket'); --- insert into stm_track (track_id,track_name) VALUES (13,'count traffic'); -- netscreen: traffic means traffic shaping not logging - --- insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (1,'Check Point NG','R5x','Check Point',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (2,'Netscreen','5.x-6.x','Netscreen', ''); --- insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (3,'Netscreen','6.x','Netscreen',''); --- insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (4,'Check Point NGX','R6x','Check Point',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (5,'Barracuda Firewall Control Center','Vx','phion',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (6,'phion netfence','3.x','phion',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (7,'Check Point','R5x-R7x','Check Point',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (8,'JUNOS','10-21','Juniper','any;0;0;65535;;junos-predefined-service;simple;'); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (9,'Check Point','R8x','Check Point',''); insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (10,'Fortinet','5.x-6.x','Fortinet',''); - --- update stm_dev_typ set dev_typ_predef_svc= --- 'ANY;0;0;65535;1;other;simple; --- MS-RPC-ANY;;0;0;1;other;rpc; --- MS-AD-BR;;0;0;1;other;rpc; --- MS-AD-DRSUAPI;;0;0;1;other;rpc; --- MS-AD-DSROLE;;0;0;1;other;rpc; --- MS-AD-DSSETUP;;0;0;1;other;rpc; --- MS-DTC;;0;0;1;other;rpc; --- MS-EXCHANGE-DATABASE;;0;0;1;other;rpc; --- MS-EXCHANGE-DIRECTORY;;0;0;1;other;rpc; --- MS-EXCHANGE-INFO-STORE;;0;0;1;other;rpc; --- MS-EXCHANGE-MTA;;0;0;1;other;rpc; --- MS-EXCHANGE-STORE;;0;0;1;other;rpc; --- MS-EXCHANGE-SYSATD;;0;0;1;other;rpc; --- MS-FRS;;0;0;1;other;rpc; --- MS-IIS-COM;;0;0;1;other;rpc; --- MS-IIS-IMAP4;;0;0;1;other;rpc; --- MS-IIS-INETINFO;;0;0;1;other;rpc; --- MS-IIS-NNTP;;0;0;1;other;rpc; --- MS-IIS-POP3;;0;0;1;other;rpc; --- MS-IIS-SMTP;;0;0;1;other;rpc; --- MS-ISMSERV;;0;0;1;other;rpc; --- MS-MESSENGER;;0;0;30;other;rpc; --- MS-MQQM;;0;0;1;other;rpc; --- MS-NETLOGON;;0;0;1;other;rpc; --- MS-SCHEDULER;;0;0;1;other;rpc; --- MS-WIN-DNS;;0;0;1;other;rpc; --- MS-WINS;;0;0;1;other;rpc; --- SUN-RPC;;0;0;1;other;rpc; --- SUN-RPC-ANY;;0;0;1;other;rpc; --- SUN-RPC-MOUNTD;;0;0;30;other;rpc; --- SUN-RPC-NFS;;0;0;40;other;rpc; --- SUN-RPC-NLOCKMGR;;0;0;1;other;rpc; --- SUN-RPC-RQUOTAD;;0;0;30;other;rpc; --- SUN-RPC-RSTATD;;0;0;30;other;rpc; --- SUN-RPC-RUSERD;;0;0;30;other;rpc; --- SUN-RPC-SADMIND;;0;0;30;other;rpc; --- SUN-RPC-SPRAYD;;0;0;30;other;rpc; --- SUN-RPC-STATUS;;0;0;30;other;rpc; --- SUN-RPC-WALLD;;0;0;30;other;rpc; --- SUN-RPC-YPBIND;;0;0;30;other;rpc; --- ICMP Address Mask;1;0;65535;1;other;simple; --- ICMP-ANY;1;0;65535;1;other;simple; --- ICMP Dest Unreachable;1;0;65535;1;other;simple; --- ICMP Fragment Needed;1;0;65535;1;other;simple; --- ICMP Fragment Reassembly;1;0;65535;1;other;simple; --- ICMP Host Unreachable;1;0;65535;1;other;simple; --- ICMP-INFO;1;0;65535;1;other;simple; --- ICMP Parameter Problem;1;0;65535;1;other;simple; --- ICMP Port Unreachable;1;0;65535;1;other;simple; --- ICMP Protocol Unreach;1;0;65535;1;other;simple; --- ICMP Redirect;1;0;65535;1;other;simple; --- ICMP Redirect Host;1;0;65535;1;other;simple; --- ICMP Redirect TOS & Host;1;0;65535;1;other;simple; --- ICMP Redirect TOS & Net;1;0;65535;1;other;simple; --- ICMP Source Quench;1;0;65535;1;other;simple; --- ICMP Source Route Fail;1;0;65535;1;other;simple; --- ICMP Time Exceeded;1;0;65535;1;other;simple; --- ICMP-TIMESTAMP;1;0;65535;1;other;simple; --- PING;1;0;65535;1;other;simple; --- TRACEROUTE;1;0;65535;1;other;simple; --- AOL;6;5190;5194;30;remote;simple; --- BGP;6;179;179;30;other;simple; --- FINGER;6;79;79;30;info seeking;simple; --- FTP;6;21;21;30;remote;simple; --- FTP-Get;6;21;21;30;remote;simple; --- FTP-Put;6;21;21;30;remote;simple; --- GOPHER;6;70;70;30;info seeking;simple; --- H.323;6;1720;1720;30;remote;simple; --- HTTP;6;80;80;5;info seeking;simple; --- HTTPS;6;443;443;30;security;simple; --- IMAP;6;143;143;30;email;simple; --- Internet Locator Service;6;389;389;30;info seeking;simple; --- IRC;6;6660;6669;30;remote;simple; --- LDAP;6;389;389;30;info seeking;simple; --- MAIL;6;25;25;30;email;simple; --- MSN;6;1863;1863;30;remote;simple; --- NetMeeting;6;1720;1720;2160;remote;simple; --- NNTP;6;119;119;30;info seeking;simple; --- NS Global;6;15397;15397;30;remote;simple; --- NS Global PRO;6;15397;15397;30;remote;simple; --- POP3;6;110;110;30;email;simple; --- PPTP;6;1723;1723;30;security;simple; --- Real Media;6;7070;7070;30;info seeking;simple; --- RLOGIN;6;513;513;30;remote;simple; --- RSH;6;514;514;30;remote;simple; --- RTSP;6;554;554;30;info seeking;simple; --- SMB;6;139;139;30;remote;simple; --- SMTP;6;25;25;30;email;simple; --- SQL*Net V1;6;1525;1525;480;other;simple; --- SQL*Net V2;6;1521;1521;480;other;simple; --- SSH;6;22;22;480;security;simple; --- TCP-ANY;6;0;65535;30;other;simple; --- TELNET;6;23;23;480;remote;simple; --- VDO Live;6;7000;7010;30;info seeking;simple; --- WAIS;6;210;210;30;info seeking;simple; --- WINFRAME;6;1494;1494;30;remote;simple; --- X-WINDOWS;6;6000;6063;30;remote;simple; --- YMSG;6;5050;5050;30;remote;simple; --- DHCP-Relay;17;67;67;1;info seeking;simple; --- DNS;17;53;53;1;info seeking;simple; --- GNUTELLA;17;6346;6347;1;remote;simple; --- IKE;17;500;500;1;security;simple; --- L2TP;17;1701;1701;1;remote;simple; --- MS-RPC-EPM;17;135;135;1;remote;simple; --- NBNAME;17;137;137;1;remote;simple; --- NBDS;17;138;138;1;remote;simple; --- NFS;17;111;111;40;remote;simple; --- NSM;17;69;69;1;other;simple; --- NTP;17;123;123;1;other;simple; --- PC-Anywhere;17;5632;5632;1;remote;simple; --- RIP;17;520;520;1;other;simple; --- SIP;17;5060;5060;1;other;simple; --- SNMP;17;161;161;1;other;simple; --- SUN-RPC-PORTMAPPER;17;111;111;40;remote;simple; --- SYSLOG;17;514;514;1;other;simple; --- TALK;17;517;518;1;other;simple; --- TFTP;17;69;69;1;remote;simple; --- UDP-ANY;17;0;65535;1;other;simple; --- UUCP;17;540;540;1;remote;simple; --- OSPF;89;0;65535;1;other;simple; --- MS-SQL;6;1433;1433;30;other;simple; --- LPR;6;515;515;30;other;simple; --- REXEC;6;512;512;30;remote;simple; --- IDENT;6;113;113;30;other;simple; --- SCTP-ANY;132;0;65535;1;other;simple; --- GRE;47;0;65535;60;remote;simple; --- HTTP;6;80;80;5;info seeking;simple; --- MGCP-UA;17;2427;2427;120;other;simple; --- GTP;17;2123;2123;30;remote;simple; --- MGCP-CA;17;2727;2727;120;other;simple; --- WHOIS;6;43;43;30;info seeking;simple; --- DISCARD;17;9;9;1;other;simple; --- RADIUS;17;1812;1813;1;other;simple; --- ECHO;17;7;7;1;other;simple; --- VNC;6;5800;5800;30;other;simple; --- CHARGEN;17;19;19;1;other;simple; --- SQL Monitor;17;1434;1434;1;other;simple; --- IKE-NAT;17;500;500;3;security;simple;' --- where dev_typ_id=2; +insert into stm_dev_typ (dev_typ_id,dev_typ_name,dev_typ_version,dev_typ_manufacturer,dev_typ_predef_svc) VALUES (11,'FortiManager','5.x-7.x','Fortinet',''); update stm_dev_typ set dev_typ_predef_svc= 'ANY;0;0;65535;1;other;simple @@ -431,10 +285,9 @@ MS-IIS;;;;;Microsoft IIS Server;group;MS-IIS-COM|MS-IIS-IMAP4|MS-IIS-INETINFO|MS VOIP;;;;;VOIP Service Group;group;H.323|MGCP-CA|MGCP-UA|SCCP|SIP' where dev_typ_id=2; - -SET statement_timeout = 0; -SET client_encoding = 'UTF8'; -SET standard_conforming_strings = on; -SET check_function_bodies = false; -SET client_min_messages = warning; -SET search_path = public, pg_catalog; +-- SET statement_timeout = 0; +-- SET client_encoding = 'UTF8'; +-- SET standard_conforming_strings = on; +-- SET check_function_bodies = false; +-- SET client_min_messages = warning; +-- SET search_path = public, pg_catalog; diff --git a/roles/database/files/sql/idempotent/fworch-import-main.sql b/roles/database/files/sql/idempotent/fworch-import-main.sql index 11375e6a8..ad74190bc 100644 --- a/roles/database/files/sql/idempotent/fworch-import-main.sql +++ b/roles/database/files/sql/idempotent/fworch-import-main.sql @@ -73,7 +73,6 @@ BEGIN IF (import_rules(r_dev.dev_id, i_current_import_id)) THEN -- returns true if rule order needs to be changed -- currently always returns true as each import needs a rule reordering v_err_pos := 'import_rules_set_rule_num_numeric of device ' || r_dev.dev_name || ' (Management: ' || CAST (i_mgm_id AS VARCHAR) || ')'; - -- PERFORM import_rules_save_order(i_current_import_id,r_dev.dev_id); -- todo: to be removed -- in case of any changes - adjust rule_num values in rulebase PERFORM import_rules_set_rule_num_numeric (i_current_import_id,r_dev.dev_id); END IF; @@ -110,6 +109,7 @@ BEGIN SELECT INTO v_err_str import_errors FROM import_control WHERE control_id=i_current_import_id; UPDATE import_control SET import_errors = v_err_str || ';' || v_err_str_refs WHERE control_id=i_current_import_id; END IF; + RAISE NOTICE 'ERROR: import_all_main failed'; RETURN FALSE; END; RETURN TRUE; diff --git a/roles/database/files/sql/idempotent/fworch-import.sql b/roles/database/files/sql/idempotent/fworch-import.sql index b9963d783..ac0e2c7f7 100644 --- a/roles/database/files/sql/idempotent/fworch-import.sql +++ b/roles/database/files/sql/idempotent/fworch-import.sql @@ -384,6 +384,12 @@ BEGIN END; $$ LANGUAGE plpgsql; +CREATE OR REPLACE FUNCTION found_changes_in_import(BIGINT) RETURNS VARCHAR AS $$ +BEGIN + RETURN (select show_change_summary($1)<>''); +END; +$$ LANGUAGE plpgsql; + CREATE OR REPLACE FUNCTION clean_up_tables (BIGINT) RETURNS VOID AS $$ DECLARE i_current_import_id ALIAS FOR $1; diff --git a/roles/database/files/sql/idempotent/fworch-rule-resolved.sql b/roles/database/files/sql/idempotent/fworch-rule-resolved.sql index 881597fe7..ee433e5e1 100644 --- a/roles/database/files/sql/idempotent/fworch-rule-resolved.sql +++ b/roles/database/files/sql/idempotent/fworch-rule-resolved.sql @@ -317,7 +317,7 @@ BEGIN ELSIF c_action = 'D' THEN RAISE DEBUG 'import_rule_resolved_usr 2 delete - i_mgm_id=%, i_rule_id=%, i_old_obj_id=%, i_new_obj_id=%, i_current_import_id=%, c_action=%, c_changelog_table=%', i_mgm_id, i_rule_id, i_old_obj_id, i_new_obj_id, i_current_import_id, c_action, c_changelog_table; - UPDATE rule_user_resolved SET removed=i_current_import_id WHERE rule_id=i_rule_id AND userc_id=i_old_obj_id AND removed IS NULL; + UPDATE rule_user_resolved SET removed=i_current_import_id WHERE rule_id=i_rule_id AND user_id=i_old_obj_id AND removed IS NULL; ELSIF c_action = 'C' THEN RAISE DEBUG 'import_rule_resolved_usr 3 change - i_mgm_id=%, i_rule_id=%, i_old_obj_id=%, i_new_obj_id=%, i_current_import_id=%, c_action=%, c_changelog_table=%', i_mgm_id, i_rule_id, i_old_obj_id, i_new_obj_id, i_current_import_id, c_action, c_changelog_table; diff --git a/roles/database/files/sql/idempotent/fworch-texts.sql b/roles/database/files/sql/idempotent/fworch-texts.sql index 423e6e17d..7a853c5b1 100644 --- a/roles/database/files/sql/idempotent/fworch-texts.sql +++ b/roles/database/files/sql/idempotent/fworch-texts.sql @@ -144,7 +144,7 @@ The following top-level menu items are available (depending on role memberships) INSERT INTO txt VALUES ('getting_support', 'German', 'Unterstützung benötigt? Ihre Kontaktmöglichkeiten'); INSERT INTO txt VALUES ('getting_support', 'English', 'Do you need help? Our Contact options'); INSERT INTO txt VALUES ('support_details', 'German', ' -Mö Sie einen Supportvertrag abschließen, um in den Genuss folgender Vorteile zu kommen?
+Möchten Sie einen Supportvertrag abschließen, um in den Genuss folgender Vorteile zu kommen?
  • garantierte Unterstützung bei Problemen mit Firewall Orchestrator
  • Customizing: haben Sie Anpassungswünsche, die wir für Sie umsetzen sollen?
  • diff --git a/roles/database/files/sql/test/hasura-test.sql b/roles/database/files/sql/test/hasura-test.sql new file mode 100644 index 000000000..bf6599c54 --- /dev/null +++ b/roles/database/files/sql/test/hasura-test.sql @@ -0,0 +1,31 @@ + +-- cannot test hasura before API was installed, so can only run this on upgrade + +BEGIN; + +CREATE EXTENSION IF NOT EXISTS pgtap; + +-- CREATE OR REPLACE FUNCTION hdb_catalog.test_1_hdb_catalog_schema() +-- RETURNS SETOF TEXT LANGUAGE plpgsql AS $$ +-- BEGIN +-- RETURN NEXT has_table( 'hdb_catalog.hdb_action_log' ); +-- RETURN NEXT has_table( 'hdb_catalog.hdb_metadata' ); +-- RETURN NEXT has_table( 'hdb_catalog.hdb_version' ); +-- END; +-- $$; + +CREATE OR REPLACE FUNCTION hdb_catalog.test_2_hdb_catalog_data() +RETURNS SETOF TEXT LANGUAGE plpgsql AS $$ +BEGIN + RETURN NEXT results_eq('SELECT cast((select COUNT(*) FROM hdb_catalog.hdb_metadata) as integer)', 'SELECT cast (1 as integer)', 'there should be exactly one metadata entry'); +END; +$$; + +CREATE OR REPLACE FUNCTION hdb_catalog.shutdown_1() RETURNS VOID LANGUAGE plpgsql AS $$ +BEGIN + drop function if exists hdb_catalog.test_1_hdb_catalog_schema(); + drop function if exists hdb_catalog.test_2_hdb_catalog_data(); +END; +$$; + +SELECT * FROM runtests('hdb_catalog'::name); diff --git a/roles/database/files/sql/test/test-basic-procs.sql b/roles/database/files/sql/test/test-basic-procs.sql deleted file mode 100644 index 58a1381b2..000000000 --- a/roles/database/files/sql/test/test-basic-procs.sql +++ /dev/null @@ -1,51 +0,0 @@ -\set ECHO none -\set QUIET 1 -\set ON_ERROR_ROLLBACK 1 -\set ON_ERROR_STOP true -\set QUIET 1 - -\pset format unaligned -\pset tuples_only true -\pset pager - -CREATE EXTENSION pgtap; - -BEGIN; -SELECT plan(14); - -SELECT is(select * from is_obj_group(select obj_id from object where obj_name='AuxiliaryNet'), false); -SELECT is(select * from is_obj_group(select obj_id from object where obj_name='CactusDA'), true); - -SELECT * FROM finish(); -ROLLBACK; - --- SELECT --- is(sign('{"sub":"1234567890","name":"John Doe","admin":true}', 'secret'), --- 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ'); - --- INSERT into object () values (); - --- SELECT --- throws_ok( --- $$SELECT header::text, payload::text, valid FROM verify( --- 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ', --- 'secret', 'bogus')$$, --- '22023', --- 'Cannot use "": No such hash algorithm', --- 'verify() should raise on bogus algorithm' --- ); - --- SELECT throws_ok( -- bogus header --- $$SELECT header::text, payload::text, valid FROM verify( --- 'eyJhbGciOiJIUzI1NiIBOGUScCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ', --- 'secret', 'HS256')$$ --- ); - --- SELECT --- results_eq( --- $$SELECT header::text, payload::text, valid FROM verify( --- 'eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiIxMjM0NTY3ODkwIiwibmFtZSI6IkpvaG4gRG9lIiwiYWRtaW4iOnRydWV9.TJVA95OrM7E2cBab30RMHrHDcEfxjoYZgeFONFh7HgQ', --- 'secret')$$, --- $$VALUES ('{"alg":"HS256","typ":"JWT"}', '{"sub":"1234567890","name":"John Doe","admin":true}', true)$$, --- 'verify() should return return data marked valid' --- ); diff --git a/roles/database/files/sql/test/unit-test-cleanup.sql b/roles/database/files/sql/test/unit-test-cleanup.sql new file mode 100644 index 000000000..340756871 --- /dev/null +++ b/roles/database/files/sql/test/unit-test-cleanup.sql @@ -0,0 +1,3 @@ +drop extension if exists pgtap; +-- drop function if exists test_1_schema(); +-- drop function if exists test_2_functions(); \ No newline at end of file diff --git a/roles/database/files/sql/test/unit-tests.sql b/roles/database/files/sql/test/unit-tests.sql new file mode 100644 index 000000000..c293e76e4 --- /dev/null +++ b/roles/database/files/sql/test/unit-tests.sql @@ -0,0 +1,32 @@ + +BEGIN; +CREATE EXTENSION IF NOT EXISTS pgtap; + +CREATE OR REPLACE FUNCTION public.test_1_schema() +RETURNS SETOF TEXT LANGUAGE plpgsql AS $$ +BEGIN + RETURN NEXT has_table( 'object' ); + RETURN NEXT has_table( 'rule' ); + RETURN NEXT has_table( 'service' ); + RETURN NEXT has_table( 'usr' ); + RETURN NEXT hasnt_table( 'rule_order' ); +END; +$$; + +CREATE OR REPLACE FUNCTION public.test_2_functions() +RETURNS SETOF TEXT LANGUAGE plpgsql AS $$ +BEGIN + RETURN NEXT results_eq('SELECT * FROM are_equal(CAST(''1.2.3.4'' AS CIDR),CAST(''1.2.3.4/32'' AS CIDR))', 'SELECT TRUE', 'cidr are_equal should return true'); + RETURN NEXT results_eq('SELECT * FROM are_equal(7*0, 0)', 'SELECT TRUE', 'int are_equal should return true'); + RETURN NEXT results_eq('SELECT * FROM remove_spaces('' abc '')', 'SELECT CAST(''abc'' AS VARCHAR)', 'remove_spaces should return abc'); +END; +$$; + +CREATE OR REPLACE FUNCTION public.shutdown_1() RETURNS VOID LANGUAGE plpgsql AS $$ +BEGIN + drop function if exists test_1_schema(); + drop function if exists test_2_functions(); +END; +$$; + +SELECT * FROM runtests('public'::name); diff --git a/roles/database/files/upgrade/5.4.1.sql b/roles/database/files/upgrade/5.4.1.sql new file mode 100644 index 000000000..8f8931958 --- /dev/null +++ b/roles/database/files/upgrade/5.4.1.sql @@ -0,0 +1,147 @@ + +-- Grant ALL on "import_service" to group "configimporters"; +-- Grant ALL on "import_object" to group "configimporters"; +-- Grant ALL on "import_user" to group "configimporters"; +-- Grant ALL on "import_rule" to group "configimporters"; +-- Grant ALL on "import_control" to group "configimporters"; +-- Grant ALL on "import_zone" to group "configimporters"; +-- Grant ALL on "import_changelog" to group "configimporters"; + +CREATE TABLE IF NOT EXISTS "import_config" ( + "import_id" bigint NOT NULL, + "mgm_id" integer NOT NULL, + "config" jsonb NOT NULL, + PRIMARY KEY ("import_id") +); + +CREATE TABLE "import_full_config" ( + "import_id" bigint NOT NULL, + "mgm_id" integer NOT NULL, + "config" jsonb NOT NULL, + PRIMARY KEY ("import_id") +); + +ALTER TABLE "import_config" + DROP CONSTRAINT IF EXISTS "import_config_import_id_f_key" CASCADE; +ALTER TABLE "import_config" + ADD CONSTRAINT "import_config_import_id_f_key" FOREIGN KEY ("import_id") REFERENCES "import_control" ("control_id") ON UPDATE RESTRICT ON DELETE CASCADE; +ALTER TABLE "import_config" + DROP CONSTRAINT IF EXISTS "import_config_mgm_id_f_key" CASCADE; +ALTER TABLE "import_config" + ADD CONSTRAINT "import_config_mgm_id_f_key" FOREIGN KEY ("mgm_id") REFERENCES "management" ("mgm_id") ON UPDATE RESTRICT ON DELETE CASCADE; + +ALTER TABLE "import_full_config" + DROP CONSTRAINT IF EXISTS "import_full_config_import_id_f_key" CASCADE; +ALTER TABLE "import_full_config" + ADD CONSTRAINT "import_full_config_import_id_f_key" FOREIGN KEY ("import_id") REFERENCES "import_control" ("control_id") ON UPDATE RESTRICT ON DELETE CASCADE; +ALTER TABLE "import_full_config" + DROP CONSTRAINT IF EXISTS "import_full_config_mgm_id_f_key" CASCADE; +ALTER TABLE "import_full_config" + ADD CONSTRAINT "import_full_config_mgm_id_f_key" FOREIGN KEY ("mgm_id") REFERENCES "management" ("mgm_id") ON UPDATE RESTRICT ON DELETE CASCADE; + +--- create index to enforce max 1 stop_time=null import per mgm +DROP INDEX IF EXISTS import_control_only_one_null_stop_time_per_mgm_when_null; +CREATE UNIQUE INDEX import_control_only_one_null_stop_time_per_mgm_when_null ON import_control (mgm_id) WHERE stop_time IS NULL; + +------------------- +-- the following triggers creates the bigserial obj_id as it does not seem to be set automatically, +-- when insert via jsonb function and specifying no obj_id + +CREATE OR REPLACE FUNCTION import_object_obj_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.obj_id = coalesce(NEW.obj_id, nextval('import_object_obj_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_object_obj_id_seq ON import_object CASCADE; +CREATE TRIGGER import_object_obj_id_seq BEFORE INSERT ON import_object FOR EACH ROW EXECUTE PROCEDURE import_object_obj_id_seq(); + +CREATE OR REPLACE FUNCTION import_service_svc_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.svc_id = coalesce(NEW.svc_id, nextval('import_service_svc_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_service_svc_id_seq ON import_service CASCADE; +CREATE TRIGGER import_service_svc_id_seq BEFORE INSERT ON import_service FOR EACH ROW EXECUTE PROCEDURE import_service_svc_id_seq(); + +CREATE OR REPLACE FUNCTION import_user_user_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.user_id = coalesce(NEW.user_id, nextval('import_user_user_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_user_user_id_seq ON import_user CASCADE; +CREATE TRIGGER import_user_user_id_seq BEFORE INSERT ON import_user FOR EACH ROW EXECUTE PROCEDURE import_user_user_id_seq(); + +CREATE OR REPLACE FUNCTION import_rule_rule_id_seq() RETURNS TRIGGER AS $$ +BEGIN + NEW.rule_id = coalesce(NEW.rule_id, nextval('import_rule_rule_id_seq')); + RETURN NEW; +END; +$$ LANGUAGE plpgsql; + +DROP TRIGGER IF EXISTS import_rule_rule_id_seq ON import_rule CASCADE; +CREATE TRIGGER import_rule_rule_id_seq BEFORE INSERT ON import_rule FOR EACH ROW EXECUTE PROCEDURE import_rule_rule_id_seq(); + +------------------- + +CREATE OR REPLACE FUNCTION import_config_from_jsonb () + RETURNS TRIGGER + AS $BODY$ +DECLARE + import_id BIGINT; + r_import_result RECORD; +BEGIN + INSERT INTO import_object + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_object, NEW.config -> 'network_objects'); + + INSERT INTO import_service + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_service, NEW.config -> 'service_objects'); + + INSERT INTO import_user + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_user, NEW.config -> 'user_objects'); + + INSERT INTO import_zone + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_zone, NEW.config -> 'zone_objects'); + + INSERT INTO import_rule + SELECT + * + FROM + jsonb_populate_recordset(NULL::import_rule, NEW.config -> 'rules'); + + -- finally start the stored procedure import + PERFORM import_all_main(NEW.import_id); + + + RETURN NEW; +END; +$BODY$ +LANGUAGE plpgsql +VOLATILE +COST 100; + +ALTER FUNCTION public.import_config_from_jsonb () OWNER TO fworch; + +DROP TRIGGER IF EXISTS import_config_insert ON import_config CASCADE; + +CREATE TRIGGER import_config_insert + BEFORE INSERT ON import_config + FOR EACH ROW + EXECUTE PROCEDURE import_config_from_jsonb (); diff --git a/roles/database/tasks/install-database.yml b/roles/database/tasks/install-database.yml index 6380ff00d..4f7a3da26 100644 --- a/roles/database/tasks/install-database.yml +++ b/roles/database/tasks/install-database.yml @@ -43,7 +43,7 @@ - name: fail upon new installation with existing database fail: - msg: "Error: You choose new installation on a system with existing database {{ fworch_db_name }}" + msg: "Error: You chose new installation on a system with existing database {{ fworch_db_name }}" when: installation_mode == "new" and db_exists.query_result.0.count != 0 - name: create postgres user "{{ fworch_dbadmin_name }}" @@ -78,30 +78,27 @@ debug: msg: "test_query result: {{ test_query }}" - - name: creating {{ fworch_db_name }}-db-model - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/creation/{{ item }}" - as_single_query: "{{ postgresql_query_as_single_query }}" - loop: - - fworch-create-tables.sql - - fworch-create-constraints.sql - - fworch-create-foreign-keys.sql - - fworch-create-indices.sql - when: installation_mode == "new" and ansible_version.string is version_compare('2.10', '>=') - - - name: creating {{ fworch_db_name }}-db-model - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/creation/{{ item }}" - loop: - - fworch-create-tables.sql - - fworch-create-constraints.sql - - fworch-create-foreign-keys.sql - - fworch-create-indices.sql - when: installation_mode == "new" and ansible_version.string is version_compare('2.10', '<') - - - name: create db users with group memmberships + - name: include table creation with ansible 2.10 and beyond + include_tasks: install-db-base-ansible-2.10.yml + when: ansible_version.string is version_compare('2.10', '>=') + + - name: include table creation pre ansible 2.10 + include_tasks: install-db-base-ansible-pre2.10.yml + when: ansible_version.string is version_compare('2.10', '<') + + # - name: creating {{ fworch_db_name }}-db-model + # community.postgresql.postgresql_query: + # db: "{{ fworch_db_name }}" + # path_to_script: "{{ database_install_dir }}/sql/creation/{{ item }}" + # as_single_query: "{{ postgresql_query_as_single_query }}" + # loop: + # - fworch-create-tables.sql + # - fworch-create-constraints.sql + # - fworch-create-foreign-keys.sql + # - fworch-create-indices.sql + # when: installation_mode == "new" + + - name: create db users with group memberships import_tasks: create-users.yml when: installation_mode == "new" @@ -147,21 +144,11 @@ format: csv when: installation_mode == "new" -# non-ascii sql file cannot be executed under red hat -# revert to command + psql? - - - name: add base data to database fworch-fill-stm.sql - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/creation/fworch-fill-stm.sql" - as_single_query: yes - when: ansible_version.string is version_compare('2.10', '>=') - - - name: add base data to database fworch-fill-stm.sql - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/creation/fworch-fill-stm.sql" - when: ansible_version.string is version_compare('2.10', '<') + # - name: add base data to database fworch-fill-stm.sql + # community.postgresql.postgresql_query: + # db: "{{ fworch_db_name }}" + # path_to_script: "{{ database_install_dir }}/sql/creation/fworch-fill-stm.sql" + # as_single_query: yes become: yes become_user: postgres diff --git a/roles/database/tasks/install-db-base-ansible-2.10.yml b/roles/database/tasks/install-db-base-ansible-2.10.yml new file mode 100644 index 000000000..a8e55d364 --- /dev/null +++ b/roles/database/tasks/install-db-base-ansible-2.10.yml @@ -0,0 +1,19 @@ + +- block: + + - name: creating {{ fworch_db_name }}-db-model + community.postgresql.postgresql_query: + db: "{{ fworch_db_name }}" + path_to_script: "{{ database_install_dir }}/sql/creation/{{ item }}" + as_single_query: "{{ postgresql_query_as_single_query }}" + loop: + - fworch-create-tables.sql + - fworch-create-constraints.sql + - fworch-create-foreign-keys.sql + - fworch-create-indices.sql + - fworch-create-triggers.sql + - fworch-fill-stm.sql + when: installation_mode == "new" + + become: yes + become_user: postgres diff --git a/roles/database/tasks/install-db-base-ansible-pre2.10.yml b/roles/database/tasks/install-db-base-ansible-pre2.10.yml new file mode 100644 index 000000000..492062a93 --- /dev/null +++ b/roles/database/tasks/install-db-base-ansible-pre2.10.yml @@ -0,0 +1,18 @@ + +- block: + + - name: creating {{ fworch_db_name }}-db-model + postgresql_query: + db: "{{ fworch_db_name }}" + path_to_script: "{{ database_install_dir }}/sql/creation/{{ item }}" + loop: + - fworch-create-tables.sql + - fworch-create-constraints.sql + - fworch-create-foreign-keys.sql + - fworch-create-indices.sql + - fworch-create-triggers.sql + - fworch-fill-stm.sql + when: installation_mode == "new" + + become: yes + become_user: postgres diff --git a/roles/database/tasks/main.yml b/roles/database/tasks/main.yml index 64bdfcf39..e13b92dcb 100644 --- a/roles/database/tasks/main.yml +++ b/roles/database/tasks/main.yml @@ -164,7 +164,7 @@ copy: src="{{ item }}" dest="{{ database_install_dir }}" owner="{{ fworch_user }}" group="{{ fworch_user }}" loop: - csv - - sql + - sql - name: create tablespace directory file: @@ -194,7 +194,17 @@ when: installation_mode == "upgrade" - name: (re)define functions and views - import_tasks: recreate-functions-and-views.yml + include_tasks: recreate-functions-and-views-ansible-pre2.10.yml + when: ansible_version.string is version_compare('2.10', '<') + +- name: (re)define functions and views + include_tasks: recreate-functions-and-views-ansible-2.10.yml + when: ansible_version.string is version_compare('2.10', '>=') + +- name: run unit tests + import_tasks: run-unit-tests.yml + when: not installation_mode == "uninstall" + tags: [ 'never', 'unittest' ] - name: finalize handler for datarecovery set_fact: diff --git a/roles/database/tasks/recreate-functions-and-views-ansible-2.10.yml b/roles/database/tasks/recreate-functions-and-views-ansible-2.10.yml new file mode 100644 index 000000000..b80c84f2f --- /dev/null +++ b/roles/database/tasks/recreate-functions-and-views-ansible-2.10.yml @@ -0,0 +1,29 @@ + +- name: (re)defines functions and views (idempotent) from ansible 2.10 + community.postgresql.postgresql_query: + db: "{{ fworch_db_name }}" + path_to_script: "{{ database_install_dir }}/sql/idempotent/{{ item }}" + as_single_query: "{{ postgresql_query_as_single_query }}" + become: yes + become_user: postgres + loop: + - fworch-basic-procs.sql + - fworch-import.sql + - fworch-import-main.sql + - fworch-obj-import.sql + - fworch-obj-refs.sql + - fworch-svc-import.sql + - fworch-svc-refs.sql + - fworch-usr-import.sql + - fworch-usr-refs.sql + - fworch-rule-import.sql + - fworch-rule-refs.sql + - fworch-rule-resolved.sql + - fworch-zone-import.sql + - fworch-report.sql + - fworch-qa.sql + - fworch-report-basics.sql + - fworch-views.sql + - fworch-api-funcs.sql + - fworch-grants.sql + - fworch-texts.sql diff --git a/roles/database/tasks/recreate-functions-and-views-ansible-pre2.10.yml b/roles/database/tasks/recreate-functions-and-views-ansible-pre2.10.yml new file mode 100644 index 000000000..314eb4a38 --- /dev/null +++ b/roles/database/tasks/recreate-functions-and-views-ansible-pre2.10.yml @@ -0,0 +1,28 @@ + +- name: (re)defines functions and views (idempotent) prior to ansible 2.10 + postgresql_query: + db: "{{ fworch_db_name }}" + path_to_script: "{{ database_install_dir }}/sql/idempotent/{{ item }}" + become: yes + become_user: postgres + loop: + - fworch-basic-procs.sql + - fworch-import.sql + - fworch-import-main.sql + - fworch-obj-import.sql + - fworch-obj-refs.sql + - fworch-svc-import.sql + - fworch-svc-refs.sql + - fworch-usr-import.sql + - fworch-usr-refs.sql + - fworch-rule-import.sql + - fworch-rule-refs.sql + - fworch-rule-resolved.sql + - fworch-zone-import.sql + - fworch-report.sql + - fworch-qa.sql + - fworch-report-basics.sql + - fworch-views.sql + - fworch-api-funcs.sql + - fworch-grants.sql + - fworch-texts.sql diff --git a/roles/database/tasks/recreate-functions-and-views.yml b/roles/database/tasks/recreate-functions-and-views.yml deleted file mode 100644 index 5cfee58d2..000000000 --- a/roles/database/tasks/recreate-functions-and-views.yml +++ /dev/null @@ -1,59 +0,0 @@ - -- name: (re)defines functions and views (idempotent) from ansible 2.10 - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/idempotent/{{ item }}" - as_single_query: "{{ postgresql_query_as_single_query }}" - become: yes - become_user: postgres - when: ansible_version.string is version_compare('2.10', '>=') - loop: - - fworch-basic-procs.sql - - fworch-import.sql - - fworch-import-main.sql - - fworch-obj-import.sql - - fworch-obj-refs.sql - - fworch-svc-import.sql - - fworch-svc-refs.sql - - fworch-usr-import.sql - - fworch-usr-refs.sql - - fworch-rule-import.sql - - fworch-rule-refs.sql - - fworch-rule-resolved.sql - - fworch-zone-import.sql - - fworch-report.sql - - fworch-qa.sql - - fworch-report-basics.sql - - fworch-views.sql - - fworch-api-funcs.sql - - fworch-grants.sql - - fworch-texts.sql - -- name: (re)defines functions and views (idempotent) prior to ansible 2.10 - postgresql_query: - db: "{{ fworch_db_name }}" - path_to_script: "{{ database_install_dir }}/sql/idempotent/{{ item }}" - become: yes - become_user: postgres - when: ansible_version.string is version_compare('2.10', '<') - loop: - - fworch-basic-procs.sql - - fworch-import.sql - - fworch-import-main.sql - - fworch-obj-import.sql - - fworch-obj-refs.sql - - fworch-svc-import.sql - - fworch-svc-refs.sql - - fworch-usr-import.sql - - fworch-usr-refs.sql - - fworch-rule-import.sql - - fworch-rule-refs.sql - - fworch-rule-resolved.sql - - fworch-zone-import.sql - - fworch-report.sql - - fworch-qa.sql - - fworch-report-basics.sql - - fworch-views.sql - - fworch-api-funcs.sql - - fworch-grants.sql - - fworch-texts.sql diff --git a/roles/database/tasks/run-unit-tests.yml b/roles/database/tasks/run-unit-tests.yml new file mode 100644 index 000000000..1983218a1 --- /dev/null +++ b/roles/database/tasks/run-unit-tests.yml @@ -0,0 +1,34 @@ + +- name: copy database test files to backend target + copy: src="sql/test" dest="{{ database_install_dir }}/sql" owner="{{ fworch_user }}" group="{{ fworch_user }}" + become: yes + +- set_fact: + unit_test_scripts: + - unit-tests.sql + - hasura-test.sql + - unit-test-cleanup.sql +# when: installation_mode == 'upgrade' + +# # do not run hasura tests during first install (as the tables are not there yet) +# - set_fact: +# unit_test_scripts: +# - unit-tests.sql +# - unit-test-cleanup.sql +# when: installation_mode == 'new' + +- name: run db unit tests + postgresql_query: + db: "{{ fworch_db_name }}" + path_to_script: "{{ database_install_dir }}/sql/test/{{ item }}" + become: yes + become_user: "postgres" + register: testresults + loop: "{{ unit_test_scripts }}" + tags: + - unittest + - test + +- name: Print db test results + debug: + msg: "test results: {{ testresults | to_nice_json }}" diff --git a/roles/database/tasks/upgrade-database.yml b/roles/database/tasks/upgrade-database.yml index f654e93bd..bbbcc9108 100644 --- a/roles/database/tasks/upgrade-database.yml +++ b/roles/database/tasks/upgrade-database.yml @@ -3,8 +3,8 @@ - name: guard - stop when trying anything but an upgrade with existing database fail: - msg: "Error: You choose new installation on a system with existing database {{ fworch_db_name }}" - when: installation_mode != "upgrade" or db_exists.query_result.0.count == 0 + msg: "Error: You chose upgrade on a system without existing database {{ fworch_db_name }}" + when: db_exists.query_result.0.count == 0 - name: create upgrade dir file: diff --git a/roles/importer/files/import.conf b/roles/importer/files/import.conf index b56d5d5a8..fbc456640 100644 --- a/roles/importer/files/import.conf +++ b/roles/importer/files/import.conf @@ -1,23 +1,21 @@ # Config settings of import module -ImportSleepTime 40 # Ztime between import loops in seconds - -ImportDir /usr/local/fworch/importer # Import main directory -PerlInc /usr/local/fworch/importer # Perl Include directory -fworch_workdir /var/fworch/tmp # temp. dir for import data -archive_dir /var/fworch/import_archive # dir for archiving of faulty import runs -simple_bin_dir /bin # dir for tar, date, mkdir, ... +ImportSleepTime 40 # time between import loops in seconds +ImportDir /usr/local/fworch/importer # Import main directory +PerlInc /usr/local/fworch/importer # Perl Include directory +fworch_workdir /var/fworch/tmp # temp. dir for import data +archive_dir /var/fworch/import_archive # dir for archiving of faulty import runs +simple_bin_dir /bin # dir for tar, date, mkdir, ... save_import_results_to_file 0 # delimiter -usergroup_delimiter | +usergroup_delimiter | csv_delimiter % csv_user_delimiter ; group_delimiter | -fworch_srv_user fworchimporter +fworch_srv_user fworchimporter output_method text - echo_bin /bin/echo scp_bin /usr/bin/scp ssh_bin /usr/bin/ssh @@ -27,10 +25,3 @@ scp_batch_mode_switch -B -q psql_exe /usr/bin/psql # for netscreen predef-services copy from psql_params -t -q -A -h $fworch_srv_host -d $fworch_database -U $fworch_srv_user - -# LDAP stuff -LDAP_enabled 1 -LDAP_c de -LDAP_o cactus -LDAP_server localhost - diff --git a/roles/importer/files/importer/CACTUS/FWORCH.pm b/roles/importer/files/importer/CACTUS/FWORCH.pm index 5074766b2..d086735f2 100644 --- a/roles/importer/files/importer/CACTUS/FWORCH.pm +++ b/roles/importer/files/importer/CACTUS/FWORCH.pm @@ -25,11 +25,10 @@ our %EXPORT_TAGS = ( $output_method $dbdriver $echo_bin $chmod_bin $scp_bin $ssh_bin $scp_batch_mode_switch $ssh_client_screenos $fworch_database $fworch_srv_host $fworch_srv_user $fworch_srv_user $fworch_srv_port $fworch_srv_pw $psql_exe $psql_params - &get_client_id_for_user_via_ldap &get_client_filter &get_device_ids_for_mgm &eval_boolean_sql &exec_pgsql_file &exec_pgsql_cmd &exec_pgsql_cmd_no_result &exec_pgsql_cmd_return_value &exec_pgsql_cmd_return_array_ref &exec_pgsql_cmd_return_table_ref - ©_file_to_db &read_user_client_classification_from_ldap &get_rulebase_names &get_ruleset_name_list &evaluate_parameters &replace_import_id_in_csv + ©_file_to_db &get_rulebase_names &get_ruleset_name_list &evaluate_parameters &replace_import_id_in_csv ) ]); our @EXPORT = (@{$EXPORT_TAGS{'basic'}}); @@ -58,10 +57,6 @@ our $csv_user_delimiter = &CACTUS::read_config::read_config("csv_user_delimiter" our $fworch_srv_user = &CACTUS::read_config::read_config("fworch_srv_user"); our $psql_exe = &CACTUS::read_config::read_config("psql_exe"); our $psql_params = &CACTUS::read_config::read_config("psql_params"); -our $LDAP_enabled = &CACTUS::read_config::read_config("LDAP_enabled"); -our $LDAP_c = &CACTUS::read_config::read_config("LDAP_c"); -our $LDAP_o = &CACTUS::read_config::read_config("LDAP_o"); -our $LDAP_server = &CACTUS::read_config::read_config("LDAP_server"); our $dbdriver = "Pg"; #our $ssh_id_basename = 'id_dsa'; our $ssh_id_basename = 'import_user_secret'; @@ -535,32 +530,6 @@ sub replace_special_chars { $output->close; } -############################################################ -# get_client_id_for_user_via_ldap -# -############################################################ -sub get_client_id_for_user_via_ldap { - if (!$LDAP_enabled) {return undef;} - - require Net::LDAP; - - my $user_id = $_[0]; - my ($ldap, $mesg, $entry, $user_name, $result); - - $user_name = &exec_pgsql_cmd_return_value("SELECT user_name FROM usr WHERE user_id=$user_id"); - $ldap = Net::LDAP->new($LDAP_server) or die "$@"; - $mesg = $ldap->bind; # an anonymous bind - $mesg = $ldap->search( # perform a search - base => "c=$LDAP_c", - filter => "(&(cn=$user_name) (o=$LDAP_o))" - ); - $mesg->code && die $mesg->error; - if (count($mesg->entries) != 1) {return undef;} - foreach $entry ($mesg->entries) {$result = $entry->dump;} - $mesg = $ldap->unbind; # take down session - return $result; -} - ############################################################ # replace_import_id_in_csv @@ -921,26 +890,6 @@ sub get_ruleset_name_list { return $result; } -# falls LDAP vorhanden: Benutzer im LDAP nachschlagen und die Tenant-Zuordnung machen -sub read_user_client_classification_from_ldap { - my $fehler = shift; - my $current_import_id = shift; - - if (0 && !$fehler) { - # for all changes on users in current import lookup user in LDAP if found update client in usr - my $user_changes = &exec_pgsql_cmd_return_table_ref - ("SELECT * FROM changelog_user WHERE control_id=$current_import_id AND NOT change_action='D' ", 'new_user_id'); - my ($client_id, $usr_id); - foreach $usr_id (keys %$user_changes) { - # $usr_id = $user_changes->{"$user_change_id"}{"new_user_id"}; - $client_id = &get_client_id_for_user_via_ldap($usr_id); - if (defined($client_id)) { - &exec_pgsql_cmd_no_result("UPDATE usr SET client_id=$client_id WHERE user_id=$usr_id"); - } - } - } -} - sub evaluate_parameters { my $mgm_id = shift; my $mgm_name = shift; diff --git a/roles/importer/files/importer/__init__.py b/roles/importer/files/importer/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/roles/importer/files/importer/checkpointR8x/api-test-call.py b/roles/importer/files/importer/checkpointR8x/api-test-call.py index 38139d529..2b58fadda 100755 --- a/roles/importer/files/importer/checkpointR8x/api-test-call.py +++ b/roles/importer/files/importer/checkpointR8x/api-test-call.py @@ -1,4 +1,6 @@ #!/usr/bin/python3 +import sys +sys.path.append(r"/usr/local/fworch/importer") import logging import logging.config import getter diff --git a/roles/importer/files/importer/checkpointR8x/auto-discover.py b/roles/importer/files/importer/checkpointR8x/auto-discover.py index 12840f971..c29c0dd4e 100755 --- a/roles/importer/files/importer/checkpointR8x/auto-discover.py +++ b/roles/importer/files/importer/checkpointR8x/auto-discover.py @@ -1,4 +1,6 @@ #!/usr/bin/python3 +import sys +sys.path.append(r"/usr/local/fworch/importer") import logging import logging.config import getter diff --git a/roles/importer/files/importer/checkpointR8x/common.py b/roles/importer/files/importer/checkpointR8x/common.py deleted file mode 100644 index 8e4469b7b..000000000 --- a/roles/importer/files/importer/checkpointR8x/common.py +++ /dev/null @@ -1,56 +0,0 @@ -import logging - -csv_delimiter = '%' -list_delimiter = '|' -line_delimiter = "\n" -section_header_uids=[] - -nw_obj_table_names = ['hosts', 'networks', 'address-ranges', 'multicast-address-ranges', 'groups', 'gateways-and-servers', 'simple-gateways'] -# do not consider: CpmiAnyObject, CpmiGatewayPlain, external - -svc_obj_table_names = ['services-tcp', 'services-udp', 'service-groups', 'services-dce-rpc', 'services-rpc', 'services-other', 'services-icmp', 'services-icmp6'] - - -# the following is the static across all installations unique any obj uid -# cannot fetch the Any object via API (<=1.7) at the moment -# therefore we have a workaround adding the object manually (as svc and nw) -any_obj_uid = "97aeb369-9aea-11d5-bd16-0090272ccb30" -# todo: read this from config (vom API 1.6 on it is fetched) - -debug_new_uid = "90f749ec-5331-477d-89e5-a58990f7271d" -# debug_new_uid = "b2cff4df-0034-4b75-bd95-738a85cbe9d3" - -# todo: save the initial value, reset initial value at the end -def set_log_level(log_level, debug_level): - logger = logging.getLogger(__name__) - # todo: use log_level to define non debug logging - # use debug_level to define different debug levels - if debug_level == 1: - logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') - elif debug_level == 2: - logging.basicConfig(filename='/var/tmp/fworch_get_config_cp_r8x_api.debug', filemode='a', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') - elif debug_level == 3: - logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') - logging.basicConfig(filename='/var/tmp/fworch_get_config_cp_r8x_api.debug', filemode='a', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') - logger.debug ("debug_level: "+ str(debug_level) ) - - -def get_ip_of_obj(obj): - if 'ipv4-address' in obj: - ip_addr = obj['ipv4-address'] - elif 'ipv6-address' in obj: - ip_addr = obj['ipv6-address'] - elif 'subnet4' in obj: - ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4']) - elif 'subnet6' in obj: - ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6']) - elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj: - ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last']) - elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj: - ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last']) - elif 'obj_typ' in obj and obj['obj_typ'] == 'group': - ip_addr = '' - else: - ip_addr = '0.0.0.0/0' - return ip_addr - diff --git a/roles/importer/files/importer/checkpointR8x/cpcommon.py b/roles/importer/files/importer/checkpointR8x/cpcommon.py new file mode 100644 index 000000000..712cac5ed --- /dev/null +++ b/roles/importer/files/importer/checkpointR8x/cpcommon.py @@ -0,0 +1,83 @@ +import sys +sys.path.append(r"/usr/local/fworch/importer") +import os +import parse_network, parse_rule, parse_service, parse_user +import json +import sys +import logging +import copy + +nw_obj_table_names = ['hosts', 'networks', 'address-ranges', 'multicast-address-ranges', 'groups', 'gateways-and-servers', 'simple-gateways'] +# do not consider: CpmiAnyObject, CpmiGatewayPlain, external + +svc_obj_table_names = ['services-tcp', 'services-udp', 'service-groups', 'services-dce-rpc', 'services-rpc', 'services-other', 'services-icmp', 'services-icmp6'] + +# the following is the static across all installations unique any obj uid +# cannot fetch the Any object via API (<=1.7) at the moment +# therefore we have a workaround adding the object manually (as svc and nw) +any_obj_uid = "97aeb369-9aea-11d5-bd16-0090272ccb30" +# todo: read this from config (vom API 1.6 on it is fetched) + +# this is just a test UID for debugging a single rule +debug_new_uid = "90f749ec-5331-477d-89e5-a58990f7271d" + + +def parse_config_cp_main(config2import, current_import_id, base_dir, mgm_details, secret_filename, rulebase_string, config_filename, debug_level): + logging.info("found Check Point R8x management") + get_config_cmd = "cd " + base_dir + "/importer/checkpointR8x && ./get_config.py -a " + \ + mgm_details['hostname'] + " -u " + mgm_details['user'] + " -w " + \ + secret_filename + " -l \"" + rulebase_string + \ + "\" -o " + config_filename + " -d " + str(debug_level) + get_config_cmd += " && ./enrich_config.py -a " + mgm_details['hostname'] + " -u " + mgm_details['user'] + " -w " + \ + secret_filename + " -l \"" + rulebase_string + \ + "\" -c " + config_filename + " -d " + str(debug_level) + os.system(get_config_cmd) + with open(config_filename, "r") as json_data: + full_config_json = json.load(json_data) + parse_network.parse_network_objects_to_json( + full_config_json, config2import, current_import_id) + parse_service.parse_service_objects_to_json( + full_config_json, config2import, current_import_id) + if 'users' not in full_config_json: + full_config_json.update({'users': {}}) + rb_range = range(len(rulebase_string.split(','))) + target_rulebase = [] + rule_num = 0 + parent_uid="" + section_header_uids=[] + for rb_id in rb_range: + parse_user.parse_user_objects_from_rulebase( + full_config_json['rulebases'][rb_id], full_config_json['users'], current_import_id) + # if current_layer_name == args.rulebase: + logging.debug("parsing layer " + full_config_json['rulebases'][rb_id]['layername']) + rule_num = parse_rule.parse_rulebase_json( + full_config_json['rulebases'][rb_id], target_rulebase, full_config_json['rulebases'][rb_id]['layername'], current_import_id, rule_num, section_header_uids, parent_uid) + # copy users from full_config to config2import + # also converting users from dict to array: + config2import.update({'user_objects': []}) + for user_name in full_config_json['users'].keys(): + user = copy.deepcopy(full_config_json['users'][user_name]) + user.update({'user_name': user_name}) + config2import['user_objects'].append(user) + + config2import.update({'rules': target_rulebase}) + + +def get_ip_of_obj(obj): + if 'ipv4-address' in obj: + ip_addr = obj['ipv4-address'] + elif 'ipv6-address' in obj: + ip_addr = obj['ipv6-address'] + elif 'subnet4' in obj: + ip_addr = obj['subnet4'] + '/' + str(obj['mask-length4']) + elif 'subnet6' in obj: + ip_addr = obj['subnet6'] + '/' + str(obj['mask-length6']) + elif 'ipv4-address-first' in obj and 'ipv4-address-last' in obj: + ip_addr = obj['ipv4-address-first'] + '-' + str(obj['ipv4-address-last']) + elif 'ipv6-address-first' in obj and 'ipv6-address-last' in obj: + ip_addr = obj['ipv6-address-first'] + '-' + str(obj['ipv6-address-last']) + elif 'obj_typ' in obj and obj['obj_typ'] == 'group': + ip_addr = '' + else: + ip_addr = '0.0.0.0/0' + return ip_addr diff --git a/roles/importer/files/importer/checkpointR8x/enrich_config.py b/roles/importer/files/importer/checkpointR8x/enrich_config.py index 35f297dcd..eab6f76a4 100755 --- a/roles/importer/files/importer/checkpointR8x/enrich_config.py +++ b/roles/importer/files/importer/checkpointR8x/enrich_config.py @@ -1,6 +1,7 @@ #!/usr/bin/python3 - -import common, getter +import sys +sys.path.append(r"/usr/local/fworch/importer") +import cpcommon, common, getter import json, argparse, pdb, sys, time, logging import requests, requests.packages.urllib3 import os @@ -103,8 +104,8 @@ #nw_uids_from_rulebase.extend(nw_uids_from_rulebase) #svc_uids_from_rulebase.extend(svc_uids_from_rulebase) - # if common.debug_new_uid in nw_uids_from_rulebase: - # logging.debug("found " + common.debug_new_uid + " in enrich_config") + # if cpcommon.debug_new_uid in nw_uids_from_rulebase: + # logging.debug("found " + cpcommon.debug_new_uid + " in enrich_config") # remove duplicates from uid lists @@ -112,12 +113,12 @@ svc_uids_from_rulebase = list(set(svc_uids_from_rulebase)) # logging.debug ("enrich_config - found (unique) nw_objects in rulebase:\n" + str(nw_uids_from_rulebase)) -# if common.debug_new_uid in nw_uids_from_rulebase: -# logging.debug("enrich_config: found " + common.debug_new_uid + " in enrich_config after cleanup") +# if cpcommon.debug_new_uid in nw_uids_from_rulebase: +# logging.debug("enrich_config: found " + cpcommon.debug_new_uid + " in enrich_config after cleanup") # get all uids in objects tables for obj_table in config['object_tables']: - nw_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, common.nw_obj_table_names)) + nw_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, cpcommon.nw_obj_table_names)) svc_objs_from_obj_tables.extend(getter.get_all_uids_of_a_type(obj_table, getter.svc_obj_table_names)) #logging.debug ("enrich_config - already parsed nw_objects in obj tables:\n" + str(nw_objs_from_obj_tables)) @@ -154,7 +155,7 @@ json_obj = {"object_type": "hosts", "object_chunks": [ { "objects": [ { 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': common.get_ip_of_obj(obj), + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cpcommon.get_ip_of_obj(obj), } ] } ] } config['object_tables'].append(json_obj) elif obj['type'] == 'multicast-address-range': @@ -162,14 +163,14 @@ json_obj = {"object_type": "hosts", "object_chunks": [ { "objects": [ { 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': common.get_ip_of_obj(obj), + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cpcommon.get_ip_of_obj(obj), } ] } ] } config['object_tables'].append(json_obj) elif (obj['type'] == 'CpmiVsClusterMember' or obj['type'] == 'CpmiVsxClusterMember'): json_obj = {"object_type": "hosts", "object_chunks": [ { "objects": [ { 'uid': obj['uid'], 'name': obj['name'], 'color': obj['color'], - 'comments': obj['comments'], 'type': 'host', 'ipv4-address': common.get_ip_of_obj(obj), + 'comments': obj['comments'], 'type': 'host', 'ipv4-address': cpcommon.get_ip_of_obj(obj), } ] } ] } config['object_tables'].append(json_obj) logging.debug ('missing obj: ' + obj['name'] + obj['type']) diff --git a/roles/importer/files/importer/checkpointR8x/get_config.py b/roles/importer/files/importer/checkpointR8x/get_config.py index 5d8a98fef..9e9ad5828 100755 --- a/roles/importer/files/importer/checkpointR8x/get_config.py +++ b/roles/importer/files/importer/checkpointR8x/get_config.py @@ -18,6 +18,8 @@ # mgmt_cli -r true --domain MDS set api-settings accepted-api-calls-from "All IP addresses" # api restart +import sys +sys.path.append(r"/usr/local/fworch/importer") import common import getter import json, argparse, pdb diff --git a/roles/importer/files/importer/checkpointR8x/getter.py b/roles/importer/files/importer/checkpointR8x/getter.py index 5a908c740..87e5b11d3 100644 --- a/roles/importer/files/importer/checkpointR8x/getter.py +++ b/roles/importer/files/importer/checkpointR8x/getter.py @@ -1,10 +1,11 @@ -#!/usr/bin/python3 # library for API get functions +import sys +sys.path.append(r"/usr/local/fworch/importer") import json, argparse, pdb import time, logging, re, sys, logging import os import requests, requests.packages.urllib3 -import common +import cpcommon requests.packages.urllib3.disable_warnings() # suppress ssl warnings only @@ -131,8 +132,8 @@ def collect_uids_from_rule(rule, nw_uids_found, svc_uids_found): svc_uids_found.append(svc['uid']) #logging.debug ("getter::collect_uids_from_rule nw_uids_found: " + str(nw_uids_found)) #logging.debug ("getter::collect_uids_from_rule svc_uids_found: " + str(svc_uids_found)) - if common.debug_new_uid in nw_uids_found: - logging.debug("found " + common.debug_new_uid + " in getter::collect_uids_from_rule") + if cpcommon.debug_new_uid in nw_uids_found: + logging.debug("found " + cpcommon.debug_new_uid + " in getter::collect_uids_from_rule") return @@ -159,8 +160,8 @@ def collect_uids_from_rulebase(rulebase, nw_uids_found, svc_uids_found, debug_te if (debug_text == 'top_level'): # logging.debug ("getter::collect_uids_from_rulebase final nw_uids_found: " + str(nw_uids_found)) # logging.debug ("getter::collect_uids_from_rulebase final svc_uids_found: " + str(svc_uids_found)) - if common.debug_new_uid in nw_uids_found: - logging.debug("found " + common.debug_new_uid + " in getter::collect_uids_from_rulebase") + if cpcommon.debug_new_uid in nw_uids_found: + logging.debug("found " + cpcommon.debug_new_uid + " in getter::collect_uids_from_rulebase") return diff --git a/roles/importer/files/importer/checkpointR8x/parse_config.py b/roles/importer/files/importer/checkpointR8x/parse_config.py index 00716c1c8..a36f7d98a 100755 --- a/roles/importer/files/importer/checkpointR8x/parse_config.py +++ b/roles/importer/files/importer/checkpointR8x/parse_config.py @@ -1,4 +1,6 @@ #!/usr/bin/python3 +import sys +sys.path.append(r"/usr/local/fworch/importer") import common import parse_network, parse_rule, parse_service, parse_user import argparse @@ -17,9 +19,6 @@ parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 1(DEBUG Console) 2(DEBUG File)i 2(DEBUG Console&File); default=0') args = parser.parse_args() -csv_delimiter = '%' -list_delimiter = '|' -line_delimiter = "\n" found_rulebase = False number_of_section_headers_so_far = 0 rule_num = 0 diff --git a/roles/importer/files/importer/checkpointR8x/parse_network.py b/roles/importer/files/importer/checkpointR8x/parse_network.py index f43a1a5ca..6debcd02b 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_network.py +++ b/roles/importer/files/importer/checkpointR8x/parse_network.py @@ -1,53 +1,63 @@ - +base_dir = "/usr/local/fworch" +import sys +sys.path.append(base_dir + '/importer') +sys.path.append(base_dir + '/importer/checkpointR8x') import logging -import json -import common +from requests import NullHandler +import common, cpcommon def csv_dump_nw_obj(nw_obj, import_id): - result_line = '"' + import_id + '"' + common.csv_delimiter # control_id - result_line += '"' + nw_obj['obj_name'] + '"' + common.csv_delimiter # obj_name - result_line += '"' + nw_obj['obj_typ'] + '"' + common.csv_delimiter # ob_typ - result_line += '"' + nw_obj['obj_member_names'] + '"' + common.csv_delimiter # obj_member_names - result_line += '"' + nw_obj['obj_member_refs'] + '"' + common.csv_delimiter # obj_member_refs - result_line += common.csv_delimiter # obj_sw + result_line = common.csv_add_field(import_id) # control_id + result_line += common.csv_add_field(nw_obj['obj_name']) # obj_name + result_line += common.csv_add_field(nw_obj['obj_typ']) # ob_typ + result_line += common.csv_add_field(nw_obj['obj_member_names']) # obj_member_names + result_line += common.csv_add_field(nw_obj['obj_member_refs']) # obj_member_refs + result_line += common.csv_delimiter # obj_sw if nw_obj['obj_typ'] == 'group': - result_line += common.csv_delimiter # obj_ip for groups = null - else: - result_line += '"' + nw_obj['obj_ip'] + '"' + common.csv_delimiter # obj_ip - if 'obj_ip_end' in nw_obj: - result_line += '"' + nw_obj['obj_ip_end'] + '"' + common.csv_delimiter # obj_ip_end + result_line += common.csv_delimiter # obj_ip for groups = null + result_line += common.csv_delimiter # obj_ip_end for groups = null else: - result_line += common.csv_delimiter - result_line += '"' + nw_obj['obj_color'] + '"' + common.csv_delimiter # obj_color - result_line += '"' + nw_obj['obj_comment'] + '"' + common.csv_delimiter # obj_comment - result_line += common.csv_delimiter # result_line += '"' + nw_obj['obj_location'] + '"' + csv_delimiter # obj_location + result_line += common.csv_add_field(nw_obj['obj_ip']) # obj_ip + if 'obj_ip_end' in nw_obj: + result_line += common.csv_add_field(nw_obj['obj_ip_end'])# obj_ip_end + else: + result_line += common.csv_delimiter + result_line += common.csv_add_field(nw_obj['obj_color']) # obj_color + result_line += common.csv_add_field(nw_obj['obj_comment']) # obj_comment + result_line += common.csv_delimiter # obj_location if 'obj_zone' in nw_obj: - result_line += '"' + nw_obj['obj_zone'] + '"' + common.csv_delimiter # obj_zone + result_line += common.csv_add_field(nw_obj['obj_zone']) # obj_zone else: result_line += common.csv_delimiter - result_line += '"' + nw_obj['obj_uid'] + '"' + common.csv_delimiter # obj_uid - result_line += common.csv_delimiter # last_change_admin + result_line += common.csv_add_field(nw_obj['obj_uid']) # obj_uid + result_line += common.csv_delimiter # last_change_admin # add last_change_time result_line += common.line_delimiter return result_line +def parse_network_objects_to_json(full_config, config2import, import_id): + nw_objects = [] + + for obj_table in full_config['object_tables']: + collect_nw_objects(obj_table, nw_objects) + for nw_obj in nw_objects: + nw_obj.update({'control_id': import_id}) + for idx in range(0, len(nw_objects)-1): + if nw_objects[idx]['obj_typ'] == 'group': + add_member_names_for_nw_group(idx, nw_objects) + config2import.update({'network_objects': nw_objects}) + + # collect_nw_objects from object tables and write them into global nw_objects dict def collect_nw_objects(object_table, nw_objects): - result = '' # todo: delete this line - # nw_obj_tables = ['hosts', 'networks', 'address-ranges', 'groups', 'gateways-and-servers', 'simple-gateways'] - # nw_obj_type_to_host_list = [ - # 'address-range', 'multicast-address-range', - # 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiAnyObject', - # 'CpmiClusterMember', 'CpmiGatewayPlain', 'CpmiHostCkp', 'CpmiGatewayCluster', 'checkpoint-host' - # ] nw_obj_type_to_host_list = [ 'simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiVsxClusterNetobj', 'CpmiVsxClusterMember', 'CpmiAnyObject', 'CpmiClusterMember', 'CpmiGatewayPlain', 'CpmiHostCkp', 'CpmiGatewayCluster', 'checkpoint-host' ] - if object_table['object_type'] in common.nw_obj_table_names: + if object_table['object_type'] in cpcommon.nw_obj_table_names: for chunk in object_table['object_chunks']: for obj in chunk['objects']: members = '' @@ -59,10 +69,13 @@ def collect_nw_objects(object_table, nw_objects): for member in obj['members']: member_refs += member + common.list_delimiter member_refs = member_refs[:-1] - ip_addr = common.get_ip_of_obj(obj) + ip_addr = cpcommon.get_ip_of_obj(obj) first_ip = ip_addr last_ip = ip_addr obj_type = obj['type'] + if obj_type=='group': + first_ip = None + last_ip = None if obj_type == 'address-range' or obj_type == 'multicast-address-range': obj_type = 'ip_range' @@ -75,11 +88,6 @@ def collect_nw_objects(object_table, nw_objects): # logging.debug("parse_network::collect_nw_objects - rewriting non-standard cp-host-type '" + obj['name'] + "' with object type '" + obj_type + "' to host") # logging.debug("obj_dump:" + json.dumps(obj, indent=3)) obj_type = 'host' - # else: - # if not obj['type'] in ['host', 'network', 'group'] or obj['name']=='test-interop-device' or obj['name']=='test-ext-vpn-gw': - # logging.debug("parse_network::collect_nw_objects - for '" + obj['name'] + "' we are using standard object type '" + obj['type'] + "'") - # logging.debug("obj_dump:" + json.dumps(obj, indent=3)) - # adding the object: nw_objects.extend([{'obj_uid': obj['uid'], 'obj_name': obj['name'], 'obj_color': obj['color'], 'obj_comment': obj['comments'], diff --git a/roles/importer/files/importer/checkpointR8x/parse_rule.py b/roles/importer/files/importer/checkpointR8x/parse_rule.py index d572ff0c4..ac726aba1 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_rule.py +++ b/roles/importer/files/importer/checkpointR8x/parse_rule.py @@ -1,82 +1,50 @@ +import sys +base_dir = "/usr/local/fworch/" +sys.path.append(base_dir + '/importer') +sys.path.append(base_dir + '/importer/checkpointR8x') import re import logging -import common +import common, cpcommon import json -def create_domain_rule_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - section_header_uids.append(rule_uid) - header_rule_csv = '"' + import_id + '"' + common.csv_delimiter # control_id - header_rule_csv += '"' + str(rule_num) + '"' + common.csv_delimiter # rule_num - header_rule_csv += '"' + layer_name + '"' + common.csv_delimiter # rulebase_name - header_rule_csv += common.csv_delimiter # rule_ruleid - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_disabled - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_src_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # src - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # src_refs - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_dst_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # dst - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # dst_refs - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_svc_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # svc - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # svc_refs - header_rule_csv += '"' + 'Accept' + '"' + common.csv_delimiter # action - header_rule_csv += '"' + 'Log' + '"' + common.csv_delimiter # track - header_rule_csv += '"' + 'Policy Targets' + '"' + common.csv_delimiter # install-on - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # time - header_rule_csv += '""' + common.csv_delimiter # comments - header_rule_csv += common.csv_delimiter # name - header_rule_csv += '"' + rule_uid + '"' + common.csv_delimiter # uid - header_rule_csv += '"' + section_name + '"' + common.csv_delimiter # head_text - header_rule_csv += common.csv_delimiter # from_zone - header_rule_csv += common.csv_delimiter # to_zone - header_rule_csv += common.csv_delimiter # last_change_admin - if parent_uid != "": - header_rule_csv += '"' + parent_uid + '"' # parent_rule_uid - return header_rule_csv + common.line_delimiter - def create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): - section_header_uids.append(rule_uid) - header_rule_csv = '"' + import_id + '"' + common.csv_delimiter # control_id - header_rule_csv += '"' + str(rule_num) + '"' + common.csv_delimiter # rule_num - header_rule_csv += '"' + layer_name + '"' + common.csv_delimiter # rulebase_name - header_rule_csv += common.csv_delimiter # rule_ruleid - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_disabled - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_src_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # src - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # src_refs - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_dst_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # dst - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # dst_refs - header_rule_csv += '"' + 'False' + '"' + common.csv_delimiter # rule_svc_neg - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # svc - header_rule_csv += '"' + common.any_obj_uid + '"' + common.csv_delimiter # svc_refs - header_rule_csv += '"' + 'Accept' + '"' + common.csv_delimiter # action - header_rule_csv += '"' + 'Log' + '"' + common.csv_delimiter # track - header_rule_csv += '"' + 'Policy Targets' + '"' + common.csv_delimiter # install-on - header_rule_csv += '"' + 'Any' + '"' + common.csv_delimiter # time - header_rule_csv += '""' + common.csv_delimiter # comments - header_rule_csv += common.csv_delimiter # name - header_rule_csv += '"' + rule_uid + '"' + common.csv_delimiter # uid - header_rule_csv += '"' + section_name + '"' + common.csv_delimiter # head_text - header_rule_csv += common.csv_delimiter # from_zone - header_rule_csv += common.csv_delimiter # to_zone - header_rule_csv += common.csv_delimiter # last_change_admin + # only do this once! : section_header_uids.append(rule_uid) + header_rule_csv = common.csv_add_field(import_id) # control_id + header_rule_csv += common.csv_add_field(str(rule_num)) # rule_num + header_rule_csv += common.csv_add_field(layer_name) # rulebase_name + header_rule_csv += common.csv_delimiter # rule_ruleid + header_rule_csv += common.csv_add_field('False') # rule_disabled + header_rule_csv += common.csv_add_field('False') # rule_src_neg + header_rule_csv += common.csv_add_field('Any') # rule_src + header_rule_csv += common.csv_add_field(cpcommon.any_obj_uid) # rule_src_refs + header_rule_csv += common.csv_add_field('False') # rule_dst_neg + header_rule_csv += common.csv_add_field('Any') # rule_dst + header_rule_csv += common.csv_add_field(cpcommon.any_obj_uid) # rule_dst_refs + header_rule_csv += common.csv_add_field('False') # rule_svc_neg + header_rule_csv += common.csv_add_field('Any') # rule_svc + header_rule_csv += common.csv_add_field(cpcommon.any_obj_uid) # rule_svc_refs + header_rule_csv += common.csv_add_field('Accept') # rule_action + header_rule_csv += common.csv_add_field('Log') # rule_track + header_rule_csv += common.csv_add_field('Policy Targets') # rule_installon + header_rule_csv += common.csv_add_field('Any') # rule_time + header_rule_csv += common.csv_add_field('') # rule_comment + header_rule_csv += common.csv_delimiter # rule_name + header_rule_csv += common.csv_add_field(rule_uid) # rule_uid + header_rule_csv += common.csv_add_field(section_name) # rule_head_text + header_rule_csv += common.csv_delimiter # rule_from_zone + header_rule_csv += common.csv_delimiter # rule_to_zone + header_rule_csv += common.csv_delimiter # rule_last_change_admin if parent_uid != "": - header_rule_csv += '"' + parent_uid + '"' # parent_rule_uid + header_rule_csv += common.csv_add_field(parent_uid, no_csv_delimiter=True) # parent_rule_uid return header_rule_csv + common.line_delimiter -def csv_add_field(content, csv_del, apostrophe): - if content == '': # do not add apostrophes for empty fields - field_result = csv_del - else: - field_result = apostrophe + content + apostrophe + csv_del - return field_result +def create_domain_rule_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): + return create_section_header(section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid) def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid): - apostrophe = '"' rule_csv = '' # reference to domain rule layer, filling up basic fields @@ -84,12 +52,12 @@ def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid): # add_missing_info_to_domain_ref_rule(rule) if 'rule-number' in rule: # standard rule, no section header # print ("rule #" + str(rule['rule-number']) + "\n") - rule_csv += csv_add_field(import_id, common.csv_delimiter, apostrophe) # control_id - rule_csv += csv_add_field(str(rule_num), common.csv_delimiter, apostrophe) # rule_num - rule_csv += csv_add_field(layer_name, common.csv_delimiter, apostrophe) # rulebase_name - rule_csv += csv_add_field('', common.csv_delimiter, apostrophe) # rule_ruleid is empty - rule_csv += csv_add_field(str(not rule['enabled']), common.csv_delimiter, apostrophe) # rule_disabled - rule_csv += csv_add_field(str(rule['source-negate']), common.csv_delimiter, apostrophe) # src_neg + rule_csv += common.csv_add_field(import_id) # control_id + rule_csv += common.csv_add_field(str(rule_num)) # rule_num + rule_csv += common.csv_add_field(layer_name) # rulebase_name + rule_csv += common.csv_add_field('') # rule_ruleid is empty + rule_csv += common.csv_add_field(str(not rule['enabled'])) # rule_disabled + rule_csv += common.csv_add_field(str(rule['source-negate'])) # src_neg # SOURCE names rule_src_name = '' @@ -110,7 +78,7 @@ def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid): else: # standard network objects as source rule_src_name += src["name"] + common.list_delimiter rule_src_name = rule_src_name[:-1] # removing last list_delimiter - rule_csv += csv_add_field(rule_src_name, common.csv_delimiter, apostrophe) # src_names + rule_csv += common.csv_add_field(rule_src_name) # src_names # SOURCE refs rule_src_ref = '' @@ -120,7 +88,7 @@ def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid): elif src['type'] == 'access-role': if isinstance(src['networks'], str): # just a single source if src['networks'] == 'any': - rule_src_ref += src['uid'] + '@' + common.any_obj_uid + common.list_delimiter + rule_src_ref += src['uid'] + '@' + cpcommon.any_obj_uid + common.list_delimiter else: rule_src_ref += src['uid'] + '@' + src['networks'] + common.list_delimiter else: # more than one source @@ -129,82 +97,78 @@ def csv_dump_rule(rule, layer_name, import_id, rule_num, parent_uid): else: # standard network objects as source rule_src_ref += src["uid"] + common.list_delimiter rule_src_ref = rule_src_ref[:-1] # removing last list_delimiter - rule_csv += csv_add_field(rule_src_ref, common.csv_delimiter, apostrophe) # src_refs + rule_csv += common.csv_add_field(rule_src_ref) # src_refs - rule_csv += csv_add_field(str(rule['destination-negate']), common.csv_delimiter, apostrophe) # destination negation + rule_csv += common.csv_add_field(str(rule['destination-negate'])) # destination negation rule_dst_name = '' for dst in rule["destination"]: rule_dst_name += dst["name"] + common.list_delimiter rule_dst_name = rule_dst_name[:-1] - rule_csv += csv_add_field(rule_dst_name, common.csv_delimiter, apostrophe) # rule dest_name + rule_csv += common.csv_add_field(rule_dst_name) # rule dest_name rule_dst_ref = '' for dst in rule["destination"]: rule_dst_ref += dst["uid"] + common.list_delimiter rule_dst_ref = rule_dst_ref[:-1] - rule_csv += csv_add_field(rule_dst_ref, common.csv_delimiter, apostrophe) # rule_dest_refs + rule_csv += common.csv_add_field(rule_dst_ref) # rule_dest_refs + # SERVICE negate + rule_csv += common.csv_add_field(str(rule['service-negate'])) # service negation # SERVICE names rule_svc_name = '' - rule_svc_name += str(rule['service-negate']) + '"' + common.csv_delimiter + '"' for svc in rule["service"]: rule_svc_name += svc["name"] + common.list_delimiter rule_svc_name = rule_svc_name[:-1] - rule_csv += csv_add_field(rule_svc_name, common.csv_delimiter, apostrophe) # rule svc name + rule_csv += common.csv_add_field(rule_svc_name) # rule svc name # SERVICE refs rule_svc_ref = '' for svc in rule["service"]: rule_svc_ref += svc["uid"] + common.list_delimiter rule_svc_ref = rule_svc_ref[:-1] - rule_csv += csv_add_field(rule_svc_ref, common.csv_delimiter, apostrophe) # rule svc ref + rule_csv += common.csv_add_field(rule_svc_ref) # rule svc ref rule_action = rule['action'] rule_action_name = rule_action['name'] - rule_csv += csv_add_field(rule_action_name, common.csv_delimiter, apostrophe) # rule action + rule_csv += common.csv_add_field(rule_action_name) # rule action rule_track = rule['track'] rule_track_type = rule_track['type'] - rule_csv += csv_add_field(rule_track_type['name'], common.csv_delimiter, apostrophe) # rule track + rule_csv += common.csv_add_field(rule_track_type['name']) # rule track rule_install_on = rule['install-on'] first_rule_install_target = rule_install_on[0] - rule_csv += csv_add_field(first_rule_install_target['name'], common.csv_delimiter, apostrophe) # install on + rule_csv += common.csv_add_field(first_rule_install_target['name']) # install on rule_time = rule['time'] first_rule_time = rule_time[0] - rule_csv += csv_add_field(first_rule_time['name'], common.csv_delimiter, apostrophe) # time + rule_csv += common.csv_add_field(first_rule_time['name']) # time - rule_csv += csv_add_field(rule['comments'], common.csv_delimiter, apostrophe) # time + rule_csv += common.csv_add_field(rule['comments']) # comments if 'name' in rule: rule_name = rule['name'] else: rule_name = '' - rule_csv += csv_add_field(rule_name, common.csv_delimiter, apostrophe) # rule_name + rule_csv += common.csv_add_field(rule_name) # rule_name - rule_csv += csv_add_field(rule['uid'], common.csv_delimiter, apostrophe) # rule_uid + rule_csv += common.csv_add_field(rule['uid']) # rule_uid rule_head_text = '' - rule_csv += csv_add_field(rule_head_text, common.csv_delimiter, apostrophe) # rule_head_text + rule_csv += common.csv_add_field(rule_head_text) # rule_head_text rule_from_zone = '' - rule_csv += csv_add_field(rule_from_zone, common.csv_delimiter, apostrophe) + rule_csv += common.csv_add_field(rule_from_zone) rule_to_zone = '' - rule_csv += csv_add_field(rule_to_zone, common.csv_delimiter, apostrophe) + rule_csv += common.csv_add_field(rule_to_zone) rule_meta_info = rule['meta-info'] - rule_csv += csv_add_field(rule_meta_info['last-modifier'], common.csv_delimiter, apostrophe) + rule_csv += common.csv_add_field(rule_meta_info['last-modifier']) # new in v5.1.17: if 'parent_rule_uid' in rule: logging.debug('csv_dump_rule: found rule (uid=' + rule['uid'] + ') with parent_rule_uid set: ' + rule['parent_rule_uid']) parent_rule_uid = rule['parent_rule_uid'] else: - # parent_rule_uid = "" parent_rule_uid = parent_uid - #if parent_uid != "": - # logging.debug('csv_dump_rule: no parent_rule_uid set in rule, using parent_uid from function parameter, uid=' + rule['uid'] ) - rule_csv += csv_add_field(parent_rule_uid, common.csv_delimiter, apostrophe) - - rule_csv = rule_csv[:-1] - rule_csv += common.line_delimiter # remove last csv delimiter and add line delimiter + rule_csv += common.csv_add_field(parent_rule_uid,no_csv_delimiter=True) + rule_csv += common.line_delimiter return rule_csv @@ -254,3 +218,199 @@ def csv_dump_rules(rulebase, layer_name, import_id, rule_num, section_header_uid rule_num += 1 return rule_num, result +############################################################################################### +############################################################################################### +# the following functions are only used within new python-only importer: + +def add_section_header_rule_in_json (rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): + section_header_uids.append(common.sanitize(rule_uid)) + rule = { + "control_id": int(import_id), + "rule_num": int(rule_num), + "rulebase_name": common.sanitize(layer_name), + # rule_ruleid + "rule_disabled": False, + "rule_src_neg": False, + "rule_src": "Any", + "rule_src_refs": common.sanitize(cpcommon.any_obj_uid), + "rule_dst_neg": False, + "rule_dst": "Any", + "rule_dst_refs": common.sanitize(cpcommon.any_obj_uid), + "rule_svc_neg": False, + "rule_svc": "Any", + "rule_svc_refs": common.sanitize(cpcommon.any_obj_uid), + "rule_action": "Accept", + "rule_track": "Log", + "rule_installon": "Policy Targets", + "rule_time": "Any", + "rule_implied": False, + "rule_comment": "", + # rule_name + "rule_uid": common.sanitize(rule_uid), + "rule_head_text": common.sanitize(section_name), + # rule_from_zone + # rule_to_zone + # rule_last_change_admin + "parent_rule_uid": common.sanitize(parent_uid) + } + rulebase.append(rule) + + +def add_domain_rule_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid): + add_section_header_rule_in_json(rulebase, section_name, layer_name, import_id, rule_uid, rule_num, section_header_uids, parent_uid) + + +def parse_single_rule_to_json (src_rule, rulebase, layer_name, import_id, rule_num, parent_uid): + dst_rule = {} + + # reference to domain rule layer, filling up basic fields + if 'type' in src_rule and src_rule['type'] != 'place-holder': + if 'rule-number' in src_rule: # standard rule, no section header + # SOURCE names + rule_src_name = '' + for src in src_rule["source"]: + if src['type'] == 'LegacyUserAtLocation': + rule_src_name += src['name'] + common.list_delimiter + elif src['type'] == 'access-role': + if isinstance(src['networks'], str): # just a single source + if src['networks'] == 'any': + rule_src_name += src["name"] + '@' + 'Any' + common.list_delimiter + else: + rule_src_name += src["name"] + '@' + src['networks'] + common.list_delimiter + else: # more than one source + for nw in src['networks']: + rule_src_name += src[ + # TODO: this is not correct --> need to reverse resolve name from given UID + "name"] + '@' + nw + common.list_delimiter + else: # standard network objects as source + rule_src_name += src["name"] + common.list_delimiter + rule_src_name = rule_src_name[:-1] # removing last list_delimiter + common.csv_add_field(rule_src_name) # src_names + + # SOURCE refs + rule_src_ref = '' + for src in src_rule["source"]: + if src['type'] == 'LegacyUserAtLocation': + rule_src_ref += src["userGroup"] + '@' + src["location"] + common.list_delimiter + elif src['type'] == 'access-role': + if isinstance(src['networks'], str): # just a single source + if src['networks'] == 'any': + rule_src_ref += src['uid'] + '@' + cpcommon.any_obj_uid + common.list_delimiter + else: + rule_src_ref += src['uid'] + '@' + src['networks'] + common.list_delimiter + else: # more than one source + for nw in src['networks']: + rule_src_ref += src['uid'] + '@' + nw + common.list_delimiter + else: # standard network objects as source + rule_src_ref += src["uid"] + common.list_delimiter + rule_src_ref = rule_src_ref[:-1] # removing last list_delimiter + + # rule_dst... + rule_dst_name = '' + for dst in src_rule["destination"]: + rule_dst_name += dst["name"] + common.list_delimiter + rule_dst_name = rule_dst_name[:-1] + + rule_dst_ref = '' + for dst in src_rule["destination"]: + rule_dst_ref += dst["uid"] + common.list_delimiter + rule_dst_ref = rule_dst_ref[:-1] + + # rule_svc... + rule_svc_name = '' + for svc in src_rule["service"]: + rule_svc_name += svc["name"] + common.list_delimiter + rule_svc_name = rule_svc_name[:-1] + + rule_svc_ref = '' + for svc in src_rule["service"]: + rule_svc_ref += svc["uid"] + common.list_delimiter + rule_svc_ref = rule_svc_ref[:-1] + + if 'name' in src_rule: + rule_name = src_rule['name'] + else: + rule_name = '' + + # new in v5.1.17: + if 'parent_rule_uid' in src_rule: + logging.debug('csv_dump_rule: found rule (uid=' + src_rule['uid'] + ') with parent_rule_uid set: ' + src_rule['parent_rule_uid']) + parent_rule_uid = src_rule['parent_rule_uid'] + else: + parent_rule_uid = parent_uid + + rule = { + "control_id": int(import_id), + "rule_num": int(rule_num), + "rulebase_name": common.sanitize(layer_name), + # rule_ruleid + "rule_disabled": not bool(src_rule['enabled']), + "rule_src_neg": bool(src_rule['source-negate']), + "rule_src": common.sanitize(rule_src_name), + "rule_src_refs": common.sanitize(rule_src_ref), + "rule_dst_neg": bool(src_rule['destination-negate']), + "rule_dst": common.sanitize(rule_dst_name), + "rule_dst_refs": common.sanitize(rule_dst_ref), + "rule_svc_neg": bool(src_rule['service-negate']), + "rule_svc": common.sanitize(rule_svc_name), + "rule_svc_refs": common.sanitize(rule_svc_ref), + "rule_action": common.sanitize(src_rule['action']['name']), + "rule_track": common.sanitize(src_rule['track']['type']['name']), + "rule_installon": common.sanitize(src_rule['install-on'][0]['name']), + "rule_time": common.sanitize(src_rule['time'][0]['name']), + "rule_comment": common.sanitize(src_rule['comments']), + "rule_name": common.sanitize(rule_name), + "rule_uid": common.sanitize(src_rule['uid']), + "rule_implied": False, + # "rule_head_text": common.sanitize(section_name), + # rule_from_zone + # rule_to_zone + "rule_last_change_admin": common.sanitize(src_rule['meta-info']['last-modifier']), + "parent_rule_uid": common.sanitize(parent_uid) + } + rulebase.append(rule) + + +def parse_rulebase_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid): + + if 'layerchunks' in src_rulebase: + for chunk in src_rulebase['layerchunks']: + if 'rulebase' in chunk: + for rules_chunk in chunk['rulebase']: + rule_num = parse_rulebase_json(rules_chunk, target_rulebase, layer_name, import_id, rule_num, section_header_uids, parent_uid) + else: + logging.warning("parse_rule: found no rulebase in chunk:\n" + json.dumps(chunk, indent=2)) + else: + if 'rulebase' in src_rulebase: + if src_rulebase['type'] == 'access-section' and not src_rulebase['uid'] in section_header_uids: # add section header, but only if it does not exist yet (can happen by chunking a section) + section_name = "" + if 'name' in src_rulebase: + section_name = src_rulebase['name'] + if 'parent_rule_uid' in src_rulebase: + parent_uid = src_rulebase['parent_rule_uid'] + else: + parent_uid = "" + add_section_header_rule_in_json(target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) + rule_num += 1 + parent_uid = src_rulebase['uid'] + for rule in src_rulebase['rulebase']: + if rule['type'] == 'place-holder': # add domain rules + section_name = "" + if 'name' in src_rulebase: + section_name = rule['name'] + add_domain_rule_header_rule_in_json(target_rulebase, section_name, layer_name, import_id, rule['uid'], rule_num, section_header_uids, parent_uid) + else: # parse standard sections + parse_single_rule_to_json(rule, target_rulebase, layer_name, import_id, rule_num, parent_uid) + rule_num += 1 + + if src_rulebase['type'] == 'place-holder': # add domain rules + logging.debug('parse_rules_json: found domain rule ref: ' + src_rulebase['uid']) + section_name = "" + if 'name' in src_rulebase: + section_name = src_rulebase['name'] + add_domain_rule_header_rule_in_json(target_rulebase, section_name, layer_name, import_id, src_rulebase['uid'], rule_num, section_header_uids, parent_uid) + rule_num += 1 + if 'rule-number' in src_rulebase: # rulebase is just a single rule + parse_single_rule_to_json(src_rulebase, target_rulebase, layer_name, import_id, rule_num, parent_uid) + rule_num += 1 + return rule_num diff --git a/roles/importer/files/importer/checkpointR8x/parse_service.py b/roles/importer/files/importer/checkpointR8x/parse_service.py index b71371b26..96c67c57c 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_service.py +++ b/roles/importer/files/importer/checkpointR8x/parse_service.py @@ -1,38 +1,45 @@ +base_dir = "/usr/local/fworch" + +import sys +sys.path.append(base_dir + '/importer') +sys.path.append(base_dir + '/importer/checkpointR8x') import re import logging -import common +import common, cpcommon def csv_dump_svc_obj(svc_obj, import_id): - #print("dumping svc: " + svc_obj['svc_name'] + ", svc_member_refs: " + svc_obj['svc_member_refs']) - result_line = '"' + import_id + '"' + common.csv_delimiter # control_id - result_line += '"' + svc_obj['svc_name'] + '"' + common.csv_delimiter # svc_name - result_line += '"' + svc_obj['svc_typ'] + '"' + common.csv_delimiter # svc_typ - result_line += '"' + svc_obj['svc_typ'] + '"' + common.csv_delimiter # svc_prod_specific - result_line += '"' + svc_obj['svc_member_names'] + '"' + common.csv_delimiter # svc_member_names - result_line += '"' + svc_obj['svc_member_refs'] + '"' + common.csv_delimiter # obj_member_refs - result_line += '"' + svc_obj['svc_color'] + '"' + common.csv_delimiter # svc_color - result_line += '"' + svc_obj['ip_proto'] + '"' + common.csv_delimiter # ip_proto - result_line += str(svc_obj['svc_port']) + common.csv_delimiter # svc_port - result_line += str(svc_obj['svc_port_end']) + common.csv_delimiter # svc_port_end + result_line = common.csv_add_field(import_id) # control_id + result_line += common.csv_add_field(svc_obj['svc_name']) # svc_name + result_line += common.csv_add_field(svc_obj['svc_typ']) # svc_typ + result_line += common.csv_add_field(svc_obj['svc_typ']) # svc_prod_specific + result_line += common.csv_add_field(svc_obj['svc_member_names']) # svc_member_names + result_line += common.csv_add_field(svc_obj['svc_member_refs']) # obj_member_refs + result_line += common.csv_add_field(svc_obj['svc_color']) # svc_color + result_line += common.csv_add_field(svc_obj['ip_proto']) # ip_proto + result_line += str(svc_obj['svc_port']) + common.csv_delimiter # svc_port + result_line += str(svc_obj['svc_port_end']) + common.csv_delimiter # svc_port_end if 'svc_source_port' in svc_obj: - result_line += '"' + svc_obj['svc_source_port'] + '"' + common.csv_delimiter # svc_source_port + result_line += common.csv_add_field(svc_obj['svc_source_port']) # svc_source_port else: - result_line += common.csv_delimiter # svc_source_port + result_line += common.csv_delimiter # svc_source_port if 'svc_source_port_end' in svc_obj: - result_line += '"' + svc_obj['svc_source_port_end'] + '"' + common.csv_delimiter # svc_source_port_end + result_line += common.csv_add_field(svc_obj['svc_source_port_end']) # svc_source_port_end else: - result_line += common.csv_delimiter # svc_source_port_end - result_line += '"' + svc_obj['svc_comment'] + '"' + common.csv_delimiter # svc_comment - result_line += '"' + str(svc_obj['rpc_nr']) + '"' + common.csv_delimiter # rpc_nr + result_line += common.csv_delimiter # svc_source_port_end + result_line += common.csv_add_field(svc_obj['svc_comment']) # svc_comment + result_line += common.csv_add_field(str(svc_obj['rpc_nr'])) # rpc_nr if 'svc_timeout_std' in svc_obj: - result_line += '"' + svc_obj['svc_timeout_std'] + '"' + common.csv_delimiter # svc_timeout_std + result_line += common.csv_add_field(svc_obj['svc_timeout_std']) # svc_timeout_std + else: + result_line += common.csv_delimiter # svc_timeout_std + if 'svc_timeout' in svc_obj and svc_obj['svc_timeout']!="" and svc_obj['svc_timeout']!=None: + result_line += common.csv_add_field(str(svc_obj['svc_timeout'])) # svc_timeout else: - result_line += common.csv_delimiter # svc_timeout_std - result_line += str(svc_obj['svc_timeout']) + common.csv_delimiter # svc_timeout - result_line += '"' + svc_obj['svc_uid'] + '"' + common.csv_delimiter # svc_uid - result_line += common.csv_delimiter # last_change_admin - result_line += common.line_delimiter # last_change_time + result_line += common.csv_delimiter # svc_timeout null + result_line += common.csv_add_field(svc_obj['svc_uid']) # svc_uid + result_line += common.csv_delimiter # last_change_admin + result_line += common.line_delimiter # last_change_time return result_line @@ -40,7 +47,7 @@ def csv_dump_svc_obj(svc_obj, import_id): def collect_svc_objects(object_table, svc_objects): result = '' - if object_table['object_type'] in common.svc_obj_table_names: + if object_table['object_type'] in cpcommon.svc_obj_table_names: proto = '' session_timeout = '' typ = 'undef' @@ -78,6 +85,8 @@ def collect_svc_objects(object_table, svc_objects): member_refs = member_refs[:-1] if 'session-timeout' in obj: session_timeout = str(obj['session-timeout']) + else: + session_timeout = None if 'interface-uuid' in obj: rpc_nr = obj['interface-uuid'] if 'program-number' in obj: @@ -135,3 +144,16 @@ def add_member_names_for_svc_group(idx, svc_objects): member_names += member_name + common.list_delimiter group['svc_member_names'] = member_names[:-1] svc_objects.insert(idx, group) + + +def parse_service_objects_to_json(full_config, config2import, import_id): + svc_objects = [] + for svc_table in full_config['object_tables']: + collect_svc_objects(svc_table, svc_objects) + for obj in svc_objects: + obj.update({'control_id': import_id}) + for idx in range(0, len(svc_objects)-1): + if svc_objects[idx]['svc_typ'] == 'group': + add_member_names_for_svc_group(idx, svc_objects) + config2import.update({'service_objects': svc_objects}) + \ No newline at end of file diff --git a/roles/importer/files/importer/checkpointR8x/parse_user.py b/roles/importer/files/importer/checkpointR8x/parse_user.py index 1ec2d9cd2..775922fc2 100644 --- a/roles/importer/files/importer/checkpointR8x/parse_user.py +++ b/roles/importer/files/importer/checkpointR8x/parse_user.py @@ -1,31 +1,33 @@ -import logging +import sys +base_dir = "/usr/local/fworch/" +sys.path.append(base_dir + '/importer') +sys.path.append(base_dir + '/importer/checkpointR8x') import common - +import logging def csv_dump_user(user_name, user, import_id): - user_line = '"' + import_id + '"' + common.csv_delimiter - user_line += '"' + user_name + '"' + common.csv_delimiter - user_line += '"' + user['user_type'] + '"' + common.csv_delimiter # user_typ - if 'user_member_names' in user: - user_line += '"' + user['user_member_names'] + '"' + common.csv_delimiter # user_member_names + user_line = common.csv_add_field(import_id) # control_id + user_line += common.csv_add_field(user_name) # user_name + user_line += common.csv_add_field(user['user_typ']) # user_typ + if 'user_member_names' in user: + user_line += common.csv_add_field(user['user_member_names']) # user_member_names + else: + user_line += common.csv_delimiter # no user_member_names + if 'user_member_refs' in user: + user_line += common.csv_add_field(user['user_member_refs']) # user_member_refs + else: + user_line += common.csv_delimiter # no user_member_refs + if 'user_color' in user: + user_line += common.csv_add_field(user['user_color']) # user_color else: - user_line += common.csv_delimiter # user_comment - if 'user_member_refs' in user: - user_line += '"' + user['user_member_refs'] + '"' + common.csv_delimiter # user_member_refs - else: - user_line += common.csv_delimiter # user_comment - if 'user_color' in user: - user_line += '"' + user['user_color'] + '"' + common.csv_delimiter # user_color - else: - user_line += common.csv_delimiter # user_comment + user_line += common.csv_delimiter # no user_color if 'user_comment' in user: - user_line += '"' + user['user_comment'] + '"' + common.csv_delimiter # user_comment + user_line += common.csv_add_field(user['user_comment']) # user_comment else: - user_line += common.csv_delimiter # user_comment - user_line += '"' + user['uid'] + '"' # user_uid - user_line += common.csv_delimiter # user_valid_until - user_line += common.csv_delimiter # last_change_admin - user_line += common.line_delimiter + user_line += common.csv_delimiter # no user_comment + user_line += common.csv_add_field(user['user_uid']) # user_uid + user_line += common.csv_delimiter # user_valid_until + user_line += common.line_delimiter # last_change_admin return user_line @@ -34,22 +36,22 @@ def collect_users_from_rule(rule, users): if 'type' in rule and rule['type'] != 'place-holder': for src in rule["source"]: if src['type'] == 'access-role': - users.update({src['name']: {'uid': src['uid'], 'user_type': 'group', 'comment': src['comments'], 'color': src['color']} }) + users.update({src['name']: {'user_uid': src['uid'], 'user_typ': 'group', 'user_comment': src['comments'], 'user_color': src['color']} }) if 'users' in src: - users.update({src["name"]: {'uid': src["uid"], 'user_type': 'simple'} }) + users.update({src["name"]: {'user_uid': src["uid"], 'user_typ': 'simple', 'user_comment': src['comments'], 'user_color': src['color']} }) elif src['type'] == 'LegacyUserAtLocation': user_str = src["name"] user_ar = user_str.split('@') user_name = user_ar[0] user_uid = src["userGroup"] - users.update({user_name: {'uid': user_uid, 'user_type': 'group'} }) + # users.update({user_name: {'uid': user_uid, 'user_type': 'group'} }) + users.update({user_name: {'user_uid': user_uid, 'user_typ': 'group', 'user_comment': src['comments'], 'user_color': src['color']} }) else: # section collect_users_from_rulebase(rule["rulebase"], users) # collect_users writes user info into global users dict def collect_users_from_rulebase(rulebase, users): - result = '' if 'layerchunks' in rulebase: for chunk in rulebase['layerchunks']: if 'rulebase' in chunk: @@ -58,3 +60,9 @@ def collect_users_from_rulebase(rulebase, users): else: for rule in rulebase: collect_users_from_rule(rule, users) + +# the following is only used within new python-only importer: +def parse_user_objects_from_rulebase(rulebase, users, import_id): + collect_users_from_rulebase(rulebase, users) + for user_name in users.keys(): + users[user_name]['control_id'] = import_id diff --git a/roles/importer/files/importer/common.py b/roles/importer/files/importer/common.py new file mode 100644 index 000000000..f23e9438d --- /dev/null +++ b/roles/importer/files/importer/common.py @@ -0,0 +1,43 @@ +import logging + +csv_delimiter = '%' +list_delimiter = '|' +line_delimiter = "\n" +apostrophe = "\"" +section_header_uids=[] + + +def set_log_level(log_level, debug_level): + # todo: save the initial value, reset initial value at the end + logger = logging.getLogger(__name__) + # todo: use log_level to define non debug logging + # use debug_level to define different debug levels + if debug_level == 1: + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + elif debug_level == 2: + logging.basicConfig(filename='/var/tmp/fworch_get_config_cp_r8x_api.debug', filemode='a', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + elif debug_level == 3: + logging.basicConfig(level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + logging.basicConfig(filename='/var/tmp/fworch_get_config_cp_r8x_api.debug', filemode='a', level=logging.DEBUG, format='%(asctime)s - %(levelname)s - %(message)s') + logger.debug ("debug_level: "+ str(debug_level) ) + + +def csv_add_field(content, no_csv_delimiter=False): + if content == '' and not no_csv_delimiter: # do not add apostrophes for empty fields + field_result = csv_delimiter + else: + # add apostrophes at beginning and end and remove any ocurrence of them within the string + escaped_field = content.replace(apostrophe,"") + field_result = apostrophe + escaped_field + apostrophe + if not no_csv_delimiter: + field_result += csv_delimiter + return field_result + + +def sanitize(content): + result = str(content) + result = result.replace(apostrophe,"") # remove possibly contained apostrophe + #if result != '': # do not add apostrophes for empty fields + # result = apostrophe + escaped_field + apostrophe + return result + \ No newline at end of file diff --git a/roles/importer/files/importer/fortiManager/__init__.py b/roles/importer/files/importer/fortiManager/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/roles/importer/files/importer/fortiManager/api-test-call.py b/roles/importer/files/importer/fortiManager/api-test-call.py new file mode 100755 index 000000000..38139d529 --- /dev/null +++ b/roles/importer/files/importer/fortiManager/api-test-call.py @@ -0,0 +1,116 @@ +#!/usr/bin/python3 +import logging +import logging.config +import getter +import common +import json, argparse, os, sys + +logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False) + +logger = logging.getLogger(__name__) + +logger.info("START") +parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls') +parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server') +parser.add_argument('-w', '--password', metavar='api_password', required=True, help='password for management server') +parser.add_argument('-m', '--mode', metavar='mode', required=True, help='[domains|packages|layers|generic]') +parser.add_argument('-c', '--command', metavar='command', required=False, help='generic command to send to the api (needs -m generic). ' + + 'Please note that the command must be written as one word (e.g. show-access-layer instead of show acess-layers).') +parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') +parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') +parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment') +parser.add_argument('-x', '--proxy', metavar='proxy_string', default='', help='proxy server string to use, e.g. 1.2.3.4:8080; default=empty') +parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') +parser.add_argument('-l', '--level', metavar='level_of_detail', default='standard', help='[standard|full]') +parser.add_argument('-i', '--limit', metavar='api_limit', default='500', help='The maximal number of returned results per HTTPS Connection; default=500') +parser.add_argument('-n', '--nolimit', metavar='nolimit', default='off', help='[on|off] Set to on if (generic) command does not understand limit switch') +parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') +parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off') + +args = parser.parse_args() +if len(sys.argv)==1: + parser.print_help(sys.stderr) + sys.exit(1) + +domain = args.domain + +if args.mode == 'packages': + api_command='show-packages' + api_details_level="standard" +elif args.mode == 'domains' or args.mode == 'devices': + api_command='show-domains' + api_details_level="standard" + domain = '' +elif args.mode == 'layers': + api_command='show-access-layers' + api_details_level="standard" +elif args.mode == 'generic': + api_command=args.command + api_details_level=args.level +else: + sys.exit("\"" + args.mode +"\" - unknown mode") + +proxy_string = { "http" : args.proxy, "https" : args.proxy } +offset = 0 +use_object_dictionary = 'false' +base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' +ssl_verification = getter.set_ssl_verification(args.ssl) +logger = logging.getLogger(__name__) + +xsid = getter.login(args.user, args.password, args.hostname, args.port, domain, ssl_verification, proxy_string) +api_versions = getter.api_call(args.hostname, args.port, base_url, 'show-api-versions', {}, xsid, ssl_verification, proxy_string) + +api_version = api_versions["current-version"] +api_supported = api_versions["supported-versions"] +v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname) +if args.version != 'off': + api_version = args.version +logger.debug ("using current version: "+ api_version ) +logger.debug ("supported versions: "+ ', '.join(api_supported) ) +logger.debug ("limit:"+ args.limit ) +logger.debug ("Domain:"+ args.domain ) +logger.debug ("login:"+ args.user ) +logger.debug ("sid:"+ xsid ) + +payload = { "details-level" : api_details_level } +if args.nolimit == 'off': + payload.update( { "limit" : args.limit, "offset" : offset } ) + +if args.mode == 'generic': # need to divide command string into command and payload (i.e. parameters) + cmd_parts = api_command.split(" ") + api_command = cmd_parts[0] + idx = 1 + if len(cmd_parts)>1: + payload.pop('limit') + payload.pop('offset') + while idx < len(cmd_parts): + payload.update({cmd_parts[idx]: cmd_parts[idx+1]}) + idx += 2 + +result = getter.api_call(args.hostname, args.port, v_url, api_command, payload, xsid, ssl_verification, proxy_string) + +if args.debug == "1" or args.debug == "3": + print ("\ndump of result:\n" + json.dumps(result, indent=4)) +if args.mode == 'packages': + print ("\nthe following packages exist on management server:") + for p in result['packages']: + print (" package: " + p['name']) + if "access-layers" in result: + print ("the following layers exist on management server:") + for p in result['packages']: + print (" package: " + p['name']) + for l in p['access-layers']: + print (" layer: " + l['name']) + +if args.mode == 'domains': + print ("\nthe following domains exist on management server:") + for d in result['objects']: + print (" domain: " + d['name'] + ", uid: " + d['uid']) +if args.mode == 'layers': + print ("\nthe following access-layers exist on management server:") + for l in result['access-layers']: + print (" access-layer: " + l['name'] + ", uid: " + l['uid'] ) +if args.mode == 'generic': + print (json.dumps(result, indent=3)) + +logout_result = getter.api_call(args.hostname, args.port, v_url, 'logout', {}, xsid, ssl_verification, proxy_string) diff --git a/roles/importer/files/importer/fortiManager/auto-discover.py b/roles/importer/files/importer/fortiManager/auto-discover.py new file mode 100755 index 000000000..12840f971 --- /dev/null +++ b/roles/importer/files/importer/fortiManager/auto-discover.py @@ -0,0 +1,194 @@ +#!/usr/bin/python3 +import logging +import logging.config +import getter +import common +import json, argparse, os, sys + +logging.config.fileConfig(fname='discovery_logging.conf', disable_existing_loggers=False) + +logger = logging.getLogger(__name__) + +logger.info("START") +parser = argparse.ArgumentParser(description='Discover all devices, policies starting from a single server (MDS or stand-alone) from Check Point R8x management via API calls') +parser.add_argument('-a', '--hostname', metavar='api_host', required=True, help='Check Point R8x management server') +parser.add_argument('-w', '--password', metavar='api_password', required=True, help='password for management server') +parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') +parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') +parser.add_argument('-x', '--proxy', metavar='proxy_string', default='', help='proxy server string to use, e.g. 1.2.3.4:8080; default=empty') +parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') +parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') +parser.add_argument('-V', '--version', metavar='api_version', default='off', help='alternate API version [off|]; default=off') +parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Environment') +parser.add_argument('-f', '--format', metavar='output_format', default='table', help='[json|table]]') + +args = parser.parse_args() +if len(sys.argv)==1: + parser.print_help(sys.stderr) + sys.exit(1) + +proxy_string = { "http" : args.proxy, "https" : args.proxy } +offset = 0 +use_object_dictionary = 'false' +base_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' +ssl_verification = getter.set_ssl_verification(args.ssl) +xsid = getter.login(args.user, args.password, args.hostname, args.port, args.domain, ssl_verification, proxy_string) + +api_versions = getter.api_call(args.hostname, args.port, base_url, 'show-api-versions', {}, xsid, ssl_verification, proxy_string) +api_version = api_versions["current-version"] +api_supported = api_versions["supported-versions"] +v_url = getter.set_api_url(base_url,args.version,api_supported,args.hostname) + +v_url = 'https://' + args.hostname + ':' + args.port + '/web_api/' +if args.version != "off": + v_url += 'v' + args.version + '/' + +logger = logging.getLogger(__name__) + +xsid = getter.login(args.user, args.password, args.hostname, args.port, '', ssl_verification, proxy_string) + +if args.debug == "1" or args.debug == "3": + debug = True +else: + debug = False + +# todo: +# - only show active devices (optionally with a switch) + +domains = getter.api_call(args.hostname, args.port, v_url, "show-domains", {}, xsid, ssl_verification, proxy_string) + +gw_types = ['simple-gateway', 'simple-cluster', 'CpmiVsClusterNetobj', 'CpmiGatewayPlain', 'CpmiGatewayCluster', 'CpmiVsxClusterNetobj'] +parameters = { "details-level" : "full" } + +if domains['total']== 0: + logging.debug ("no domains found, adding dummy domain.") + domains['objects'].append ({ "name": "", "uid": "" }) + + # fetching gateways for non-MDS management: + obj = domains['objects'][0] + obj['gateways'] = getter.api_call(args.hostname, args.port, v_url, "show-gateways-and-servers", parameters, xsid, ssl_verification, proxy_string) + if 'objects' in obj['gateways']: + for gw in obj['gateways']['objects']: + if 'type' in gw and gw['type'] in gw_types and 'policy' in gw: + if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']: + logging.debug ("standalone mgmt: found gateway " + gw['name'] + " with policy" + gw['policy']['access-policy-name']) + gw['package'] = getter.api_call(args.hostname, args.port, v_url, + "show-package", + { "name" : gw['policy']['access-policy-name'], "details-level": "full" }, + xsid, ssl_verification, proxy_string) + else: + logging.warning ("Standalone WARNING: did not find any gateways in stand-alone management") + logout_result = getter.api_call(args.hostname, args.port, v_url, 'logout', {}, xsid, ssl_verification, proxy_string) + +else: # visit each domain and fetch layers + for obj in domains['objects']: + domain_name = obj['name'] + logging.debug ("MDS: searchig in domain " + domain_name) + xsid = getter.login(args.user, args.password, args.hostname, args.port, domain_name, ssl_verification, proxy_string) + obj['gateways'] = getter.api_call(args.hostname, args.port, v_url, "show-gateways-and-servers", parameters, xsid, ssl_verification, proxy_string) + if 'objects' in obj['gateways']: + for gw in obj['gateways']['objects']: + if 'type' in gw and gw['type'] in gw_types and 'policy' in gw: + if 'access-policy-installed' in gw['policy'] and gw['policy']['access-policy-installed'] and "access-policy-name" in gw['policy']: + api_call_str = "show-package name " + gw['policy']['access-policy-name'] + ", logged in to domain " + domain_name + logging.debug ("MDS: found gateway " + gw['name'] + " with policy: " + gw['policy']['access-policy-name']) + logging.debug ("api call: " + api_call_str) + gw['package'] = getter.api_call(args.hostname, args.port, v_url, "show-package", + { "name" : gw['policy']['access-policy-name'], "details-level": "full" }, + xsid, ssl_verification, proxy_string) + else: + logging.warning ("Domain-WARNING: did not find any gateways in domain " + obj['name']) + logout_result = getter.api_call(args.hostname, args.port, v_url, 'logout', {}, xsid, ssl_verification, proxy_string) + +# now collect only relevant data and copy to new dict +domains_essential = [] +for obj in domains['objects']: + domain = { 'name': obj['name'], 'uid': obj['uid'] } + gateways = [] + domain['gateways'] = gateways + if 'objects' in obj['gateways']: + for gw in obj['gateways']['objects']: + if 'policy' in gw and 'access-policy-name' in gw['policy']: + gateway = { "name": gw['name'], "uid": gw['uid'], "access-policy-name": gw['policy']['access-policy-name'] } + layers = [] + if 'package' in gw: + if 'access-layers' in gw['package']: + found_domain_layer = False + for ly in gw['package']['access-layers']: + if 'firewall' in ly and ly['firewall']: + if 'parent-layer' in ly: + found_domain_layer = True + for ly in gw['package']['access-layers']: + if 'firewall' in ly and ly['firewall']: + if 'parent-layer' in ly: + layer = { "name": ly['name'], "uid": ly['uid'], "type": "domain-layer", "parent-layer": ly['parent-layer'] } + elif domains['total']==0: + layer = { "name": ly['name'], "uid": ly['uid'], "type": "local-layer" } + elif found_domain_layer: + layer = { "name": ly['name'], "uid": ly['uid'], "type": "global-layer" } + else: # in domain context, but no global layer exists + layer = { "name": ly['name'], "uid": ly['uid'], "type": "stand-alone-layer" } + layers.append(layer) + gateway['layers'] = layers + gateways.append(gateway) + domain['gateways'] = gateways + domains_essential.append(domain) +devices = {"domains": domains_essential } + + +##### output ######## +if args.format == 'json': + print (json.dumps(devices, indent=3)) + +elif args.format == 'table': + # compact print in FWO UI input format + colsize_number = 35 + colsize = "{:"+str(colsize_number)+"}" + table = "" + heading_list = ["Domain/Management", "Gateway", "Policy String"] + + # add table header: + for heading in heading_list: + table += colsize.format(heading) + table += "\n" + x = 0 + while x < len(heading_list) * colsize_number: + table += '-' + x += 1 + table += "\n" + + # print one gateway/policy per line + for dom in devices['domains']: + if 'gateways' in dom: + for gw in dom['gateways']: + table += colsize.format(dom["name"]) + table += colsize.format(gw['name']) + if 'layers' in gw: + found_domain_layer = False + layer_string = '' + for ly in gw['layers']: + if 'parent-layer' in ly: + found_domain_layer = True + for ly in gw['layers']: + if ly['type'] == 'stand-alone-layer' or ly['type'] == 'local-layer': + layer_string = ly["name"] + elif found_domain_layer and ly['type'] == 'domain-layer': + domain_layer = ly['name'] + elif found_domain_layer and ly['type'] == 'global-layer': + global_layer = ly['name'] + else: + logging.warning ("found unknown layer type") + if found_domain_layer: + layer_string = global_layer + '/' + domain_layer + table += colsize.format(layer_string) + table += "\n" + else: + table += colsize.format(dom["name"]) + table += "\n" # empty line between domains for readability + + print (table) + +else: + logging.error("You specified a wrong output format: " + args.format ) + parser.print_help(sys.stderr) + sys.exit(1) diff --git a/roles/importer/files/importer/fortiManager/autodiscover.sh b/roles/importer/files/importer/fortiManager/autodiscover.sh new file mode 100644 index 000000000..1d421be37 --- /dev/null +++ b/roles/importer/files/importer/fortiManager/autodiscover.sh @@ -0,0 +1,111 @@ +#!/bin/bash + +NAME_="dicovery-fgtmgr.sh" +SYNOPSIS_="$NAME_ [-d] [-H ] [-U ] [-K ]" +REQUIRES_="standard GNU commands" +VERSION_="0.1" +DATE_="2021-06-24" +AUTHOR_="Holger Dost " +PURPOSE_="extracts information from the fortimanager" +EXIT_ERROR=3 +EXIT_BUG=3 +EXIT_SUCCESS=0 + + +KEY='/usr/local/fworch/.ssh/id_rsa_forti' +USER='itsecorg' +SERVER='1.1.1.1' +REMCOM='diagnose dvm device list' +DEBUGMODE=0 +GREP="/bin/fgrep" +AWK="/usr/bin/awk" +HEAD="/usr/bin/head" + + +usage () { + echo >&2 "$NAME_ $VERSION_ - $PURPOSE_ +Usage: $SYNOPSIS_ +Requires: $REQUIRES_ +Example: discovery-fgtmgr.sh -d -H 1.1.1.1 -U testuser -K .ssh/id_rsa_testkey +" + exit 1 +} + +shopt -s extglob + +while getopts 'dhH:C:D:' OPTION ; do + case $OPTION in + h) usage $EXIT_SUCCESS + ;; + d) DEBUGMODE=1 + ;; + H) SERVER="$OPTARG" + ;; + U) USER="$OPTARG" + ;; + K) KEY="$OPTARG" + ;; + \?)echo "unknown option \"-$OPTARG\"." >&2 + usage $EXIT_ERROR + ;; + :) echo "option \"-$OPTARG\" argument missing" >&2 + usage $EXIT_ERROR + ;; + *) echo "bug ..." >&2 + usage $EXIT_BUG + ;; + esac +done + +# : ${SERVER:='1.1.1.1'} +# : ${USER:='itsecorg'} +# : ${KEY:='/usr/local/fworch/.ssh/id_rsa_forti'} + +DEBUG () { + if [ $DEBUGMODE -gt 0 ]; then + #printf "$1\n" + printf '%s\n' "$1" + fi +} +DEBUGWOLF () { + if [ $DEBUGMODE -gt 0 ]; then + printf '%s' "$1" + fi +} + +REMRES=`ssh -i ${KEY} ${USER}@${SERVER} "${REMCOM}" | egrep "fmg/faz|vdom|^TYPE" | grep -v 'root flags'` +LINECOUNT=0 +FMGLINECOUNT=0 +while read line; do + ((LINECOUNT++)) + #DEBUG "$line" + if [[ "$line" =~ "fmg/faz" ]]; then + ((FMGLINECOUNT++)) + IFS=' '; read -ra FMGLINE <<< $line + FMGVALCOUNT=0 + for FMGVAL in "${FMGLINE[@]}"; do + ((FMGVALCOUNT++)) + FMG[${FMGLINECOUNT},${FMGVALCOUNT}]=$FMGVAL + DEBUGWOLF "${FMG[${FMGLINECOUNT},${FMGVALCOUNT}]}," + done + DEBUG "" + # array für die Ausgabezeilen bauen, oder die Zeile direkt ausgeben + fi + if [[ "$line" =~ "vdom" ]]; then + ((VDOMLINECOUNT++)) + IFS=' '; read -ra VDOMLINE <<< $line + VDOMVALCOUNT=0 + for VDOMVAL in "${VDOMLINE[@]}"; do + ((VDOMVALCOUNT++)) + VDOM[${FMGLINECOUNT},${VDOMLINECOUNT},${VDOMVALCOUNT}]=$VDOMVAL + DEBUGWOLF "${VDOM[${FMGLINECOUNT},${VDOMLINECOUNT},${VDOMVALCOUNT}]}," + done + DEBUG "" + # wenn vdoms existieren obige zeile ergänzen, auch mehrfach + fi +done <<< "$REMRES" +FMGLINECOUNTMAX=$FMGLINECOUNT + +echo "${#FMG[@]}" +echo "${#VDOM[@]}" +#printf "${FMG[${FMGLINECOUNT},${FMGVALCOUNT}]} diff --git a/roles/importer/files/importer/fortiManager/discovery_logging.conf b/roles/importer/files/importer/fortiManager/discovery_logging.conf new file mode 100644 index 000000000..139c55a9c --- /dev/null +++ b/roles/importer/files/importer/fortiManager/discovery_logging.conf @@ -0,0 +1,41 @@ +[loggers] +keys=root,discoveryDebugLogger +#keys=root,__main__ + +[handlers] +keys=consoleHandler,debugFileHandler + +[formatters] +keys=defaultFormatter,debugFileFormatter + +[logger_root] +level=DEBUG +handlers=consoleHandler + +[logger_discoveryDebugLogger] +#[logger___main__] +level=DEBUG +handlers=debugFileHandler +qualname=discoveryDebugLogger +#qualname=__main__ +propagate=0 + +[handler_consoleHandler] +class=StreamHandler +level=DEBUG +formatter=defaultFormatter +args=(sys.stderr,) + +[handler_debugFileHandler] +class=FileHandler +level=DEBUG +formatter=debugFileFormatter +args=('/tmp/fworch_discovery.log',) +# args=('/var/log/fworch/discovery.log',) + +[formatter_defaultFormatter] +format=%(levelname)s:%(name)s:%(message)s + +[formatter_debugFileFormatter] +format=%(asctime)s - %(name)s - %(levelname)s - %(message)s + diff --git a/roles/importer/files/importer/fortiManager/get_config.py b/roles/importer/files/importer/fortiManager/get_config.py new file mode 100755 index 000000000..b92ae023c --- /dev/null +++ b/roles/importer/files/importer/fortiManager/get_config.py @@ -0,0 +1,130 @@ +#!/usr/bin/python3 +# create api user: +# config system admin user +# edit "apiuser" +# set password xxx +# set adom "all_adoms" +# set rpc-permit read-write +import sys +sys.path.append(r"/usr/local/fworch/importer") +import common +import getter +import json, argparse, pdb +import requests, requests.packages.urllib3 +import time, logging, re, sys +import os + +requests.packages.urllib3.disable_warnings() # suppress ssl warnings only + +parser = argparse.ArgumentParser(description='Read configuration from Check Point R8x management via API calls') +parser.add_argument('-a', '--apihost', metavar='api_host', required=True, help='Check Point R8x management server') +parser.add_argument('-w', '--password', metavar='api_password_file', default='import_user_secret', help='name of the file to read the password for management server from') +parser.add_argument('-u', '--user', metavar='api_user', default='fworch', help='user for connecting to Check Point R8x management server, default=fworch') +parser.add_argument('-p', '--port', metavar='api_port', default='443', help='port for connecting to Check Point R8x management server, default=443') +parser.add_argument('-D', '--domain', metavar='api_domain', default='', help='name of Domain in a Multi-Domain Envireonment') +parser.add_argument('-l', '--layer', metavar='policy_layer_name(s)', required=True, help='name of policy layer(s) to read (comma separated)') +parser.add_argument('-x', '--proxy', metavar='proxy_string', default='', help='proxy server string to use, e.g. 1.2.3.4:8080; default=empty') +parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') +parser.add_argument('-i', '--limit', metavar='api_limit', default='500', help='The maximal number of returned results per HTTPS Connection; default=500') +parser.add_argument('-d', '--debug', metavar='debug_level', default='0', help='Debug Level: 0(off) 4(DEBUG Console) 41(DEBUG File); default=0') +parser.add_argument('-t', '--testing', metavar='version_testing', default='off', help='Version test, [off|]; default=off') +parser.add_argument('-o', '--out', metavar='output_file', required=True, help='filename to write output in json format to') + +args = parser.parse_args() +if len(sys.argv)==1: + parser.print_help(sys.stderr) + sys.exit(1) + +api_host = args.apihost +api_port = args.port +config_filename = args.out +with open(args.password, "r") as password_file: + api_password = password_file.read().rstrip() +api_domain = args.domain +proxy_string = { "http" : args.proxy, "https" : args.proxy } +offset = 0 +limit = args.limit +details_level = "full" # 'standard' +test_version = args.testing +base_url = 'https://' + api_host + ':' + api_port +json_indent=2 +use_object_dictionary = 'false' +#limit="25" + +# logging config +debug_level = int(args.debug) +common.set_log_level(log_level=debug_level, debug_level=debug_level) +ssl_verification = getter.set_ssl_verification(args.ssl) + +starttime = int(time.time()) +# top level dict start +sid = getter.login(args.user,api_password,api_host,args.port,api_domain,ssl_verification, proxy_string=proxy_string,debug=debug_level) +v_url = getter.get_api_url (sid, api_host, args.port, args.user, base_url, limit, test_version,ssl_verification, proxy_string) + +config_json = {} + +# get global objects +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/adom/root/obj/firewall/address", "ipv4_objects", debug=debug_level) +# api_url = "/pm/config/adom/global/obj/firewall/address" # --> error +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/adom/root/obj/firewall/address6", "ipv6_objects", debug=debug_level) + +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/global/obj/application/list", "app_list_objects", debug=debug_level) +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/global/obj/application/group", "app_group_objects", debug=debug_level) +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/global/obj/application/categories", "app_categories", debug=debug_level) + +# user: /pm/config/global/obj/user/local +getter.update_config_with_fortinet_api_call(config_json, sid, v_url, "/pm/config/global/obj/user/local", "users_local", debug=debug_level) + +# get all custom adoms: +q_get_custom_adoms = { "params": [ { "fields": ["name", "oid", "uuid"], "filter": ["create_time", "<>", 0] } ] } +adoms = getter.fortinet_api_call(sid, v_url, '/dvmdb/adom', payload=q_get_custom_adoms, debug=debug_level) + +# get root adom: +q_get_root_adom = { "params": [ { "fields": ["name", "oid", "uuid"], "filter": ["name", "==", "root"] } ] } +adom_root = getter.fortinet_api_call(sid, v_url, '/dvmdb/adom', payload=q_get_root_adom, debug=debug_level).pop() +adoms.append(adom_root) +config_json.update({ "adoms": adoms }) + +# for each adom get devices +for adom in config_json["adoms"]: + q_get_devices_per_adom = { "params": [ { "fields": ["name", "desc", "hostname", "vdom", "ip", "mgmt_id", "mgt_vdom", "os_type", "os_ver", "platform_str", "dev_status"] } ] } + devs = getter.fortinet_api_call(sid, v_url, "/dvmdb/adom/" + adom["name"] + "/device", payload=q_get_devices_per_adom, debug=debug_level) + adom.update({"devices": devs}) + +# for each adom get packages +for adom in config_json["adoms"]: + packages = getter.fortinet_api_call(sid, v_url, "/pm/pkg/adom/" + adom["name"], debug=debug_level) + adom.update({"packages": packages}) + +# todo: find mapping device <--> package +# todo: consolidate nat rules in a single rulebase +# todo: consolidate global and pkg-local rules in a single rulebase + +# get rulebases per pkg per adom +for adom in config_json["adoms"]: + for pkg in adom["packages"]: + rulebase = getter.fortinet_api_call(sid, v_url, "/pm/config/adom/" + adom['name'] + "/pkg/" + pkg['name'] + "/firewall/policy", debug=debug_level) + pkg.update({"rulebase": rulebase}) + +# get global policies: +global_header_policy = getter.fortinet_api_call(sid, v_url, "/pm/config/global/pkg/default/global/header/consolidated/policy", debug=debug_level) +config_json.update({"global_header_policy": global_header_policy}) +global_footer_policy = getter.fortinet_api_call(sid, v_url, "/pm/config/global/pkg/default/global/footer/consolidated/policy", debug=debug_level) +config_json.update({"global_footer_policy": global_footer_policy}) + +# get nat rules per pkg per adom +for adom in config_json["adoms"]: + for pkg in adom["packages"]: + central_snat_rulebase = getter.fortinet_api_call(sid, v_url, "/pm/config/adom/" + adom['name'] + "/pkg/" + pkg['name'] + "/firewall/central-snat-map", debug=debug_level) + central_dnat_rulebase = getter.fortinet_api_call(sid, v_url, "/pm/config/adom/" + adom['name'] + "/pkg/" + pkg['name'] + "/firewall/central/dnat", debug=debug_level) + pkg.update({"central_snat_rulebase": central_snat_rulebase}) + pkg.update({"central_dnat_rulebase": central_dnat_rulebase}) + +# now dumping results to file +with open(config_filename, "w") as configfile_json: + configfile_json.write(json.dumps(config_json)) + +getter.logout(v_url, sid, ssl_verification, proxy_string=proxy_string,debug=debug_level) +duration = int(time.time()) - starttime +logging.debug ( "fortiManager/get_config - duration: " + str(duration) + "s" ) +sys.exit(0) diff --git a/roles/importer/files/importer/fortiManager/getter.py b/roles/importer/files/importer/fortiManager/getter.py new file mode 100644 index 000000000..0001ca576 --- /dev/null +++ b/roles/importer/files/importer/fortiManager/getter.py @@ -0,0 +1,122 @@ +# library for API get functions +import sys +sys.path.append(r"/usr/local/fworch/importer") +import json, argparse, pdb +import time, logging, re, sys, logging +import os +import requests, requests.packages.urllib3 +import common + +requests.packages.urllib3.disable_warnings() # suppress ssl warnings only + +details_level = "full" # 'standard' +use_object_dictionary = 'false' + +def api_call(url, command, json_payload, sid, ssl_verification='', proxy_string='', show_progress=False, method='', debug=0): + request_headers = {'Content-Type' : 'application/json'} + if sid != '': + json_payload.update({"session": sid}) + if command != '': + for p in json_payload['params']: + p.update({"url": command}) + if method == '': + method = 'get' + json_payload.update({"method": method}) + + r = requests.post(url, data=json.dumps(json_payload), headers=request_headers, verify=ssl_verification, proxies=proxy_string) + if r is None: + logging.exception("\nerror while sending api_call to url '" + str(url) + "' with payload '" + json.dumps(json_payload, indent=2) + "' and headers: '" + json.dumps(request_headers, indent=2)) + sys.exit(1) + if debug>0: + logging.debug("\napi_call to url '" + str(url) + "' with payload '" + json.dumps(json_payload, indent=2) + "' and headers: '" + json.dumps(request_headers, indent=2)) + + if show_progress: + print ('.', end='', flush=True) + return r.json() + + +def login(user,password,api_host,api_port,domain, ssl_verification, proxy_string, debug=0): + payload = { + "id": 1, + "params": [ + { + "data": [ + { + "user": user, + "passwd": password, + } + ] + } + ] + } + base_url = 'https://' + api_host + ':' + api_port + '/jsonrpc' + response = api_call(base_url, 'sys/login/user', payload, '', ssl_verification=ssl_verification, proxy_string=proxy_string, method="exec",debug=debug) + if "session" not in response: + logging.exception("\ngetter ERROR: did not receive a session id during login, " + + "api call: api_host: " + str(api_host) + ", api_port: " + str(api_port) + ", base_url: " + str(base_url) + ", payload: " + str(payload) + + ", ssl_verification: " + str(ssl_verification) + ", proxy_string: " + str(proxy_string)) + sys.exit(1) + return response["session"] + + +def logout(v_url, sid, ssl_verification='', proxy_string='', debug=0, method='exec'): + payload = { "params": [ {} ] } + + response = api_call(v_url, 'sys/logout', payload, sid, ssl_verification=ssl_verification, proxy_string=proxy_string, method="exec", debug=debug) + if "result" in response and "status" in response["result"][0] and "code" in response["result"][0]["status"] and response["result"][0]["status"]["code"]==0: + logging.debug("\nsuccessfully logged out") + else: + logging.exception("\ngetter ERROR: did not get status code 0 when logging out, " + + "api call: url: " + str(v_url) + ", + payload: " + str(payload) + ", ssl_verification: " + str(ssl_verification) + ", proxy_string: " + str(proxy_string)) + sys.exit(1) + + +def set_ssl_verification(ssl_verification_mode): + logger = logging.getLogger(__name__) + if ssl_verification_mode == '' or ssl_verification_mode == 'off': + ssl_verification = False + logger.debug ("ssl_verification: False") + else: + ssl_verification = ssl_verification_mode + logger.debug ("ssl_verification: [ca]certfile="+ ssl_verification ) + return ssl_verification + + +def get_api_url(sid, api_host, api_port, user, base_url, limit, test_version, ssl_verification, proxy_string): + return base_url + '/jsonrpc' + + +def set_api_url(base_url,testmode,api_supported,hostname): + logger = logging.getLogger(__name__) + url = '' + if testmode == 'off': + url = base_url + else: + if re.search(r'^\d+[\.\d+]+$', testmode) or re.search(r'^\d+$', testmode): + if testmode in api_supported : + url = base_url + 'v' + testmode + '/' + else: + logger.debug ("api version " + testmode + " is not supported by the manager " + hostname + " - Import is canceled") + sys.exit("api version " + testmode +" not supported") + else: + logger.debug ("not a valid version") + sys.exit("\"" + testmode +"\" - not a valid version") + logger.debug ("testmode: " + testmode + " - url: "+ url) + return url + + +def update_config_with_fortinet_api_call(config_json, sid, api_base_url, api_path, result_name, payload={}, ssl_verification='', proxy_string="", show_progress=False, debug=0): + result = fortinet_api_call(sid, api_base_url, api_path, payload=payload, ssl_verification=ssl_verification, proxy_string=proxy_string, show_progress=show_progress, debug=debug) + config_json.update({result_name: result}) + + +def fortinet_api_call(sid, api_base_url, api_path, payload={}, ssl_verification='', proxy_string="", show_progress=False, debug=0): + if payload=={}: + payload = { "params": [ {} ] } + result = api_call(api_base_url, api_path, payload, sid, ssl_verification, proxy_string, debug=debug) + plain_result = result["result"][0] + if "data" in plain_result: + result = plain_result["data"] + else: + result = {} + return result diff --git a/roles/importer/files/importer/fwo_api.py b/roles/importer/files/importer/fwo_api.py new file mode 100644 index 000000000..2aaedf30d --- /dev/null +++ b/roles/importer/files/importer/fwo_api.py @@ -0,0 +1,311 @@ +# library for FWORCH API calls +import re +import logging +import requests.packages +import requests +import json +import sys +base_dir = "/usr/local/fworch/" +sys.path.append(base_dir + '/importer') + +requests.packages.urllib3.disable_warnings() # suppress ssl warnings only + +details_level = "full" # 'standard' +use_object_dictionary = 'false' + +# call(fwo_api_base_url, jwt, lock_mutation, query_variables=query_variables); + + +def call(url, jwt, query, query_variables="", role="reporter", ssl_verification='', proxy_string='', show_progress=False, method='', debug=0): + request_headers = { + 'Content-Type': 'application/json', + 'Authorization': 'Bearer ' + jwt, + 'x-hasura-role': role + } + full_query = {"variables": query_variables, "query": query} + + try: + r = requests.post(url, data=json.dumps( + full_query), headers=request_headers, verify=ssl_verification, proxies=proxy_string) + r.raise_for_status() + except requests.exceptions.RequestException as e: + logging.exception("\nerror while sending api_call to url " + str(url) + " with payload \n" + + json.dumps(full_query, indent=2) + "\n and headers: \n" + json.dumps(request_headers, indent=2)) + raise SystemExit(e) from None + + if debug > 0: + logging.debug("\napi_call to url '" + str(url) + "' with payload '" + json.dumps(query, indent=2) + "' and headers: '" + + json.dumps(request_headers, indent=2)) + if show_progress: + print('.', end='', flush=True) + return r.json() + + +def login(user, password, user_management_api_base_url, method, ssl_verification=False, proxy_string='', debug=0): + payload = {"Username": user, "Password": password} + request_headers = {'Content-Type': 'application/json'} + + try: + response = requests.post(user_management_api_base_url + method, data=json.dumps( + payload), headers=request_headers, verify=ssl_verification, proxies=proxy_string) + response.raise_for_status() + #content = response.content + except requests.exceptions.RequestException as e: + logging.exception("\nfwo_api: error while sending api_call to url " + str(user_management_api_base_url) + " with payload \n" + + json.dumps(payload, indent=2) + "\n and headers: \n" + json.dumps(request_headers, indent=2)) + raise SystemExit(e) from None + + jsonResponse = response.json() + if 'jwt' in jsonResponse: + return jsonResponse["jwt"] + logging.exception("\nfwo_api: getter ERROR: did not receive a JWT during login, " + + ", api_url: " + str(user_management_api_base_url) + ", payload: " + str(payload) + + ", ssl_verification: " + str(ssl_verification) + ", proxy_string: " + str(proxy_string)) + sys.exit(1) + + +def set_ssl_verification(ssl_verification_mode): + logger = logging.getLogger(__name__) + if ssl_verification_mode == '' or ssl_verification_mode == 'off': + ssl_verification = False + logger.debug("ssl_verification: False") + else: + ssl_verification = ssl_verification_mode + logger.debug("ssl_verification: [ca]certfile=" + ssl_verification) + return ssl_verification + + +def get_api_url(sid, api_host, api_port, user, base_url, limit, test_version, ssl_verification, proxy_string): + return base_url + '/jsonrpc' + + +def set_api_url(base_url, testmode, api_supported, hostname): + logger = logging.getLogger(__name__) + url = '' + if testmode == 'off': + url = base_url + else: + if re.search(r'^\d+[\.\d+]+$', testmode) or re.search(r'^\d+$', testmode): + if testmode in api_supported: + url = base_url + 'v' + testmode + '/' + else: + logger.debug("api version " + testmode + + " is not supported by the manager " + hostname + " - Import is canceled") + sys.exit("api version " + testmode + " not supported") + else: + logger.debug("not a valid version") + sys.exit("\"" + testmode + "\" - not a valid version") + logger.debug("testmode: " + testmode + " - url: " + url) + return url + + +def update_config_with_fortinet_api_call(config_json, sid, api_base_url, api_path, result_name, payload={}, ssl_verification='', proxy_string="", show_progress=False, debug=0): + result = fortinet_api_call(sid, api_base_url, api_path, payload=payload, ssl_verification=ssl_verification, + proxy_string=proxy_string, show_progress=show_progress, debug=debug) + config_json.update({result_name: result}) + + +def fortinet_api_call(sid, api_base_url, api_path, payload={}, ssl_verification='', proxy_string="", show_progress=False, debug=0): + if payload == {}: + payload = {"params": [{}]} + result = call(api_base_url, api_path, payload, sid, + ssl_verification, proxy_string, debug=debug) + plain_result = result["result"][0] + if "data" in plain_result: + result = plain_result["data"] + else: + result = {} + return result + + +def get_mgm_details(fwo_api_base_url, jwt, query_variables): + mgm_query = """ + query getManagementDetails($mgmId: Int!) { + management(where:{mgm_id:{_eq:$mgmId}, do_not_import:{_eq:false}} order_by: {mgm_name: asc}) { + id: mgm_id + name: mgm_name + hostname: ssh_hostname + port: ssh_port + secret: ssh_private_key + sshPublicKey: ssh_public_key + user: ssh_user + deviceType: stm_dev_typ { + id: dev_typ_id + name: dev_typ_name + version: dev_typ_version + } + configPath: config_path + importDisabled: do_not_import + forceInitialImport: force_initial_import + importerHostname: importer_hostname + debugLevel: debug_level + lastConfigHash: last_import_md5_complete_config + devices(where:{do_not_import:{_eq:false}}) { + id: dev_id + name: dev_name + rulebase:dev_rulebase + } + } + } + """ + return call(fwo_api_base_url, jwt, mgm_query, query_variables=query_variables, role='importer')['data']['management'][0] + + +def lock_import(fwo_api_base_url, jwt, query_variables): + lock_mutation = "mutation lockImport($mgmId: Int!) { insert_import_control(objects: {mgm_id: $mgmId}) { returning { control_id } } }" + try: + lock_result = call(fwo_api_base_url, jwt, lock_mutation, + query_variables=query_variables, role='importer') + current_import_id = lock_result['data']['insert_import_control']['returning'][0]['control_id'] + except: + logging.exception( + "fwo_api: failed to get import lock for management id " + str(query_variables)) + return -1 + return current_import_id + + +def count_changes_per_import(fwo_api_base_url, jwt, import_id): + change_count_query = """ + query count_changes($importId: bigint!) { + changelog_object_aggregate(where: {control_id: {_eq: $importId}}) { aggregate { count } } + changelog_service_aggregate(where: {control_id: {_eq: $importId}}) { aggregate { count } } + changelog_user_aggregate(where: {control_id: {_eq: $importId}}) { aggregate { count } } + changelog_rule_aggregate(where: {control_id: {_eq: $importId}}) { aggregate { count } } + }""" + try: + count_result = call(fwo_api_base_url, jwt, change_count_query, query_variables={ + 'importId': import_id}, role='importer') + changes_in_import = int(count_result['data']['changelog_object_aggregate']['aggregate']['count']) + \ + int(count_result['data']['changelog_service_aggregate']['aggregate']['count']) + \ + int(count_result['data']['changelog_user_aggregate']['aggregate']['count']) + \ + int(count_result['data']['changelog_rule_aggregate'] + ['aggregate']['count']) + except: + logging.exception( + "fwo_api: failed to count changes for import id " + str(import_id)) + changes_in_import = 0 + return changes_in_import + + +def unlock_import(fwo_api_base_url, jwt, mgm_id, stop_time, current_import_id, error_count, change_count): + query_variables = {"stopTime": stop_time, "importId": current_import_id, + "success": error_count == 0, "changesFound": change_count > 0} + + unlock_mutation = """ + mutation unlockImport($importId: bigint!, $stopTime: timestamp!, $success: Boolean, $changesFound: Boolean!) { + update_import_control(where: {control_id: {_eq: $importId}}, _set: {stop_time: $stopTime, successful_import: $success, changes_found: $changesFound}) { + affected_rows + } + }""" + + try: + unlock_result = call(fwo_api_base_url, jwt, unlock_mutation, + query_variables=query_variables, role='importer') + changes_in_import_control = unlock_result['data']['update_import_control']['affected_rows'] + except: + logging.exception( + "fwo_api: failed to unlock import for management id " + str(mgm_id)) + changes_in_import_control = 0 + return changes_in_import_control-1 + + +# this effectively clears the management! +def delete_import(fwo_api_base_url, jwt, current_import_id): + query_variables = {"importId": current_import_id} + + delete_import_mutation = """ + mutation deleteImport($importId: bigint!) { + delete_import_control(where: {control_id: {_eq: $importId}}) { affected_rows } + }""" + + try: + result = call(fwo_api_base_url, jwt, delete_import_mutation, + query_variables=query_variables, role='importer') + api_changes = result['data']['delete_import_control']['affected_rows'] + except: + logging.exception( + "fwo_api: failed to unlock import for import id " + str(current_import_id)) + return 1 # signalling an error + if api_changes == 1: + return 0 # return code 0 is ok + else: + return 1 + + +def import_json_config(fwo_api_base_url, jwt, mgm_id, query_variables): + import_mutation = """ + mutation import($importId: bigint!, $mgmId: Int!, $config: jsonb!) { + insert_import_config(objects: {import_id: $importId, mgm_id: $mgmId, config: $config}) { + affected_rows + } + } + """ + + try: + import_result = call(fwo_api_base_url, jwt, import_mutation, + query_variables=query_variables, role='importer') + if 'errors' in import_result: + logging.exception("fwo_api:import_json_config - error while writing importable config for mgm id " + + str(mgm_id) + ": " + str(import_result['errors'])) + changes_in_import_control = import_result['data']['insert_import_config']['affected_rows'] + except: + logging.exception( + "fwo_api: failed to write importable config for mgm id " + str(mgm_id)) + return 2 # indicating 1 error + return changes_in_import_control-1 + + +def delete_json_config(fwo_api_base_url, jwt, query_variables): + delete_mutation = """ + mutation delete_import_config($importId: bigint!) { + delete_import_config(where: {import_id: {_eq: $importId}}) { affected_rows } + } + """ + + try: + delete_result = call(fwo_api_base_url, jwt, delete_mutation, + query_variables=query_variables, role='importer') + changes_in_delete_config = delete_result['data']['delete_import_config']['affected_rows'] + except: + logging.exception("fwo_api: failed to delete config without changes") + return 2 # indicating 1 error + return changes_in_delete_config-1 + + +def store_full_json_config(fwo_api_base_url, jwt, mgm_id, query_variables): + import_mutation = """ + mutation store_full_config($importId: bigint!, $mgmId: Int!, $config: jsonb!) { + insert_import_full_config(objects: {import_id: $importId, mgm_id: $mgmId, config: $config}) { + affected_rows + } + } + """ + + try: + import_result = call(fwo_api_base_url, jwt, import_mutation, + query_variables=query_variables, role='importer') + changes_in_import_full_config = import_result['data']['insert_import_full_config']['affected_rows'] + except: + logging.exception( + "fwo_api: failed to write full config for mgm id " + str(mgm_id)) + return 2 # indicating 1 error because we are expecting exactly one change + return changes_in_import_full_config-1 + + +def delete_full_json_config(fwo_api_base_url, jwt, query_variables): + delete_mutation = """ + mutation delete_import_full_config($importId: bigint!) { + delete_import_full_config(where: {import_id: {_eq: $importId}}) { + affected_rows + } + } + """ + + try: + delete_result = call(fwo_api_base_url, jwt, delete_mutation, + query_variables=query_variables, role='importer') + changes_in_delete_full_config = delete_result['data']['delete_import_full_config']['affected_rows'] + except: + logging.exception("fwo_api: failed to delete full config ") + return 2 # indicating 1 error + return changes_in_delete_full_config-1 diff --git a/roles/importer/files/importer/fworch-importer-single.pl b/roles/importer/files/importer/fworch-importer-single.pl index dc205653d..4404a67b4 100755 --- a/roles/importer/files/importer/fworch-importer-single.pl +++ b/roles/importer/files/importer/fworch-importer-single.pl @@ -147,7 +147,10 @@ sub empty_config_files { # deletes a csv config file and creates an empty csv fi $current_import_id, "$cfg_dir/$audit_log_file", $prev_imp_time, $fullauditlog, $debug_level); if ($error_count_local) { $error_count_global = &error_handler_add( $current_import_id, $error_level = 3, "parse-$error_count_local", $error_count_local=1, $error_count_global); - $error_count_local = &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); + if (defined($current_import_id)) + { + $error_count_local = &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); + } $error_count_global = &error_handler_add ($current_import_id, $error_level = 3, "remove-import-lock-failed: $error_count_local", $error_count_local, $error_count_global); } @@ -212,7 +215,6 @@ sub empty_config_files { # deletes a csv config file and creates an empty csv fi print("found no errors during import\n"); } $error_count_global = &error_handler_add ($current_import_id, $error_level = 3, "", $error_count_local, $error_count_global); -# &read_user_client_classification_from_ldap ($error_count_local=1, $current_import_id); $changes = &exec_pgsql_cmd_return_value("SELECT show_change_summary($current_import_id)"); # updating md5sum if (!$error_count_global) { &exec_pgsql_cmd_no_result("UPDATE management SET last_import_md5_complete_config='$new_md5sum' WHERE mgm_id=$mgm_id"); } @@ -236,7 +238,10 @@ sub empty_config_files { # deletes a csv config file and creates an empty csv fi } } # Cleanup and statistics - &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); # this sets import_control.stop_time to now() + if (defined($current_import_id)) + { + &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); # this sets import_control.stop_time to now() + } &clean_up_fworch_db($current_import_id); if (defined($save_import_results_to_file) && $save_import_results_to_file && ($error_count_global || $changes ne '')) { # if changes or errors occured: move config & csv to archive system ("${bin_path}mkdir -p $archive_dir; cd $fworch_workdir; ${bin_path}tar cfz $archive_dir/${current_import_id}_`${bin_path}date +%F_%T`_mgm_id_$mgm_id.tgz ."); @@ -244,6 +249,9 @@ sub empty_config_files { # deletes a csv config file and creates an empty csv fi #`cp -f $fworch_workdir/cfg/*.cfg /var/itsecorg/fw-config/`; # special backup for several configs - dos-box if (!$no_cleanup) { rmtree $fworch_workdir; } } else { - &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); # this sets import_control.stop_time to now() + if (defined($current_import_id)) + { + &exec_pgsql_cmd_no_result("SELECT remove_import_lock($current_import_id)"); # this sets import_control.stop_time to now() + } } exit ($error_count_global); diff --git a/roles/importer/files/importer/import_mgm.py b/roles/importer/files/importer/import_mgm.py new file mode 100644 index 000000000..6a5366138 --- /dev/null +++ b/roles/importer/files/importer/import_mgm.py @@ -0,0 +1,171 @@ +#!/usr/bin/python3 +# master plan import target design + +# add main importer loop in pyhton (also able to run distributed) +# run import loop every x seconds (adjust sleep time per management depending on the change frequency ) +# import a single management (if no import for it is running) +# lock mgmt for import via FWORCH API call, generating new import_id y +# check if we need to import (no md5, api call if anything has changed since last import) +# get complete config (get, enrich, parse) +# write into json dict write json dict to new table (single entry for complete config) +# trigger import from json into csv and from there into destination tables +# release mgmt for import via FWORCH API call (also removing import_id y data from import_tables?) +# no changes: remove import_control? + +from symbol import except_clause +import logging +import argparse +import time +import common +import checkpointR8x.parse_rule +import checkpointR8x.parse_user +import checkpointR8x.parse_service +import checkpointR8x.parse_network +import checkpointR8x.cpcommon +import requests.packages +import requests +import os +import json +import datetime +import fwo_api +from pathlib import Path +import sys +import socket +base_dir = "/usr/local/fworch" +sys.path.append(base_dir + '/importer') +sys.path.append(base_dir + '/importer/checkpointR8x') + +# use CACTUS::read_config; + +parser = argparse.ArgumentParser( + description='Read configuration from FW management via API calls') +parser.add_argument('-m', '--mgm_id', metavar='management_id', + required=True, help='FWORCH DB ID of the management server to import') +parser.add_argument('-c', '--clear', metavar='clear_management', default=False, + help='If set the import will delete all data for the given management instead of importing') +parser.add_argument('-f', '--force', metavar='force_import', default=False, + help='If set the import will be attempted even if there seem to be no changes.') +parser.add_argument('-d', '--debug', metavar='debug_level', default='0', + help='Debug Level: 0=off, 1=send debug to console, 2=send debug to file, 3=keep temporary config files; default=0') +parser.add_argument('-x', '--proxy', metavar='proxy_string', default='', + help='proxy server string to use, e.g. 1.2.3.4:8080; default=empty') +parser.add_argument('-s', '--ssl', metavar='ssl_verification_mode', default='', + help='[ca]certfile, if value not set, ssl check is off"; default=empty/off') +parser.add_argument('-i', '--limit', metavar='api_limit', default='500', + help='The maximal number of returned results per HTTPS Connection; default=500') +parser.add_argument('-t', '--testing', metavar='version_testing', + default='off', help='Version test, [off|]; default=off') +parser.add_argument('-o', '--out', metavar='output_file', + default=False, help='filename to write output in json format to, "False" if not writing to file') + +args = parser.parse_args() +if len(sys.argv) == 1: + parser.print_help(sys.stderr) + sys.exit(1) + +error_count = 0 +importer_user_name = 'importer' +start_time = int(time.time()) + +requests.packages.urllib3.disable_warnings() # suppress ssl warnings only + +debug_level = int(args.debug) +common.set_log_level(log_level=debug_level, debug_level=debug_level) + +user_management_api_base_url = 'https://localhost:8888/' +method = 'AuthenticateUser' +ssl_mode = args.ssl +proxy_setting = args.proxy + +# authenticate to get JWT +importer_pwd_file = base_dir + '/etc/secrets/importer_pwd' +with open(importer_pwd_file, 'r') as file: + importer_pwd = file.read().replace('\n', '') +jwt = fwo_api.login(importer_user_name, importer_pwd, user_management_api_base_url, + method, ssl_verification=ssl_mode, proxy_string=proxy_setting) + +fwo_api_base_url = 'https://localhost:9443/api/v1/graphql' # todo: read url from config + +# get mgm_details (fw-type, port, ip, user credentials): +mgm_details = fwo_api.get_mgm_details(fwo_api_base_url, jwt, {"mgmId": int(args.mgm_id)}) + +# only run if this is the correct import module +if mgm_details['importerHostname'] != socket.gethostname(): + logging.info("we are not responsilble for importing this management - so resting") + sys.exit(0) + +# set import lock +current_import_id = fwo_api.lock_import(fwo_api_base_url, jwt, {"mgmId": int(args.mgm_id)}) +if current_import_id == -1: + logging.warning("error while setting import lock for management id " + + str(args.mgm_id) + ", import already running?") + sys.exit(1) + +logging.info("start import of management " + str(args.mgm_id) + + ", import_id=" + str(current_import_id)) + +full_config_json = {} +config2import = {} +import_tmp_path = base_dir + '/tmp/import' +Path(import_tmp_path).mkdir(parents=True, exist_ok=True) + +config_filename = import_tmp_path + '/mgm_id_' + \ + str(args.mgm_id) + '_config.json' + +with open(config_filename, "w") as json_data: # create empty config file + json_data.write(json.dumps(full_config_json)) +secret_filename = base_dir + '/tmp/import/mgm_id_' + \ + str(args.mgm_id) + '_secret.txt' +with open(secret_filename, "w") as secret: # write pwd to disk to avoid passing it as parameter + secret.write(mgm_details['secret']) + +rulebase_string = '' +for device in mgm_details['devices']: + rulebase_string += device['rulebase'] + ',' +rulebase_string = rulebase_string[:-1] # remove final comma + +# get config from FW API and write to json file + +if mgm_details['deviceType']['name'] == 'Fortinet' and mgm_details['deviceType']['version'] == '5.x-6.x': + logging.info("ignoring legacy fortinet devices for now") +if mgm_details['deviceType']['name'] == 'FortiManager': + logging.info("found fortiManager") + os.system("fortiManager.parse_config -f " + config_filename) +if mgm_details['deviceType']['name'] == 'Check Point' and mgm_details['deviceType']['version'] == 'R8x': + checkpointR8x.cpcommon.parse_config_cp_main(config2import, current_import_id, base_dir, mgm_details, secret_filename, rulebase_string, config_filename, debug_level) + +#### now we import the config via API: +error_count += fwo_api.import_json_config(fwo_api_base_url, jwt, args.mgm_id, { + "importId": current_import_id, "mgmId": args.mgm_id, "config": config2import}) + +# todo: if error_count>0: +# get error from import_control table? and show it + +change_count = fwo_api.count_changes_per_import( + fwo_api_base_url, jwt, current_import_id) + +if change_count > 0 or error_count > 0: # store full config in case of change or error + with open(config_filename, "r") as json_data: + full_config_json = json.load(json_data) + + error_count += fwo_api.store_full_json_config(fwo_api_base_url, jwt, args.mgm_id, { + "importId": current_import_id, "mgmId": args.mgm_id, "config": full_config_json}) + +stop_time = int(time.time()) +stop_time_string = datetime.datetime.now().isoformat() + +# delete configs of imports without changes (if no error occured) +if change_count == 0 and error_count == 0: + error_count += fwo_api.delete_json_config( + fwo_api_base_url, jwt, {"importId": current_import_id}) + # error_count += fwo_api.delete_import(fwo_api_base_url, jwt, current_import_id) +# finalize remport by unlocking it +error_count += fwo_api.unlock_import(fwo_api_base_url, jwt, int( + args.mgm_id), stop_time_string, current_import_id, error_count, change_count) + + +print("import_mgm.py: import no. " + str(current_import_id) + " for management " + str(args.mgm_id) + " ran " + + str("with" if error_count else "without") + " errors, change_count: " + str(change_count) + ", duration: " + + str(int(time.time()) - start_time) + "s") + +sys.exit(0) diff --git a/roles/lib/files/FWO_API_Client/APIcalls/report/addGeneratedReport.graphql b/roles/lib/files/FWO_API_Client/APIcalls/report/addGeneratedReport.graphql index 89b5c95c2..fa668bd64 100644 --- a/roles/lib/files/FWO_API_Client/APIcalls/report/addGeneratedReport.graphql +++ b/roles/lib/files/FWO_API_Client/APIcalls/report/addGeneratedReport.graphql @@ -26,4 +26,4 @@ report_id } } -} \ No newline at end of file +} diff --git a/roles/middleware/tasks/main.yml b/roles/middleware/tasks/main.yml index 5a4d5ec51..cd52d876d 100644 --- a/roles/middleware/tasks/main.yml +++ b/roles/middleware/tasks/main.yml @@ -110,9 +110,9 @@ - name: set UI admin password randomly set_fact: admin_password: "{{ random_generated_pw }}" - when: testkeys is not defined or not testkeys|bool + when: (testkeys is not defined or not testkeys|bool) and installation_mode == 'new' -- name: write admin_password password to secrets directory +- name: write ui admin_password to secrets directory copy: content: "{{ admin_password }}" dest: "{{ ui_admin_password_file }}" @@ -120,6 +120,7 @@ owner: "{{ fworch_user }}" group: "{{ fworch_group }}" become: yes + when: installation_mode == 'new' - name: Set admin password in ldap ldap_passwd: @@ -128,6 +129,31 @@ server_uri: "{{ openldap_url }}" bind_dn: "{{ openldap_superuser_dn }}" bind_pw: "{{ ldap_manager_pwd }}" + when: installation_mode == 'new' + +- name: set importer user password randomly + set_fact: + importer_password: "{{ random_generated_pw }}" + when: (testkeys is not defined or not testkeys|bool) and installation_mode == 'new' + +- name: write importer_password to secrets directory + copy: + content: "{{ importer_password }}" + dest: "{{ importer_password_file }}" + mode: '0600' + owner: "{{ fworch_user }}" + group: "{{ fworch_group }}" + become: yes + when: installation_mode == 'new' + +- name: Set importer password in ldap + ldap_passwd: + dn: uid=importer,ou=tenant0,ou=operator,ou=user,{{ openldap_path }} + passwd: "{{ importer_password }}" + server_uri: "{{ openldap_url }}" + bind_dn: "{{ openldap_superuser_dn }}" + bind_pw: "{{ ldap_manager_pwd }}" + when: installation_mode == 'new' - name: Set {{ openldap_readonly_user_name }} password in ldap ldap_passwd: diff --git a/roles/middleware/tasks/upgrade/5.1.17.yml b/roles/middleware/tasks/upgrade/5.1.17.yml deleted file mode 100644 index 49b28277f..000000000 --- a/roles/middleware/tasks/upgrade/5.1.17.yml +++ /dev/null @@ -1,3 +0,0 @@ -- debug: - msg: - - "running dummy common upgrade script" diff --git a/roles/middleware/tasks/upgrade/5.4.1.yml b/roles/middleware/tasks/upgrade/5.4.1.yml new file mode 100644 index 000000000..e8e3cbe2b --- /dev/null +++ b/roles/middleware/tasks/upgrade/5.4.1.yml @@ -0,0 +1,23 @@ + + +- name: set importer user password randomly + set_fact: + importer_password: "{{ random_generated_pw }}" + when: (testkeys is not defined or not testkeys|bool) + +- name: write importer_password to secrets directory + copy: + content: "{{ importer_password }}" + dest: "{{ importer_password_file }}" + mode: '0600' + owner: "{{ fworch_user }}" + group: "{{ fworch_group }}" + become: yes + +- name: Set importer password in ldap + ldap_passwd: + dn: uid=importer,ou=tenant0,ou=operator,ou=user,{{ openldap_path }} + passwd: "{{ importer_password }}" + server_uri: "{{ openldap_url }}" + bind_dn: "{{ openldap_superuser_dn }}" + bind_pw: "{{ ldap_manager_pwd }}" diff --git a/roles/middleware/templates/ldif_files/tree_operators.ldif.j2 b/roles/middleware/templates/ldif_files/tree_operators.ldif.j2 index bee8bae0e..b26ca142f 100644 --- a/roles/middleware/templates/ldif_files/tree_operators.ldif.j2 +++ b/roles/middleware/templates/ldif_files/tree_operators.ldif.j2 @@ -6,6 +6,14 @@ cn: admin sn: user +dn: uid=importer,ou=tenant0,ou=operator,ou=user,{{ openldap_path }} +changetype: {{ ldif_changetype }} +objectClass: top +objectclass: inetorgperson +cn: importer +sn: user + + {% if audit_user is defined -%} dn: uid={{ audit_user }},ou=tenant0,ou=operator,ou=user,{{ openldap_path }} changetype: {{ ldif_changetype }} @@ -22,6 +30,12 @@ add: uniquemember uniquemember: uid=admin,ou=tenant0,ou=operator,ou=user,dc=fworch,dc=internal +dn: cn=importer,ou=role,{{ openldap_path }} +changetype: modify +add: uniquemember +uniquemember: uid=importer,ou=tenant0,ou=operator,ou=user,dc=fworch,dc=internal + + {% if audit_user is defined -%} dn: cn=auditor,ou=role,{{ openldap_path }} changetype: modify diff --git a/roles/middleware/templates/upgrade/5.4.1.ldif.j2 b/roles/middleware/templates/upgrade/5.4.1.ldif.j2 new file mode 100644 index 000000000..7a55abd0f --- /dev/null +++ b/roles/middleware/templates/upgrade/5.4.1.ldif.j2 @@ -0,0 +1,13 @@ + +dn: uid=importer,ou=tenant0,ou=operator,ou=user,{{ openldap_path }} +changetype: add +objectClass: top +objectclass: inetorgperson +cn: importer +sn: user + + +dn: cn=importer,ou=role,{{ openldap_path }} +changetype: modify +add: uniquemember +uniquemember: uid=importer,ou=tenant0,ou=operator,ou=user,dc=fworch,dc=internal diff --git a/roles/sample-data/tasks/create-devices.yml b/roles/sample-data/tasks/create-devices.yml index ee951b24e..d4b060900 100644 --- a/roles/sample-data/tasks/create-devices.yml +++ b/roles/sample-data/tasks/create-devices.yml @@ -21,10 +21,10 @@ db: "{{ fworch_db_name }}" query: > DO $do$ BEGIN - IF NOT EXISTS (SELECT * FROM management WHERE mgm_name='fortigate_demo') THEN + IF NOT EXISTS (SELECT * FROM management WHERE mgm_name='{{ sample_fortigate_name }}') THEN insert into management (dev_typ_id,mgm_name,ssh_private_key,ssh_hostname,ssh_user,do_not_import,config_path,importer_hostname) - VALUES (10,'fortigate_demo','{{ sample_ssh_priv_key }}','{{ import_sample_server }}','{{ sample_config_user }}',false,'sample-configs/fortinet_demo/','{{ importer_hostname }}'); + VALUES (10,'{{ sample_fortigate_name }}','{{ sample_ssh_priv_key }}','{{ import_sample_server }}','{{ sample_config_user }}',false,'sample-configs/fortinet_demo/','{{ importer_hostname }}'); END IF; END $do$ - name: insert test fortinet gateway @@ -32,9 +32,9 @@ db: "{{ fworch_db_name }}" query: > DO $do$ BEGIN - IF NOT EXISTS (SELECT * FROM device WHERE dev_name='fortigate_demo') THEN + IF NOT EXISTS (SELECT * FROM device WHERE dev_name='{{ sample_fortigate_name }}') THEN insert into device (mgm_id,dev_name,dev_rulebase,dev_typ_id) - VALUES ((select mgm_id from management where mgm_name='fortigate_demo'),'fortigate_demo','fortigate_demo',10); + VALUES ((select mgm_id from management where mgm_name='{{ sample_fortigate_name }}'),'{{ sample_fortigate_name }}','{{ sample_fortigate_name }}',10); END IF; END $do$ - name: insert test check point R7x management @@ -46,6 +46,7 @@ insert into management (dev_typ_id,mgm_name,ssh_private_key,ssh_hostname,ssh_user,do_not_import,config_path,importer_hostname) VALUES (7,'checkpoint_demo','{{ sample_ssh_priv_key }}','{{ import_sample_server }}','{{ sample_config_user }}',false, 'sample-configs/checkpoint_demo/','{{ importer_hostname }}'); END IF; END $do$ + when: sample_role_purpose is not match('test') - name: insert test CPR7x device postgresql_query: @@ -56,6 +57,7 @@ insert into device (mgm_id,dev_name,dev_rulebase,dev_typ_id) VALUES ((select mgm_id from management where mgm_name='checkpoint_demo'),'checkpoint_demo','IsoAAAA',7); END IF; END $do$ + when: sample_role_purpose is not match('test') become: yes become_user: postgres diff --git a/roles/sample-data/tasks/main.yml b/roles/sample-data/tasks/main.yml index 078a3148b..ffed5be15 100644 --- a/roles/sample-data/tasks/main.yml +++ b/roles/sample-data/tasks/main.yml @@ -42,7 +42,8 @@ - name: establish cron job to simulate hourly changes to configs import_tasks: setup-config-changes.yml + when: sample_role_purpose is not match('test') - name: add second ldap database import_tasks: add_second_ldap_db.yml - when: second_ldap_db | bool + when: (second_ldap_db | bool) and (sample_role_purpose is not match('test')) diff --git a/roles/sample-data/tasks/setup-sample-import.yml b/roles/sample-data/tasks/setup-sample-import.yml index 9517c851d..bcd953453 100644 --- a/roles/sample-data/tasks/setup-sample-import.yml +++ b/roles/sample-data/tasks/setup-sample-import.yml @@ -3,7 +3,6 @@ user: name: "{{ sample_config_user }}" comment: fworch import user for sample configs - uid: 60322 home: "{{ sample_config_user_home }}" shell: /bin/bash group: "{{ fworch_group }}" diff --git a/roles/test/tasks/main.yml b/roles/test/tasks/main.yml index f1215d97e..506774ad7 100644 --- a/roles/test/tasks/main.yml +++ b/roles/test/tasks/main.yml @@ -24,6 +24,10 @@ - name: csharp testing import_tasks: test-csharp.yml +- name: imorter testing + import_tasks: test-importer.yml + when: "'sampleserver' in group_names" + # - name: remove all test data # import_tasks: test-data-cleanup.yml # when: test == 'with_cleanup' diff --git a/roles/test/tasks/test-importer.yml b/roles/test/tasks/test-importer.yml new file mode 100644 index 000000000..5213a1638 --- /dev/null +++ b/roles/test/tasks/test-importer.yml @@ -0,0 +1,44 @@ +--- +- name: include test data + include_role: + name: sample-data + vars: + sample_role_purpose: test + sample_fortigate_name: "{{ test_fortigate_name }}" + sample_config_user: fworchtest + sample_config_user_home: "/home/{{ sample_config_user }}" + import_sample_server: localhost + +- name: make test import + command: "./fworch-importer-single.pl mgm_name={{ test_fortigate_name }}" + args: + chdir: "{{ fworch_home }}/importer" + become_user: "{{ fworch_user }}" + become: yes + +- name: delete test user and dir + user: + name: test + state: absent + remove: yes + become: yes + +- name: delete test fortinet gateway + postgresql_query: + db: "{{ fworch_db_name }}" + query: > + DO $do$ BEGIN + DELETE FROM device WHERE dev_name='{{ test_fortigate_name }}'; + END $do$ + become: yes + become_user: postgres + +- name: delete test fortinet management + postgresql_query: + db: "{{ fworch_db_name }}" + query: > + DO $do$ BEGIN + DELETE FROM management WHERE mgm_name='{{ test_fortigate_name }}'; + END $do$ + become: yes + become_user: postgres diff --git a/roles/ui/tasks/ui_install_dot_net.yml b/roles/ui/tasks/ui_install_dot_net.yml index 2984dd2a9..65b060119 100644 --- a/roles/ui/tasks/ui_install_dot_net.yml +++ b/roles/ui/tasks/ui_install_dot_net.yml @@ -65,3 +65,10 @@ state: restarted daemon_reload: yes become: yes + + +#### debian 11 workaround as long as ms does not provide packages: +# sudo apt install snapd +# sudo snap install core +# sudo snap install dotnet-runtime-50 +# sudo snap install dotnet-sdk --classic diff --git a/site.yml b/site.yml index 943695c20..99cdd9670 100644 --- a/site.yml +++ b/site.yml @@ -75,13 +75,6 @@ - frontend - csharp -- hosts: importers - roles: - - { role: importer, when: "not installation_mode == 'uninstall'" } - tags: - - frontend - - importer - - hosts: sampleserver roles: - { role: sample-data, when: "not without_sample_data|bool and not installation_mode == 'uninstall'" } @@ -99,13 +92,8 @@ # todo: add a meta dependency on role sample-data - hosts: importers - tasks: - - name: restart importer service to make sure it works correctly with sample data - ansible.builtin.systemd: - name: "{{ product_name }}-importer" - state: restarted - become: yes - when: "not installation_mode == 'uninstall'" + roles: + - { role: importer, when: "not installation_mode == 'uninstall'" } tags: - frontend - importer @@ -134,13 +122,25 @@ - { role: webhook, when: install_webhook|bool and not installation_mode == 'uninstall' } tags: - test - + - hosts: all roles: - { role: test, when: "not installation_mode == 'uninstall'" } tags: - test +- hosts: importers + tasks: + - name: restart importer service to make sure it works correctly with sample data + ansible.builtin.systemd: + name: "{{ product_name }}-importer" + state: restarted + become: yes + when: "not installation_mode == 'uninstall'" + tags: + - frontend + - importer + - hosts: all tasks: - name: show listener status @@ -158,4 +158,4 @@ - "Your initial UI admin password is '{{ admin_password }}'" - "Your api hasura admin secret is '{{ api_hasura_admin_secret }}'" when: admin_password is defined and api_hasura_admin_secret is defined - + \ No newline at end of file