From d77eca9f90f4431be8d65afbe298be3bbc3bc828 Mon Sep 17 00:00:00 2001 From: Federico Stagni Date: Wed, 20 Mar 2024 17:43:38 +0100 Subject: [PATCH 1/5] docs: added a comment indicating source of workaround --- tests/CI/docker-compose.yml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/CI/docker-compose.yml b/tests/CI/docker-compose.yml index 529b7342882..bb5a49f53a8 100644 --- a/tests/CI/docker-compose.yml +++ b/tests/CI/docker-compose.yml @@ -101,7 +101,7 @@ services: - DIRACX_CONFIG_BACKEND_URL=git+file:///cs_store/initialRepo - DIRACX_SERVICE_AUTH_TOKEN_KEY=file:///signing-key/rs256.key pull_policy: always - command: ["sleep", "infinity"] + command: ["sleep", "infinity"] # This is necessary because of the issue described in https://github.com/moby/moby/issues/42275. What is added here is a hack/workaround. dirac-client: @@ -114,7 +114,7 @@ services: ulimits: nofile: 8192 pull_policy: always - command: ["sleep", "infinity"] + command: ["sleep", "infinity"] # This is necessary because of the issue described in https://github.com/moby/moby/issues/42275. What is added here is a hack/workaround. diracx-init-key: image: ghcr.io/diracgrid/diracx/secret-generation:latest From 822d6000ea7a40226a95e9d9fa655366a0252f3a Mon Sep 17 00:00:00 2001 From: Federico Stagni Date: Wed, 20 Mar 2024 17:49:31 +0100 Subject: [PATCH 2/5] fix: removed old performance tests --- tests/DISET_HTTPS_migration/README | 2 - ...tReturnedValuesAndCredentialsExtraction.py | 118 ------------ .../TestClientSelectionAndInterface.py | 181 ------------------ .../TestServerIntegration.py | 90 --------- .../multi-mechanize/distributed-test.py | 46 ----- .../multi-mechanize/ping/config.cfg | 13 -- .../ping/test_scripts/v_perf.py | 18 -- .../multi-mechanize/plot-distributedTest.py | 174 ----------------- .../multi-mechanize/plot.py | 53 ----- .../multi-mechanize/service/config.cfg | 13 -- .../service/test_scripts/v_perf.py | 25 --- 11 files changed, 733 deletions(-) delete mode 100644 tests/DISET_HTTPS_migration/README delete mode 100644 tests/DISET_HTTPS_migration/TestClientReturnedValuesAndCredentialsExtraction.py delete mode 100644 tests/DISET_HTTPS_migration/TestClientSelectionAndInterface.py delete mode 100644 tests/DISET_HTTPS_migration/TestServerIntegration.py delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/distributed-test.py delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/ping/config.cfg delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/ping/test_scripts/v_perf.py delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/plot-distributedTest.py delete mode 100755 tests/DISET_HTTPS_migration/multi-mechanize/plot.py delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/service/config.cfg delete mode 100644 tests/DISET_HTTPS_migration/multi-mechanize/service/test_scripts/v_perf.py diff --git a/tests/DISET_HTTPS_migration/README b/tests/DISET_HTTPS_migration/README deleted file mode 100644 index 2e8dea36dbb..00000000000 --- a/tests/DISET_HTTPS_migration/README +++ /dev/null @@ -1,2 +0,0 @@ -These tests were written in order to compare the existing DISET based services with the new Tornado/HTTPs based services. -They were ran by hand, and are kept here just in case for the time being. diff --git a/tests/DISET_HTTPS_migration/TestClientReturnedValuesAndCredentialsExtraction.py b/tests/DISET_HTTPS_migration/TestClientReturnedValuesAndCredentialsExtraction.py deleted file mode 100644 index 18354976884..00000000000 --- a/tests/DISET_HTTPS_migration/TestClientReturnedValuesAndCredentialsExtraction.py +++ /dev/null @@ -1,118 +0,0 @@ -""" - In this test we want to check if Tornado generate the same credentials dictionnary as DIRAC. - It also test if the correct certificates are sended by client. - - To run this test you must have the handlers who returns credentials dictionnary. - Handlers are diracCredDictHandler and tornadoCredDictHandler just returns these dictionnary - and are stored in DIRAC/FrameworkSystem/Service - - Then you have to start tornado using script tornado-start-all.py in DIRAC/TornadoServices/scripts - and diset with ``dirac-service Framework/diracCredDict`` before running test - - In configuration it have to be set as normal services, it will look like: - - ``` - # In Systems/Service//Framework - Services - { - tornadoCredDict - { - protocol = https - } - diracCredDict - { - Port = 3444 - DisableMonitoring = yes - } - } - ``` - - ``` - URLs - { - tornadoCredDict = https://localhost:443/Framework/tornadoCredDict - diracCredDict = dips://MrBoincHost:3444/Framework/diracCredDict - } - ``` - -""" -import DIRAC - -DIRAC.initialize() # Initialize configuration - -from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData - -from DIRAC.Core.Tornado.Client.TornadoClient import TornadoClient -from DIRAC.Core.DISET.RPCClient import RPCClient - -from pytest import mark - -parametrize = mark.parametrize - - -def get_RPC_returnedValue(serviceName, Client): - """ - Get credentials extracted tornado server or Dirac server - """ - service = Client(serviceName) - return service.credDict() - - -def get_all_returnedValues(): - """ - Just code factorisation to who call server and get credential dictionnary - """ - serviceNameTornado = "Framework/tornadoCredDict" - serviceNameDirac = "Framework/diracCredDict" - repTornado = TornadoClient(serviceNameTornado).whoami() - repDirac = RPCClient(serviceNameDirac).whoami() - return (repTornado, repDirac) - - -@parametrize("UseServerCertificate", ("true", "false")) -def test_return_credential_are_equals(UseServerCertificate): - """ - Check if certificates sended AND extraction have same comportement is DISET and HTTPS - """ - gConfigurationData.setOptionInCFG("/DIRAC/Security/UseServerCertificate", UseServerCertificate) - - (repTornado, repDirac) = get_all_returnedValues() - - # Service returns credentials - assert repDirac["Value"] == repTornado["Value"] - - -@parametrize("UseServerCertificate", ("True", "False")) -def test_rpcStubs_are_equals(UseServerCertificate): - """ - Test if Clients returns the same rpcStubs - - Navigating through array is a bit complicated in this test... - repDirac and repTornado may have the same structure: - - repDirac dict{ - OK: True - rpcStub: tuple{ - ServiceName: str - kwargs: dict{ - *** kwargs used to instanciate client *** - } - methodName: str - arguments: list - } - Value: dict { # NOT USED IN THIS TEST - *** Credentials dictionnary extracted by server *** - } - } - """ - - gConfigurationData.setOptionInCFG("/DIRAC/Security/UseServerCertificate", UseServerCertificate) - (repTornado, repDirac) = get_all_returnedValues() - - # Explicitly removed in Tornado - del repDirac["rpcStub"][0][1]["keepAliveLapse"] - - # rep['rpcStub'] is at form (rpcStub, method, args) where rpcStub is tuple with (serviceName, kwargs) - assert repTornado["rpcStub"][0][0] != repDirac["rpcStub"][0][0] # Services name are different - assert repTornado["rpcStub"][0][1] == repDirac["rpcStub"][0][1] # Check kwargs returned by rpcStub - assert repTornado["rpcStub"][1:] != repDirac["rpcStub"][1:] # Check method/args diff --git a/tests/DISET_HTTPS_migration/TestClientSelectionAndInterface.py b/tests/DISET_HTTPS_migration/TestClientSelectionAndInterface.py deleted file mode 100644 index 5c9080ccad9..00000000000 --- a/tests/DISET_HTTPS_migration/TestClientSelectionAndInterface.py +++ /dev/null @@ -1,181 +0,0 @@ -""" - Unit test on client selection: - - By default: RPCClient should be used - - If we use Tornado service TornadoClient is used - - Should work with - - 'Component/Service' - - URL - - List of URL - - Mock Config: - - Service using HTTPS with Tornado - - Service using Diset - - You don't need to setup anything, just run ``pytest TestClientSelection.py`` ! -""" -import os -import re - - -from pytest import mark, fixture - -from DIRAC.Core.Tornado.Client.ClientSelector import RPCClientSelector -from DIRAC.Core.Tornado.Client.TornadoClient import TornadoClient -from DIRAC.Core.DISET.RPCClient import RPCClient -from DIRAC.ConfigurationSystem.private.ConfigurationClient import ConfigurationClient -from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData -from diraccfg import CFG -from DIRAC.Core.Base.Client import Client -from DIRAC.Core.DISET.private.InnerRPCClient import InnerRPCClient - -parametrize = mark.parametrize - - -testCfgFileName = "test.cfg" - - -@fixture(scope="function") -def config(request): - """ - fixture is the pytest way to declare initalization function. - Scope = module significate that this function will be called only time for this file. - If no scope precised it call config for each test. - - This function can have a return value, it will be the value of 'config' argument for the tests - """ - - cfgContent = """ - DIRAC - { - Setup=TestSetup - Setups - { - TestSetup - { - WorkloadManagement=MyWM - } - } - } - Systems - { - WorkloadManagement - { - MyWM - { - URLs - { - ServiceDips = dips://$MAINSERVERS$:1234/WorkloadManagement/ServiceDips - ServiceHttps = https://$MAINSERVERS$:1234/WorkloadManagement/ServiceHttps - } - } - } - } - Operations{ - Defaults - { - MainServers = server1, server2 - } - } - """ - with open(testCfgFileName, "w") as f: - f.write(cfgContent) - gConfig = ConfigurationClient(fileToLoadList=[testCfgFileName]) # we replace the configuration by our own one. - - # def tearDown(): - # Wait for teardown - yield config - """ - This function is called at the end of the test. - """ - try: - os.remove(testCfgFileName) - except OSError: - pass - # SUPER UGLY: one must recreate the CFG objects of gConfigurationData - # not to conflict with other tests that might be using a local dirac.cfg - gConfigurationData.localCFG = CFG() - gConfigurationData.remoteCFG = CFG() - gConfigurationData.mergedCFG = CFG() - gConfigurationData.generateNewVersion() - print("TearDown") - # request is given by @fixture decorator, addfinalizer set the function who need to be called after the tests - # request.addfinalizer(tearDown) - - -# Tuple with (expectedClient, serviceName) -client_imp = ( - (TornadoClient, "WorkloadManagement/ServiceHttps"), - (TornadoClient, "https://server1:1234/WorkloadManagement/ServiceHttps"), - ( - TornadoClient, - "https://server1:1234/WorkloadManagement/ServiceHttps,https://server2:1234/WorkloadManagement/ServiceHttps", - ), - (RPCClient, "WorkloadManagement/ServiceDips"), - (RPCClient, "dips://server1:1234/WorkloadManagement/ServiceDips"), - ( - RPCClient, - "dips://server1:1234/WorkloadManagement/ServiceDips,dips://server2:1234/WorkloadManagement/ServiceDips", - ), -) - - -@parametrize("client", client_imp) -def test_selection_when_using_RPCClientSelector(client, config): - """ - One way to call service is to use RPCClient or TornadoClient - If service is HTTPS, it must return client who work with tornado (TornadoClient) - else it must return the RPCClient - """ - clientWanted = client[0] - component_service = client[1] - clientSelected = RPCClientSelector(component_service) - assert isinstance(clientSelected, clientWanted) - - -error_component = ( - "Too/Many/Sections", - "JustAName", - "InexistantComponent/InexistantService", - "dummyProtocol://dummy/url", -) - - -@parametrize("component_service", error_component) -def test_error(component_service, config): - """ - In any other cases (including error cases) it must return RPCClient by default - This test is NOT testing if RPCClient handle the errors - It just test that we get RPCClient and not Tornadoclient - """ - clientSelected = RPCClientSelector(component_service) - assert isinstance(clientSelected, RPCClient) - - -def test_interface(): - """ - Interface of TornadoClient MUST contain at least interface of RPCClient. - BUT a __getattr__ method extends this interface with interface of InnerRPCClient. - """ - interfaceTornadoClient = dir(TornadoClient) - interfaceRPCClient = dir(RPCClient) + dir(InnerRPCClient) - for element in interfaceRPCClient: - # We don't need to test private methods / attribute - # Private methods/attribute starts with __ - # dir also return private methods named with something like _ClassName__PrivateMethodName - if not element.startswith("_"): - assert element in interfaceTornadoClient - - -client_imp = ((2, "WorkloadManagement/ServiceHttps"), (1, "https://server1:1234/WorkloadManagement/ServiceHttps")) - - -@parametrize("client", client_imp) -def test_urls_used_by_TornadoClient(config, client): - # We can't directly get url because they are randomized but we can check if we have right number of URL - - nbOfUrl = client[0] - component_service = client[1] - clientSelected = RPCClientSelector(component_service) - # Little hack to get the private attribute - assert nbOfUrl == clientSelected._TornadoBaseClient__nbOfUrls diff --git a/tests/DISET_HTTPS_migration/TestServerIntegration.py b/tests/DISET_HTTPS_migration/TestServerIntegration.py deleted file mode 100644 index 92df66eeaf3..00000000000 --- a/tests/DISET_HTTPS_migration/TestServerIntegration.py +++ /dev/null @@ -1,90 +0,0 @@ -""" - Test if same service work on DIRAC and TORNADO - Testing if basic operation works on a dummy example - - These handlers provide a method who's access is always forbidden to test authorization system - - It's just normal services, entry in dirac.cfg are the same as usual. - To start tornado use DIRAC/TornadoServices/scripts/tornado-start-all.py - ``` - Services - { - User - { - Protocol = https - } - UserDirac - { - Port = 3424 - } - } - ``` - - ``` - URLs - { - User = https://MrBoincHost:443/Framework/User - UserDirac = dips://localhost:3424/Framework/UserDirac - } - ``` - -""" -import DIRAC - -DIRAC.initialize() # Initialize configuration - -from string import printable -from hypothesis import given, settings -from hypothesis.strategies import text - -from DIRAC.Core.DISET.RPCClient import RPCClient as RPCClientDIRAC -from DIRAC.Core.Tornado.Client.TornadoClient import TornadoClient as RPCClientTornado -from DIRAC.Core.Utilities.DErrno import ENOAUTH -from DIRAC import S_ERROR - -from pytest import mark - -parametrize = mark.parametrize - -rpc_imp = ((RPCClientTornado, "Framework/User"), (RPCClientDIRAC, "Framework/UserDirac")) - - -@parametrize("rpc", rpc_imp) -def test_authorization(rpc): - service = rpc[0](rpc[1]) - - authorisation = service.unauthorized() - assert authorisation["OK"] is False - assert authorisation["Message"] == S_ERROR(ENOAUTH, "Unauthorized query")["Message"] - - -@parametrize("rpc", rpc_imp) -def test_unknown_method(rpc): - service = rpc[0](rpc[1]) - - unknownmethod = service.ThisMethodMayNotExist() - assert unknownmethod["OK"] is False - assert unknownmethod["Message"] == "Unknown method ThisMethodMayNotExist" - - -@parametrize("rpc", rpc_imp) -def test_ping(rpc): - service = rpc[0](rpc[1]) - - assert service.ping()["OK"] - - -@parametrize("rpc", rpc_imp) -@settings(deadline=None, max_examples=42) -@given(data=text(printable, max_size=64)) -def test_echo(rpc, data): - service = rpc[0](rpc[1]) - - assert service.echo(data)["Value"] == data - - -def test_whoami(): # Only in tornado - credDict = RPCClientTornado("Framework/User").whoami()["Value"] - assert "DN" in credDict - assert "CN" in credDict - assert "isProxy" in credDict diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/distributed-test.py b/tests/DISET_HTTPS_migration/multi-mechanize/distributed-test.py deleted file mode 100644 index 29930a3d262..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/distributed-test.py +++ /dev/null @@ -1,46 +0,0 @@ -import xmlrpclib -import time -import sys - -# == BEGIN OF CONFIGURATION == - -# Add all machine who have multimechanize client -serversList = ["137.138.150.194", "server2"] -# Each multimechanize client must listen the same ports, add the ports here -portList = ["9000", "9001"] - -# END OF CONFIG - -servers = [] - -print("Starting test servers....") -# We send signal to all servers -for port in portList: - for server in serversList: - servers.append(xmlrpclib.ServerProxy(f"http://{server}:{port}")) - servers[-1].run_test() - # If there is multiple ports opened on same machine, we wait a little to avoid confusion in multimechanize - time.sleep(2) - - -print("Waiting for results...") -while servers[-1].get_results() == "Results Not Available": - time.sleep(1) - -# We get all results and write them into files -# There is one file/multimechanize servers -try: - output = sys.argv[1] -except KeyError: - output = str(time.time()) -fileCount = 0 -for server in servers: - fileCount += 1 - fileName = f"{output}.{fileCount}.txt" - print(f"Writing output file {fileName}") - file = open(fileName, "w") - file.write(server.get_results()) - file.close() - -# We print the command you can copy paste to have the results in a plot -print("python plot-distributedTest.py %s %d" % (output, fileCount)) diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/ping/config.cfg b/tests/DISET_HTTPS_migration/multi-mechanize/ping/config.cfg deleted file mode 100644 index b9ba7f702d8..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/ping/config.cfg +++ /dev/null @@ -1,13 +0,0 @@ - -[global] -run_time = 60 -rampup = 60 -results_ts_interval = 1 -progress_bar = on -console_logging = off -xml_report = off - - -[user_group-1] -threads = 60 -script = v_perf.py diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/ping/test_scripts/v_perf.py b/tests/DISET_HTTPS_migration/multi-mechanize/ping/test_scripts/v_perf.py deleted file mode 100644 index 44bf8716292..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/ping/test_scripts/v_perf.py +++ /dev/null @@ -1,18 +0,0 @@ -from DIRAC.Core.Tornado.Client.TornadoClient import TornadoClient -from DIRAC.Core.DISET.RPCClient import RPCClient -from time import time -from random import randint -import sys - - -class Transaction: - def __init__(self): - # If we want we can force to use dirac - if len(sys.argv) > 2 and sys.argv[2].lower() == "dirac": - self.client = RPCClient("Framework/UserDirac") - else: - self.client = TornadoClient("Framework/User") - return - - def run(self): - self.client.ping() diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/plot-distributedTest.py b/tests/DISET_HTTPS_migration/multi-mechanize/plot-distributedTest.py deleted file mode 100644 index 959b5beeab8..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/plot-distributedTest.py +++ /dev/null @@ -1,174 +0,0 @@ -import csv -import matplotlib.pyplot as plt -import sys - - -# == BEGIN OF CONFIGURATION == - -# FOR DISPLAY - Enter settings relative to test -system = "Test ping - timeout=30 - Diset Server" -multimech_thread = 60 -multimech_time = 300 -multimech_rampup = 200 -multimech_clients = 8 -server_maxThreads = 20 - -# For this line, use vmstat and copy the free memory -memoryOffset = 0 - -plt.suptitle( - "%s with %d threads\n %d threads/client - %d clients (total %d threads) \n \ - duration: %dsec - rampup %dsec \n latency between client starts: 2s" - % ( - system, - server_maxThreads, - multimech_thread, - multimech_clients, - multimech_clients * multimech_thread, - multimech_time, - multimech_rampup, - ) -) - - -# END OF CONFIG - - -def get_results(): - if len(sys.argv) < 3: - print("Usage: python plot-distributedTest NAME NUMBEROFFILE") - print("Example: python plot-distributedTest 1532506328.38 2") - sys.exit(1) - - file = sys.argv[1] - count = int(sys.argv[2]) - results = [] - for i in range(1, count + 1): - fileName = f"{file}.{i}.txt" - with open(fileName) as content: - print(f"reading {fileName}") - lines = content.read().split("\n")[1:-1] - result = [line.split(",") for line in lines] - results.append(result) - content.close() - return results - - -def get_server_stats(): - print("Please specify location to file with server stats:") - serverStatFile = "/tmp/results.txt" # raw_input() - print(f"Loading {serverStatFile}") - - serverStats = dict() - with open(serverStatFile) as content_file: - lines = content_file.read().split("\n")[1:] - for line in lines: - line = line.split(";") - serverStats[line[0]] = line[1:] - return serverStats - - -def get_test_begin_end(results): - """ - First result file contain the test started in first - Last result file contain the test started in last - Every test have same duration - So we read first and last line to have begin hour and end hour - (the '2' is because time is registered in the third row) - """ - return (int(results[0][0][2]), int(results[-1][-1][2])) - - -def process_data(results, serverStats): - # Begin and end are timestamps - (begin, end) = get_test_begin_end(results) - - # Initializing all data list - (time, requestTime, CPU, RAM, reqPerSec, errorRate, loadAvg) = ([], [], [], [], [], [], []) - global memoryOffset - initialRAM = memoryOffset - - for t in range(begin, end): # We determine datas time with timestamp - # Offset to set starttime = 0 - time.append(t - begin) - - # Getting requesttime (mean), number of request/error at a givent time - (reqTime, reqCount, errorCount) = getRequestTimeAndCount(results, t) - requestTime.append(reqTime) - reqPerSec.append(reqCount) - errorRate.append(errorCount) - - # Getting infos from Server, sometimes no info are getted during more than one second - try: - # Get CPU usage - CPU.append(100 - int(serverStats[str(t)][14])) - - # Get Memory used (delta with memory at the beginning) - usedRam = int(serverStats[str(t)][5]) - initialRAM - RAM.append(usedRam) - - # Get the load - loadAvg.append(100 * float(serverStats[str(t)][17])) # 18 - except KeyError: - # If fail in getting value, take previous values - CPU.append(CPU[-1]) - RAM.append(RAM[-1]) - loadAvg.append(loadAvg[-1]) - - print(f"ERROR - Some values missing for CPU and Memory usage [try to load for time={t}]") - - return (time, requestTime, CPU, RAM, reqPerSec, errorRate, loadAvg) - - -def getRequestTimeAndCount(data, time): - reqCount = 0 - errorCount = 0 - totalRequest = 0 - - for result in results: - i = 0 - try: - # Ignore past - while int(result[i][2]) < time: - i += 1 - - # Get infos for present - while int(result[i][2]) == time: - reqCount += 1 - totalRequest += result[i][4] - if result[i][5] != "": - errorCount += 1 - i += 1 - except IndexError: - pass - return (int(totalRequest / reqCount) if reqCount > 0 else 0, reqCount, errorCount) - - -def displayGraph(results, serverStats): - """ - Display all the graph on the same figure - """ - print("Processing data and plot, it may take some time for huge tests") - (time, requestTime, CPU, RAM, reqPerSec, errorCount, loadAvg) = process_data(results, serverStats) - - plt.subplot(221) - plt.plot(time, requestTime, "-", label="Request time (s)") - plt.legend() - plt.subplot(222) - plt.plot(time, CPU, "*", label="CPU usage (%)") - plt.plot(time, loadAvg, "*", label="Load average * 100") - plt.legend() - plt.subplot(223) - plt.plot(time, RAM, "*", label="Used Memory (bytes)") - plt.legend() - plt.subplot(224) - plt.plot(time, reqPerSec, "*", label="Requests/sec") - plt.plot(time, errorCount, "*", label="Errors/sec") - plt.legend() - - -results = get_results() -# process_data(results) -serverStats = get_server_stats() -displayGraph(results, serverStats) -plt.show() diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/plot.py b/tests/DISET_HTTPS_migration/multi-mechanize/plot.py deleted file mode 100755 index 64022871047..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/plot.py +++ /dev/null @@ -1,53 +0,0 @@ -""" - A little script to analyze multi-mechanize tests by ploting every test in a single figure - - Because it can't be automatized you have to define path of the folder who contains results - - testTornado and testDirac must have the same length -""" -import csv -import matplotlib.pyplot as plt - -testTornado = ["ping/results/results_2018.06.22_14.50.53", "service/results/results_2018.06.22_15.24.18"] -testDirac = ["ping/results/results_2018.06.22_14.52.09", "service/results/results_2018.06.22_14.55.03"] - - -def read_data(test, groupSize): - with open(test + "/results.csv", "rb") as csvfile: - reader = csv.reader(csvfile, delimiter=",") - - sumgrouptime = 0 - sumgrouprequestTime = 0 - count = 0 - - time = [] - requestTime = [] - - for row in reader: - count += 1 - sumgrouptime += float(row[1]) - sumgrouprequestTime += float(row[4]) - # We group some points to make graph readable - if count == groupSize: - time.append(sumgrouptime / groupSize) - requestTime.append(sumgrouprequestTime / groupSize) - sumgrouptime = 0 - sumgrouprequestTime = 0 - count = 0 - return (time, requestTime) - - -def displayGraph(testTornado, testDirac, subplot, groupSize): - plt.subplot(subplot) - plt.ylabel("red = dirac") - - (timeTornado, requestTimeTornado) = read_data(testTornado, groupSize) - (timeDirac, requestTimeDirac) = read_data(testDirac, groupSize) - - plt.plot(timeTornado, requestTimeTornado, "b-", timeDirac, requestTimeDirac, "r-") - - -# The "100*len(testTornado)+11+i" can look strange but it define a sublot dynamically -for i in range(len(testTornado)): - displayGraph(testTornado[i], testDirac[i], 100 * len(testTornado) + 11 + i, 42) -plt.show() diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/service/config.cfg b/tests/DISET_HTTPS_migration/multi-mechanize/service/config.cfg deleted file mode 100644 index b9ba7f702d8..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/service/config.cfg +++ /dev/null @@ -1,13 +0,0 @@ - -[global] -run_time = 60 -rampup = 60 -results_ts_interval = 1 -progress_bar = on -console_logging = off -xml_report = off - - -[user_group-1] -threads = 60 -script = v_perf.py diff --git a/tests/DISET_HTTPS_migration/multi-mechanize/service/test_scripts/v_perf.py b/tests/DISET_HTTPS_migration/multi-mechanize/service/test_scripts/v_perf.py deleted file mode 100644 index 158661e3062..00000000000 --- a/tests/DISET_HTTPS_migration/multi-mechanize/service/test_scripts/v_perf.py +++ /dev/null @@ -1,25 +0,0 @@ -from DIRAC.Core.Tornado.Client.TornadoClient import TornadoClient -from DIRAC.Core.DISET.RPCClient import RPCClient -from time import time -from random import randint -import sys - - -class Transaction: - def __init__(self): - # If we want we can force to use dirac - if len(sys.argv) > 2 and sys.argv[2].lower() == "dirac": - self.client = RPCClient("Framework/UserDirac") - else: - self.client = TornadoClient("Framework/User") - return - - def run(self): - s = "Chaine 1" - s2 = "Chaine %d" % randint(0, 42) - - newUser = self.client.addUser(s) - userID = int(newUser["Value"]) - User = self.client.getUserName(userID) - self.client.editUser(userID, s2) - User = self.client.getUserName(userID) From fc4bf221ec1c030f321012ec1e6bc2edeabd814a Mon Sep 17 00:00:00 2001 From: Federico Stagni Date: Mon, 18 Mar 2024 17:12:33 +0100 Subject: [PATCH 3/5] fix: do not use hyphens --- src/DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py b/src/DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py index 3edfb82a26c..639beb8afeb 100644 --- a/src/DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py +++ b/src/DIRAC/WorkloadManagementSystem/DB/SandboxMetadataDB.py @@ -206,7 +206,7 @@ def assignSandboxesToEntities(self, enDict, requesterName, requesterGroup, owner if not insertValues: return S_ERROR( - f"Sandbox does not exist or you're not authorized to assign it being {requesterName}@{requesterGroup}" + f"Sandbox does not exist or you are not authorized to assign it being {requesterName}@{requesterGroup}" ) sqlCmd = f"INSERT INTO `sb_EntityMapping` ( entityId, Type, SBId ) VALUES {', '.join(insertValues)}" result = self._update(sqlCmd) From 7d419a7ace977a0b641b0e629030e0232a166c89 Mon Sep 17 00:00:00 2001 From: Federico Stagni Date: Wed, 20 Mar 2024 17:54:32 +0100 Subject: [PATCH 4/5] docs: updated SECURITY.md --- SECURITY.md | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/SECURITY.md b/SECURITY.md index 184b7301e37..f93d5c276f6 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -4,11 +4,9 @@ | Version | Supported | | ------- | ------------------ | -| >= 7.1 | :white_check_mark: | -| < 7.1 | :x: | +| >= 8.0 | :white_check_mark: | +| < 8.0 | :x: | ## Reporting a Vulnerability -Please report any suspected vulnerabilities by emailing fstagni-at-cern.ch and atsareg-at-in2p3.fr. -We will respond to you within 2 working days. -If for some reason you do not recieve a response, please follow up via email to ensure we received your original message. +You can privately report vulnerability at https://github.com/DIRACGrid/DIRAC/security From 937c3b73c1f90b30179a2d0df8a1ee912c762e11 Mon Sep 17 00:00:00 2001 From: Federico Stagni Date: Thu, 21 Mar 2024 10:10:02 +0100 Subject: [PATCH 5/5] fix: restored JobWrapper initialValues --- .../JobWrapper/Watchdog.py | 2 ++ .../JobWrapper/test/Test_JobWrapper.py | 26 +++++++++---------- 2 files changed, 14 insertions(+), 14 deletions(-) diff --git a/src/DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py b/src/DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py index 442206cd3db..ed514288158 100755 --- a/src/DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py +++ b/src/DIRAC/WorkloadManagementSystem/JobWrapper/Watchdog.py @@ -718,6 +718,8 @@ def calibrate(self): self.initialValues["RSS"] = result["Value"] self.log.verbose("RSS(MB)", f"{result['Value']:.1f}") self.parameters["RSS"] = [] + self.initialValues["MemoryUsed"] = result["Value"] + self.parameters["MemoryUsed"] = [] # We exclude fuse so that mountpoints can be cleaned up by automount after a period unused # (specific request from CERN batch service). diff --git a/src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py b/src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py index 01b7311620f..4f292797a4f 100644 --- a/src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py +++ b/src/DIRAC/WorkloadManagementSystem/JobWrapper/test/Test_JobWrapper.py @@ -4,20 +4,18 @@ import shutil import tempfile import time -import pytest from unittest.mock import MagicMock -from DIRAC import gLogger +import pytest + import DIRAC +from DIRAC import gLogger from DIRAC.Core.Utilities import DErrno -from DIRAC.Core.Utilities.ReturnValues import S_ERROR, S_OK - +from DIRAC.Core.Utilities.ReturnValues import S_ERROR from DIRAC.DataManagementSystem.Client.test.mock_DM import dm_mock from DIRAC.Resources.Catalog.test.mock_FC import fc_mock - +from DIRAC.WorkloadManagementSystem.Client import JobMinorStatus, JobStatus from DIRAC.WorkloadManagementSystem.JobWrapper.JobWrapper import JobWrapper -from DIRAC.WorkloadManagementSystem.JobWrapper.Watchdog import Watchdog -from DIRAC.WorkloadManagementSystem.Client import JobStatus, JobMinorStatus getSystemSectionMock = MagicMock() getSystemSectionMock.return_value = "aValue" @@ -196,7 +194,7 @@ def test_processSuccessfulCommand(mocker): assert result["Value"]["cpuTimeConsumed"][0] > 0 assert not result["Value"]["watchdogError"] assert "LastUpdateCPU(s)" in result["Value"]["watchdogStats"] - assert "MemoryUsed(kb)" in result["Value"]["watchdogStats"] + assert "MemoryUsed(MB)" in result["Value"]["watchdogStats"] @pytest.mark.slow @@ -254,7 +252,7 @@ def test_processFailedCommand(mocker): assert result["Value"]["cpuTimeConsumed"][0] > 0 assert not result["Value"]["watchdogError"] assert "LastUpdateCPU(s)" in result["Value"]["watchdogStats"] - assert "MemoryUsed(kb)" in result["Value"]["watchdogStats"] + assert "MemoryUsed(MB)" in result["Value"]["watchdogStats"] @pytest.mark.slow @@ -357,7 +355,7 @@ def set_param_side_effect(*args, **kwargs): "payloadExecutorError": None, "cpuTimeConsumed": [100, 200, 300, 400, 500], "watchdogError": None, - "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(kb)": "100"}, + "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(MB)": "100"}, } jw.executionResults["CPU"] = payloadResult["cpuTimeConsumed"] @@ -378,7 +376,7 @@ def set_param_side_effect(*args, **kwargs): "payloadExecutorError": None, "cpuTimeConsumed": [100, 200, 300, 400, 500], "watchdogError": None, - "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(kb)": "100"}, + "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(MB)": "100"}, } jw.executionResults["CPU"] = payloadResult["cpuTimeConsumed"] @@ -399,7 +397,7 @@ def set_param_side_effect(*args, **kwargs): "payloadExecutorError": None, "cpuTimeConsumed": [100, 200, 300, 400, 500], "watchdogError": None, - "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(kb)": "100"}, + "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(MB)": "100"}, } jw.executionResults["CPU"] = payloadResult["cpuTimeConsumed"] @@ -422,7 +420,7 @@ def set_param_side_effect(*args, **kwargs): "payloadExecutorError": None, "cpuTimeConsumed": [100, 200, 300, 400, 500], "watchdogError": None, - "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(kb)": "100"}, + "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(MB)": "100"}, } jw.executionResults["CPU"] = payloadResult["cpuTimeConsumed"] @@ -443,7 +441,7 @@ def set_param_side_effect(*args, **kwargs): "payloadExecutorError": None, "cpuTimeConsumed": [100, 200, 300, 400, 500], "watchdogError": "Watchdog error", - "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(kb)": "100"}, + "watchdogStats": {"LastUpdateCPU(s)": "100", "MemoryUsed(MB)": "100"}, } jw.executionResults["CPU"] = payloadResult["cpuTimeConsumed"]