From 1130a1ba6fc3c08f375dd99f98e93e4ee36331fb Mon Sep 17 00:00:00 2001 From: Kaituo Li Date: Thu, 27 Jul 2023 10:39:39 -0700 Subject: [PATCH] Refactoring NodeStateManager etc. to support forecasting functionality (#965) * Refactoring NodeStateManager etc. to support forecasting functionality This commit extends the codebase to support both Anomaly Detection (AD) and forecasting. It contains a mixture of refactoring, renaming, removal of unused code, and package moving tasks. Here are the details: Refactoring: - `NodeStateManager.getAnomalyDetector` is now `getConfig`, with added functionality to fetch a Forecaster. The method comments are updated for clarity. - Existing methods (`getFeatureSamplesForPeriods`, `getColdStartSamplesForPeriods`, `createPreviewSearchRequest`, `getMinDataTime`) have been added in `SearchFeatureDao` to handle forecasting logic. - Adjusted `SecurityClientUtil` and `ParseUtils` to handle forecasting logic. - Cleaned up `NodeState` to differentiate state for AD and forecasting. Renaming: - `AnomalyDetectorJob` is renamed to `Job` to facilitate reuse for forecasting. - `NodeStateManager.getAnomalyDetectorJob` is renamed to `getJob`. - Certain settings in `AnomalyDetectorSettings` are renamed to reflect they are meant for the AD setting. They have been marked as deprecated and new settings are used in `TimeSeriesSettings` instead. - `IndexAnomalyDetectorJobActionHandler.getAnomalyDetectorJobForWrite` is renamed to `getJobForWrite`. - `ADSafeSecurityInjector` is renamed to `TimeSeriesSafeSecurityInjector`. Removing unused code: - Synchronous code in `ClientUtil`, `IndexUtils`, and `CheckpointDao` is removed. - The unused class `Throttler` is deleted. - Mapping file names are changed, and the code referencing these files is adjusted. Package moving: - Several classes (`ClientUtil`, `MultiResponsesDelegateActionListener`, `SafeSecurityInjector`, `SecurityUtil`, `ExceptionUtil`, `SearchFeatureDao`, `CleanState`, `ExpiringState`, `MaintenanceState`, `NodeState`, `SingleStreamModelIdMapper`, `BackPressureRouting`) are moved to the respective `org.opensearch.timeseries` packages. Miscellaneous: - Fixed compiler failures caused by changes in https://github.com/opensearch-project/OpenSearch/pull/8730 by replacing `DoubleArrayList` with `java.util.ArrayList`. - Updates the Backwards Compatibility (bwc) version to align with the core's incremented bwc version as per [OpenSearch PR #8670](https://github.com/opensearch-project/OpenSearch/pull/8670). This change prevents the issue described in [OpenSearch Issue #5076](https://github.com/opensearch-project/OpenSearch/issues/5076). Testing: - Executed a `gradle build`. - Added new tests for `ClientUtil` and `NodeStateManager`. Signed-off-by: Kaituo Li * improve comment Signed-off-by: Kaituo Li * fix compiler error and comments Signed-off-by: Kaituo Li --------- Signed-off-by: Kaituo Li --- build.gradle | 4 +- .../ad/AnomalyDetectorJobRunner.java | 38 +- .../ad/AnomalyDetectorProfileRunner.java | 19 +- .../opensearch/ad/AnomalyDetectorRunner.java | 2 +- .../opensearch/ad/EntityProfileRunner.java | 12 +- .../ad/ExecuteADResultResponseRecorder.java | 10 +- .../opensearch/ad/caching/CacheBuffer.java | 2 +- .../org/opensearch/ad/caching/DoorKeeper.java | 4 +- .../opensearch/ad/caching/EntityCache.java | 4 +- .../opensearch/ad/cluster/ADDataMigrator.java | 23 +- .../cluster/ClusterManagerEventListener.java | 2 +- .../org/opensearch/ad/cluster/DailyCron.java | 2 +- .../org/opensearch/ad/cluster/HashRing.java | 10 +- .../ad/cluster/diskcleanup/IndexCleanup.java | 2 +- .../ad/constant/ADCommonMessages.java | 2 - .../ad/feature/CompositeRetriever.java | 4 +- .../opensearch/ad/feature/FeatureManager.java | 19 +- .../org/opensearch/ad/ml/CheckpointDao.java | 25 +- .../opensearch/ad/ml/EntityColdStarter.java | 35 +- .../org/opensearch/ad/ml/ModelManager.java | 1 + .../java/org/opensearch/ad/ml/ModelState.java | 2 +- .../opensearch/ad/ratelimit/BatchWorker.java | 2 +- .../ratelimit/CheckpointMaintainWorker.java | 2 +- .../ad/ratelimit/CheckpointReadWorker.java | 13 +- .../ad/ratelimit/CheckpointWriteWorker.java | 18 +- .../ad/ratelimit/ColdEntityWorker.java | 2 +- .../ad/ratelimit/ConcurrentWorker.java | 2 +- .../ad/ratelimit/EntityColdStartWorker.java | 9 +- .../ratelimit/RateLimitedRequestWorker.java | 10 +- .../ad/ratelimit/ResultWriteWorker.java | 16 +- .../ad/ratelimit/ScheduledWorker.java | 2 +- .../ad/ratelimit/SingleRequestWorker.java | 2 +- .../rest/AbstractAnomalyDetectorAction.java | 6 +- .../ad/rest/RestAnomalyDetectorJobAction.java | 6 +- .../RestExecuteAnomalyDetectorAction.java | 10 +- .../ad/rest/RestGetAnomalyDetectorAction.java | 5 +- .../RestPreviewAnomalyDetectorAction.java | 4 +- .../AbstractAnomalyDetectorActionHandler.java | 34 +- .../handler/AnomalyDetectorActionHandler.java | 4 +- .../IndexAnomalyDetectorActionHandler.java | 4 +- .../IndexAnomalyDetectorJobActionHandler.java | 31 +- .../handler/ModelValidationActionHandler.java | 17 +- .../ValidateAnomalyDetectorActionHandler.java | 4 +- .../ad/settings/AnomalyDetectorSettings.java | 12 +- .../opensearch/ad/task/ADBatchTaskRunner.java | 11 +- .../org/opensearch/ad/task/ADTaskManager.java | 14 +- .../AnomalyDetectorJobTransportAction.java | 8 +- .../AnomalyResultTransportAction.java | 18 +- .../ad/transport/CronTransportAction.java | 2 +- .../DeleteAnomalyDetectorTransportAction.java | 8 +- .../transport/DeleteModelTransportAction.java | 2 +- .../EntityProfileTransportAction.java | 2 +- .../EntityResultTransportAction.java | 12 +- .../ForwardADTaskTransportAction.java | 2 +- .../transport/GetAnomalyDetectorResponse.java | 10 +- .../GetAnomalyDetectorTransportAction.java | 11 +- .../IndexAnomalyDetectorTransportAction.java | 18 +- ...PreviewAnomalyDetectorTransportAction.java | 3 +- .../transport/RCFPollingTransportAction.java | 4 +- .../StatsAnomalyDetectorTransportAction.java | 2 +- ...alidateAnomalyDetectorTransportAction.java | 4 +- .../handler/AnomalyIndexHandler.java | 2 +- .../AnomalyResultBulkIndexHandler.java | 2 +- .../handler/MultiEntityResultHandler.java | 2 +- .../java/org/opensearch/ad/util/BulkUtil.java | 1 + .../org/opensearch/ad/util/ClientUtil.java | 332 --------------- .../org/opensearch/ad/util/IndexUtils.java | 23 +- .../org/opensearch/ad/util/Throttler.java | 73 ---- .../opensearch/timeseries/AnalysisType.java | 11 + .../{ad => timeseries}/CleanState.java | 2 +- .../timeseries/ExceptionRecorder.java | 20 + .../{ad => timeseries}/ExpiringState.java | 2 +- .../{ad => timeseries}/MaintenanceState.java | 2 +- .../{ad => timeseries}/NodeState.java | 180 ++++---- .../{ad => timeseries}/NodeStateManager.java | 388 ++++++++++-------- .../timeseries/TimeSeriesAnalyticsPlugin.java | 42 +- .../timeseries/constant/CommonMessages.java | 6 + .../feature/SearchFeatureDao.java | 66 +-- .../function/BiCheckedFunction.java | 11 + .../ml/SingleStreamModelIdMapper.java | 2 +- .../model/Job.java} | 20 +- .../model/MergeableList.java | 4 +- .../settings/TimeSeriesSettings.java | 15 +- .../transport/BackPressureRouting.java | 2 +- .../timeseries/util/ClientUtil.java | 71 ++++ .../util/ExceptionUtil.java | 2 +- .../MultiResponsesDelegateActionListener.java | 2 +- .../timeseries/util/ParseUtils.java | 136 +++--- .../util/SafeSecurityInjector.java | 2 +- .../util/SecurityClientUtil.java | 40 +- .../{ad => timeseries}/util/SecurityUtil.java | 14 +- .../util/TimeSeriesSafeSecurityInjector.java} | 43 +- .../{anomaly-detectors.json => config.json} | 0 .../{anomaly-detector-jobs.json => job.json} | 0 ...ndexAnomalyDetectorActionHandlerTests.java | 6 +- ...dateAnomalyDetectorActionHandlerTests.java | 6 +- .../ad/AbstractProfileRunnerTests.java | 2 +- .../ad/AnomalyDetectorJobRunnerTests.java | 70 ++-- .../ad/AnomalyDetectorProfileRunnerTests.java | 12 +- .../ad/AnomalyDetectorRestTestCase.java | 33 +- .../ad/EntityProfileRunnerTests.java | 12 +- .../ad/HistoricalAnalysisIntegTestCase.java | 8 +- .../ad/HistoricalAnalysisRestTestCase.java | 2 +- .../ad/MultiEntityProfileRunnerTests.java | 6 +- .../org/opensearch/ad/ODFERestTestCase.java | 4 +- .../ad/bwc/ADBackwardsCompatibilityIT.java | 17 +- .../ClusterManagerEventListenerTests.java | 2 +- .../opensearch/ad/cluster/DailyCronTests.java | 2 +- .../opensearch/ad/cluster/HashRingTests.java | 6 +- .../diskcleanup/IndexCleanupTests.java | 2 +- .../ad/e2e/AbstractSyntheticDataTest.java | 4 +- .../ad/feature/FeatureManagerTests.java | 79 ++-- .../ad/ml/AbstractCosineDataTest.java | 17 +- .../opensearch/ad/ml/CheckpointDaoTests.java | 2 +- .../ad/ml/CheckpointDeleteTests.java | 2 +- .../ad/ml/EntityColdStarterTests.java | 80 ++-- .../opensearch/ad/ml/HCADModelPerfTests.java | 12 +- .../opensearch/ad/ml/ModelManagerTests.java | 5 +- .../ad/ml/SingleStreamModelIdMapperTests.java | 1 + ...alyDetectorJobTransportActionWithUser.java | 8 +- .../ad/model/AnomalyDetectorJobTests.java | 9 +- .../ad/model/MergeableListTests.java | 1 + .../ratelimit/AbstractRateLimitingTest.java | 8 +- .../ratelimit/CheckpointReadWorkerTests.java | 9 +- .../ratelimit/CheckpointWriteWorkerTests.java | 13 +- .../opensearch/ad/rest/ADRestTestUtils.java | 12 +- .../ad/rest/AnomalyDetectorRestApiIT.java | 22 +- .../ad/rest/HistoricalAnalysisRestApiIT.java | 6 +- .../opensearch/ad/rest/SecureADRestIT.java | 10 +- ...xAnomalyDetectorJobActionHandlerTests.java | 2 +- .../AnomalyDetectorSettingsTests.java | 47 ++- .../org/opensearch/ad/stats/ADStatsTests.java | 1 - .../ad/task/ADTaskManagerTests.java | 14 +- ...atchAnomalyResultTransportActionTests.java | 2 +- .../ADStatsNodesTransportActionTests.java | 11 +- ...nomalyDetectorJobTransportActionTests.java | 6 +- .../ad/transport/AnomalyResultTests.java | 20 +- .../AnomalyResultTransportActionTests.java | 2 +- .../transport/CronTransportActionTests.java | 2 +- .../transport/DeleteAnomalyDetectorTests.java | 8 +- .../DeleteModelTransportActionTests.java | 2 +- .../EntityResultTransportActionTests.java | 14 +- .../ForwardADTaskTransportActionTests.java | 2 +- .../GetAnomalyDetectorActionTests.java | 4 +- .../ad/transport/GetAnomalyDetectorTests.java | 9 +- ...etAnomalyDetectorTransportActionTests.java | 35 +- ...exAnomalyDetectorTransportActionTests.java | 6 +- .../ad/transport/MultiEntityResultTests.java | 35 +- .../ad/transport/RCFPollingTests.java | 2 +- .../handler/AbstractIndexHandlerTest.java | 8 +- .../AnomalyResultBulkIndexHandlerTests.java | 6 +- .../handler/AnomalyResultHandlerTests.java | 2 +- .../ad/util/ExceptionUtilsTests.java | 1 + .../opensearch/ad/util/IndexUtilsTests.java | 32 +- .../opensearch/ad/util/ThrottlerTests.java | 67 --- .../metrics/CardinalityProfileTests.java | 16 +- .../timeseries/AbstractTimeSeriesTest.java | 4 +- .../NodeStateManagerTests.java | 203 ++++++--- .../{ad => timeseries}/NodeStateTests.java | 15 +- .../opensearch/timeseries/TestHelpers.java | 10 +- .../NoPowermockSearchFeatureDaoTests.java | 20 +- .../feature/SearchFeatureDaoParamTests.java | 11 +- .../feature/SearchFeatureDaoTests.java | 13 +- .../timeseries/util/ClientUtilTests.java | 153 +++++++ ...iResponsesDelegateActionListenerTests.java | 2 +- 165 files changed, 1708 insertions(+), 1655 deletions(-) delete mode 100644 src/main/java/org/opensearch/ad/util/ClientUtil.java delete mode 100644 src/main/java/org/opensearch/ad/util/Throttler.java create mode 100644 src/main/java/org/opensearch/timeseries/AnalysisType.java rename src/main/java/org/opensearch/{ad => timeseries}/CleanState.java (94%) create mode 100644 src/main/java/org/opensearch/timeseries/ExceptionRecorder.java rename src/main/java/org/opensearch/{ad => timeseries}/ExpiringState.java (94%) rename src/main/java/org/opensearch/{ad => timeseries}/MaintenanceState.java (96%) rename src/main/java/org/opensearch/{ad => timeseries}/NodeState.java (57%) rename src/main/java/org/opensearch/{ad => timeseries}/NodeStateManager.java (59%) rename src/main/java/org/opensearch/{ad => timeseries}/feature/SearchFeatureDao.java (95%) create mode 100644 src/main/java/org/opensearch/timeseries/function/BiCheckedFunction.java rename src/main/java/org/opensearch/{ad => timeseries}/ml/SingleStreamModelIdMapper.java (98%) rename src/main/java/org/opensearch/{ad/model/AnomalyDetectorJob.java => timeseries/model/Job.java} (93%) rename src/main/java/org/opensearch/{ad => timeseries}/model/MergeableList.java (91%) rename src/main/java/org/opensearch/{ad => timeseries}/transport/BackPressureRouting.java (98%) create mode 100644 src/main/java/org/opensearch/timeseries/util/ClientUtil.java rename src/main/java/org/opensearch/{ad => timeseries}/util/ExceptionUtil.java (99%) rename src/main/java/org/opensearch/{ad => timeseries}/util/MultiResponsesDelegateActionListener.java (98%) rename src/main/java/org/opensearch/{ad => timeseries}/util/SafeSecurityInjector.java (98%) rename src/main/java/org/opensearch/{ad => timeseries}/util/SecurityClientUtil.java (82%) rename src/main/java/org/opensearch/{ad => timeseries}/util/SecurityUtil.java (86%) rename src/main/java/org/opensearch/{ad/util/ADSafeSecurityInjector.java => timeseries/util/TimeSeriesSafeSecurityInjector.java} (54%) rename src/main/resources/mappings/{anomaly-detectors.json => config.json} (100%) rename src/main/resources/mappings/{anomaly-detector-jobs.json => job.json} (100%) delete mode 100644 src/test/java/org/opensearch/ad/util/ThrottlerTests.java rename src/test/java/org/opensearch/{ad => timeseries}/NodeStateManagerTests.java (68%) rename src/test/java/org/opensearch/{ad => timeseries}/NodeStateTests.java (86%) rename src/test/java/org/opensearch/{ad => timeseries}/feature/NoPowermockSearchFeatureDaoTests.java (97%) rename src/test/java/org/opensearch/{ad => timeseries}/feature/SearchFeatureDaoParamTests.java (98%) rename src/test/java/org/opensearch/{ad => timeseries}/feature/SearchFeatureDaoTests.java (97%) create mode 100644 src/test/java/org/opensearch/timeseries/util/ClientUtilTests.java rename src/test/java/org/opensearch/{ad => timeseries}/util/MultiResponsesDelegateActionListenerTests.java (98%) diff --git a/build.gradle b/build.gradle index ab170a9f4..53cd202ad 100644 --- a/build.gradle +++ b/build.gradle @@ -34,7 +34,7 @@ buildscript { js_resource_folder = "src/test/resources/job-scheduler" common_utils_version = System.getProperty("common_utils.version", opensearch_build) job_scheduler_version = System.getProperty("job_scheduler.version", opensearch_build) - bwcVersionShort = "2.9.0" + bwcVersionShort = "2.10.0" bwcVersion = bwcVersionShort + ".0" bwcOpenSearchADDownload = 'https://ci.opensearch.org/ci/dbc/distribution-build-opensearch/' + bwcVersionShort + '/latest/linux/x64/tar/builds/' + 'opensearch/plugins/opensearch-anomaly-detection-' + bwcVersion + '.zip' @@ -672,8 +672,6 @@ List jacocoExclusions = [ 'org.opensearch.timeseries.settings.TimeSeriesSettings', 'org.opensearch.forecast.settings.ForecastSettings', - 'org.opensearch.ad.util.ClientUtil', - 'org.opensearch.ad.transport.CronRequest', 'org.opensearch.ad.AnomalyDetectorRunner', diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java index 7c2427847..395359435 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorJobRunner.java @@ -34,14 +34,12 @@ import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.AnomalyResultAction; import org.opensearch.ad.transport.AnomalyResultRequest; import org.opensearch.ad.transport.AnomalyResultResponse; import org.opensearch.ad.transport.AnomalyResultTransportAction; -import org.opensearch.ad.util.SecurityUtil; import org.opensearch.client.Client; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -58,11 +56,15 @@ import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.jobscheduler.spi.utils.LockService; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.InternalFailure; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.SecurityUtil; import com.google.common.base.Throwables; @@ -134,12 +136,12 @@ public void runJob(ScheduledJobParameter scheduledJobParameter, JobExecutionCont String detectorId = scheduledJobParameter.getName(); log.info("Start to run AD job {}", detectorId); adTaskManager.refreshRealtimeJobRunTime(detectorId); - if (!(scheduledJobParameter instanceof AnomalyDetectorJob)) { + if (!(scheduledJobParameter instanceof Job)) { throw new IllegalArgumentException( - "Job parameter is not instance of AnomalyDetectorJob, type: " + scheduledJobParameter.getClass().getCanonicalName() + "Job parameter is not instance of Job, type: " + scheduledJobParameter.getClass().getCanonicalName() ); } - AnomalyDetectorJob jobParameter = (AnomalyDetectorJob) scheduledJobParameter; + Job jobParameter = (Job) scheduledJobParameter; Instant executionStartTime = Instant.now(); IntervalSchedule schedule = (IntervalSchedule) jobParameter.getSchedule(); Instant detectionStartTime = executionStartTime.minus(schedule.getInterval(), schedule.getUnit()); @@ -148,12 +150,12 @@ public void runJob(ScheduledJobParameter scheduledJobParameter, JobExecutionCont Runnable runnable = () -> { try { - nodeStateManager.getAnomalyDetector(detectorId, ActionListener.wrap(detectorOptional -> { + nodeStateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(detectorOptional -> { if (!detectorOptional.isPresent()) { log.error(new ParameterizedMessage("fail to get detector [{}]", detectorId)); return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); if (jobParameter.getLockDurationSeconds() != null) { lockService @@ -216,7 +218,7 @@ public void runJob(ScheduledJobParameter scheduledJobParameter, JobExecutionCont * @param detector associated detector accessor */ protected void runAdJob( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -284,7 +286,7 @@ protected void runAdJob( } private void runAnomalyDetectionJob( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -393,7 +395,7 @@ private void runAnomalyDetectionJob( * @param detector associated detector accessor */ protected void handleAdException( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -482,7 +484,7 @@ protected void handleAdException( } private void stopAdJobForEndRunException( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -524,9 +526,9 @@ private void stopAdJob(String detectorId, ExecutorFunction function) { .createParser(NamedXContentRegistry.EMPTY, LoggingDeprecationHandler.INSTANCE, response.getSourceAsString()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + Job job = Job.parse(parser); if (job.isEnabled()) { - AnomalyDetectorJob newJob = new AnomalyDetectorJob( + Job newJob = new Job( job.getName(), job.getSchedule(), job.getWindowDelay(), @@ -566,7 +568,7 @@ private void stopAdJob(String detectorId, ExecutorFunction function) { } private void indexAnomalyResult( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -590,7 +592,7 @@ private void indexAnomalyResult( } private void indexAnomalyResultException( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -621,7 +623,7 @@ private void indexAnomalyResultException( } private void indexAnomalyResultException( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -646,7 +648,7 @@ private void indexAnomalyResultException( } private void indexAnomalyResultException( - AnomalyDetectorJob jobParameter, + Job jobParameter, LockService lockService, LockModel lock, Instant detectionStartTime, @@ -666,7 +668,7 @@ private void indexAnomalyResultException( } } - private void releaseLock(AnomalyDetectorJob jobParameter, LockService lockService, LockModel lock) { + private void releaseLock(Job jobParameter, LockService lockService, LockModel lock) { lockService .release( lock, diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java index d9d5e3f7b..6f1dca2a5 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorProfileRunner.java @@ -35,7 +35,6 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.ad.model.DetectorProfileName; import org.opensearch.ad.model.DetectorState; @@ -49,9 +48,6 @@ import org.opensearch.ad.transport.RCFPollingAction; import org.opensearch.ad.transport.RCFPollingRequest; import org.opensearch.ad.transport.RCFPollingResponse; -import org.opensearch.ad.util.ExceptionUtil; -import org.opensearch.ad.util.MultiResponsesDelegateActionListener; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -68,11 +64,16 @@ import org.opensearch.search.aggregations.metrics.CardinalityAggregationBuilder; import org.opensearch.search.aggregations.metrics.InternalCardinality; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.common.exception.NotSerializedExceptionName; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.ExceptionUtil; +import org.opensearch.timeseries.util.MultiResponsesDelegateActionListener; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; public class AnomalyDetectorProfileRunner extends AbstractProfileRunner { @@ -159,7 +160,7 @@ private void prepareProfile( .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + Job job = Job.parse(parser); long enabledTimeMs = job.getEnabledTime().toEpochMilli(); boolean isMultiEntityDetector = detector.isHighCardinality(); @@ -315,6 +316,7 @@ private void profileEntityStats(MultiResponsesDelegateActionListener profiles, - AnomalyDetectorJob job, + Job job, boolean forMultiEntityDetector, MultiResponsesDelegateActionListener listener ) { @@ -430,7 +433,7 @@ private void profileModels( private ActionListener onModelResponse( AnomalyDetector detector, Set profilesToCollect, - AnomalyDetectorJob job, + Job job, MultiResponsesDelegateActionListener listener ) { boolean isMultientityDetector = detector.isHighCardinality(); @@ -464,7 +467,7 @@ private ActionListener onModelResponse( } private void profileMultiEntityDetectorStateRelated( - AnomalyDetectorJob job, + Job job, Set profilesToCollect, ProfileResponse profileResponse, DetectorProfile.Builder profileBuilder, diff --git a/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java b/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java index 90b7d350f..f94f40306 100644 --- a/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java +++ b/src/main/java/org/opensearch/ad/AnomalyDetectorRunner.java @@ -33,11 +33,11 @@ import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.EntityAnomalyResult; -import org.opensearch.ad.util.MultiResponsesDelegateActionListener; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.FeatureData; +import org.opensearch.timeseries.util.MultiResponsesDelegateActionListener; /** * Runner to trigger an anomaly detector. diff --git a/src/main/java/org/opensearch/ad/EntityProfileRunner.java b/src/main/java/org/opensearch/ad/EntityProfileRunner.java index 491e8088f..479260e21 100644 --- a/src/main/java/org/opensearch/ad/EntityProfileRunner.java +++ b/src/main/java/org/opensearch/ad/EntityProfileRunner.java @@ -28,7 +28,6 @@ import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.EntityProfile; import org.opensearch.ad.model.EntityProfileName; @@ -38,8 +37,6 @@ import org.opensearch.ad.transport.EntityProfileAction; import org.opensearch.ad.transport.EntityProfileRequest; import org.opensearch.ad.transport.EntityProfileResponse; -import org.opensearch.ad.util.MultiResponsesDelegateActionListener; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.routing.Preference; import org.opensearch.common.xcontent.LoggingDeprecationHandler; @@ -53,11 +50,15 @@ import org.opensearch.index.query.TermQueryBuilder; import org.opensearch.search.aggregations.AggregationBuilders; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.MultiResponsesDelegateActionListener; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; public class EntityProfileRunner extends AbstractProfileRunner { private final Logger logger = LogManager.getLogger(EntityProfileRunner.class); @@ -188,6 +189,7 @@ private void validateEntity( client::search, detector.getId(), client, + AnalysisType.AD, searchResponseListener ); @@ -228,7 +230,7 @@ private void getJob( .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, getResponse.getSourceAsString()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + Job job = Job.parse(parser); int totalResponsesToWait = 0; if (profilesToCollect.contains(EntityProfileName.INIT_PROGRESS) @@ -331,7 +333,7 @@ private void profileStateRelated( Entity entityValue, Set profilesToCollect, AnomalyDetector detector, - AnomalyDetectorJob job, + Job job, MultiResponsesDelegateActionListener delegateListener ) { if (totalUpdates == 0) { diff --git a/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java b/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java index 4b05295ae..028ea5550 100644 --- a/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java +++ b/src/main/java/org/opensearch/ad/ExecuteADResultResponseRecorder.java @@ -38,13 +38,14 @@ import org.opensearch.ad.transport.RCFPollingAction; import org.opensearch.ad.transport.RCFPollingRequest; import org.opensearch.ad.transport.handler.AnomalyIndexHandler; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.unit.TimeValue; import org.opensearch.commons.authuser.User; import org.opensearch.search.SearchHits; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; @@ -52,6 +53,7 @@ import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.ExceptionUtil; public class ExecuteADResultResponseRecorder { private static final Logger log = LogManager.getLogger(ExecuteADResultResponseRecorder.class); @@ -337,12 +339,12 @@ private void confirmTotalRCFUpdatesFound( String error, ActionListener listener ) { - nodeStateManager.getAnomalyDetector(detectorId, ActionListener.wrap(detectorOptional -> { + nodeStateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(detectorOptional -> { if (!detectorOptional.isPresent()) { listener.onFailure(new TimeSeriesException(detectorId, "fail to get detector")); return; } - nodeStateManager.getAnomalyDetectorJob(detectorId, ActionListener.wrap(jobOptional -> { + nodeStateManager.getJob(detectorId, ActionListener.wrap(jobOptional -> { if (!jobOptional.isPresent()) { listener.onFailure(new TimeSeriesException(detectorId, "fail to get job")); return; @@ -350,7 +352,7 @@ private void confirmTotalRCFUpdatesFound( ProfileUtil .confirmDetectorRealtimeInitStatus( - detectorOptional.get(), + (AnomalyDetector) detectorOptional.get(), jobOptional.get().getEnabledTime().toEpochMilli(), client, ActionListener.wrap(searchResponse -> { diff --git a/src/main/java/org/opensearch/ad/caching/CacheBuffer.java b/src/main/java/org/opensearch/ad/caching/CacheBuffer.java index d9ec0143d..234a72932 100644 --- a/src/main/java/org/opensearch/ad/caching/CacheBuffer.java +++ b/src/main/java/org/opensearch/ad/caching/CacheBuffer.java @@ -25,7 +25,6 @@ import org.apache.commons.lang.builder.HashCodeBuilder; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.ad.ExpiringState; import org.opensearch.ad.MemoryTracker; import org.opensearch.ad.MemoryTracker.Origin; import org.opensearch.ad.ml.EntityModel; @@ -36,6 +35,7 @@ import org.opensearch.ad.ratelimit.CheckpointWriteWorker; import org.opensearch.ad.ratelimit.RequestPriority; import org.opensearch.ad.util.DateUtils; +import org.opensearch.timeseries.ExpiringState; /** * We use a layered cache to manage active entities’ states. We have a two-level diff --git a/src/main/java/org/opensearch/ad/caching/DoorKeeper.java b/src/main/java/org/opensearch/ad/caching/DoorKeeper.java index 96a18d8f6..5bb5e3cd5 100644 --- a/src/main/java/org/opensearch/ad/caching/DoorKeeper.java +++ b/src/main/java/org/opensearch/ad/caching/DoorKeeper.java @@ -17,8 +17,8 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.ad.ExpiringState; -import org.opensearch.ad.MaintenanceState; +import org.opensearch.timeseries.ExpiringState; +import org.opensearch.timeseries.MaintenanceState; import com.google.common.base.Charsets; import com.google.common.hash.BloomFilter; diff --git a/src/main/java/org/opensearch/ad/caching/EntityCache.java b/src/main/java/org/opensearch/ad/caching/EntityCache.java index 0a6a303d6..287994efd 100644 --- a/src/main/java/org/opensearch/ad/caching/EntityCache.java +++ b/src/main/java/org/opensearch/ad/caching/EntityCache.java @@ -16,13 +16,13 @@ import java.util.Optional; import org.apache.commons.lang3.tuple.Pair; -import org.opensearch.ad.CleanState; import org.opensearch.ad.DetectorModelSize; -import org.opensearch.ad.MaintenanceState; import org.opensearch.ad.ml.EntityModel; import org.opensearch.ad.ml.ModelState; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.ModelProfile; +import org.opensearch.timeseries.CleanState; +import org.opensearch.timeseries.MaintenanceState; import org.opensearch.timeseries.model.Entity; public interface EntityCache extends MaintenanceState, CleanState, DetectorModelSize { diff --git a/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java b/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java index 62702d15c..fbe9787e9 100644 --- a/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java +++ b/src/main/java/org/opensearch/ad/cluster/ADDataMigrator.java @@ -42,9 +42,7 @@ import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorInternalState; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.xcontent.XContentFactory; @@ -60,6 +58,8 @@ import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.ExceptionUtil; /** * Migrate AD data to support backward compatibility. @@ -137,13 +137,13 @@ public void migrateDetectorInternalStateToRealtimeTask() { logger.info("No anomaly detector job found, no need to migrate"); return; } - ConcurrentLinkedQueue detectorJobs = new ConcurrentLinkedQueue<>(); + ConcurrentLinkedQueue detectorJobs = new ConcurrentLinkedQueue<>(); Iterator iterator = r.getHits().iterator(); while (iterator.hasNext()) { SearchHit searchHit = iterator.next(); try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, searchHit.getSourceRef())) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + Job job = Job.parse(parser); detectorJobs.add(job); } catch (IOException e) { logger.error("Fail to parse AD job " + searchHit.getId(), e); @@ -168,8 +168,8 @@ public void migrateDetectorInternalStateToRealtimeTask() { * @param detectorJobs realtime AD jobs * @param backfillAllJob backfill task for all realtime job or not */ - public void backfillRealtimeTask(ConcurrentLinkedQueue detectorJobs, boolean backfillAllJob) { - AnomalyDetectorJob job = detectorJobs.poll(); + public void backfillRealtimeTask(ConcurrentLinkedQueue detectorJobs, boolean backfillAllJob) { + Job job = detectorJobs.poll(); if (job == null) { logger.info("AD data migration done."); if (backfillAllJob) { @@ -203,9 +203,9 @@ public void backfillRealtimeTask(ConcurrentLinkedQueue detec } private void checkIfRealtimeTaskExistsAndBackfill( - AnomalyDetectorJob job, + Job job, ExecutorFunction createRealtimeTaskFunction, - ConcurrentLinkedQueue detectorJobs, + ConcurrentLinkedQueue detectorJobs, boolean migrateAll ) { String jobId = job.getName(); @@ -233,12 +233,7 @@ private void checkIfRealtimeTaskExistsAndBackfill( })); } - private void createRealtimeADTask( - AnomalyDetectorJob job, - String error, - ConcurrentLinkedQueue detectorJobs, - boolean migrateAll - ) { + private void createRealtimeADTask(Job job, String error, ConcurrentLinkedQueue detectorJobs, boolean migrateAll) { client.get(new GetRequest(CommonName.CONFIG_INDEX, job.getName()), ActionListener.wrap(r -> { if (r != null && r.isExists()) { try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, r.getSourceAsBytesRef())) { diff --git a/src/main/java/org/opensearch/ad/cluster/ClusterManagerEventListener.java b/src/main/java/org/opensearch/ad/cluster/ClusterManagerEventListener.java index 9cf1dd905..42b2f3fa8 100644 --- a/src/main/java/org/opensearch/ad/cluster/ClusterManagerEventListener.java +++ b/src/main/java/org/opensearch/ad/cluster/ClusterManagerEventListener.java @@ -16,7 +16,6 @@ import org.opensearch.ad.cluster.diskcleanup.IndexCleanup; import org.opensearch.ad.cluster.diskcleanup.ModelCheckpointIndexRetention; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.DateUtils; import org.opensearch.client.Client; import org.opensearch.cluster.LocalNodeClusterManagerListener; @@ -27,6 +26,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.threadpool.Scheduler.Cancellable; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import com.google.common.annotations.VisibleForTesting; diff --git a/src/main/java/org/opensearch/ad/cluster/DailyCron.java b/src/main/java/org/opensearch/ad/cluster/DailyCron.java index e2b2b8808..3c1f618ff 100644 --- a/src/main/java/org/opensearch/ad/cluster/DailyCron.java +++ b/src/main/java/org/opensearch/ad/cluster/DailyCron.java @@ -19,12 +19,12 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.IndicesOptions; import org.opensearch.ad.constant.ADCommonName; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.QueryBuilders; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.DeleteByQueryRequest; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.util.ClientUtil; @Deprecated public class DailyCron implements Runnable { diff --git a/src/main/java/org/opensearch/ad/cluster/HashRing.java b/src/main/java/org/opensearch/ad/cluster/HashRing.java index 3e6ba0b37..eb9e5b2fb 100644 --- a/src/main/java/org/opensearch/ad/cluster/HashRing.java +++ b/src/main/java/org/opensearch/ad/cluster/HashRing.java @@ -11,7 +11,7 @@ package org.opensearch.ad.cluster; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.COOLDOWN_MINUTES; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_COOLDOWN_MINUTES; import java.time.Clock; import java.util.ArrayList; @@ -37,7 +37,6 @@ import org.opensearch.action.admin.cluster.node.info.NodesInfoRequest; import org.opensearch.action.admin.cluster.node.info.PluginsAndModules; import org.opensearch.ad.ml.ModelManager; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.client.AdminClient; import org.opensearch.client.Client; import org.opensearch.client.ClusterAdminClient; @@ -52,6 +51,7 @@ import org.opensearch.plugins.PluginInfo; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import com.google.common.collect.Sets; @@ -109,8 +109,8 @@ public HashRing( this.nodeFilter = nodeFilter; this.buildHashRingSemaphore = new Semaphore(1); this.clock = clock; - this.coolDownPeriodForRealtimeAD = COOLDOWN_MINUTES.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(COOLDOWN_MINUTES, it -> coolDownPeriodForRealtimeAD = it); + this.coolDownPeriodForRealtimeAD = AD_COOLDOWN_MINUTES.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_COOLDOWN_MINUTES, it -> coolDownPeriodForRealtimeAD = it); this.lastUpdateForRealtimeAD = 0; this.client = client; @@ -387,7 +387,7 @@ private void rebuildCirclesForRealtimeAD() { * 1. There is node change event not consumed, and * 2. Have passed cool down period from last hash ring update time. * - * Check {@link org.opensearch.ad.settings.AnomalyDetectorSettings#COOLDOWN_MINUTES} about + * Check {@link org.opensearch.ad.settings.AnomalyDetectorSettings#AD_COOLDOWN_MINUTES} about * cool down settings. * * Why we need to wait for some cooldown period before rebuilding hash ring? diff --git a/src/main/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanup.java b/src/main/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanup.java index 325361aec..3f8fe461e 100644 --- a/src/main/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanup.java +++ b/src/main/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanup.java @@ -22,7 +22,6 @@ import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.admin.indices.stats.ShardStats; import org.opensearch.action.support.IndicesOptions; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.util.concurrent.ThreadContext; @@ -30,6 +29,7 @@ import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.DeleteByQueryRequest; import org.opensearch.index.store.StoreStats; +import org.opensearch.timeseries.util.ClientUtil; /** * Clean up the old docs for indices. diff --git a/src/main/java/org/opensearch/ad/constant/ADCommonMessages.java b/src/main/java/org/opensearch/ad/constant/ADCommonMessages.java index e20dc8fd1..31adb1dac 100644 --- a/src/main/java/org/opensearch/ad/constant/ADCommonMessages.java +++ b/src/main/java/org/opensearch/ad/constant/ADCommonMessages.java @@ -24,8 +24,6 @@ public class ADCommonMessages { public static String FAIL_TO_PARSE_DETECTOR_MSG = "Fail to parse detector with id: "; public static String FAIL_TO_GET_PROFILE_MSG = "Fail to get profile for detector "; public static String FAIL_TO_GET_TOTAL_ENTITIES = "Failed to get total entities for detector "; - public static String FAIL_TO_GET_USER_INFO = "Unable to get user information from detector "; - public static String NO_PERMISSION_TO_ACCESS_DETECTOR = "User does not have permissions to access detector: "; public static String CATEGORICAL_FIELD_NUMBER_SURPASSED = "We don't support categorical fields more than "; public static String EMPTY_PROFILES_COLLECT = "profiles to collect are missing or invalid"; public static String FAIL_FETCH_ERR_MSG = "Fail to fetch profile for "; diff --git a/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java b/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java index d41bdf76e..c06f8b3ec 100644 --- a/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java +++ b/src/main/java/org/opensearch/ad/feature/CompositeRetriever.java @@ -29,7 +29,6 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -46,9 +45,11 @@ import org.opensearch.search.aggregations.bucket.composite.CompositeAggregationBuilder; import org.opensearch.search.aggregations.bucket.composite.TermsValuesSourceBuilder; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; /** * @@ -220,6 +221,7 @@ public void onFailure(Exception e) { client::search, anomalyDetector.getId(), client, + AnalysisType.AD, searchResponseListener ); } diff --git a/src/main/java/org/opensearch/ad/feature/FeatureManager.java b/src/main/java/org/opensearch/ad/feature/FeatureManager.java index f6fd8ded0..9028e03e0 100644 --- a/src/main/java/org/opensearch/ad/feature/FeatureManager.java +++ b/src/main/java/org/opensearch/ad/feature/FeatureManager.java @@ -40,12 +40,14 @@ import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ThreadedActionListener; -import org.opensearch.ad.CleanState; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.CleanState; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.dataprocessor.Imputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.Entity; /** @@ -156,7 +158,7 @@ public void getCurrentFeatures(AnomalyDetector detector, long startTime, long en if (missingRanges.size() > 0) { try { - searchFeatureDao.getFeatureSamplesForPeriods(detector, missingRanges, ActionListener.wrap(points -> { + searchFeatureDao.getFeatureSamplesForPeriods(detector, missingRanges, AnalysisType.AD, ActionListener.wrap(points -> { for (int i = 0; i < points.size(); i++) { Optional point = points.get(i); long rangeEndTime = missingRanges.get(i).getValue(); @@ -306,6 +308,7 @@ private void getColdStartSamples(Optional latest, AnomalyDetector detector .getFeatureSamplesForPeriods( detector, sampleRanges, + AnalysisType.AD, new ThreadedActionListener<>(logger, threadPool, adThreadPoolName, getFeaturesListener, false) ); } catch (IOException e) { @@ -545,7 +548,14 @@ void getPreviewSamplesInRangesForEntity( ActionListener>, double[][]>> listener ) throws IOException { searchFeatureDao - .getColdStartSamplesForPeriods(detector, sampleRanges, entity, true, getSamplesRangesListener(sampleRanges, listener)); + .getColdStartSamplesForPeriods( + detector, + sampleRanges, + Optional.ofNullable(entity), + true, + AnalysisType.AD, + getSamplesRangesListener(sampleRanges, listener) + ); } private ActionListener>> getSamplesRangesListener( @@ -577,7 +587,8 @@ void getSamplesForRanges( List> sampleRanges, ActionListener>, double[][]>> listener ) throws IOException { - searchFeatureDao.getFeatureSamplesForPeriods(detector, sampleRanges, getSamplesRangesListener(sampleRanges, listener)); + searchFeatureDao + .getFeatureSamplesForPeriods(detector, sampleRanges, AnalysisType.AD, getSamplesRangesListener(sampleRanges, listener)); } /** diff --git a/src/main/java/org/opensearch/ad/ml/CheckpointDao.java b/src/main/java/org/opensearch/ad/ml/CheckpointDao.java index 738acd197..48e071543 100644 --- a/src/main/java/org/opensearch/ad/ml/CheckpointDao.java +++ b/src/main/java/org/opensearch/ad/ml/CheckpointDao.java @@ -49,15 +49,12 @@ import org.opensearch.action.get.MultiGetAction; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.get.MultiGetResponse; -import org.opensearch.action.index.IndexRequest; -import org.opensearch.action.index.IndexResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.update.UpdateRequest; import org.opensearch.action.update.UpdateResponse; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndex; import org.opensearch.ad.indices.ADIndexManagement; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.query.MatchQueryBuilder; @@ -68,7 +65,9 @@ import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.timeseries.model.Entity; +import org.opensearch.timeseries.util.ClientUtil; import com.amazon.randomcutforest.RandomCutForest; import com.amazon.randomcutforest.config.Precision; @@ -188,15 +187,11 @@ public CheckpointDao( this.anomalyRate = anomalyRate; } - private void saveModelCheckpointSync(Map source, String modelId) { - clientUtil.timedRequest(new IndexRequest(indexName).id(modelId).source(source), logger, client::index); - } - private void putModelCheckpoint(String modelId, Map source, ActionListener listener) { if (indexUtil.doesCheckpointIndexExist()) { saveModelCheckpointAsync(source, modelId, listener); } else { - onCheckpointNotExist(source, modelId, true, listener); + onCheckpointNotExist(source, modelId, listener); } } @@ -234,25 +229,17 @@ public void putThresholdCheckpoint(String modelId, ThresholdingModel threshold, putModelCheckpoint(modelId, source, listener); } - private void onCheckpointNotExist(Map source, String modelId, boolean isAsync, ActionListener listener) { + private void onCheckpointNotExist(Map source, String modelId, ActionListener listener) { indexUtil.initCheckpointIndex(ActionListener.wrap(initResponse -> { if (initResponse.isAcknowledged()) { - if (isAsync) { - saveModelCheckpointAsync(source, modelId, listener); - } else { - saveModelCheckpointSync(source, modelId); - } + saveModelCheckpointAsync(source, modelId, listener); } else { throw new RuntimeException("Creating checkpoint with mappings call not acknowledged."); } }, exception -> { if (ExceptionsHelper.unwrapCause(exception) instanceof ResourceAlreadyExistsException) { // It is possible the index has been created while we sending the create request - if (isAsync) { - saveModelCheckpointAsync(source, modelId, listener); - } else { - saveModelCheckpointSync(source, modelId); - } + saveModelCheckpointAsync(source, modelId, listener); } else { logger.error(String.format(Locale.ROOT, "Unexpected error creating index %s", indexName), exception); } diff --git a/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java b/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java index 3f198285f..22f7ac32c 100644 --- a/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java +++ b/src/main/java/org/opensearch/ad/ml/EntityColdStarter.java @@ -11,7 +11,7 @@ package org.opensearch.ad.ml; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.COOLDOWN_MINUTES; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_COOLDOWN_MINUTES; import java.time.Clock; import java.time.Duration; @@ -37,26 +37,28 @@ import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ThreadedActionListener; -import org.opensearch.ad.CleanState; -import org.opensearch.ad.MaintenanceState; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.DoorKeeper; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.ratelimit.CheckpointWriteWorker; import org.opensearch.ad.ratelimit.RequestPriority; import org.opensearch.ad.settings.ADEnabledSetting; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.CleanState; +import org.opensearch.timeseries.MaintenanceState; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.dataprocessor.Imputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.settings.TimeSeriesSettings; +import org.opensearch.timeseries.util.ExceptionUtil; import com.amazon.randomcutforest.config.Precision; import com.amazon.randomcutforest.parkservices.ThresholdedRandomCutForest; @@ -154,7 +156,7 @@ public EntityColdStarter( this.searchFeatureDao = searchFeatureDao; this.thresholdMinPvalue = thresholdMinPvalue; this.featureManager = featureManager; - this.coolDownMinutes = (int) (COOLDOWN_MINUTES.get(settings).getMinutes()); + this.coolDownMinutes = (int) (AD_COOLDOWN_MINUTES.get(settings).getMinutes()); this.doorKeepers = new ConcurrentHashMap<>(); this.modelTtl = modelTtl; this.checkpointWriteQueue = checkpointWriteQueue; @@ -410,13 +412,13 @@ private void trainModelFromDataSegments( * @param listener listener to return training data */ private void getEntityColdStartData(String detectorId, Entity entity, ActionListener>> listener) { - ActionListener> getDetectorListener = ActionListener.wrap(detectorOp -> { + ActionListener> getDetectorListener = ActionListener.wrap(detectorOp -> { if (!detectorOp.isPresent()) { listener.onFailure(new EndRunException(detectorId, "AnomalyDetector is not available.", false)); return; } List coldStartData = new ArrayList<>(); - AnomalyDetector detector = detectorOp.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOp.get(); ActionListener> minTimeListener = ActionListener.wrap(earliest -> { if (earliest.isPresent()) { @@ -441,17 +443,19 @@ private void getEntityColdStartData(String detectorId, Entity entity, ActionList }, listener::onFailure); searchFeatureDao - .getEntityMinDataTime( + .getMinDataTime( detector, - entity, + Optional.ofNullable(entity), + AnalysisType.AD, new ThreadedActionListener<>(logger, threadPool, TimeSeriesAnalyticsPlugin.AD_THREAD_POOL_NAME, minTimeListener, false) ); }, listener::onFailure); nodeStateManager - .getAnomalyDetector( + .getConfig( detectorId, + AnalysisType.AD, new ThreadedActionListener<>(logger, threadPool, TimeSeriesAnalyticsPlugin.AD_THREAD_POOL_NAME, getDetectorListener, false) ); } @@ -552,13 +556,14 @@ private void getFeatures( .getColdStartSamplesForPeriods( detector, sampleRanges, - entity, + Optional.ofNullable(entity), // Accept empty bucket. // 0, as returned by the engine should constitute a valid answer, “null” is a missing answer — it may be that 0 // is meaningless in some case, but 0 is also meaningful in some cases. It may be that the query defining the // metric is ill-formed, but that cannot be solved by cold-start strategy of the AD plugin — if we attempt to do // that, we will have issues with legitimate interpretations of 0. true, + AnalysisType.AD, new ThreadedActionListener<>( logger, threadPool, @@ -660,14 +665,14 @@ private List> getTrainSampleRanges( * cold start queue to pull another request (if any) to execute. */ public void trainModel(Entity entity, String detectorId, ModelState modelState, ActionListener listener) { - nodeStateManager.getAnomalyDetector(detectorId, ActionListener.wrap(detectorOptional -> { + nodeStateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(detectorOptional -> { if (false == detectorOptional.isPresent()) { logger.warn(new ParameterizedMessage("AnomalyDetector [{}] is not available.", detectorId)); listener.onFailure(new TimeSeriesException(detectorId, "fail to find detector")); return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); Queue samples = modelState.getModel().getSamples(); String modelId = modelState.getModelId(); diff --git a/src/main/java/org/opensearch/ad/ml/ModelManager.java b/src/main/java/org/opensearch/ad/ml/ModelManager.java index 464297193..c882de0ec 100644 --- a/src/main/java/org/opensearch/ad/ml/ModelManager.java +++ b/src/main/java/org/opensearch/ad/ml/ModelManager.java @@ -45,6 +45,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.settings.TimeSeriesSettings; diff --git a/src/main/java/org/opensearch/ad/ml/ModelState.java b/src/main/java/org/opensearch/ad/ml/ModelState.java index 9e909bc58..bb9050ecb 100644 --- a/src/main/java/org/opensearch/ad/ml/ModelState.java +++ b/src/main/java/org/opensearch/ad/ml/ModelState.java @@ -17,8 +17,8 @@ import java.util.HashMap; import java.util.Map; -import org.opensearch.ad.ExpiringState; import org.opensearch.ad.constant.ADCommonName; +import org.opensearch.timeseries.ExpiringState; import org.opensearch.timeseries.constant.CommonName; /** diff --git a/src/main/java/org/opensearch/ad/ratelimit/BatchWorker.java b/src/main/java/org/opensearch/ad/ratelimit/BatchWorker.java index 50b051f6d..f59afd6bf 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/BatchWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/BatchWorker.java @@ -20,12 +20,12 @@ import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ThreadedActionListener; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; /** diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckpointMaintainWorker.java b/src/main/java/org/opensearch/ad/ratelimit/CheckpointMaintainWorker.java index 049b2d587..8f9a543f7 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckpointMaintainWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckpointMaintainWorker.java @@ -23,13 +23,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; public class CheckpointMaintainWorker extends ScheduledWorker { private static final Logger LOG = LogManager.getLogger(CheckpointMaintainWorker.class); diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java b/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java index e06d3e08e..806a96345 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckpointReadWorker.java @@ -33,7 +33,6 @@ import org.opensearch.action.get.MultiGetItemResponse; import org.opensearch.action.get.MultiGetRequest; import org.opensearch.action.get.MultiGetResponse; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.constant.ADCommonName; @@ -47,16 +46,19 @@ import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.stats.ADStats; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.index.IndexNotFoundException; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.constant.CommonMessages; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.ExceptionUtil; import org.opensearch.timeseries.util.ParseUtils; /** @@ -310,8 +312,9 @@ private void processCheckpointIteration( } nodeStateManager - .getAnomalyDetector( + .getConfig( detectorId, + AnalysisType.AD, onGetDetector( origRequest, i, @@ -336,7 +339,7 @@ private void processCheckpointIteration( } } - private ActionListener> onGetDetector( + private ActionListener> onGetDetector( EntityFeatureRequest origRequest, int index, String detectorId, @@ -354,7 +357,7 @@ private ActionListener> onGetDetector( return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); ModelState modelState = modelManager .processEntityCheckpoint(checkpoint, entity, modelId, detectorId, detector.getShingleSize()); diff --git a/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java b/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java index dd32e21c4..642a155e3 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/CheckpointWriteWorker.java @@ -31,18 +31,20 @@ import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.update.UpdateRequest; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.ml.CheckpointDao; import org.opensearch.ad.ml.EntityModel; import org.opensearch.ad.ml.ModelState; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.Strings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.util.ExceptionUtil; public class CheckpointWriteWorker extends BatchWorker { private static final Logger LOG = LogManager.getLogger(CheckpointWriteWorker.class); @@ -163,11 +165,11 @@ public void write(ModelState modelState, boolean forceWrite, Reques return; } - nodeStateManager.getAnomalyDetector(detectorId, onGetDetector(detectorId, modelId, modelState, priority)); + nodeStateManager.getConfig(detectorId, AnalysisType.AD, onGetDetector(detectorId, modelId, modelState, priority)); } } - private ActionListener> onGetDetector( + private ActionListener> onGetDetector( String detectorId, String modelId, ModelState modelState, @@ -179,7 +181,7 @@ private ActionListener> onGetDetector( return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); try { Map source = checkpoint.toIndexSource(modelState); @@ -216,13 +218,13 @@ private ActionListener> onGetDetector( } public void writeAll(List> modelStates, String detectorId, boolean forceWrite, RequestPriority priority) { - ActionListener> onGetForAll = ActionListener.wrap(detectorOptional -> { + ActionListener> onGetForAll = ActionListener.wrap(detectorOptional -> { if (false == detectorOptional.isPresent()) { LOG.warn(new ParameterizedMessage("AnomalyDetector [{}] is not available.", detectorId)); return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); try { List allRequests = new ArrayList<>(); for (ModelState state : modelStates) { @@ -269,6 +271,6 @@ public void writeAll(List> modelStates, String detectorI }, exception -> { LOG.error(new ParameterizedMessage("fail to get detector [{}]", detectorId), exception); }); - nodeStateManager.getAnomalyDetector(detectorId, onGetForAll); + nodeStateManager.getConfig(detectorId, AnalysisType.AD, onGetForAll); } } diff --git a/src/main/java/org/opensearch/ad/ratelimit/ColdEntityWorker.java b/src/main/java/org/opensearch/ad/ratelimit/ColdEntityWorker.java index fb834e089..1c114217f 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ColdEntityWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ColdEntityWorker.java @@ -20,13 +20,13 @@ import java.util.Random; import java.util.stream.Collectors; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; /** * A queue slowly releasing low-priority requests to CheckpointReadQueue diff --git a/src/main/java/org/opensearch/ad/ratelimit/ConcurrentWorker.java b/src/main/java/org/opensearch/ad/ratelimit/ConcurrentWorker.java index 62bd0a2bd..f34944391 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ConcurrentWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ConcurrentWorker.java @@ -19,12 +19,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; /** diff --git a/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java b/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java index 53d05ff11..9a96aa0e3 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/EntityColdStartWorker.java @@ -24,7 +24,6 @@ import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.opensearch.action.ActionListener; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.ml.EntityColdStarter; @@ -32,11 +31,13 @@ import org.opensearch.ad.ml.ModelManager.ModelType; import org.opensearch.ad.ml.ModelState; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; +import org.opensearch.timeseries.util.ExceptionUtil; /** * A queue for HCAD model training (a.k.a. cold start). As model training is a @@ -121,7 +122,7 @@ protected void executeRequest(EntityRequest coldStartRequest, ActionListener coldStartListener = ActionListener.wrap(r -> { - nodeStateManager.getAnomalyDetector(detectorId, ActionListener.wrap(detectorOptional -> { + nodeStateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(detectorOptional -> { try { if (!detectorOptional.isPresent()) { LOG @@ -133,7 +134,7 @@ protected void executeRequest(EntityRequest coldStartRequest, ActionListener(); this.cooldownStart = Instant.MIN; - this.coolDownMinutes = (int) (COOLDOWN_MINUTES.get(settings).getMinutes()); + this.coolDownMinutes = (int) (AD_COOLDOWN_MINUTES.get(settings).getMinutes()); this.maintenanceFreqConstant = maintenanceFreqConstant; this.stateTtl = stateTtl; this.nodeStateManager = nodeStateManager; diff --git a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java index 2381e5db9..c0ccbcfec 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ResultWriteWorker.java @@ -26,24 +26,26 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.DocWriteRequest; import org.opensearch.action.index.IndexRequest; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.transport.ADResultBulkRequest; import org.opensearch.ad.transport.ADResultBulkResponse; import org.opensearch.ad.transport.handler.MultiEntityResultHandler; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentHelper; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.core.common.bytes.BytesReference; +import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.util.ExceptionUtil; public class ResultWriteWorker extends BatchWorker { private static final Logger LOG = LogManager.getLogger(ResultWriteWorker.class); @@ -155,10 +157,10 @@ private void enqueueRetryRequestIteration(List requestToRetry, int } AnomalyResult result = resultToRetry.get(); String detectorId = result.getConfigId(); - nodeStateManager.getAnomalyDetector(detectorId, onGetDetector(requestToRetry, index, detectorId, result)); + nodeStateManager.getConfig(detectorId, AnalysisType.AD, onGetDetector(requestToRetry, index, detectorId, result)); } - private ActionListener> onGetDetector( + private ActionListener> onGetDetector( List requestToRetry, int index, String detectorId, @@ -171,7 +173,7 @@ private ActionListener> onGetDetector( return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); super.put( new ResultWriteRequest( // expire based on execute start time @@ -200,7 +202,7 @@ private Optional getAnomalyResult(DocWriteRequest request) { // we send IndexRequest previously IndexRequest indexRequest = (IndexRequest) request; BytesReference indexSource = indexRequest.source(); - XContentType indexContentType = indexRequest.getContentType(); + MediaType indexContentType = indexRequest.getContentType(); try ( XContentParser xContentParser = XContentHelper .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, indexSource, indexContentType) diff --git a/src/main/java/org/opensearch/ad/ratelimit/ScheduledWorker.java b/src/main/java/org/opensearch/ad/ratelimit/ScheduledWorker.java index 9d4891b7c..62edc017f 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/ScheduledWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/ScheduledWorker.java @@ -18,13 +18,13 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; public abstract class ScheduledWorker extends diff --git a/src/main/java/org/opensearch/ad/ratelimit/SingleRequestWorker.java b/src/main/java/org/opensearch/ad/ratelimit/SingleRequestWorker.java index 028a0643f..c6133cda9 100644 --- a/src/main/java/org/opensearch/ad/ratelimit/SingleRequestWorker.java +++ b/src/main/java/org/opensearch/ad/ratelimit/SingleRequestWorker.java @@ -20,12 +20,12 @@ import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; public abstract class SingleRequestWorker extends ConcurrentWorker { private static final Logger LOG = LogManager.getLogger(SingleRequestWorker.class); diff --git a/src/main/java/org/opensearch/ad/rest/AbstractAnomalyDetectorAction.java b/src/main/java/org/opensearch/ad/rest/AbstractAnomalyDetectorAction.java index 331c3151f..4d06e5ab2 100644 --- a/src/main/java/org/opensearch/ad/rest/AbstractAnomalyDetectorAction.java +++ b/src/main/java/org/opensearch/ad/rest/AbstractAnomalyDetectorAction.java @@ -11,12 +11,12 @@ package org.opensearch.ad.rest; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.ad.settings.AnomalyDetectorSettings.DETECTION_INTERVAL; import static org.opensearch.ad.settings.AnomalyDetectorSettings.DETECTION_WINDOW_DELAY; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_ANOMALY_FEATURES; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_MULTI_ENTITY_ANOMALY_DETECTORS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_SINGLE_ENTITY_ANOMALY_DETECTORS; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -33,7 +33,7 @@ public abstract class AbstractAnomalyDetectorAction extends BaseRestHandler { protected volatile Integer maxAnomalyFeatures; public AbstractAnomalyDetectorAction(Settings settings, ClusterService clusterService) { - this.requestTimeout = REQUEST_TIMEOUT.get(settings); + this.requestTimeout = AD_REQUEST_TIMEOUT.get(settings); this.detectionInterval = DETECTION_INTERVAL.get(settings); this.detectionWindowDelay = DETECTION_WINDOW_DELAY.get(settings); this.maxSingleEntityDetectors = MAX_SINGLE_ENTITY_ANOMALY_DETECTORS.get(settings); @@ -41,7 +41,7 @@ public AbstractAnomalyDetectorAction(Settings settings, ClusterService clusterSe this.maxAnomalyFeatures = MAX_ANOMALY_FEATURES.get(settings); // TODO: will add more cluster setting consumer later // TODO: inject ClusterSettings only if clusterService is only used to get ClusterSettings - clusterService.getClusterSettings().addSettingsUpdateConsumer(REQUEST_TIMEOUT, it -> requestTimeout = it); + clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_REQUEST_TIMEOUT, it -> requestTimeout = it); clusterService.getClusterSettings().addSettingsUpdateConsumer(DETECTION_INTERVAL, it -> detectionInterval = it); clusterService.getClusterSettings().addSettingsUpdateConsumer(DETECTION_WINDOW_DELAY, it -> detectionWindowDelay = it); clusterService diff --git a/src/main/java/org/opensearch/ad/rest/RestAnomalyDetectorJobAction.java b/src/main/java/org/opensearch/ad/rest/RestAnomalyDetectorJobAction.java index a5052c84d..175ac02e7 100644 --- a/src/main/java/org/opensearch/ad/rest/RestAnomalyDetectorJobAction.java +++ b/src/main/java/org/opensearch/ad/rest/RestAnomalyDetectorJobAction.java @@ -11,7 +11,7 @@ package org.opensearch.ad.rest; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.timeseries.util.RestHandlerUtils.DETECTOR_ID; import static org.opensearch.timeseries.util.RestHandlerUtils.IF_PRIMARY_TERM; @@ -50,8 +50,8 @@ public class RestAnomalyDetectorJobAction extends BaseRestHandler { private volatile TimeValue requestTimeout; public RestAnomalyDetectorJobAction(Settings settings, ClusterService clusterService) { - this.requestTimeout = REQUEST_TIMEOUT.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(REQUEST_TIMEOUT, it -> requestTimeout = it); + this.requestTimeout = AD_REQUEST_TIMEOUT.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_REQUEST_TIMEOUT, it -> requestTimeout = it); } @Override diff --git a/src/main/java/org/opensearch/ad/rest/RestExecuteAnomalyDetectorAction.java b/src/main/java/org/opensearch/ad/rest/RestExecuteAnomalyDetectorAction.java index fe0d10ec9..d52749a02 100644 --- a/src/main/java/org/opensearch/ad/rest/RestExecuteAnomalyDetectorAction.java +++ b/src/main/java/org/opensearch/ad/rest/RestExecuteAnomalyDetectorAction.java @@ -11,7 +11,7 @@ package org.opensearch.ad.rest; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.timeseries.util.RestHandlerUtils.DETECTOR_ID; import static org.opensearch.timeseries.util.RestHandlerUtils.RUN; @@ -54,8 +54,8 @@ public class RestExecuteAnomalyDetectorAction extends BaseRestHandler { private final Logger logger = LogManager.getLogger(RestExecuteAnomalyDetectorAction.class); public RestExecuteAnomalyDetectorAction(Settings settings, ClusterService clusterService) { - this.requestTimeout = REQUEST_TIMEOUT.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(REQUEST_TIMEOUT, it -> requestTimeout = it); + this.requestTimeout = AD_REQUEST_TIMEOUT.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(AD_REQUEST_TIMEOUT, it -> requestTimeout = it); } @Override @@ -68,7 +68,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli if (!ADEnabledSetting.isADEnabled()) { throw new IllegalStateException(ADCommonMessages.DISABLED_ERR_MSG); } - AnomalyDetectorExecutionInput input = getAnomalyDetectorExecutionInput(request); + AnomalyDetectorExecutionInput input = getConfigExecutionInput(request); return channel -> { String error = validateAdExecutionInput(input); if (StringUtils.isNotBlank(error)) { @@ -85,7 +85,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli }; } - private AnomalyDetectorExecutionInput getAnomalyDetectorExecutionInput(RestRequest request) throws IOException { + private AnomalyDetectorExecutionInput getConfigExecutionInput(RestRequest request) throws IOException { String detectorId = null; if (request.hasParam(DETECTOR_ID)) { detectorId = request.param(DETECTOR_ID); diff --git a/src/main/java/org/opensearch/ad/rest/RestGetAnomalyDetectorAction.java b/src/main/java/org/opensearch/ad/rest/RestGetAnomalyDetectorAction.java index d14ff85ce..315ba0410 100644 --- a/src/main/java/org/opensearch/ad/rest/RestGetAnomalyDetectorAction.java +++ b/src/main/java/org/opensearch/ad/rest/RestGetAnomalyDetectorAction.java @@ -66,7 +66,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli boolean returnJob = request.paramAsBoolean("job", false); boolean returnTask = request.paramAsBoolean("task", false); boolean all = request.paramAsBoolean("_all", false); - GetAnomalyDetectorRequest getAnomalyDetectorRequest = new GetAnomalyDetectorRequest( + GetAnomalyDetectorRequest getConfigRequest = new GetAnomalyDetectorRequest( detectorId, RestActions.parseVersion(request), returnJob, @@ -77,8 +77,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, NodeClient cli buildEntity(request, detectorId) ); - return channel -> client - .execute(GetAnomalyDetectorAction.INSTANCE, getAnomalyDetectorRequest, new RestToXContentListener<>(channel)); + return channel -> client.execute(GetAnomalyDetectorAction.INSTANCE, getConfigRequest, new RestToXContentListener<>(channel)); } @Override diff --git a/src/main/java/org/opensearch/ad/rest/RestPreviewAnomalyDetectorAction.java b/src/main/java/org/opensearch/ad/rest/RestPreviewAnomalyDetectorAction.java index 9c11cc1cc..cd495517c 100644 --- a/src/main/java/org/opensearch/ad/rest/RestPreviewAnomalyDetectorAction.java +++ b/src/main/java/org/opensearch/ad/rest/RestPreviewAnomalyDetectorAction.java @@ -58,7 +58,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, org.opensearch throw new IllegalStateException(ADCommonMessages.DISABLED_ERR_MSG); } - AnomalyDetectorExecutionInput input = getAnomalyDetectorExecutionInput(request); + AnomalyDetectorExecutionInput input = getConfigExecutionInput(request); return channel -> { String rawPath = request.rawPath(); @@ -77,7 +77,7 @@ protected RestChannelConsumer prepareRequest(RestRequest request, org.opensearch }; } - private AnomalyDetectorExecutionInput getAnomalyDetectorExecutionInput(RestRequest request) throws IOException { + private AnomalyDetectorExecutionInput getConfigExecutionInput(RestRequest request) throws IOException { String detectorId = null; if (request.hasParam(RestHandlerUtils.DETECTOR_ID)) { detectorId = request.param(RestHandlerUtils.DETECTOR_ID); diff --git a/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java index 82f07b497..5a1e5e975 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/AbstractAnomalyDetectorActionHandler.java @@ -52,17 +52,13 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.WriteRequest; import org.opensearch.action.support.replication.ReplicationResponse; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.MergeableList; import org.opensearch.ad.rest.RestValidateAnomalyDetectorAction; import org.opensearch.ad.settings.ADNumericSetting; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.IndexAnomalyDetectorResponse; import org.opensearch.ad.transport.ValidateAnomalyDetectorResponse; -import org.opensearch.ad.util.MultiResponsesDelegateActionListener; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -79,13 +75,18 @@ import org.opensearch.rest.RestRequest; import org.opensearch.search.aggregations.AggregatorFactories; import org.opensearch.search.builder.SearchSourceBuilder; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.Feature; +import org.opensearch.timeseries.model.MergeableList; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; +import org.opensearch.timeseries.util.MultiResponsesDelegateActionListener; import org.opensearch.timeseries.util.RestHandlerUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; import com.google.common.collect.Sets; @@ -377,7 +378,15 @@ protected void validateTimeField(boolean indexingDryRun) { logger.error(message, error); listener.onFailure(new IllegalArgumentException(message)); }); - clientUtil.executeWithInjectedSecurity(GetFieldMappingsAction.INSTANCE, getMappingsRequest, user, client, mappingsListener); + clientUtil + .executeWithInjectedSecurity( + GetFieldMappingsAction.INSTANCE, + getMappingsRequest, + user, + client, + AnalysisType.AD, + mappingsListener + ); } /** @@ -650,7 +659,15 @@ protected void validateCategoricalField(String detectorId, boolean indexingDryRu listener.onFailure(new IllegalArgumentException(message)); }); - clientUtil.executeWithInjectedSecurity(GetFieldMappingsAction.INSTANCE, getMappingsRequest, user, client, mappingsListener); + clientUtil + .executeWithInjectedSecurity( + GetFieldMappingsAction.INSTANCE, + getMappingsRequest, + user, + client, + AnalysisType.AD, + mappingsListener + ); } protected void searchAdInputIndices(String detectorId, boolean indexingDryRun) { @@ -667,7 +684,7 @@ protected void searchAdInputIndices(String detectorId, boolean indexingDryRun) { exception -> listener.onFailure(exception) ); - clientUtil.asyncRequestWithInjectedSecurity(searchRequest, client::search, user, client, searchResponseListener); + clientUtil.asyncRequestWithInjectedSecurity(searchRequest, client::search, user, client, AnalysisType.AD, searchResponseListener); } protected void onSearchAdInputIndicesResponse(SearchResponse response, String detectorId, boolean indexingDryRun) throws IOException { @@ -940,7 +957,8 @@ protected void validateAnomalyDetectorFeatures(String detectorId, boolean indexi logger.error(errorMessage, e); multiFeatureQueriesResponseListener.onFailure(new OpenSearchStatusException(errorMessage, RestStatus.BAD_REQUEST, e)); }); - clientUtil.asyncRequestWithInjectedSecurity(searchRequest, client::search, user, client, searchResponseListener); + clientUtil + .asyncRequestWithInjectedSecurity(searchRequest, client::search, user, client, AnalysisType.AD, searchResponseListener); } } } diff --git a/src/main/java/org/opensearch/ad/rest/handler/AnomalyDetectorActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/AnomalyDetectorActionHandler.java index f279f8b63..cbe1fde53 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/AnomalyDetectorActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/AnomalyDetectorActionHandler.java @@ -21,7 +21,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.core.rest.RestStatus; @@ -29,6 +28,7 @@ import org.opensearch.core.xcontent.XContentParser; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.RestHandlerUtils; /** @@ -88,7 +88,7 @@ private void onGetAdJobResponseForWrite( .createXContentParserFromRegistry(xContentRegistry, response.getSourceAsBytesRef()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob adJob = AnomalyDetectorJob.parse(parser); + Job adJob = Job.parse(parser); if (adJob.isEnabled()) { listener.onFailure(new OpenSearchStatusException("Detector job is running: " + adJobId, RestStatus.BAD_REQUEST)); return; diff --git a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorActionHandler.java index b401ce007..ab3611f7c 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorActionHandler.java @@ -13,12 +13,10 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.IndexAnomalyDetectorResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -26,6 +24,8 @@ import org.opensearch.commons.authuser.User; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.rest.RestRequest; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; /** diff --git a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java index 824c6fc21..fac26992a 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandler.java @@ -13,8 +13,8 @@ import static org.opensearch.action.DocWriteResponse.Result.CREATED; import static org.opensearch.action.DocWriteResponse.Result.UPDATED; -import static org.opensearch.ad.util.ExceptionUtil.getShardsFailure; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; +import static org.opensearch.timeseries.util.ExceptionUtil.getShardsFailure; import static org.opensearch.timeseries.util.RestHandlerUtils.createXContentParserFromRegistry; import java.io.IOException; @@ -34,7 +34,6 @@ import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.AnomalyDetectorJobResponse; import org.opensearch.ad.transport.AnomalyResultAction; @@ -53,6 +52,7 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.RestHandlerUtils; import org.opensearch.transport.TransportService; @@ -188,7 +188,7 @@ private void createJob(AnomalyDetector detector, ActionListener listener - ) { + private void getJobForWrite(AnomalyDetector detector, Job job, ActionListener listener) { GetRequest getRequest = new GetRequest(CommonName.JOB_INDEX).id(detectorId); client @@ -230,19 +226,19 @@ private void getAnomalyDetectorJobForWrite( private void onGetAnomalyDetectorJobForWrite( GetResponse response, AnomalyDetector detector, - AnomalyDetectorJob job, + Job job, ActionListener listener ) throws IOException { if (response.isExists()) { try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, response.getSourceAsBytesRef())) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob currentAdJob = AnomalyDetectorJob.parse(parser); + Job currentAdJob = Job.parse(parser); if (currentAdJob.isEnabled()) { listener .onFailure(new OpenSearchStatusException("Anomaly detector job is already running: " + detectorId, RestStatus.OK)); return; } else { - AnomalyDetectorJob newJob = new AnomalyDetectorJob( + Job newJob = new Job( job.getName(), job.getSchedule(), job.getWindowDelay(), @@ -289,11 +285,8 @@ private void onGetAnomalyDetectorJobForWrite( } } - private void indexAnomalyDetectorJob( - AnomalyDetectorJob job, - ExecutorFunction function, - ActionListener listener - ) throws IOException { + private void indexAnomalyDetectorJob(Job job, ExecutorFunction function, ActionListener listener) + throws IOException { IndexRequest indexRequest = new IndexRequest(CommonName.JOB_INDEX) .setRefreshPolicy(WriteRequest.RefreshPolicy.IMMEDIATE) .source(job.toXContent(XContentFactory.jsonBuilder(), RestHandlerUtils.XCONTENT_WITH_TYPE)) @@ -351,11 +344,11 @@ public void stopAnomalyDetectorJob(String detectorId, ActionListenerThis class executes all validation checks that are not blocking on the 'model' level. @@ -253,6 +254,7 @@ private void getTopEntity(ActionListener> topEntityListener) client::search, user, client, + AnalysisType.AD, searchResponseListener ); } @@ -344,6 +346,7 @@ private void getBucketAggregates( client::search, user, client, + AnalysisType.AD, searchResponseListener ); } @@ -461,6 +464,7 @@ public void onResponse(SearchResponse response) { client::search, user, client, + AnalysisType.AD, this ); // In this case decreasingInterval has to be true already, so we will stop @@ -495,6 +499,7 @@ private void searchWithDifferentInterval(long newIntervalMinuteValue) { client::search, user, client, + AnalysisType.AD, this ); } @@ -571,6 +576,7 @@ private void checkRawDataSparsity(long latestTime) { client::search, user, client, + AnalysisType.AD, searchResponseListener ); } @@ -631,6 +637,7 @@ private void checkDataFilterSparsity(long latestTime) { client::search, user, client, + AnalysisType.AD, searchResponseListener ); } @@ -693,6 +700,7 @@ private void checkCategoryFieldSparsity(Map topEntity, long late client::search, user, client, + AnalysisType.AD, searchResponseListener ); } @@ -783,6 +791,7 @@ private void checkFeatureQueryDelegate(long latestTime) throws IOException { client::search, user, client, + AnalysisType.AD, searchResponseListener ); } diff --git a/src/main/java/org/opensearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java b/src/main/java/org/opensearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java index 163d1df63..545da1ff2 100644 --- a/src/main/java/org/opensearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java +++ b/src/main/java/org/opensearch/ad/rest/handler/ValidateAnomalyDetectorActionHandler.java @@ -14,11 +14,9 @@ import java.time.Clock; import org.opensearch.action.ActionListener; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.transport.ValidateAnomalyDetectorResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -26,6 +24,8 @@ import org.opensearch.commons.authuser.User; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.rest.RestRequest; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.util.SecurityClientUtil; /** * Anomaly detector REST action handler to process POST request. diff --git a/src/main/java/org/opensearch/ad/settings/AnomalyDetectorSettings.java b/src/main/java/org/opensearch/ad/settings/AnomalyDetectorSettings.java index 22e72eba0..4b98f0b6e 100644 --- a/src/main/java/org/opensearch/ad/settings/AnomalyDetectorSettings.java +++ b/src/main/java/org/opensearch/ad/settings/AnomalyDetectorSettings.java @@ -55,7 +55,7 @@ private AnomalyDetectorSettings() {} Setting.Property.Dynamic ); - public static final Setting REQUEST_TIMEOUT = Setting + public static final Setting AD_REQUEST_TIMEOUT = Setting .positiveTimeSetting( "plugins.anomaly_detection.request_timeout", LegacyOpenDistroAnomalyDetectorSettings.REQUEST_TIMEOUT, @@ -114,7 +114,9 @@ private AnomalyDetectorSettings() {} Setting.Property.Dynamic ); - public static final Setting MAX_RETRY_FOR_UNRESPONSIVE_NODE = Setting + // Use TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE + @Deprecated + public static final Setting AD_MAX_RETRY_FOR_UNRESPONSIVE_NODE = Setting .intSetting( "plugins.anomaly_detection.max_retry_for_unresponsive_node", LegacyOpenDistroAnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, @@ -123,7 +125,7 @@ private AnomalyDetectorSettings() {} Setting.Property.Dynamic ); - public static final Setting COOLDOWN_MINUTES = Setting + public static final Setting AD_COOLDOWN_MINUTES = Setting .positiveTimeSetting( "plugins.anomaly_detection.cooldown_minutes", LegacyOpenDistroAnomalyDetectorSettings.COOLDOWN_MINUTES, @@ -131,7 +133,9 @@ private AnomalyDetectorSettings() {} Setting.Property.Dynamic ); - public static final Setting BACKOFF_MINUTES = Setting + // Use TimeSeriesSettings.BACKOFF_MINUTES + @Deprecated + public static final Setting AD_BACKOFF_MINUTES = Setting .positiveTimeSetting( "plugins.anomaly_detection.backoff_minutes", LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_MINUTES, diff --git a/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java b/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java index 2140ecf10..c556857ad 100644 --- a/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java +++ b/src/main/java/org/opensearch/ad/task/ADBatchTaskRunner.java @@ -55,7 +55,6 @@ import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.feature.SinglePointFeatures; import org.opensearch.ad.indices.ADIndex; import org.opensearch.ad.indices.ADIndexManagement; @@ -75,8 +74,6 @@ import org.opensearch.ad.transport.ADStatsNodesAction; import org.opensearch.ad.transport.ADStatsRequest; import org.opensearch.ad.transport.handler.AnomalyResultBulkIndexHandler; -import org.opensearch.ad.util.ExceptionUtil; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; @@ -95,19 +92,23 @@ import org.opensearch.search.aggregations.metrics.InternalMin; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.LimitExceededException; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TaskCancelledException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.ExceptionUtil; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportService; @@ -180,7 +181,7 @@ public ADBatchTaskRunner( this.option = TransportRequestOptions .builder() .withType(TransportRequestOptions.Type.REG) - .withTimeout(AnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings)) + .withTimeout(AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(settings)) .build(); this.adTaskCacheManager = adTaskCacheManager; @@ -487,6 +488,7 @@ private void searchTopEntitiesForSingleCategoryHC( // user is the one who started historical detector. Read AnomalyDetectorJobTransportAction.doExecute. adTask.getUser(), client, + AnalysisType.AD, searchResponseListener ); } @@ -1010,6 +1012,7 @@ private void getDateRangeOfSourceData(ADTask adTask, BiConsumer cons // user is the one who started historical detector. Read AnomalyDetectorJobTransportAction.doExecute. adTask.getUser(), client, + AnalysisType.AD, searchResponseListener ); } diff --git a/src/main/java/org/opensearch/ad/task/ADTaskManager.java b/src/main/java/org/opensearch/ad/task/ADTaskManager.java index c482b0ba8..9cd21b6f1 100644 --- a/src/main/java/org/opensearch/ad/task/ADTaskManager.java +++ b/src/main/java/org/opensearch/ad/task/ADTaskManager.java @@ -38,6 +38,7 @@ import static org.opensearch.ad.model.ADTaskType.HISTORICAL_DETECTOR_TASK_TYPES; import static org.opensearch.ad.model.ADTaskType.REALTIME_TASK_TYPES; import static org.opensearch.ad.model.ADTaskType.taskTypeToString; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BATCH_TASK_PIECE_INTERVAL_SECONDS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.DELETE_AD_RESULT_WHEN_DELETE_DETECTOR; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; @@ -45,16 +46,15 @@ import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_OLD_AD_TASK_DOCS_PER_DETECTOR; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.NUM_MIN_SAMPLES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static org.opensearch.ad.stats.InternalStatNames.AD_DETECTOR_ASSIGNED_BATCH_TASK_SLOT_COUNT; import static org.opensearch.ad.stats.InternalStatNames.AD_USED_BATCH_TASK_SLOT_COUNT; -import static org.opensearch.ad.util.ExceptionUtil.getErrorMessage; -import static org.opensearch.ad.util.ExceptionUtil.getShardsFailure; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.timeseries.TimeSeriesAnalyticsPlugin.AD_BATCH_TASK_THREAD_POOL_NAME; import static org.opensearch.timeseries.constant.CommonMessages.CREATE_INDEX_NOT_ACKNOWLEDGED; import static org.opensearch.timeseries.constant.CommonMessages.FAIL_TO_FIND_CONFIG_MSG; import static org.opensearch.timeseries.constant.CommonName.TASK_ID_FIELD; +import static org.opensearch.timeseries.util.ExceptionUtil.getErrorMessage; +import static org.opensearch.timeseries.util.ExceptionUtil.getShardsFailure; import static org.opensearch.timeseries.util.ParseUtils.isNullOrEmpty; import static org.opensearch.timeseries.util.RestHandlerUtils.XCONTENT_WITH_TYPE; import static org.opensearch.timeseries.util.RestHandlerUtils.createXContentParserFromRegistry; @@ -109,7 +109,6 @@ import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorJobActionHandler; import org.opensearch.ad.transport.ADBatchAnomalyResultAction; @@ -166,6 +165,7 @@ import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Entity; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import org.opensearch.timeseries.util.RestHandlerUtils; import org.opensearch.transport.TransportRequestOptions; @@ -251,12 +251,12 @@ public ADTaskManager( transportRequestOptions = TransportRequestOptions .builder() .withType(TransportRequestOptions.Type.REG) - .withTimeout(REQUEST_TIMEOUT.get(settings)) + .withTimeout(AD_REQUEST_TIMEOUT.get(settings)) .build(); clusterService .getClusterSettings() .addSettingsUpdateConsumer( - REQUEST_TIMEOUT, + AD_REQUEST_TIMEOUT, it -> { transportRequestOptions = TransportRequestOptions .builder() @@ -1092,7 +1092,7 @@ private void resetRealtimeDetectorTaskState( if (r.isExists()) { try (XContentParser parser = createXContentParserFromRegistry(xContentRegistry, r.getSourceAsBytesRef())) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); + Job job = Job.parse(parser); if (!job.isEnabled()) { logger.debug("AD job is disabled, reset realtime task as stopped for detector {}", detectorId); resetTaskStateAsStopped(adTask, function, transportService, listener); diff --git a/src/main/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportAction.java b/src/main/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportAction.java index 1f86cefbb..dde952812 100644 --- a/src/main/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportAction.java @@ -13,8 +13,8 @@ import static org.opensearch.ad.constant.ADCommonMessages.FAIL_TO_START_DETECTOR; import static org.opensearch.ad.constant.ADCommonMessages.FAIL_TO_STOP_DETECTOR; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static org.opensearch.timeseries.util.ParseUtils.getUserContext; import static org.opensearch.timeseries.util.ParseUtils.resolveUserAndExecute; import static org.opensearch.timeseries.util.RestHandlerUtils.wrapRestActionListener; @@ -26,6 +26,7 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.ad.ExecuteADResultResponseRecorder; import org.opensearch.ad.indices.ADIndexManagement; +import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorJobActionHandler; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.client.Client; @@ -87,7 +88,7 @@ protected void doExecute(Task task, AnomalyDetectorJobRequest request, ActionLis long seqNo = request.getSeqNo(); long primaryTerm = request.getPrimaryTerm(); String rawPath = request.getRawPath(); - TimeValue requestTimeout = REQUEST_TIMEOUT.get(settings); + TimeValue requestTimeout = AD_REQUEST_TIMEOUT.get(settings); String errorMessage = rawPath.endsWith(RestHandlerUtils.START_JOB) ? FAIL_TO_START_DETECTOR : FAIL_TO_STOP_DETECTOR; ActionListener listener = wrapRestActionListener(actionListener, errorMessage); @@ -113,7 +114,8 @@ protected void doExecute(Task task, AnomalyDetectorJobRequest request, ActionLis ), client, clusterService, - xContentRegistry + xContentRegistry, + AnomalyDetector.class ); } catch (Exception e) { logger.error(e); diff --git a/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java b/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java index d7454bcda..81377cf3d 100644 --- a/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/AnomalyResultTransportAction.java @@ -43,7 +43,6 @@ import org.opensearch.action.support.IndicesOptions; import org.opensearch.action.support.ThreadedActionListener; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.constant.ADCommonMessages; @@ -53,14 +52,11 @@ import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.feature.SinglePointFeatures; import org.opensearch.ad.ml.ModelManager; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.ADEnabledSetting; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.stats.ADStats; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.ExceptionUtil; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.block.ClusterBlockLevel; @@ -80,6 +76,8 @@ import org.opensearch.node.NodeClosedException; import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.ClientException; import org.opensearch.timeseries.common.exception.EndRunException; @@ -89,11 +87,15 @@ import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonMessages; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.ExceptionUtil; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.ActionNotFoundTransportException; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.NodeNotConnectedException; @@ -173,7 +175,7 @@ public AnomalyResultTransportAction( this.option = TransportRequestOptions .builder() .withType(TransportRequestOptions.Type.REG) - .withTimeout(AnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings)) + .withTimeout(AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(settings)) .build(); this.clusterService = clusterService; this.indexNameExpressionResolver = indexNameExpressionResolver; @@ -275,7 +277,7 @@ protected void doExecute(Task task, ActionRequest actionRequest, ActionListener< return; } try { - stateManager.getAnomalyDetector(adID, onGetDetector(listener, adID, request)); + stateManager.getConfig(adID, AnalysisType.AD, onGetDetector(listener, adID, request)); } catch (Exception ex) { handleExecuteException(ex, listener, adID); } @@ -390,7 +392,7 @@ private void handleException(Exception e) { } } - private ActionListener> onGetDetector( + private ActionListener> onGetDetector( ActionListener listener, String adID, AnomalyResultRequest request @@ -401,7 +403,7 @@ private ActionListener> onGetDetector( return; } - AnomalyDetector anomalyDetector = detectorOptional.get(); + AnomalyDetector anomalyDetector = (AnomalyDetector) detectorOptional.get(); if (anomalyDetector.isHighCardinality()) { hcDetectors.add(adID); adStats.getStat(StatNames.AD_HC_EXECUTE_REQUEST_COUNT.getName()).increment(); diff --git a/src/main/java/org/opensearch/ad/transport/CronTransportAction.java b/src/main/java/org/opensearch/ad/transport/CronTransportAction.java index edc21cd6f..fc803460c 100644 --- a/src/main/java/org/opensearch/ad/transport/CronTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/CronTransportAction.java @@ -20,7 +20,6 @@ import org.opensearch.action.FailedNodeException; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.nodes.TransportNodesAction; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.ml.EntityColdStarter; @@ -30,6 +29,7 @@ import org.opensearch.common.inject.Inject; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.transport.TransportService; public class CronTransportAction extends TransportNodesAction { diff --git a/src/main/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportAction.java index ebc7577f0..c1befe8bd 100644 --- a/src/main/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTransportAction.java @@ -34,7 +34,7 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; import org.opensearch.ad.constant.ADCommonName; -import org.opensearch.ad.model.AnomalyDetectorJob; +import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.client.Client; @@ -50,6 +50,7 @@ import org.opensearch.tasks.Task; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.RestHandlerUtils; import org.opensearch.transport.TransportService; @@ -119,7 +120,8 @@ protected void doExecute(Task task, DeleteAnomalyDetectorRequest request, Action }, listener), client, clusterService, - xContentRegistry + xContentRegistry, + AnomalyDetector.class ); } catch (Exception e) { LOG.error(e); @@ -214,7 +216,7 @@ private void onGetAdJobResponseForWrite(GetResponse response, ActionListener> onGetDetector( + private ActionListener> onGetDetector( ActionListener listener, String detectorId, EntityResultRequest request, @@ -172,7 +174,7 @@ private ActionListener> onGetDetector( return; } - AnomalyDetector detector = detectorOptional.get(); + AnomalyDetector detector = (AnomalyDetector) detectorOptional.get(); if (request.getEntities() == null) { listener.onFailure(new EndRunException(detectorId, "Fail to get any entities from request.", false)); diff --git a/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java b/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java index adc8e36a8..337e71e6e 100644 --- a/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ForwardADTaskTransportAction.java @@ -24,7 +24,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskAction; @@ -36,6 +35,7 @@ import org.opensearch.commons.authuser.User; import org.opensearch.core.rest.RestStatus; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.model.DateRange; import org.opensearch.transport.TransportService; diff --git a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorResponse.java b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorResponse.java index e1532b816..652076531 100644 --- a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorResponse.java +++ b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorResponse.java @@ -16,7 +16,6 @@ import org.opensearch.action.ActionResponse; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.ad.model.EntityProfile; import org.opensearch.core.common.io.stream.StreamInput; @@ -24,6 +23,7 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.core.xcontent.XContentBuilder; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.RestHandlerUtils; public class GetAnomalyDetectorResponse extends ActionResponse implements ToXContentObject { @@ -34,7 +34,7 @@ public class GetAnomalyDetectorResponse extends ActionResponse implements ToXCon private long primaryTerm; private long seqNo; private AnomalyDetector detector; - private AnomalyDetectorJob adJob; + private Job adJob; private ADTask realtimeAdTask; private ADTask historicalAdTask; private RestStatus restStatus; @@ -65,7 +65,7 @@ public GetAnomalyDetectorResponse(StreamInput in) throws IOException { detector = new AnomalyDetector(in); returnJob = in.readBoolean(); if (returnJob) { - adJob = new AnomalyDetectorJob(in); + adJob = new Job(in); } else { adJob = null; } @@ -89,7 +89,7 @@ public GetAnomalyDetectorResponse( long primaryTerm, long seqNo, AnomalyDetector detector, - AnomalyDetectorJob adJob, + Job adJob, boolean returnJob, ADTask realtimeAdTask, ADTask historicalAdTask, @@ -197,7 +197,7 @@ public DetectorProfile getDetectorProfile() { return detectorProfile; } - public AnomalyDetectorJob getAdJob() { + public Job getAdJob() { return adJob; } diff --git a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java index 473f247dd..949971037 100644 --- a/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportAction.java @@ -46,13 +46,11 @@ import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.ad.model.DetectorProfileName; import org.opensearch.ad.model.EntityProfileName; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.CheckedConsumer; @@ -68,8 +66,10 @@ import org.opensearch.timeseries.Name; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Entity; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import org.opensearch.timeseries.util.RestHandlerUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; import com.google.common.collect.Sets; @@ -143,7 +143,8 @@ protected void doExecute(Task task, GetAnomalyDetectorRequest request, ActionLis (anomalyDetector) -> getExecute(request, listener), client, clusterService, - xContentRegistry + xContentRegistry, + AnomalyDetector.class ); } catch (Exception e) { LOG.error(e); @@ -296,7 +297,7 @@ private ActionListener onMultiGetResponse( public void onResponse(MultiGetResponse multiGetResponse) { MultiGetItemResponse[] responses = multiGetResponse.getResponses(); AnomalyDetector detector = null; - AnomalyDetectorJob adJob = null; + Job adJob = null; String id = null; long version = 0; long seqNo = 0; @@ -336,7 +337,7 @@ public void onResponse(MultiGetResponse multiGetResponse) { .createXContentParserFromRegistry(xContentRegistry, response.getResponse().getSourceAsBytesRef()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - adJob = AnomalyDetectorJob.parse(parser); + adJob = Job.parse(parser); } catch (Exception e) { String message = "Failed to parse detector job " + detectorId; listener.onFailure(buildInternalServerErrorResponse(e, message)); diff --git a/src/main/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportAction.java index 06018ae6c..598419774 100644 --- a/src/main/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportAction.java @@ -15,7 +15,7 @@ import static org.opensearch.ad.constant.ADCommonMessages.FAIL_TO_UPDATE_DETECTOR; import static org.opensearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; import static org.opensearch.timeseries.util.ParseUtils.checkFilterByBackendRoles; -import static org.opensearch.timeseries.util.ParseUtils.getDetector; +import static org.opensearch.timeseries.util.ParseUtils.getConfig; import static org.opensearch.timeseries.util.ParseUtils.getUserContext; import static org.opensearch.timeseries.util.RestHandlerUtils.wrapRestActionListener; @@ -29,13 +29,11 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorActionHandler; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; @@ -48,7 +46,9 @@ import org.opensearch.rest.RestRequest; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.function.ExecutorFunction; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; public class IndexAnomalyDetectorTransportAction extends HandledTransportAction { @@ -126,7 +126,17 @@ private void resolveUserAndExecute( boolean filterByBackendRole = requestedUser == null ? false : filterByEnabled; // Update detector request, check if user has permissions to update the detector // Get detector and verify backend roles - getDetector(requestedUser, detectorId, listener, function, client, clusterService, xContentRegistry, filterByBackendRole); + getConfig( + requestedUser, + detectorId, + listener, + function, + client, + clusterService, + xContentRegistry, + filterByBackendRole, + AnomalyDetector.class + ); } else { // Create Detector. No need to get current detector. function.accept(null); diff --git a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java index 5d6bdd193..a67e6d83f 100644 --- a/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/PreviewAnomalyDetectorTransportAction.java @@ -114,7 +114,8 @@ protected void doExecute( (anomalyDetector) -> previewExecute(request, context, listener), client, clusterService, - xContentRegistry + xContentRegistry, + AnomalyDetector.class ); } catch (Exception e) { logger.error(e); diff --git a/src/main/java/org/opensearch/ad/transport/RCFPollingTransportAction.java b/src/main/java/org/opensearch/ad/transport/RCFPollingTransportAction.java index 49e6f0153..28dee9352 100644 --- a/src/main/java/org/opensearch/ad/transport/RCFPollingTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/RCFPollingTransportAction.java @@ -22,7 +22,6 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.ml.ModelManager; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; @@ -32,6 +31,7 @@ import org.opensearch.tasks.Task; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportRequestOptions; import org.opensearch.transport.TransportResponseHandler; @@ -69,7 +69,7 @@ public RCFPollingTransportAction( this.option = TransportRequestOptions .builder() .withType(TransportRequestOptions.Type.REG) - .withTimeout(AnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings)) + .withTimeout(AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(settings)) .build(); this.clusterService = clusterService; } diff --git a/src/main/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportAction.java index caf4bd42a..adfba42e0 100644 --- a/src/main/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/StatsAnomalyDetectorTransportAction.java @@ -30,7 +30,6 @@ import org.opensearch.ad.model.AnomalyDetectorType; import org.opensearch.ad.stats.ADStats; import org.opensearch.ad.stats.ADStatsResponse; -import org.opensearch.ad.util.MultiResponsesDelegateActionListener; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; @@ -43,6 +42,7 @@ import org.opensearch.tasks.Task; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.MultiResponsesDelegateActionListener; import org.opensearch.transport.TransportService; public class StatsAnomalyDetectorTransportAction extends HandledTransportAction { diff --git a/src/main/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportAction.java b/src/main/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportAction.java index ecd0ca07c..af7e42282 100644 --- a/src/main/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportAction.java +++ b/src/main/java/org/opensearch/ad/transport/ValidateAnomalyDetectorTransportAction.java @@ -28,13 +28,11 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.HandledTransportAction; import org.opensearch.ad.constant.ADCommonMessages; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.DetectorValidationIssue; import org.opensearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; @@ -48,10 +46,12 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.tasks.Task; import org.opensearch.timeseries.common.exception.ValidationException; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; public class ValidateAnomalyDetectorTransportAction extends diff --git a/src/main/java/org/opensearch/ad/transport/handler/AnomalyIndexHandler.java b/src/main/java/org/opensearch/ad/transport/handler/AnomalyIndexHandler.java index 371640ad2..f9e1470ab 100644 --- a/src/main/java/org/opensearch/ad/transport/handler/AnomalyIndexHandler.java +++ b/src/main/java/org/opensearch/ad/transport/handler/AnomalyIndexHandler.java @@ -29,7 +29,6 @@ import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.util.BulkUtil; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; import org.opensearch.client.Client; import org.opensearch.cluster.block.ClusterBlockLevel; @@ -42,6 +41,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.RestHandlerUtils; public class AnomalyIndexHandler { diff --git a/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java b/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java index c021ead73..219aebf71 100644 --- a/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java +++ b/src/main/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandler.java @@ -27,7 +27,6 @@ import org.opensearch.action.index.IndexRequest; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyResult; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; @@ -36,6 +35,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.RestHandlerUtils; public class AnomalyResultBulkIndexHandler extends AnomalyIndexHandler { diff --git a/src/main/java/org/opensearch/ad/transport/handler/MultiEntityResultHandler.java b/src/main/java/org/opensearch/ad/transport/handler/MultiEntityResultHandler.java index d9d98b74a..328cee25c 100644 --- a/src/main/java/org/opensearch/ad/transport/handler/MultiEntityResultHandler.java +++ b/src/main/java/org/opensearch/ad/transport/handler/MultiEntityResultHandler.java @@ -22,7 +22,6 @@ import org.opensearch.ad.transport.ADResultBulkAction; import org.opensearch.ad.transport.ADResultBulkRequest; import org.opensearch.ad.transport.ADResultBulkResponse; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; import org.opensearch.client.Client; import org.opensearch.cluster.block.ClusterBlockLevel; @@ -31,6 +30,7 @@ import org.opensearch.common.settings.Settings; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.util.ClientUtil; /** * EntityResultTransportAction depends on this class. I cannot use diff --git a/src/main/java/org/opensearch/ad/util/BulkUtil.java b/src/main/java/org/opensearch/ad/util/BulkUtil.java index d7fe9c6f6..b754b1951 100644 --- a/src/main/java/org/opensearch/ad/util/BulkUtil.java +++ b/src/main/java/org/opensearch/ad/util/BulkUtil.java @@ -23,6 +23,7 @@ import org.opensearch.action.bulk.BulkRequest; import org.opensearch.action.bulk.BulkResponse; import org.opensearch.action.index.IndexRequest; +import org.opensearch.timeseries.util.ExceptionUtil; public class BulkUtil { private static final Logger logger = LogManager.getLogger(BulkUtil.class); diff --git a/src/main/java/org/opensearch/ad/util/ClientUtil.java b/src/main/java/org/opensearch/ad/util/ClientUtil.java deleted file mode 100644 index d85d4fdf7..000000000 --- a/src/main/java/org/opensearch/ad/util/ClientUtil.java +++ /dev/null @@ -1,332 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.ad.util; - -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; - -import java.util.List; -import java.util.Optional; -import java.util.concurrent.CountDownLatch; -import java.util.concurrent.TimeUnit; -import java.util.concurrent.atomic.AtomicReference; -import java.util.function.BiConsumer; -import java.util.function.Function; - -import org.apache.logging.log4j.Logger; -import org.opensearch.OpenSearchException; -import org.opensearch.OpenSearchTimeoutException; -import org.opensearch.action.ActionFuture; -import org.opensearch.action.ActionListener; -import org.opensearch.action.ActionRequest; -import org.opensearch.action.ActionResponse; -import org.opensearch.action.ActionType; -import org.opensearch.action.LatchedActionListener; -import org.opensearch.action.TaskOperationFailure; -import org.opensearch.action.admin.cluster.node.tasks.cancel.CancelTasksAction; -import org.opensearch.action.admin.cluster.node.tasks.cancel.CancelTasksRequest; -import org.opensearch.action.admin.cluster.node.tasks.cancel.CancelTasksResponse; -import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksAction; -import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksRequest; -import org.opensearch.action.admin.cluster.node.tasks.list.ListTasksResponse; -import org.opensearch.ad.constant.ADCommonName; -import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.client.Client; -import org.opensearch.common.inject.Inject; -import org.opensearch.common.settings.Settings; -import org.opensearch.common.unit.TimeValue; -import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.tasks.Task; -import org.opensearch.tasks.TaskId; -import org.opensearch.tasks.TaskInfo; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.timeseries.common.exception.InternalFailure; -import org.opensearch.timeseries.constant.CommonMessages; - -public class ClientUtil { - private volatile TimeValue requestTimeout; - private Client client; - private final Throttler throttler; - private ThreadPool threadPool; - - @Inject - public ClientUtil(Settings setting, Client client, Throttler throttler, ThreadPool threadPool) { - this.requestTimeout = REQUEST_TIMEOUT.get(setting); - this.client = client; - this.throttler = throttler; - this.threadPool = threadPool; - } - - /** - * Send a nonblocking request with a timeout and return response. Blocking is not allowed in a - * transport call context. See BaseFuture.blockingAllowed - * @param request request like index/search/get - * @param LOG log - * @param consumer functional interface to operate as a client request like client::get - * @param ActionRequest - * @param ActionResponse - * @return the response - * @throws OpenSearchTimeoutException when we cannot get response within time. - * @throws IllegalStateException when the waiting thread is interrupted - */ - public Optional timedRequest( - Request request, - Logger LOG, - BiConsumer> consumer - ) { - try { - AtomicReference respReference = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - - consumer - .accept( - request, - new LatchedActionListener( - ActionListener - .wrap( - response -> { respReference.set(response); }, - exception -> { LOG.error("Cannot get response for request {}, error: {}", request, exception); } - ), - latch - ) - ); - - if (!latch.await(requestTimeout.getSeconds(), TimeUnit.SECONDS)) { - throw new OpenSearchTimeoutException("Cannot get response within time limit: " + request.toString()); - } - return Optional.ofNullable(respReference.get()); - } catch (InterruptedException e1) { - LOG.error(CommonMessages.WAIT_ERR_MSG); - throw new IllegalStateException(e1); - } - } - - /** - * Send an asynchronous request and handle response with the provided listener. - * @param ActionRequest - * @param ActionResponse - * @param request request body - * @param consumer request method, functional interface to operate as a client request like client::get - * @param listener needed to handle response - */ - public void asyncRequest( - Request request, - BiConsumer> consumer, - ActionListener listener - ) { - consumer - .accept( - request, - ActionListener.wrap(response -> { listener.onResponse(response); }, exception -> { listener.onFailure(exception); }) - ); - } - - /** - * Execute a transport action and handle response with the provided listener. - * @param ActionRequest - * @param ActionResponse - * @param action transport action - * @param request request body - * @param listener needed to handle response - */ - public void execute( - ActionType action, - Request request, - ActionListener listener - ) { - client - .execute( - action, - request, - ActionListener.wrap(response -> { listener.onResponse(response); }, exception -> { listener.onFailure(exception); }) - ); - } - - /** - * Send an synchronous request and handle response with the provided listener. - * - * @deprecated use asyncRequest with listener instead. - * - * @param ActionRequest - * @param ActionResponse - * @param request request body - * @param function request method, functional interface to operate as a client request like client::get - * @return the response - */ - @Deprecated - public Response syncRequest( - Request request, - Function> function - ) { - return function.apply(request).actionGet(requestTimeout); - } - - /** - * Send a nonblocking request with a timeout and return response. - * If there is already a query running on given detector, it will try to - * cancel the query. Otherwise it will add this query to the negative cache - * and then attach the AnomalyDetection specific header to the request. - * Once the request complete, it will be removed from the negative cache. - * @param ActionRequest - * @param ActionResponse - * @param request request like index/search/get - * @param LOG log - * @param consumer functional interface to operate as a client request like client::get - * @param detector Anomaly Detector - * @return the response - * @throws InternalFailure when there is already a query running - * @throws OpenSearchTimeoutException when we cannot get response within time. - * @throws IllegalStateException when the waiting thread is interrupted - */ - public Optional throttledTimedRequest( - Request request, - Logger LOG, - BiConsumer> consumer, - AnomalyDetector detector - ) { - - try { - String detectorId = detector.getId(); - if (!throttler.insertFilteredQuery(detectorId, request)) { - LOG.info("There is one query running for detectorId: {}. Trying to cancel the long running query", detectorId); - cancelRunningQuery(client, detectorId, LOG); - throw new InternalFailure(detector.getId(), "There is already a query running on AnomalyDetector"); - } - AtomicReference respReference = new AtomicReference<>(); - final CountDownLatch latch = new CountDownLatch(1); - - try (ThreadContext.StoredContext context = threadPool.getThreadContext().stashContext()) { - assert context != null; - threadPool.getThreadContext().putHeader(Task.X_OPAQUE_ID, ADCommonName.ANOMALY_DETECTOR + ":" + detectorId); - consumer.accept(request, new LatchedActionListener(ActionListener.wrap(response -> { - // clear negative cache - throttler.clearFilteredQuery(detectorId); - respReference.set(response); - }, exception -> { - // clear negative cache - throttler.clearFilteredQuery(detectorId); - LOG.error("Cannot get response for request {}, error: {}", request, exception); - }), latch)); - } catch (Exception e) { - LOG.error("Failed to process the request for detectorId: {}.", detectorId); - throttler.clearFilteredQuery(detectorId); - throw e; - } - - if (!latch.await(requestTimeout.getSeconds(), TimeUnit.SECONDS)) { - throw new OpenSearchTimeoutException("Cannot get response within time limit: " + request.toString()); - } - return Optional.ofNullable(respReference.get()); - } catch (InterruptedException e1) { - LOG.error(CommonMessages.WAIT_ERR_MSG); - throw new IllegalStateException(e1); - } - } - - /** - * Check if there is running query on given detector - * @param detector Anomaly Detector - * @return true if given detector has a running query else false - */ - public boolean hasRunningQuery(AnomalyDetector detector) { - return throttler.getFilteredQuery(detector.getId()).isPresent(); - } - - /** - * Cancel long running query for given detectorId - * @param client OpenSearch client - * @param detectorId Anomaly Detector Id - * @param LOG Logger - */ - private void cancelRunningQuery(Client client, String detectorId, Logger LOG) { - ListTasksRequest listTasksRequest = new ListTasksRequest(); - listTasksRequest.setActions("*search*"); - client - .execute( - ListTasksAction.INSTANCE, - listTasksRequest, - ActionListener.wrap(response -> { onListTaskResponse(response, detectorId, LOG); }, exception -> { - LOG.error("List Tasks failed.", exception); - throw new InternalFailure(detectorId, "Failed to list current tasks", exception); - }) - ); - } - - /** - * Helper function to handle ListTasksResponse - * @param listTasksResponse ListTasksResponse - * @param detectorId Anomaly Detector Id - * @param LOG Logger - */ - private void onListTaskResponse(ListTasksResponse listTasksResponse, String detectorId, Logger LOG) { - List tasks = listTasksResponse.getTasks(); - TaskId matchedParentTaskId = null; - TaskId matchedSingleTaskId = null; - for (TaskInfo task : tasks) { - if (!task.getHeaders().isEmpty() - && task.getHeaders().get(Task.X_OPAQUE_ID).equals(ADCommonName.ANOMALY_DETECTOR + ":" + detectorId)) { - if (!task.getParentTaskId().equals(TaskId.EMPTY_TASK_ID)) { - // we found the parent task, don't need to check more - matchedParentTaskId = task.getParentTaskId(); - break; - } else { - // we found one task, keep checking other tasks - matchedSingleTaskId = task.getTaskId(); - } - } - } - // case 1: given detectorId is not in current task list - if (matchedParentTaskId == null && matchedSingleTaskId == null) { - // log and then clear negative cache - LOG.info("Couldn't find task for detectorId: {}. Clean this entry from Throttler", detectorId); - throttler.clearFilteredQuery(detectorId); - return; - } - // case 2: we can find the task for given detectorId - CancelTasksRequest cancelTaskRequest = new CancelTasksRequest(); - if (matchedParentTaskId != null) { - cancelTaskRequest.setParentTaskId(matchedParentTaskId); - LOG.info("Start to cancel task for parentTaskId: {}", matchedParentTaskId.toString()); - } else { - cancelTaskRequest.setTaskId(matchedSingleTaskId); - LOG.info("Start to cancel task for taskId: {}", matchedSingleTaskId.toString()); - } - - client - .execute( - CancelTasksAction.INSTANCE, - cancelTaskRequest, - ActionListener.wrap(response -> { onCancelTaskResponse(response, detectorId, LOG); }, exception -> { - LOG.error("Failed to cancel task for detectorId: " + detectorId, exception); - throw new InternalFailure(detectorId, "Failed to cancel current tasks", exception); - }) - ); - } - - /** - * Helper function to handle CancelTasksResponse - * @param cancelTasksResponse CancelTasksResponse - * @param detectorId Anomaly Detector Id - * @param LOG Logger - */ - private void onCancelTaskResponse(CancelTasksResponse cancelTasksResponse, String detectorId, Logger LOG) { - // todo: adding retry mechanism - List nodeFailures = cancelTasksResponse.getNodeFailures(); - List taskFailures = cancelTasksResponse.getTaskFailures(); - if (nodeFailures.isEmpty() && taskFailures.isEmpty()) { - LOG.info("Cancelling query for detectorId: {} succeeds. Clear entry from Throttler", detectorId); - throttler.clearFilteredQuery(detectorId); - return; - } - LOG.error("Failed to cancel task for detectorId: " + detectorId); - throw new InternalFailure(detectorId, "Failed to cancel current tasks due to node or task failures"); - } -} diff --git a/src/main/java/org/opensearch/ad/util/IndexUtils.java b/src/main/java/org/opensearch/ad/util/IndexUtils.java index b69c0924a..c93511849 100644 --- a/src/main/java/org/opensearch/ad/util/IndexUtils.java +++ b/src/main/java/org/opensearch/ad/util/IndexUtils.java @@ -13,12 +13,9 @@ import java.util.List; import java.util.Locale; -import java.util.Optional; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; -import org.opensearch.action.admin.indices.stats.IndicesStatsRequest; -import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.support.IndicesOptions; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterState; @@ -28,6 +25,7 @@ import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.inject.Inject; +import org.opensearch.timeseries.util.ClientUtil; public class IndexUtils { /** @@ -110,25 +108,6 @@ public String getIndexHealthStatus(String indexOrAliasName) throws IllegalArgume return indexHealth.getStatus().name().toLowerCase(Locale.ROOT); } - /** - * Gets the number of documents in an index. - * - * @deprecated - * - * @param indexName Name of the index - * @return The number of documents in an index. 0 is returned if the index does not exist. -1 is returned if the - * request fails. - */ - @Deprecated - public Long getNumberOfDocumentsInIndex(String indexName) { - if (!clusterService.state().getRoutingTable().hasIndex(indexName)) { - return 0L; - } - IndicesStatsRequest indicesStatsRequest = new IndicesStatsRequest(); - Optional response = clientUtil.timedRequest(indicesStatsRequest, logger, client.admin().indices()::stats); - return response.map(r -> r.getIndex(indexName).getPrimaries().docs.getCount()).orElse(-1L); - } - /** * Similar to checkGlobalBlock, we check block on the indices level. * diff --git a/src/main/java/org/opensearch/ad/util/Throttler.java b/src/main/java/org/opensearch/ad/util/Throttler.java deleted file mode 100644 index 177b612a2..000000000 --- a/src/main/java/org/opensearch/ad/util/Throttler.java +++ /dev/null @@ -1,73 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.ad.util; - -import java.time.Clock; -import java.time.Instant; -import java.util.AbstractMap; -import java.util.Map; -import java.util.Optional; -import java.util.concurrent.ConcurrentHashMap; - -import org.opensearch.action.ActionRequest; - -/** - * Utility functions for throttling query. - */ -public class Throttler { - // negativeCache is used to reject search query if given detector already has one query running - // key is detectorId, value is an entry. Key is ActionRequest and value is the timestamp - private final ConcurrentHashMap> negativeCache; - private final Clock clock; - - public Throttler(Clock clock) { - this.negativeCache = new ConcurrentHashMap<>(); - this.clock = clock; - } - - /** - * This will be used when dependency injection directly/indirectly injects a Throttler object. Without this object, - * node start might fail due to not being able to find a Clock object. We removed Clock object association in - * https://github.com/opendistro-for-elasticsearch/anomaly-detection/pull/305 - */ - public Throttler() { - this(Clock.systemUTC()); - } - - /** - * Get negative cache value(ActionRequest, Instant) for given detector - * @param detectorId AnomalyDetector ID - * @return negative cache value(ActionRequest, Instant) - */ - public Optional> getFilteredQuery(String detectorId) { - return Optional.ofNullable(negativeCache.get(detectorId)); - } - - /** - * Insert the negative cache entry for given detector - * If key already exists, return false. Otherwise true. - * @param detectorId AnomalyDetector ID - * @param request ActionRequest - * @return true if key doesn't exist otherwise false. - */ - public synchronized boolean insertFilteredQuery(String detectorId, ActionRequest request) { - return negativeCache.putIfAbsent(detectorId, new AbstractMap.SimpleEntry<>(request, clock.instant())) == null; - } - - /** - * Clear the negative cache for given detector. - * @param detectorId AnomalyDetector ID - */ - public void clearFilteredQuery(String detectorId) { - negativeCache.remove(detectorId); - } -} diff --git a/src/main/java/org/opensearch/timeseries/AnalysisType.java b/src/main/java/org/opensearch/timeseries/AnalysisType.java new file mode 100644 index 000000000..7d7cc805e --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/AnalysisType.java @@ -0,0 +1,11 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.timeseries; + +public enum AnalysisType { + AD, + FORECAST +} diff --git a/src/main/java/org/opensearch/ad/CleanState.java b/src/main/java/org/opensearch/timeseries/CleanState.java similarity index 94% rename from src/main/java/org/opensearch/ad/CleanState.java rename to src/main/java/org/opensearch/timeseries/CleanState.java index ae8085e88..fac03b453 100644 --- a/src/main/java/org/opensearch/ad/CleanState.java +++ b/src/main/java/org/opensearch/timeseries/CleanState.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; /** * Represent a state organized via detectorId. When deleting a detector's state, diff --git a/src/main/java/org/opensearch/timeseries/ExceptionRecorder.java b/src/main/java/org/opensearch/timeseries/ExceptionRecorder.java new file mode 100644 index 000000000..5b692e96f --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/ExceptionRecorder.java @@ -0,0 +1,20 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.timeseries; + +import java.util.Optional; + +public interface ExceptionRecorder { + public void setException(String id, Exception e); + + public Optional fetchExceptionAndClear(String id); +} diff --git a/src/main/java/org/opensearch/ad/ExpiringState.java b/src/main/java/org/opensearch/timeseries/ExpiringState.java similarity index 94% rename from src/main/java/org/opensearch/ad/ExpiringState.java rename to src/main/java/org/opensearch/timeseries/ExpiringState.java index 0df0e1f51..f5e6d3669 100644 --- a/src/main/java/org/opensearch/ad/ExpiringState.java +++ b/src/main/java/org/opensearch/timeseries/ExpiringState.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import java.time.Duration; import java.time.Instant; diff --git a/src/main/java/org/opensearch/ad/MaintenanceState.java b/src/main/java/org/opensearch/timeseries/MaintenanceState.java similarity index 96% rename from src/main/java/org/opensearch/ad/MaintenanceState.java rename to src/main/java/org/opensearch/timeseries/MaintenanceState.java index 646715f7a..07bbb9546 100644 --- a/src/main/java/org/opensearch/ad/MaintenanceState.java +++ b/src/main/java/org/opensearch/timeseries/MaintenanceState.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import java.time.Duration; import java.util.Map; diff --git a/src/main/java/org/opensearch/ad/NodeState.java b/src/main/java/org/opensearch/timeseries/NodeState.java similarity index 57% rename from src/main/java/org/opensearch/ad/NodeState.java rename to src/main/java/org/opensearch/timeseries/NodeState.java index 9c4693cbd..8537d0b64 100644 --- a/src/main/java/org/opensearch/ad/NodeState.java +++ b/src/main/java/org/opensearch/timeseries/NodeState.java @@ -9,198 +9,180 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import java.time.Clock; import java.time.Duration; import java.time.Instant; import java.util.Optional; -import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.model.Job; /** * Storing intermediate state during the execution of transport action * */ public class NodeState implements ExpiringState { - private String detectorId; - // detector definition - private AnomalyDetector detectorDef; - // number of partitions - private int partitonNumber; + private String configId; + // config definition + private Config configDef; // last access time private Instant lastAccessTime; - // last detection error recorded in result index. Used by DetectorStateHandler - // to check if the error for a detector has changed or not. If changed, trigger indexing. - private Optional lastDetectionError; // last error. private Optional exception; - // flag indicating whether checkpoint for the detector exists - private boolean checkPointExists; // clock to get current time private final Clock clock; + // config job + private Job configJob; + + // AD only states + // number of partitions + private int partitonNumber; + + // flag indicating whether checkpoint for the detector exists + private boolean checkPointExists; + // cold start running flag to prevent concurrent cold start private boolean coldStartRunning; - // detector job - private AnomalyDetectorJob detectorJob; - public NodeState(String detectorId, Clock clock) { - this.detectorId = detectorId; - this.detectorDef = null; - this.partitonNumber = -1; + public NodeState(String configId, Clock clock) { + this.configId = configId; + this.configDef = null; this.lastAccessTime = clock.instant(); - this.lastDetectionError = Optional.empty(); this.exception = Optional.empty(); - this.checkPointExists = false; this.clock = clock; + this.partitonNumber = -1; + this.checkPointExists = false; this.coldStartRunning = false; - this.detectorJob = null; + this.configJob = null; } - public String getId() { - return detectorId; + public String getConfigId() { + return configId; } /** * * @return Detector configuration object */ - public AnomalyDetector getDetectorDef() { + public Config getConfigDef() { refreshLastUpdateTime(); - return detectorDef; + return configDef; } /** * - * @param detectorDef Detector configuration object + * @param configDef Analysis configuration object */ - public void setDetectorDef(AnomalyDetector detectorDef) { - this.detectorDef = detectorDef; + public void setConfigDef(Config configDef) { + this.configDef = configDef; refreshLastUpdateTime(); } /** * - * @return RCF partition number of the detector + * @return last exception if any */ - public int getPartitonNumber() { + public Optional getException() { refreshLastUpdateTime(); - return partitonNumber; + return exception; } /** * - * @param partitonNumber RCF partition number + * @param exception exception to record */ - public void setPartitonNumber(int partitonNumber) { - this.partitonNumber = partitonNumber; + public void setException(Exception exception) { + this.exception = Optional.ofNullable(exception); refreshLastUpdateTime(); } /** - * Used to indicate whether cold start succeeds or not - * @return whether checkpoint of models exists or not. + * refresh last access time. */ - public boolean doesCheckpointExists() { - refreshLastUpdateTime(); - return checkPointExists; + protected void refreshLastUpdateTime() { + lastAccessTime = clock.instant(); } /** - * - * @param checkpointExists mark whether checkpoint of models exists or not. + * @param stateTtl time to leave for the state + * @return whether the transport state is expired */ - public void setCheckpointExists(boolean checkpointExists) { - refreshLastUpdateTime(); - this.checkPointExists = checkpointExists; - }; + @Override + public boolean expired(Duration stateTtl) { + return expired(lastAccessTime, stateTtl, clock.instant()); + } /** - * - * @return last model inference error - */ - public Optional getLastDetectionError() { + * + * @return RCF partition number of the detector + */ + public int getPartitonNumber() { refreshLastUpdateTime(); - return lastDetectionError; + return partitonNumber; } /** - * - * @param lastError last model inference error - */ - public void setLastDetectionError(String lastError) { - this.lastDetectionError = Optional.ofNullable(lastError); + * + * @param partitonNumber RCF partition number + */ + public void setPartitonNumber(int partitonNumber) { + this.partitonNumber = partitonNumber; refreshLastUpdateTime(); } /** - * - * @return last exception if any - */ - public Optional getException() { + * Used to indicate whether cold start succeeds or not + * @return whether checkpoint of models exists or not. + */ + public boolean doesCheckpointExists() { refreshLastUpdateTime(); - return exception; + return checkPointExists; } /** - * - * @param exception exception to record - */ - public void setException(Exception exception) { - this.exception = Optional.ofNullable(exception); + * + * @param checkpointExists mark whether checkpoint of models exists or not. + */ + public void setCheckpointExists(boolean checkpointExists) { refreshLastUpdateTime(); - } + this.checkPointExists = checkpointExists; + }; /** - * Used to prevent concurrent cold start - * @return whether cold start is running or not - */ + * Used to prevent concurrent cold start + * @return whether cold start is running or not + */ public boolean isColdStartRunning() { refreshLastUpdateTime(); return coldStartRunning; } /** - * - * @param coldStartRunning whether cold start is running or not - */ + * + * @param coldStartRunning whether cold start is running or not + */ public void setColdStartRunning(boolean coldStartRunning) { this.coldStartRunning = coldStartRunning; refreshLastUpdateTime(); } /** - * - * @return Detector configuration object - */ - public AnomalyDetectorJob getDetectorJob() { + * + * @return Job configuration object + */ + public Job getJob() { refreshLastUpdateTime(); - return detectorJob; + return configJob; } /** - * - * @param detectorJob Detector job - */ - public void setDetectorJob(AnomalyDetectorJob detectorJob) { - this.detectorJob = detectorJob; + * + * @param job analysis job + */ + public void setJob(Job job) { + this.configJob = job; refreshLastUpdateTime(); } - - /** - * refresh last access time. - */ - private void refreshLastUpdateTime() { - lastAccessTime = clock.instant(); - } - - /** - * @param stateTtl time to leave for the state - * @return whether the transport state is expired - */ - @Override - public boolean expired(Duration stateTtl) { - return expired(lastAccessTime, stateTtl, clock.instant()); - } } diff --git a/src/main/java/org/opensearch/ad/NodeStateManager.java b/src/main/java/org/opensearch/timeseries/NodeStateManager.java similarity index 59% rename from src/main/java/org/opensearch/ad/NodeStateManager.java rename to src/main/java/org/opensearch/timeseries/NodeStateManager.java index 7e3d708c2..bda0039ae 100644 --- a/src/main/java/org/opensearch/ad/NodeStateManager.java +++ b/src/main/java/org/opensearch/timeseries/NodeStateManager.java @@ -1,20 +1,13 @@ /* + * Copyright OpenSearch Contributors * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; +import java.io.IOException; import java.time.Clock; import java.time.Duration; import java.util.HashMap; @@ -22,50 +15,56 @@ import java.util.Map; import java.util.Optional; import java.util.concurrent.ConcurrentHashMap; +import java.util.function.Consumer; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.apache.logging.log4j.message.ParameterizedMessage; import org.apache.logging.log4j.util.Strings; +import org.opensearch.OpenSearchStatusException; import org.opensearch.action.ActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.constant.ADCommonName; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; -import org.opensearch.ad.transport.BackPressureRouting; -import org.opensearch.ad.util.ClientUtil; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.lease.Releasable; +import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.xcontent.LoggingDeprecationHandler; import org.opensearch.common.xcontent.XContentType; +import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; +import org.opensearch.core.xcontent.XContentParserUtils; +import org.opensearch.forecast.model.Forecaster; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; - -/** - * NodeStateManager is used to manage states shared by transport and ml components - * like AnomalyDetector object - * - */ -public class NodeStateManager implements MaintenanceState, CleanState { +import org.opensearch.timeseries.function.BiCheckedFunction; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.transport.BackPressureRouting; +import org.opensearch.timeseries.util.ClientUtil; +import org.opensearch.timeseries.util.ExceptionUtil; +import org.opensearch.timeseries.util.RestHandlerUtils; + +public class NodeStateManager implements MaintenanceState, CleanState, ExceptionRecorder { private static final Logger LOG = LogManager.getLogger(NodeStateManager.class); + public static final String NO_ERROR = "no_error"; - private ConcurrentHashMap states; - private Client client; - private NamedXContentRegistry xContentRegistry; - private ClientUtil clientUtil; + + protected ConcurrentHashMap states; + protected Client client; + protected NamedXContentRegistry xContentRegistry; + protected ClientUtil clientUtil; + protected final Clock clock; + protected final Duration stateTtl; // map from detector id to the map of ES node id to the node's backpressureMuter private Map> backpressureMuter; - private final Clock clock; - private final Duration stateTtl; private int maxRetryForUnresponsiveNode; private TimeValue mutePeriod; @@ -87,17 +86,20 @@ public NodeStateManager( ClientUtil clientUtil, Clock clock, Duration stateTtl, - ClusterService clusterService + ClusterService clusterService, + Setting maxRetryForUnresponsiveNodeSetting, + Setting backoffMinutesSetting ) { this.states = new ConcurrentHashMap<>(); this.client = client; this.xContentRegistry = xContentRegistry; this.clientUtil = clientUtil; - this.backpressureMuter = new ConcurrentHashMap<>(); this.clock = clock; this.stateTtl = stateTtl; - this.maxRetryForUnresponsiveNode = MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(MAX_RETRY_FOR_UNRESPONSIVE_NODE, it -> { + this.backpressureMuter = new ConcurrentHashMap<>(); + + this.maxRetryForUnresponsiveNode = maxRetryForUnresponsiveNodeSetting.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(maxRetryForUnresponsiveNodeSetting, it -> { this.maxRetryForUnresponsiveNode = it; Iterator> iter = backpressureMuter.values().iterator(); while (iter.hasNext()) { @@ -105,8 +107,8 @@ public NodeStateManager( entry.values().forEach(v -> v.setMaxRetryForUnresponsiveNode(it)); } }); - this.mutePeriod = BACKOFF_MINUTES.get(settings); - clusterService.getClusterSettings().addSettingsUpdateConsumer(BACKOFF_MINUTES, it -> { + this.mutePeriod = backoffMinutesSetting.get(settings); + clusterService.getClusterSettings().addSettingsUpdateConsumer(backoffMinutesSetting, it -> { this.mutePeriod = it; Iterator> iter = backpressureMuter.values().iterator(); while (iter.hasNext()) { @@ -114,117 +116,37 @@ public NodeStateManager( entry.values().forEach(v -> v.setMutePeriod(it)); } }); - } - - /** - * Get Detector config object if present - * @param adID detector Id - * @return the Detecor config object or empty Optional - */ - public Optional getAnomalyDetectorIfPresent(String adID) { - NodeState state = states.get(adID); - return Optional.ofNullable(state).map(NodeState::getDetectorDef); - } - - public void getAnomalyDetector(String adID, ActionListener> listener) { - NodeState state = states.get(adID); - if (state != null && state.getDetectorDef() != null) { - listener.onResponse(Optional.of(state.getDetectorDef())); - } else { - GetRequest request = new GetRequest(CommonName.CONFIG_INDEX, adID); - clientUtil.asyncRequest(request, client::get, onGetDetectorResponse(adID, listener)); - } - } - - private ActionListener onGetDetectorResponse(String adID, ActionListener> listener) { - return ActionListener.wrap(response -> { - if (response == null || !response.isExists()) { - listener.onResponse(Optional.empty()); - return; - } - - String xc = response.getSourceAsString(); - LOG.debug("Fetched anomaly detector: {}", xc); - - try ( - XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, xc) - ) { - ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetector detector = AnomalyDetector.parse(parser, response.getId()); - // end execution if all features are disabled - if (detector.getEnabledFeatureIds().isEmpty()) { - listener.onFailure(new EndRunException(adID, CommonMessages.ALL_FEATURES_DISABLED_ERR_MSG, true).countedInStats(false)); - return; - } - NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); - state.setDetectorDef(detector); - listener.onResponse(Optional.of(detector)); - } catch (Exception t) { - LOG.error("Fail to parse detector {}", adID); - LOG.error("Stack trace:", t); - listener.onResponse(Optional.empty()); - } - }, listener::onFailure); } /** - * Get a detector's checkpoint and save a flag if we find any so that next time we don't need to do it again - * @param adID the detector's ID - * @param listener listener to handle get request + * Clean states if it is older than our stateTtl. transportState has to be a + * ConcurrentHashMap otherwise we will have + * java.util.ConcurrentModificationException. + * */ - public void getDetectorCheckpoint(String adID, ActionListener listener) { - NodeState state = states.get(adID); - if (state != null && state.doesCheckpointExists()) { - listener.onResponse(Boolean.TRUE); - return; - } - - GetRequest request = new GetRequest(ADCommonName.CHECKPOINT_INDEX_NAME, SingleStreamModelIdMapper.getRcfModelId(adID, 0)); - - clientUtil.asyncRequest(request, client::get, onGetCheckpointResponse(adID, listener)); - } - - private ActionListener onGetCheckpointResponse(String adID, ActionListener listener) { - return ActionListener.wrap(response -> { - if (response == null || !response.isExists()) { - listener.onResponse(Boolean.FALSE); - } else { - NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); - state.setCheckpointExists(true); - listener.onResponse(Boolean.TRUE); - } - }, listener::onFailure); + @Override + public void maintenance() { + maintenance(states, stateTtl); } /** * Used in delete workflow * - * @param detectorId detector ID + * @param configId config ID */ @Override - public void clear(String detectorId) { - Map routingMap = backpressureMuter.get(detectorId); + public void clear(String configId) { + Map routingMap = backpressureMuter.get(configId); if (routingMap != null) { routingMap.clear(); - backpressureMuter.remove(detectorId); + backpressureMuter.remove(configId); } - states.remove(detectorId); + states.remove(configId); } - /** - * Clean states if it is older than our stateTtl. transportState has to be a - * ConcurrentHashMap otherwise we will have - * java.util.ConcurrentModificationException. - * - */ - @Override - public void maintenance() { - maintenance(states, stateTtl); - } - - public boolean isMuted(String nodeId, String detectorId) { - Map routingMap = backpressureMuter.get(detectorId); + public boolean isMuted(String nodeId, String configId) { + Map routingMap = backpressureMuter.get(configId); if (routingMap == null || routingMap.isEmpty()) { return false; } @@ -235,68 +157,140 @@ public boolean isMuted(String nodeId, String detectorId) { /** * When we have a unsuccessful call with a node, increment the backpressure counter. * @param nodeId an ES node's ID - * @param detectorId Detector ID + * @param configId config ID */ - public void addPressure(String nodeId, String detectorId) { + public void addPressure(String nodeId, String configId) { Map routingMap = backpressureMuter - .computeIfAbsent(detectorId, k -> new HashMap()); + .computeIfAbsent(configId, k -> new HashMap()); routingMap.computeIfAbsent(nodeId, k -> new BackPressureRouting(k, clock, maxRetryForUnresponsiveNode, mutePeriod)).addPressure(); } /** * When we have a successful call with a node, clear the backpressure counter. * @param nodeId an ES node's ID - * @param detectorId Detector ID + * @param configId config ID */ - public void resetBackpressureCounter(String nodeId, String detectorId) { - Map routingMap = backpressureMuter.get(detectorId); + public void resetBackpressureCounter(String nodeId, String configId) { + Map routingMap = backpressureMuter.get(configId); if (routingMap == null || routingMap.isEmpty()) { - backpressureMuter.remove(detectorId); + backpressureMuter.remove(configId); return; } routingMap.remove(nodeId); } /** - * Check if there is running query on given detector - * @param detector Anomaly Detector - * @return true if given detector has a running query else false + * Get config and execute consumer function. + * [Important!] Make sure listener returns in function + * + * @param configId config id + * @param analysisType analysis type + * @param function consumer function. + * @param listener action listener. Only meant to return failure. + * @param action listener response type */ - public boolean hasRunningQuery(AnomalyDetector detector) { - return clientUtil.hasRunningQuery(detector); + public void getConfig( + String configId, + AnalysisType analysisType, + Consumer> function, + ActionListener listener + ) { + GetRequest getRequest = new GetRequest(CommonName.CONFIG_INDEX, configId); + client.get(getRequest, ActionListener.wrap(response -> { + if (!response.isExists()) { + function.accept(Optional.empty()); + return; + } + try ( + XContentParser parser = RestHandlerUtils.createXContentParserFromRegistry(xContentRegistry, response.getSourceAsBytesRef()) + ) { + XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + Config config = null; + if (analysisType == AnalysisType.AD) { + config = AnomalyDetector.parse(parser, response.getId(), response.getVersion()); + } else if (analysisType == AnalysisType.FORECAST) { + config = Forecaster.parse(parser, response.getId(), response.getVersion()); + } else { + throw new UnsupportedOperationException("This method is not supported"); + } + + function.accept(Optional.of(config)); + } catch (Exception e) { + String message = "Failed to parse config " + configId; + LOG.error(message, e); + listener.onFailure(new OpenSearchStatusException(message, RestStatus.INTERNAL_SERVER_ERROR)); + } + }, exception -> { + LOG.error("Failed to get config " + configId, exception); + listener.onFailure(exception); + })); } - /** - * Get last error of a detector - * @param adID detector id - * @return last error for the detector - */ - public String getLastDetectionError(String adID) { - return Optional.ofNullable(states.get(adID)).flatMap(state -> state.getLastDetectionError()).orElse(NO_ERROR); + public void getConfig(String configID, AnalysisType context, ActionListener> listener) { + NodeState state = states.get(configID); + if (state != null && state.getConfigDef() != null) { + listener.onResponse(Optional.of(state.getConfigDef())); + } else { + GetRequest request = new GetRequest(CommonName.CONFIG_INDEX, configID); + BiCheckedFunction configParser = context == AnalysisType.AD + ? AnomalyDetector::parse + : Forecaster::parse; + clientUtil.asyncRequest(request, client::get, onGetConfigResponse(configID, configParser, listener)); + } } - /** - * Set last detection error of a detector - * @param adID detector id - * @param error error, can be null - */ - public void setLastDetectionError(String adID, String error) { - NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); - state.setLastDetectionError(error); + private ActionListener onGetConfigResponse( + String configID, + BiCheckedFunction configParser, + ActionListener> listener + ) { + return ActionListener.wrap(response -> { + if (response == null || !response.isExists()) { + listener.onResponse(Optional.empty()); + return; + } + + String xc = response.getSourceAsString(); + LOG.debug("Fetched config: {}", xc); + + try ( + XContentParser parser = XContentType.JSON.xContent().createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, xc) + ) { + ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); + Config config = configParser.apply(parser, response.getId()); + + // end execution if all features are disabled + if (config.getEnabledFeatureIds().isEmpty()) { + listener + .onFailure(new EndRunException(configID, CommonMessages.ALL_FEATURES_DISABLED_ERR_MSG, true).countedInStats(false)); + return; + } + + NodeState state = states.computeIfAbsent(configID, configId -> new NodeState(configId, clock)); + state.setConfigDef(config); + + listener.onResponse(Optional.of(config)); + } catch (Exception t) { + LOG.error("Fail to parse config {}", configID); + LOG.error("Stack trace:", t); + listener.onResponse(Optional.empty()); + } + }, listener::onFailure); } /** - * Get a detector's exception. The method has side effect. + * Get the exception of an analysis. The method has side effect. * We reset error after calling the method because - * 1) We record a detector's exception in each interval. There is no need - * to record it twice. + * 1) We record the exception of an analysis in each interval. + * There is no need to record it twice. * 2) EndRunExceptions can stop job running. We only want to send the same * signal once for each exception. - * @param adID detector id - * @return the detector's exception + * @param configID config id + * @return the config's exception */ - public Optional fetchExceptionAndClear(String adID) { - NodeState state = states.get(adID); + @Override + public Optional fetchExceptionAndClear(String configID) { + NodeState state = states.get(configID); if (state == null) { return Optional.empty(); } @@ -307,26 +301,27 @@ public Optional fetchExceptionAndClear(String adID) { } /** - * For single-stream detector, we have one exception per interval. When + * For single-stream analysis, we have one exception per interval. When * an interval starts, it fetches and clears the exception. - * For HCAD, there can be one exception per entity. To not bloat memory + * For HC analysis, there can be one exception per entity. To not bloat memory * with exceptions, we will keep only one exception. An exception has 3 purposes: - * 1) stop detector if nothing else works; + * 1) stop analysis if nothing else works; * 2) increment error stats to ticket about high-error domain * 3) debugging. * - * For HCAD, we record all entities' exceptions in anomaly results. So 3) + * For HC analysis, we record all entities' exceptions in result index. So 3) * is covered. As long as we keep one exception among all exceptions, 2) * is covered. So the only thing we have to pay attention is to keep EndRunException. * When overriding an exception, EndRunException has priority. - * @param detectorId Detector Id + * @param configId Detector Id * @param e Exception to set */ - public void setException(String detectorId, Exception e) { - if (e == null || Strings.isEmpty(detectorId)) { + @Override + public void setException(String configId, Exception e) { + if (e == null || Strings.isEmpty(configId)) { return; } - NodeState state = states.computeIfAbsent(detectorId, d -> new NodeState(detectorId, clock)); + NodeState state = states.computeIfAbsent(configId, d -> new NodeState(configId, clock)); Optional exception = state.getException(); if (exception.isPresent()) { Exception higherPriorityException = ExceptionUtil.selectHigherPriorityException(e, exception.get()); @@ -338,6 +333,35 @@ public void setException(String detectorId, Exception e) { state.setException(e); } + /** + * Get a detector's checkpoint and save a flag if we find any so that next time we don't need to do it again + * @param adID the detector's ID + * @param listener listener to handle get request + */ + public void getDetectorCheckpoint(String adID, ActionListener listener) { + NodeState state = states.get(adID); + if (state != null && state.doesCheckpointExists()) { + listener.onResponse(Boolean.TRUE); + return; + } + + GetRequest request = new GetRequest(ADCommonName.CHECKPOINT_INDEX_NAME, SingleStreamModelIdMapper.getRcfModelId(adID, 0)); + + clientUtil.asyncRequest(request, client::get, onGetCheckpointResponse(adID, listener)); + } + + private ActionListener onGetCheckpointResponse(String adID, ActionListener listener) { + return ActionListener.wrap(response -> { + if (response == null || !response.isExists()) { + listener.onResponse(Boolean.FALSE); + } else { + NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); + state.setCheckpointExists(true); + listener.onResponse(Boolean.TRUE); + } + }, listener::onFailure); + } + /** * Whether last cold start for the detector is running * @param adID detector ID @@ -368,17 +392,17 @@ public Releasable markColdStartRunning(String adID) { }; } - public void getAnomalyDetectorJob(String adID, ActionListener> listener) { - NodeState state = states.get(adID); - if (state != null && state.getDetectorJob() != null) { - listener.onResponse(Optional.of(state.getDetectorJob())); + public void getJob(String configID, ActionListener> listener) { + NodeState state = states.get(configID); + if (state != null && state.getJob() != null) { + listener.onResponse(Optional.of(state.getJob())); } else { - GetRequest request = new GetRequest(CommonName.JOB_INDEX, adID); - clientUtil.asyncRequest(request, client::get, onGetDetectorJobResponse(adID, listener)); + GetRequest request = new GetRequest(CommonName.JOB_INDEX, configID); + clientUtil.asyncRequest(request, client::get, onGetJobResponse(configID, listener)); } } - private ActionListener onGetDetectorJobResponse(String adID, ActionListener> listener) { + private ActionListener onGetJobResponse(String configID, ActionListener> listener) { return ActionListener.wrap(response -> { if (response == null || !response.isExists()) { listener.onResponse(Optional.empty()); @@ -386,7 +410,7 @@ private ActionListener onGetDetectorJobResponse(String adID, Action } String xc = response.getSourceAsString(); - LOG.debug("Fetched anomaly detector: {}", xc); + LOG.debug("Fetched config: {}", xc); try ( XContentParser parser = XContentType.JSON @@ -394,13 +418,13 @@ private ActionListener onGetDetectorJobResponse(String adID, Action .createParser(xContentRegistry, LoggingDeprecationHandler.INSTANCE, response.getSourceAsString()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetectorJob job = AnomalyDetectorJob.parse(parser); - NodeState state = states.computeIfAbsent(adID, id -> new NodeState(id, clock)); - state.setDetectorJob(job); + Job job = Job.parse(parser); + NodeState state = states.computeIfAbsent(configID, id -> new NodeState(id, clock)); + state.setJob(job); listener.onResponse(Optional.of(job)); } catch (Exception t) { - LOG.error(new ParameterizedMessage("Fail to parse job {}", adID), t); + LOG.error(new ParameterizedMessage("Fail to parse job {}", configID), t); listener.onResponse(Optional.empty()); } }, listener::onFailure); diff --git a/src/main/java/org/opensearch/timeseries/TimeSeriesAnalyticsPlugin.java b/src/main/java/org/opensearch/timeseries/TimeSeriesAnalyticsPlugin.java index 9d3e827eb..b22d12ade 100644 --- a/src/main/java/org/opensearch/timeseries/TimeSeriesAnalyticsPlugin.java +++ b/src/main/java/org/opensearch/timeseries/TimeSeriesAnalyticsPlugin.java @@ -38,7 +38,6 @@ import org.opensearch.ad.AnomalyDetectorRunner; import org.opensearch.ad.ExecuteADResultResponseRecorder; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -49,14 +48,12 @@ import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.ml.CheckpointDao; import org.opensearch.ad.ml.EntityColdStarter; import org.opensearch.ad.ml.HybridThresholdingModel; import org.opensearch.ad.ml.ModelManager; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.DetectorInternalState; import org.opensearch.ad.ratelimit.CheckPointMaintainRequestAdapter; @@ -158,10 +155,7 @@ import org.opensearch.ad.transport.handler.AnomalyIndexHandler; import org.opensearch.ad.transport.handler.AnomalyResultBulkIndexHandler; import org.opensearch.ad.transport.handler.MultiEntityResultHandler; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; -import org.opensearch.ad.util.SecurityClientUtil; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNodes; @@ -199,10 +193,14 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.function.ThrowingSupplierWrapper; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.settings.TimeSeriesSettings; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.watcher.ResourceWatcherService; import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper; @@ -347,8 +345,7 @@ public Collection createComponents( this.client = client; this.threadPool = threadPool; Settings settings = environment.settings(); - Throttler throttler = new Throttler(getClock()); - this.clientUtil = new ClientUtil(settings, client, throttler, threadPool); + this.clientUtil = new ClientUtil(client); this.indexUtils = new IndexUtils(client, clientUtil, clusterService, indexNameExpressionResolver); this.nodeFilter = new DiscoveryNodeFilterer(clusterService); // convert from checked IOException to unchecked RuntimeException @@ -373,8 +370,10 @@ public Collection createComponents( settings, clientUtil, getClock(), - AnomalyDetectorSettings.HOURLY_MAINTENANCE, - clusterService + TimeSeriesSettings.HOURLY_MAINTENANCE, + clusterService, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES ); securityClientUtil = new SecurityClientUtil(stateManager, settings); SearchFeatureDao searchFeatureDao = new SearchFeatureDao( @@ -896,10 +895,10 @@ public List> getSettings() { LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_MINUTES, LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_INITIAL_DELAY, LegacyOpenDistroAnomalyDetectorSettings.MAX_RETRY_FOR_BACKOFF, - AnomalyDetectorSettings.REQUEST_TIMEOUT, - AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, - AnomalyDetectorSettings.COOLDOWN_MINUTES, - AnomalyDetectorSettings.BACKOFF_MINUTES, + AnomalyDetectorSettings.AD_REQUEST_TIMEOUT, + AnomalyDetectorSettings.AD_MAX_RETRY_FOR_UNRESPONSIVE_NODE, + AnomalyDetectorSettings.AD_COOLDOWN_MINUTES, + AnomalyDetectorSettings.AD_BACKOFF_MINUTES, AnomalyDetectorSettings.AD_BACKOFF_INITIAL_DELAY, AnomalyDetectorSettings.AD_MAX_RETRY_FOR_BACKOFF, // result index rollover @@ -979,7 +978,16 @@ public List> getSettings() { // ForecastSettings.FORECAST_MAX_HC_FORECASTERS, ForecastSettings.FORECAST_INDEX_PRESSURE_SOFT_LIMIT, ForecastSettings.FORECAST_INDEX_PRESSURE_HARD_LIMIT, - ForecastSettings.FORECAST_MAX_PRIMARY_SHARDS + ForecastSettings.FORECAST_MAX_PRIMARY_SHARDS, + // ====================================== + // Common settings + // ====================================== + // Fault tolerance + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES, + TimeSeriesSettings.COOLDOWN_MINUTES, + // tasks + TimeSeriesSettings.MAX_CACHED_DELETED_TASKS ); return unmodifiableList( Stream @@ -997,7 +1005,7 @@ public List getNamedXContent() { AnomalyDetector.XCONTENT_REGISTRY, AnomalyResult.XCONTENT_REGISTRY, DetectorInternalState.XCONTENT_REGISTRY, - AnomalyDetectorJob.XCONTENT_REGISTRY, + Job.XCONTENT_REGISTRY, Forecaster.XCONTENT_REGISTRY ); } @@ -1061,7 +1069,7 @@ public ScheduledJobRunner getJobRunner() { public ScheduledJobParser getJobParser() { return (parser, id, jobDocVersion) -> { XContentParserUtils.ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - return AnomalyDetectorJob.parse(parser); + return Job.parse(parser); }; } diff --git a/src/main/java/org/opensearch/timeseries/constant/CommonMessages.java b/src/main/java/org/opensearch/timeseries/constant/CommonMessages.java index 393248237..ae2064add 100644 --- a/src/main/java/org/opensearch/timeseries/constant/CommonMessages.java +++ b/src/main/java/org/opensearch/timeseries/constant/CommonMessages.java @@ -85,4 +85,10 @@ public static String getTooManyCategoricalFieldErr(int limit) { public static final String BUG_RESPONSE = "We might have bugs."; public static final String MEMORY_LIMIT_EXCEEDED_ERR_MSG = "Models memory usage exceeds our limit."; + // ====================================== + // security + // ====================================== + public static String NO_PERMISSION_TO_ACCESS_CONFIG = "User does not have permissions to access config: "; + public static String FAIL_TO_GET_USER_INFO = "Unable to get user information from config "; + } diff --git a/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java b/src/main/java/org/opensearch/timeseries/feature/SearchFeatureDao.java similarity index 95% rename from src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java rename to src/main/java/org/opensearch/timeseries/feature/SearchFeatureDao.java index 557e98fd7..b429bef26 100644 --- a/src/main/java/org/opensearch/ad/feature/SearchFeatureDao.java +++ b/src/main/java/org/opensearch/timeseries/feature/SearchFeatureDao.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.feature; +package org.opensearch.timeseries.feature; import static org.apache.commons.math3.linear.MatrixUtils.createRealMatrix; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_ENTITIES_FOR_PREVIEW; @@ -39,8 +39,8 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; +import org.opensearch.ad.feature.AbstractRetriever; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -65,12 +65,15 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.search.sort.FieldSortBuilder; import org.opensearch.search.sort.SortOrder; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; /** * DAO for features from search. @@ -181,6 +184,7 @@ public void getLatestDataTime(AnomalyDetector detector, ActionListener> listener) { + public void getMinDataTime(Config config, Optional entity, AnalysisType context, ActionListener> listener) { BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery(); - for (TermQueryBuilder term : entity.getTermQueryBuilders()) { - internalFilterQuery.filter(term); + if (entity.isPresent()) { + for (TermQueryBuilder term : entity.get().getTermQueryBuilders()) { + internalFilterQuery.filter(term); + } } SearchSourceBuilder searchSourceBuilder = new SearchSourceBuilder() .query(internalFilterQuery) - .aggregation(AggregationBuilders.min(AGG_NAME_MIN).field(detector.getTimeField())) + .aggregation(AggregationBuilders.min(AGG_NAME_MIN).field(config.getTimeField())) .trackTotalHits(false) .size(0); - SearchRequest searchRequest = new SearchRequest().indices(detector.getIndices().toArray(new String[0])).source(searchSourceBuilder); + SearchRequest searchRequest = new SearchRequest().indices(config.getIndices().toArray(new String[0])).source(searchSourceBuilder); final ActionListener searchResponseListener = ActionListener .wrap(response -> { listener.onResponse(parseMinDataTime(response)); }, listener::onFailure); // inject user role while searching. @@ -494,8 +502,9 @@ public void getEntityMinDataTime(AnomalyDetector detector, Entity entity, Action .asyncRequestWithInjectedSecurity( searchRequest, client::search, - detector.getId(), + config.getId(), client, + context, searchResponseListener ); } @@ -529,6 +538,7 @@ public void getFeaturesForPeriod(AnomalyDetector detector, long startTime, long client::search, detector.getId(), client, + AnalysisType.AD, searchResponseListener ); } @@ -556,6 +566,7 @@ public void getFeaturesForPeriodByBatch( client::search, detector.getId(), client, + AnalysisType.AD, searchResponseListener ); } @@ -583,24 +594,24 @@ public Optional parseResponse(SearchResponse response, List fe * * Sampled features are not true features. They are intended to be approximate results produced at low costs. * - * @param detector info about the indices, documents, feature query + * @param config info about the indices, documents, feature query * @param ranges list of time ranges * @param listener handle approximate features for the time ranges * @throws IOException if a user gives wrong query input when defining a detector */ public void getFeatureSamplesForPeriods( - AnomalyDetector detector, + Config config, List> ranges, + AnalysisType context, ActionListener>> listener ) throws IOException { - SearchRequest request = createPreviewSearchRequest(detector, ranges); + SearchRequest request = createPreviewSearchRequest(config, ranges); final ActionListener searchResponseListener = ActionListener.wrap(response -> { Aggregations aggs = response.getAggregations(); if (aggs == null) { listener.onResponse(Collections.emptyList()); return; } - listener .onResponse( aggs @@ -608,7 +619,7 @@ public void getFeatureSamplesForPeriods( .stream() .filter(InternalDateRange.class::isInstance) .flatMap(agg -> ((InternalDateRange) agg).getBuckets().stream()) - .map(bucket -> parseBucket(bucket, detector.getEnabledFeatureIds())) + .map(bucket -> parseBucket(bucket, config.getEnabledFeatureIds())) .collect(Collectors.toList()) ); }, listener::onFailure); @@ -617,8 +628,9 @@ public void getFeatureSamplesForPeriods( .asyncRequestWithInjectedSecurity( request, client::search, - detector.getId(), + config.getId(), client, + context, searchResponseListener ); } @@ -842,24 +854,25 @@ private SearchRequest createFeatureSearchRequest(AnomalyDetector detector, long } } - private SearchRequest createPreviewSearchRequest(AnomalyDetector detector, List> ranges) throws IOException { + private SearchRequest createPreviewSearchRequest(Config config, List> ranges) throws IOException { try { - SearchSourceBuilder searchSourceBuilder = ParseUtils.generatePreviewQuery(detector, ranges, xContent); - return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); + SearchSourceBuilder searchSourceBuilder = ParseUtils.generatePreviewQuery(config, ranges, xContent); + return new SearchRequest(config.getIndices().toArray(new String[0]), searchSourceBuilder); } catch (IOException e) { - logger.warn("Failed to create feature search request for " + detector.getId() + " for preview", e); + logger.warn("Failed to create feature search request for " + config.getId() + " for preview", e); throw e; } } public void getColdStartSamplesForPeriods( - AnomalyDetector detector, + Config config, List> ranges, - Entity entity, + Optional entity, boolean includesEmptyBucket, + AnalysisType context, ActionListener>> listener ) { - SearchRequest request = createColdStartFeatureSearchRequest(detector, ranges, entity); + SearchRequest request = createColdStartFeatureSearchRequest(config, ranges, entity); final ActionListener searchResponseListener = ActionListener.wrap(response -> { Aggregations aggs = response.getAggregations(); if (aggs == null) { @@ -889,7 +902,7 @@ public void getColdStartSamplesForPeriods( .filter(bucket -> bucket.getFrom() != null && bucket.getFrom() instanceof ZonedDateTime) .filter(bucket -> bucket.getDocCount() > docCountThreshold) .sorted(Comparator.comparing((Bucket bucket) -> (ZonedDateTime) bucket.getFrom())) - .map(bucket -> parseBucket(bucket, detector.getEnabledFeatureIds())) + .map(bucket -> parseBucket(bucket, config.getEnabledFeatureIds())) .collect(Collectors.toList()) ); }, listener::onFailure); @@ -899,15 +912,16 @@ public void getColdStartSamplesForPeriods( .asyncRequestWithInjectedSecurity( request, client::search, - detector.getId(), + config.getId(), client, + context, searchResponseListener ); } - private SearchRequest createColdStartFeatureSearchRequest(AnomalyDetector detector, List> ranges, Entity entity) { + private SearchRequest createColdStartFeatureSearchRequest(Config detector, List> ranges, Optional entity) { try { - SearchSourceBuilder searchSourceBuilder = ParseUtils.generateEntityColdStartQuery(detector, ranges, entity, xContent); + SearchSourceBuilder searchSourceBuilder = ParseUtils.generateColdStartQuery(detector, ranges, entity, xContent); return new SearchRequest(detector.getIndices().toArray(new String[0]), searchSourceBuilder); } catch (IOException e) { logger diff --git a/src/main/java/org/opensearch/timeseries/function/BiCheckedFunction.java b/src/main/java/org/opensearch/timeseries/function/BiCheckedFunction.java new file mode 100644 index 000000000..d96b14adf --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/function/BiCheckedFunction.java @@ -0,0 +1,11 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.timeseries.function; + +@FunctionalInterface +public interface BiCheckedFunction { + R apply(T t, F f) throws E; +} diff --git a/src/main/java/org/opensearch/ad/ml/SingleStreamModelIdMapper.java b/src/main/java/org/opensearch/timeseries/ml/SingleStreamModelIdMapper.java similarity index 98% rename from src/main/java/org/opensearch/ad/ml/SingleStreamModelIdMapper.java rename to src/main/java/org/opensearch/timeseries/ml/SingleStreamModelIdMapper.java index ac3ce899d..c33c4818f 100644 --- a/src/main/java/org/opensearch/ad/ml/SingleStreamModelIdMapper.java +++ b/src/main/java/org/opensearch/timeseries/ml/SingleStreamModelIdMapper.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.ml; +package org.opensearch.timeseries.ml; import java.util.Locale; import java.util.regex.Matcher; diff --git a/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java b/src/main/java/org/opensearch/timeseries/model/Job.java similarity index 93% rename from src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java rename to src/main/java/org/opensearch/timeseries/model/Job.java index 7ef5ae528..17371b362 100644 --- a/src/main/java/org/opensearch/ad/model/AnomalyDetectorJob.java +++ b/src/main/java/org/opensearch/timeseries/model/Job.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.model; +package org.opensearch.timeseries.model; import static org.opensearch.ad.settings.AnomalyDetectorSettings.DEFAULT_AD_JOB_LOC_DURATION_SECONDS; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; @@ -31,8 +31,6 @@ import org.opensearch.jobscheduler.spi.schedule.IntervalSchedule; import org.opensearch.jobscheduler.spi.schedule.Schedule; import org.opensearch.jobscheduler.spi.schedule.ScheduleParser; -import org.opensearch.timeseries.model.IntervalTimeConfiguration; -import org.opensearch.timeseries.model.TimeConfiguration; import org.opensearch.timeseries.util.ParseUtils; import com.google.common.base.Objects; @@ -40,7 +38,7 @@ /** * Anomaly detector job. */ -public class AnomalyDetectorJob implements Writeable, ToXContentObject, ScheduledJobParameter { +public class Job implements Writeable, ToXContentObject, ScheduledJobParameter { enum ScheduleType { CRON, INTERVAL @@ -48,7 +46,7 @@ enum ScheduleType { public static final String PARSE_FIELD_NAME = "AnomalyDetectorJob"; public static final NamedXContentRegistry.Entry XCONTENT_REGISTRY = new NamedXContentRegistry.Entry( - AnomalyDetectorJob.class, + Job.class, new ParseField(PARSE_FIELD_NAME), it -> parse(it) ); @@ -76,7 +74,7 @@ enum ScheduleType { private final User user; private String resultIndex; - public AnomalyDetectorJob( + public Job( String name, Schedule schedule, TimeConfiguration windowDelay, @@ -100,9 +98,9 @@ public AnomalyDetectorJob( this.resultIndex = resultIndex; } - public AnomalyDetectorJob(StreamInput input) throws IOException { + public Job(StreamInput input) throws IOException { name = input.readString(); - if (input.readEnum(AnomalyDetectorJob.ScheduleType.class) == ScheduleType.CRON) { + if (input.readEnum(Job.ScheduleType.class) == ScheduleType.CRON) { schedule = new CronSchedule(input); } else { schedule = new IntervalSchedule(input); @@ -168,7 +166,7 @@ public void writeTo(StreamOutput output) throws IOException { output.writeOptionalString(resultIndex); } - public static AnomalyDetectorJob parse(XContentParser parser) throws IOException { + public static Job parse(XContentParser parser) throws IOException { String name = null; Schedule schedule = null; TimeConfiguration windowDelay = null; @@ -222,7 +220,7 @@ public static AnomalyDetectorJob parse(XContentParser parser) throws IOException break; } } - return new AnomalyDetectorJob( + return new Job( name, schedule, windowDelay, @@ -242,7 +240,7 @@ public boolean equals(Object o) { return true; if (o == null || getClass() != o.getClass()) return false; - AnomalyDetectorJob that = (AnomalyDetectorJob) o; + Job that = (Job) o; return Objects.equal(getName(), that.getName()) && Objects.equal(getSchedule(), that.getSchedule()) && Objects.equal(isEnabled(), that.isEnabled()) diff --git a/src/main/java/org/opensearch/ad/model/MergeableList.java b/src/main/java/org/opensearch/timeseries/model/MergeableList.java similarity index 91% rename from src/main/java/org/opensearch/ad/model/MergeableList.java rename to src/main/java/org/opensearch/timeseries/model/MergeableList.java index 4bb0d7842..fd9f26e84 100644 --- a/src/main/java/org/opensearch/ad/model/MergeableList.java +++ b/src/main/java/org/opensearch/timeseries/model/MergeableList.java @@ -9,10 +9,12 @@ * GitHub history for details. */ -package org.opensearch.ad.model; +package org.opensearch.timeseries.model; import java.util.List; +import org.opensearch.ad.model.Mergeable; + public class MergeableList implements Mergeable { private final List elements; diff --git a/src/main/java/org/opensearch/timeseries/settings/TimeSeriesSettings.java b/src/main/java/org/opensearch/timeseries/settings/TimeSeriesSettings.java index a9aebff53..8ce4cbf9b 100644 --- a/src/main/java/org/opensearch/timeseries/settings/TimeSeriesSettings.java +++ b/src/main/java/org/opensearch/timeseries/settings/TimeSeriesSettings.java @@ -21,9 +21,9 @@ public class TimeSeriesSettings { // the larger shingle size, the harder to fill in a complete shingle public static final int MAX_SHINGLE_SIZE = 60; - public static final String CONFIG_INDEX_MAPPING_FILE = "mappings/anomaly-detectors.json"; + public static final String CONFIG_INDEX_MAPPING_FILE = "mappings/config.json"; - public static final String JOBS_INDEX_MAPPING_FILE = "mappings/anomaly-detector-jobs.json"; + public static final String JOBS_INDEX_MAPPING_FILE = "mappings/job.json"; // 100,000 insertions costs roughly 1KB. public static final int DOOR_KEEPER_FOR_COLD_STARTER_MAX_INSERTION = 100_000; @@ -52,6 +52,10 @@ public class TimeSeriesSettings { public static final Duration HOURLY_MAINTENANCE = Duration.ofHours(1); + // Maximum number of deleted tasks can keep in cache. + public static final Setting MAX_CACHED_DELETED_TASKS = Setting + .intSetting("plugins.timeseries.max_cached_deleted_tasks", 1000, 1, 10_000, Setting.Property.NodeScope, Setting.Property.Dynamic); + // ====================================== // Checkpoint setting // ====================================== @@ -185,7 +189,12 @@ public class TimeSeriesSettings { ); // ====================================== - // AD Index setting + // Index setting // ====================================== public static int MAX_UPDATE_RETRY_TIMES = 10_000; + + // ====================================== + // JOB + // ====================================== + public static final long DEFAULT_JOB_LOC_DURATION_SECONDS = 60; } diff --git a/src/main/java/org/opensearch/ad/transport/BackPressureRouting.java b/src/main/java/org/opensearch/timeseries/transport/BackPressureRouting.java similarity index 98% rename from src/main/java/org/opensearch/ad/transport/BackPressureRouting.java rename to src/main/java/org/opensearch/timeseries/transport/BackPressureRouting.java index e5f4ba9b8..bfec0fe95 100644 --- a/src/main/java/org/opensearch/ad/transport/BackPressureRouting.java +++ b/src/main/java/org/opensearch/timeseries/transport/BackPressureRouting.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.transport; +package org.opensearch.timeseries.transport; import java.time.Clock; import java.util.concurrent.atomic.AtomicInteger; diff --git a/src/main/java/org/opensearch/timeseries/util/ClientUtil.java b/src/main/java/org/opensearch/timeseries/util/ClientUtil.java new file mode 100644 index 000000000..011819a07 --- /dev/null +++ b/src/main/java/org/opensearch/timeseries/util/ClientUtil.java @@ -0,0 +1,71 @@ +/* + * SPDX-License-Identifier: Apache-2.0 + * + * The OpenSearch Contributors require contributions made to + * this file be licensed under the Apache-2.0 license or a + * compatible open source license. + * + * Modifications Copyright OpenSearch Contributors. See + * GitHub history for details. + */ + +package org.opensearch.timeseries.util; + +import java.util.function.BiConsumer; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.ActionRequest; +import org.opensearch.action.ActionResponse; +import org.opensearch.action.ActionType; +import org.opensearch.client.Client; +import org.opensearch.common.inject.Inject; + +public class ClientUtil { + private Client client; + + @Inject + public ClientUtil(Client client) { + this.client = client; + } + + /** + * Send an asynchronous request and handle response with the provided listener. + * @param ActionRequest + * @param ActionResponse + * @param request request body + * @param consumer request method, functional interface to operate as a client request like client::get + * @param listener needed to handle response + */ + public void asyncRequest( + Request request, + BiConsumer> consumer, + ActionListener listener + ) { + consumer + .accept( + request, + ActionListener.wrap(response -> { listener.onResponse(response); }, exception -> { listener.onFailure(exception); }) + ); + } + + /** + * Execute a transport action and handle response with the provided listener. + * @param ActionRequest + * @param ActionResponse + * @param action transport action + * @param request request body + * @param listener needed to handle response + */ + public void execute( + ActionType action, + Request request, + ActionListener listener + ) { + client + .execute( + action, + request, + ActionListener.wrap(response -> { listener.onResponse(response); }, exception -> { listener.onFailure(exception); }) + ); + } +} diff --git a/src/main/java/org/opensearch/ad/util/ExceptionUtil.java b/src/main/java/org/opensearch/timeseries/util/ExceptionUtil.java similarity index 99% rename from src/main/java/org/opensearch/ad/util/ExceptionUtil.java rename to src/main/java/org/opensearch/timeseries/util/ExceptionUtil.java index b48cf49e4..d71204c82 100644 --- a/src/main/java/org/opensearch/ad/util/ExceptionUtil.java +++ b/src/main/java/org/opensearch/timeseries/util/ExceptionUtil.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.EnumSet; import java.util.concurrent.RejectedExecutionException; diff --git a/src/main/java/org/opensearch/ad/util/MultiResponsesDelegateActionListener.java b/src/main/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListener.java similarity index 98% rename from src/main/java/org/opensearch/ad/util/MultiResponsesDelegateActionListener.java rename to src/main/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListener.java index 8b18bf9c3..fe9ea1cc2 100644 --- a/src/main/java/org/opensearch/ad/util/MultiResponsesDelegateActionListener.java +++ b/src/main/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListener.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.ArrayList; import java.util.Collections; diff --git a/src/main/java/org/opensearch/timeseries/util/ParseUtils.java b/src/main/java/org/opensearch/timeseries/util/ParseUtils.java index ee73be777..b2d512a0b 100644 --- a/src/main/java/org/opensearch/timeseries/util/ParseUtils.java +++ b/src/main/java/org/opensearch/timeseries/util/ParseUtils.java @@ -11,8 +11,6 @@ package org.opensearch.timeseries.util; -import static org.opensearch.ad.constant.ADCommonMessages.FAIL_TO_GET_USER_INFO; -import static org.opensearch.ad.constant.ADCommonMessages.NO_PERMISSION_TO_ACCESS_DETECTOR; import static org.opensearch.ad.constant.ADCommonName.EPOCH_MILLIS_FORMAT; import static org.opensearch.core.xcontent.XContentParserUtils.ensureExpectedToken; import static org.opensearch.search.aggregations.AggregationBuilders.dateRange; @@ -73,13 +71,14 @@ import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.model.Config; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; -import com.carrotsearch.hppc.DoubleArrayList; import com.google.common.collect.ImmutableList; /** @@ -333,18 +332,18 @@ public static SearchSourceBuilder generateInternalFeatureQuery( } public static SearchSourceBuilder generatePreviewQuery( - AnomalyDetector detector, + Config config, List> ranges, NamedXContentRegistry xContentRegistry ) throws IOException { - DateRangeAggregationBuilder dateRangeBuilder = dateRange("date_range").field(detector.getTimeField()).format("epoch_millis"); + DateRangeAggregationBuilder dateRangeBuilder = dateRange("date_range").field(config.getTimeField()).format("epoch_millis"); for (Entry range : ranges) { dateRangeBuilder.addRange(range.getKey(), range.getValue()); } - if (detector.getFeatureAttributes() != null) { - for (Feature feature : detector.getFeatureAttributes()) { + if (config.getFeatureAttributes() != null) { + for (Feature feature : config.getFeatureAttributes()) { AggregatorFactories.Builder internalAgg = parseAggregators( feature.getAggregation().toString(), xContentRegistry, @@ -354,29 +353,31 @@ public static SearchSourceBuilder generatePreviewQuery( } } - return new SearchSourceBuilder().query(detector.getFilterQuery()).size(0).aggregation(dateRangeBuilder); + return new SearchSourceBuilder().query(config.getFilterQuery()).size(0).aggregation(dateRangeBuilder); } - public static SearchSourceBuilder generateEntityColdStartQuery( - AnomalyDetector detector, + public static SearchSourceBuilder generateColdStartQuery( + Config config, List> ranges, - Entity entity, + Optional entity, NamedXContentRegistry xContentRegistry ) throws IOException { - BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().filter(detector.getFilterQuery()); + BoolQueryBuilder internalFilterQuery = QueryBuilders.boolQuery().filter(config.getFilterQuery()); - for (TermQueryBuilder term : entity.getTermQueryBuilders()) { - internalFilterQuery.filter(term); + if (entity.isPresent()) { + for (TermQueryBuilder term : entity.get().getTermQueryBuilders()) { + internalFilterQuery.filter(term); + } } - DateRangeAggregationBuilder dateRangeBuilder = dateRange("date_range").field(detector.getTimeField()).format("epoch_millis"); + DateRangeAggregationBuilder dateRangeBuilder = dateRange("date_range").field(config.getTimeField()).format("epoch_millis"); for (Entry range : ranges) { dateRangeBuilder.addRange(range.getKey(), range.getValue()); } - if (detector.getFeatureAttributes() != null) { - for (Feature feature : detector.getFeatureAttributes()) { + if (config.getFeatureAttributes() != null) { + for (Feature feature : config.getFeatureAttributes()) { AggregatorFactories.Builder internalAgg = parseAggregators( feature.getAggregation().toString(), xContentRegistry, @@ -443,23 +444,34 @@ public static User getUserContext(Client client) { return User.parse(userStr); } - public static void resolveUserAndExecute( + public static void resolveUserAndExecute( User requestedUser, - String detectorId, + String configId, boolean filterByEnabled, ActionListener listener, - Consumer function, + Consumer function, Client client, ClusterService clusterService, - NamedXContentRegistry xContentRegistry + NamedXContentRegistry xContentRegistry, + Class configTypeClass ) { try { - if (requestedUser == null || detectorId == null) { + if (requestedUser == null || configId == null) { // requestedUser == null means security is disabled or user is superadmin. In this case we don't need to // check if request user have access to the detector or not. function.accept(null); } else { - getDetector(requestedUser, detectorId, listener, function, client, clusterService, xContentRegistry, filterByEnabled); + getConfig( + requestedUser, + configId, + listener, + function, + client, + clusterService, + xContentRegistry, + filterByEnabled, + configTypeClass + ); } } catch (Exception e) { listener.onFailure(e); @@ -467,45 +479,48 @@ public static void resolveUserAndExecute( } /** - * If filterByEnabled is true, get detector and check if the user has permissions to access the detector, - * then execute function; otherwise, get detector and execute function + * If filterByEnabled is true, get config and check if the user has permissions to access the config, + * then execute function; otherwise, get config and execute function * @param requestUser user from request - * @param detectorId detector id + * @param configId config id * @param listener action listener * @param function consumer function * @param client client * @param clusterService cluster service * @param xContentRegistry XContent registry * @param filterByBackendRole filter by backend role or not + * @param configTypeClass the class of the ConfigType, used by the ConfigFactory to parse the correct type of Config */ - public static void getDetector( + public static void getConfig( User requestUser, - String detectorId, + String configId, ActionListener listener, - Consumer function, + Consumer function, Client client, ClusterService clusterService, NamedXContentRegistry xContentRegistry, - boolean filterByBackendRole + boolean filterByBackendRole, + Class configTypeClass ) { if (clusterService.state().metadata().indices().containsKey(CommonName.CONFIG_INDEX)) { - GetRequest request = new GetRequest(CommonName.CONFIG_INDEX).id(detectorId); + GetRequest request = new GetRequest(CommonName.CONFIG_INDEX).id(configId); client .get( request, ActionListener .wrap( - response -> onGetAdResponse( + response -> onGetConfigResponse( response, requestUser, - detectorId, + configId, listener, function, xContentRegistry, - filterByBackendRole + filterByBackendRole, + configTypeClass ), exception -> { - logger.error("Failed to get anomaly detector: " + detectorId, exception); + logger.error("Failed to get anomaly detector: " + configId, exception); listener.onFailure(exception); } ) @@ -515,34 +530,57 @@ public static void getDetector( } } - public static void onGetAdResponse( + /** + * Processes a GetResponse by leveraging the factory method Config.parseConfig to + * appropriately parse the specified type of Config. The execution of the provided + * consumer function depends on the state of the 'filterByBackendRole' setting: + * + * - If 'filterByBackendRole' is disabled, the consumer function will be invoked + * irrespective of the user's permissions. + * + * - If 'filterByBackendRole' is enabled, the consumer function will only be invoked + * provided the user holds the requisite permissions. + * + * @param The type of Config to be processed in this method, which extends from the Config base type. + * @param response The GetResponse from the getConfig request. This contains the information about the config that is to be processed. + * @param requestUser The User from the request. This user's permissions will be checked to ensure they have access to the config. + * @param configId The ID of the config. This is used for logging and error messages. + * @param listener The ActionListener to call if an error occurs. Any errors that occur during the processing of the config will be passed to this listener. + * @param function The Consumer function to apply to the ConfigType. If the user has permission to access the config, this function will be applied. + * @param xContentRegistry The XContentRegistry used to create the XContentParser. This is used to parse the response into a ConfigType. + * @param filterByBackendRole A boolean indicating whether to filter by backend role. If true, the user's backend roles will be checked to ensure they have access to the config. + * @param configTypeClass The class of the ConfigType, used by the ConfigFactory to parse the correct type of Config. + */ + public static void onGetConfigResponse( GetResponse response, User requestUser, - String detectorId, + String configId, ActionListener listener, - Consumer function, + Consumer function, NamedXContentRegistry xContentRegistry, - boolean filterByBackendRole + boolean filterByBackendRole, + Class configTypeClass ) { if (response.isExists()) { try ( XContentParser parser = RestHandlerUtils.createXContentParserFromRegistry(xContentRegistry, response.getSourceAsBytesRef()) ) { ensureExpectedToken(XContentParser.Token.START_OBJECT, parser.nextToken(), parser); - AnomalyDetector detector = AnomalyDetector.parse(parser); - User resourceUser = detector.getUser(); + @SuppressWarnings("unchecked") + ConfigType config = (ConfigType) Config.parseConfig(configTypeClass, parser); + User resourceUser = config.getUser(); - if (!filterByBackendRole || checkUserPermissions(requestUser, resourceUser, detectorId) || isAdmin(requestUser)) { - function.accept(detector); + if (!filterByBackendRole || checkUserPermissions(requestUser, resourceUser, configId) || isAdmin(requestUser)) { + function.accept(config); } else { - logger.debug("User: " + requestUser.getName() + " does not have permissions to access detector: " + detectorId); - listener.onFailure(new TimeSeriesException(NO_PERMISSION_TO_ACCESS_DETECTOR + detectorId)); + logger.debug("User: " + requestUser.getName() + " does not have permissions to access config: " + configId); + listener.onFailure(new TimeSeriesException(CommonMessages.NO_PERMISSION_TO_ACCESS_CONFIG + configId)); } } catch (Exception e) { - listener.onFailure(new TimeSeriesException(FAIL_TO_GET_USER_INFO + detectorId)); + listener.onFailure(new TimeSeriesException(CommonMessages.FAIL_TO_GET_USER_INFO + configId)); } } else { - listener.onFailure(new ResourceNotFoundException(detectorId, FAIL_TO_FIND_CONFIG_MSG + detectorId)); + listener.onFailure(new ResourceNotFoundException(configId, FAIL_TO_FIND_CONFIG_MSG + configId)); } } @@ -710,12 +748,12 @@ public static boolean listEqualsWithoutConsideringOrder(List list1, List } public static double[] parseDoubleArray(XContentParser parser) throws IOException { - DoubleArrayList oldValList = new DoubleArrayList(); + final List oldValList = new ArrayList<>(); ensureExpectedToken(XContentParser.Token.START_ARRAY, parser.currentToken(), parser); while (parser.nextToken() != XContentParser.Token.END_ARRAY) { oldValList.add(parser.doubleValue()); } - return oldValList.toArray(); + return oldValList.stream().mapToDouble(Double::doubleValue).toArray(); } public static List parseAggregationRequest(XContentParser parser) throws IOException { diff --git a/src/main/java/org/opensearch/ad/util/SafeSecurityInjector.java b/src/main/java/org/opensearch/timeseries/util/SafeSecurityInjector.java similarity index 98% rename from src/main/java/org/opensearch/ad/util/SafeSecurityInjector.java rename to src/main/java/org/opensearch/timeseries/util/SafeSecurityInjector.java index 612ea4d5c..671aa0466 100644 --- a/src/main/java/org/opensearch/ad/util/SafeSecurityInjector.java +++ b/src/main/java/org/opensearch/timeseries/util/SafeSecurityInjector.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.List; import java.util.Locale; diff --git a/src/main/java/org/opensearch/ad/util/SecurityClientUtil.java b/src/main/java/org/opensearch/timeseries/util/SecurityClientUtil.java similarity index 82% rename from src/main/java/org/opensearch/ad/util/SecurityClientUtil.java rename to src/main/java/org/opensearch/timeseries/util/SecurityClientUtil.java index 8e9b97b57..82d72e1ab 100644 --- a/src/main/java/org/opensearch/ad/util/SecurityClientUtil.java +++ b/src/main/java/org/opensearch/timeseries/util/SecurityClientUtil.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.function.BiConsumer; @@ -17,12 +17,13 @@ import org.opensearch.action.ActionRequest; import org.opensearch.action.ActionResponse; import org.opensearch.action.ActionType; -import org.opensearch.ad.NodeStateManager; import org.opensearch.client.Client; import org.opensearch.common.inject.Inject; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.commons.authuser.User; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; public class SecurityClientUtil { private static final String INJECTION_ID = "direct"; @@ -51,12 +52,21 @@ public void asy BiConsumer> consumer, String detectorId, Client client, + AnalysisType context, ActionListener listener ) { ThreadContext threadContext = client.threadPool().getThreadContext(); - try (ADSafeSecurityInjector injectSecurity = new ADSafeSecurityInjector(detectorId, settings, threadContext, nodeStateManager)) { + try ( + TimeSeriesSafeSecurityInjector injectSecurity = new TimeSeriesSafeSecurityInjector( + detectorId, + settings, + threadContext, + nodeStateManager, + context + ) + ) { injectSecurity - .injectUserRolesFromDetector( + .injectUserRolesFromConfig( ActionListener .wrap( success -> consumer.accept(request, ActionListener.runBefore(listener, () -> injectSecurity.close())), @@ -82,6 +92,7 @@ public void asy BiConsumer> consumer, User user, Client client, + AnalysisType context, ActionListener listener ) { ThreadContext threadContext = client.threadPool().getThreadContext(); @@ -95,7 +106,15 @@ public void asy // client.execute/client.search and handles the responses (this can be a thread in the search thread pool). // Auto-close in try will restore the context in one thread; the explicit close injectSecurity will restore // the context in another thread. So we still need to put the injectSecurity inside try. - try (ADSafeSecurityInjector injectSecurity = new ADSafeSecurityInjector(INJECTION_ID, settings, threadContext, nodeStateManager)) { + try ( + TimeSeriesSafeSecurityInjector injectSecurity = new TimeSeriesSafeSecurityInjector( + INJECTION_ID, + settings, + threadContext, + nodeStateManager, + context + ) + ) { injectSecurity.injectUserRoles(user); consumer.accept(request, ActionListener.runBefore(listener, () -> injectSecurity.close())); } @@ -117,12 +136,21 @@ public void exe Request request, User user, Client client, + AnalysisType context, ActionListener listener ) { ThreadContext threadContext = client.threadPool().getThreadContext(); // use a hardcoded string as detector id that is only used in logging - try (ADSafeSecurityInjector injectSecurity = new ADSafeSecurityInjector(INJECTION_ID, settings, threadContext, nodeStateManager)) { + try ( + TimeSeriesSafeSecurityInjector injectSecurity = new TimeSeriesSafeSecurityInjector( + INJECTION_ID, + settings, + threadContext, + nodeStateManager, + context + ) + ) { injectSecurity.injectUserRoles(user); client.execute(action, request, ActionListener.runBefore(listener, () -> injectSecurity.close())); } diff --git a/src/main/java/org/opensearch/ad/util/SecurityUtil.java b/src/main/java/org/opensearch/timeseries/util/SecurityUtil.java similarity index 86% rename from src/main/java/org/opensearch/ad/util/SecurityUtil.java rename to src/main/java/org/opensearch/timeseries/util/SecurityUtil.java index d72d345ab..135116c34 100644 --- a/src/main/java/org/opensearch/ad/util/SecurityUtil.java +++ b/src/main/java/org/opensearch/timeseries/util/SecurityUtil.java @@ -9,15 +9,15 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.Collections; import java.util.List; -import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.common.settings.Settings; import org.opensearch.commons.authuser.User; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.model.Job; import com.google.common.collect.ImmutableList; @@ -57,12 +57,12 @@ private static User getAdjustedUserBWC(User userObj, Settings settings) { /** * * - * @param detector Detector config + * @param config analysis config * @param settings Node settings * @return user recorded by a detector. Made adjstument for BWC (backward-compatibility) if necessary. */ - public static User getUserFromDetector(AnomalyDetector detector, Settings settings) { - return getAdjustedUserBWC(detector.getUser(), settings); + public static User getUserFromConfig(Config config, Settings settings) { + return getAdjustedUserBWC(config.getUser(), settings); } /** @@ -71,7 +71,7 @@ public static User getUserFromDetector(AnomalyDetector detector, Settings settin * @param settings Node settings * @return user recorded by a detector job */ - public static User getUserFromJob(AnomalyDetectorJob detectorJob, Settings settings) { + public static User getUserFromJob(Job detectorJob, Settings settings) { return getAdjustedUserBWC(detectorJob.getUser(), settings); } } diff --git a/src/main/java/org/opensearch/ad/util/ADSafeSecurityInjector.java b/src/main/java/org/opensearch/timeseries/util/TimeSeriesSafeSecurityInjector.java similarity index 54% rename from src/main/java/org/opensearch/ad/util/ADSafeSecurityInjector.java rename to src/main/java/org/opensearch/timeseries/util/TimeSeriesSafeSecurityInjector.java index 749a7434c..4bb73b323 100644 --- a/src/main/java/org/opensearch/ad/util/ADSafeSecurityInjector.java +++ b/src/main/java/org/opensearch/timeseries/util/TimeSeriesSafeSecurityInjector.java @@ -9,31 +9,40 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import java.util.Optional; import org.apache.logging.log4j.LogManager; import org.apache.logging.log4j.Logger; import org.opensearch.action.ActionListener; -import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.concurrent.ThreadContext; import org.opensearch.commons.authuser.User; import org.opensearch.core.common.Strings; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.common.exception.EndRunException; +import org.opensearch.timeseries.model.Config; -public class ADSafeSecurityInjector extends SafeSecurityInjector { - private static final Logger LOG = LogManager.getLogger(ADSafeSecurityInjector.class); +public class TimeSeriesSafeSecurityInjector extends SafeSecurityInjector { + private static final Logger LOG = LogManager.getLogger(TimeSeriesSafeSecurityInjector.class); private NodeStateManager nodeStateManager; + private AnalysisType context; - public ADSafeSecurityInjector(String detectorId, Settings settings, ThreadContext tc, NodeStateManager stateManager) { - super(detectorId, settings, tc); + public TimeSeriesSafeSecurityInjector( + String configId, + Settings settings, + ThreadContext tc, + NodeStateManager stateManager, + AnalysisType context + ) { + super(configId, settings, tc); this.nodeStateManager = stateManager; + this.context = context; } - public void injectUserRolesFromDetector(ActionListener injectListener) { + public void injectUserRolesFromConfig(ActionListener injectListener) { // if id is null, we cannot fetch a detector if (Strings.isEmpty(id)) { LOG.debug("Empty id"); @@ -48,21 +57,21 @@ public void injectUserRolesFromDetector(ActionListener injectListener) { return; } - ActionListener> getDetectorListener = ActionListener.wrap(detectorOp -> { - if (!detectorOp.isPresent()) { - injectListener.onFailure(new EndRunException(id, "AnomalyDetector is not available.", false)); + ActionListener> getConfigListener = ActionListener.wrap(configOp -> { + if (!configOp.isPresent()) { + injectListener.onFailure(new EndRunException(id, "Config is not available.", false)); return; } - AnomalyDetector detector = detectorOp.get(); - User userInfo = SecurityUtil.getUserFromDetector(detector, settings); + Config config = configOp.get(); + User userInfo = SecurityUtil.getUserFromConfig(config, settings); inject(userInfo.getName(), userInfo.getRoles()); injectListener.onResponse(null); }, injectListener::onFailure); - // Since we are gonna read user from detector, make sure the anomaly detector exists and fetched from disk or cached memory - // We don't accept a passed-in AnomalyDetector because the caller might mistakenly not insert any user info in the - // constructed AnomalyDetector and thus poses risks. In the case, if the user is null, we will give admin role. - nodeStateManager.getAnomalyDetector(id, getDetectorListener); + // Since we are gonna read user from config, make sure the config exists and fetched from disk or cached memory + // We don't accept a passed-in Config because the caller might mistakenly not insert any user info in the + // constructed Config and thus poses risks. In the case, if the user is null, we will give admin role. + nodeStateManager.getConfig(id, context, getConfigListener); } public void injectUserRoles(User user) { diff --git a/src/main/resources/mappings/anomaly-detectors.json b/src/main/resources/mappings/config.json similarity index 100% rename from src/main/resources/mappings/anomaly-detectors.json rename to src/main/resources/mappings/config.json diff --git a/src/main/resources/mappings/anomaly-detector-jobs.json b/src/main/resources/mappings/job.json similarity index 100% rename from src/main/resources/mappings/anomaly-detector-jobs.json rename to src/main/resources/mappings/job.json diff --git a/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java b/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java index e2904c319..9a5fdcea6 100644 --- a/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java +++ b/src/test/java/org/opensearch/action/admin/indices/mapping/get/IndexAnomalyDetectorActionHandlerTests.java @@ -44,14 +44,11 @@ import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorActionHandler; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.IndexAnomalyDetectorResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; @@ -64,9 +61,12 @@ import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ValidationException; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; /** diff --git a/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java b/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java index 2869943b6..79021714e 100644 --- a/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java +++ b/src/test/java/org/opensearch/action/admin/indices/mapping/get/ValidateAnomalyDetectorActionHandlerTests.java @@ -32,8 +32,6 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.search.SearchResponse; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.rest.handler.AbstractAnomalyDetectorActionHandler; @@ -41,7 +39,6 @@ import org.opensearch.ad.rest.handler.ValidateAnomalyDetectorActionHandler; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.ValidateAnomalyDetectorResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.client.node.NodeClient; import org.opensearch.cluster.service.ClusterService; @@ -51,9 +48,12 @@ import org.opensearch.rest.RestRequest; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ValidationException; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.ValidationAspect; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableList; diff --git a/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java b/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java index f28de4547..2353f69b9 100644 --- a/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AbstractProfileRunnerTests.java @@ -36,7 +36,6 @@ import org.opensearch.ad.model.DetectorProfileName; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.AnomalyResultTests; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; @@ -46,6 +45,7 @@ import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; public class AbstractProfileRunnerTests extends AbstractTimeSeriesTest { diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java b/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java index 074b6ee86..8b47f7135 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorJobRunnerTests.java @@ -56,7 +56,6 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskCacheManager; @@ -64,7 +63,6 @@ import org.opensearch.ad.transport.AnomalyResultAction; import org.opensearch.ad.transport.AnomalyResultResponse; import org.opensearch.ad.transport.handler.AnomalyIndexHandler; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; @@ -85,12 +83,16 @@ import org.opensearch.jobscheduler.spi.utils.LockService; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import com.google.common.collect.ImmutableList; @@ -109,7 +111,7 @@ public class AnomalyDetectorJobRunnerTests extends AbstractTimeSeriesTest { private LockService lockService; @Mock - private AnomalyDetectorJob jobParameter; + private Job jobParameter; @Mock private JobExecutionContext context; @@ -194,7 +196,7 @@ public void setup() throws Exception { ActionListener listener = (ActionListener) args[1]; if (request.index().equals(CommonName.JOB_INDEX)) { - AnomalyDetectorJob job = TestHelpers.randomAnomalyDetectorJob(true); + Job job = TestHelpers.randomAnomalyDetectorJob(true); listener.onResponse(TestHelpers.createGetResponse(job, randomAlphaOfLength(5), CommonName.JOB_INDEX)); } return null; @@ -227,10 +229,10 @@ public void setup() throws Exception { detector = TestHelpers.randomAnomalyDetectorWithEmptyFeature(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); runner.setNodeStateManager(nodeStateManager); recorder = new ExecuteADResultResponseRecorder( @@ -260,7 +262,7 @@ public void tearDown() throws Exception { @Test public void testRunJobWithWrongParameterType() { expectedEx.expect(IllegalArgumentException.class); - expectedEx.expectMessage("Job parameter is not instance of AnomalyDetectorJob, type: "); + expectedEx.expectMessage("Job parameter is not instance of Job, type: "); ScheduledJobParameter parameter = mock(ScheduledJobParameter.class); when(jobParameter.getLockDurationSeconds()).thenReturn(null); @@ -392,7 +394,7 @@ private void testRunAdJobWithEndRunExceptionNowAndStopAdJob(boolean jobExists, b jobExists, BytesReference .bytes( - new AnomalyDetectorJob( + new Job( jobParameter.getName(), jobParameter.getSchedule(), jobParameter.getWindowDelay(), @@ -575,10 +577,10 @@ public void testFailtoFindDetector() { Instant executionStartTime = confirmInitializedSetup(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onFailure(new RuntimeException()); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); LockModel lock = new LockModel(CommonName.JOB_INDEX, jobParameter.getName(), Instant.now(), 10, false); @@ -586,8 +588,8 @@ public void testFailtoFindDetector() { verify(client, times(1)).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); verify(adTaskCacheManager, times(1)).hasQueriedResultIndex(anyString()); - verify(nodeStateManager, times(1)).getAnomalyDetector(any(String.class), any(ActionListener.class)); - verify(nodeStateManager, times(0)).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); + verify(nodeStateManager, times(0)).getJob(any(String.class), any(ActionListener.class)); verify(adTaskManager, times(1)).updateLatestRealtimeTaskOnCoordinatingNode(any(), any(), any(), any(), any(), any()); assertEquals(1, testAppender.countMessage("Fail to confirm rcf update")); assertTrue(testAppender.containExceptionMsg(TimeSeriesException.class, "fail to get detector")); @@ -598,16 +600,16 @@ public void testFailtoFindJob() { Instant executionStartTime = confirmInitializedSetup(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(1); listener.onFailure(new RuntimeException()); return null; - }).when(nodeStateManager).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getJob(any(String.class), any(ActionListener.class)); LockModel lock = new LockModel(CommonName.JOB_INDEX, jobParameter.getName(), Instant.now(), 10, false); @@ -615,8 +617,8 @@ public void testFailtoFindJob() { verify(client, times(1)).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); verify(adTaskCacheManager, times(1)).hasQueriedResultIndex(anyString()); - verify(nodeStateManager, times(1)).getAnomalyDetector(any(String.class), any(ActionListener.class)); - verify(nodeStateManager, times(1)).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getJob(any(String.class), any(ActionListener.class)); verify(adTaskManager, times(1)).updateLatestRealtimeTaskOnCoordinatingNode(any(), any(), any(), any(), any(), any()); assertEquals(1, testAppender.countMessage("Fail to confirm rcf update")); assertTrue(testAppender.containExceptionMsg(TimeSeriesException.class, "fail to get job")); @@ -627,10 +629,10 @@ public void testEmptyDetector() { Instant executionStartTime = confirmInitializedSetup(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.empty()); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); LockModel lock = new LockModel(CommonName.JOB_INDEX, jobParameter.getName(), Instant.now(), 10, false); @@ -638,8 +640,8 @@ public void testEmptyDetector() { verify(client, times(1)).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); verify(adTaskCacheManager, times(1)).hasQueriedResultIndex(anyString()); - verify(nodeStateManager, times(1)).getAnomalyDetector(any(String.class), any(ActionListener.class)); - verify(nodeStateManager, times(0)).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); + verify(nodeStateManager, times(0)).getJob(any(String.class), any(ActionListener.class)); verify(adTaskManager, times(1)).updateLatestRealtimeTaskOnCoordinatingNode(any(), any(), any(), any(), any(), any()); assertEquals(1, testAppender.countMessage("Fail to confirm rcf update")); assertTrue(testAppender.containExceptionMsg(TimeSeriesException.class, "fail to get detector")); @@ -650,16 +652,16 @@ public void testEmptyJob() { Instant executionStartTime = confirmInitializedSetup(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(1); listener.onResponse(Optional.empty()); return null; - }).when(nodeStateManager).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getJob(any(String.class), any(ActionListener.class)); LockModel lock = new LockModel(CommonName.JOB_INDEX, jobParameter.getName(), Instant.now(), 10, false); @@ -667,8 +669,8 @@ public void testEmptyJob() { verify(client, times(1)).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); verify(adTaskCacheManager, times(1)).hasQueriedResultIndex(anyString()); - verify(nodeStateManager, times(1)).getAnomalyDetector(any(String.class), any(ActionListener.class)); - verify(nodeStateManager, times(1)).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getJob(any(String.class), any(ActionListener.class)); verify(adTaskManager, times(1)).updateLatestRealtimeTaskOnCoordinatingNode(any(), any(), any(), any(), any(), any()); assertEquals(1, testAppender.countMessage("Fail to confirm rcf update")); assertTrue(testAppender.containExceptionMsg(TimeSeriesException.class, "fail to get job")); @@ -685,16 +687,16 @@ public void testMarkResultIndexQueried() throws IOException { Instant executionStartTime = confirmInitializedSetup(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(1); listener.onResponse(Optional.of(TestHelpers.randomAnomalyDetectorJob(true, Instant.ofEpochMilli(1602401500000L), null))); return null; - }).when(nodeStateManager).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getJob(any(String.class), any(ActionListener.class)); doAnswer(invocation -> { Object[] args = invocation.getArguments(); @@ -755,9 +757,9 @@ public void testMarkResultIndexQueried() throws IOException { runner.runAdJob(jobParameter, lockService, lock, Instant.now().minusSeconds(60), executionStartTime, recorder, detector); verify(client, times(1)).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); + verify(nodeStateManager, times(1)).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); + verify(nodeStateManager, times(1)).getJob(any(String.class), any(ActionListener.class)); verify(client, times(1)).search(any(), any()); - verify(nodeStateManager, times(1)).getAnomalyDetector(any(String.class), any(ActionListener.class)); - verify(nodeStateManager, times(1)).getAnomalyDetectorJob(any(String.class), any(ActionListener.class)); ArgumentCaptor totalUpdates = ArgumentCaptor.forClass(Long.class); verify(adTaskManager, times(1)) diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java b/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java index 5d3c54541..7c619fda1 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorProfileRunnerTests.java @@ -41,7 +41,6 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorInternalState; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.ad.model.DetectorProfileName; @@ -53,19 +52,22 @@ import org.opensearch.ad.transport.ProfileResponse; import org.opensearch.ad.transport.RCFPollingAction; import org.opensearch.ad.transport.RCFPollingResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; import org.opensearch.core.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.index.IndexNotFoundException; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.RemoteTransportException; public class AnomalyDetectorProfileRunnerTests extends AbstractProfileRunnerTests { @@ -100,10 +102,10 @@ private void setUpClientGet( detector = TestHelpers.randomAnomalyDetectorWithInterval(new IntervalTimeConfiguration(detectorIntervalMin, ChronoUnit.MINUTES)); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(anyString(), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(anyString(), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, Settings.EMPTY); runner = new AnomalyDetectorProfileRunner( client, @@ -137,7 +139,7 @@ private void setUpClientGet( break; } } else if (request.index().equals(CommonName.JOB_INDEX)) { - AnomalyDetectorJob job = null; + Job job = null; switch (jobStatus) { case INDEX_NOT_EXIT: listener.onFailure(new IndexNotFoundException(CommonName.JOB_INDEX)); diff --git a/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java b/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java index 6ff4d604d..288c0e3dc 100644 --- a/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java +++ b/src/test/java/org/opensearch/ad/AnomalyDetectorRestTestCase.java @@ -26,7 +26,6 @@ import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorExecutionInput; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.client.Request; import org.opensearch.client.Response; import org.opensearch.client.RestClient; @@ -45,6 +44,7 @@ import org.opensearch.test.rest.OpenSearchRestTestCase; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.RestHandlerUtils; import com.google.common.collect.ImmutableList; @@ -124,9 +124,9 @@ protected AnomalyDetector createRandomAnomalyDetector( AnomalyDetector createdDetector = createAnomalyDetector(detector, refresh, client); if (withMetadata) { - return getAnomalyDetector(createdDetector.getId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"), client); + return getConfig(createdDetector.getId(), new BasicHeader(HttpHeaders.USER_AGENT, "Kibana"), client); } - return getAnomalyDetector(createdDetector.getId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), client); + return getConfig(createdDetector.getId(), new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"), client); } protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolean refresh, RestClient client) throws IOException { @@ -143,7 +143,7 @@ protected AnomalyDetector createAnomalyDetector(AnomalyDetector detector, Boolea do { i++; try { - detectorInIndex = getAnomalyDetector(detectorId, client); + detectorInIndex = getConfig(detectorId, client); assertNotNull(detectorInIndex); break; } catch (Exception e) { @@ -208,8 +208,8 @@ protected Response previewAnomalyDetector(String detectorId, RestClient client, ); } - public AnomalyDetector getAnomalyDetector(String detectorId, RestClient client) throws IOException { - return (AnomalyDetector) getAnomalyDetector(detectorId, false, client)[0]; + public AnomalyDetector getConfig(String detectorId, RestClient client) throws IOException { + return (AnomalyDetector) getConfig(detectorId, false, client)[0]; } public Response updateAnomalyDetector(String detectorId, AnomalyDetector newDetector, RestClient client) throws IOException { @@ -225,22 +225,17 @@ public Response updateAnomalyDetector(String detectorId, AnomalyDetector newDete ); } - public AnomalyDetector getAnomalyDetector(String detectorId, BasicHeader header, RestClient client) throws IOException { - return (AnomalyDetector) getAnomalyDetector(detectorId, header, false, false, client)[0]; + public AnomalyDetector getConfig(String detectorId, BasicHeader header, RestClient client) throws IOException { + return (AnomalyDetector) getConfig(detectorId, header, false, false, client)[0]; } - public ToXContentObject[] getAnomalyDetector(String detectorId, boolean returnJob, RestClient client) throws IOException { + public ToXContentObject[] getConfig(String detectorId, boolean returnJob, RestClient client) throws IOException { BasicHeader header = new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"); - return getAnomalyDetector(detectorId, header, returnJob, false, client); + return getConfig(detectorId, header, returnJob, false, client); } - public ToXContentObject[] getAnomalyDetector( - String detectorId, - BasicHeader header, - boolean returnJob, - boolean returnTask, - RestClient client - ) throws IOException { + public ToXContentObject[] getConfig(String detectorId, BasicHeader header, boolean returnJob, boolean returnTask, RestClient client) + throws IOException { Response response = TestHelpers .makeRequest( client, @@ -258,7 +253,7 @@ public ToXContentObject[] getAnomalyDetector( String id = null; Long version = null; AnomalyDetector detector = null; - AnomalyDetectorJob detectorJob = null; + Job detectorJob = null; ADTask realtimeAdTask = null; ADTask historicalAdTask = null; while (parser.nextToken() != XContentParser.Token.END_OBJECT) { @@ -275,7 +270,7 @@ public ToXContentObject[] getAnomalyDetector( detector = AnomalyDetector.parse(parser); break; case "anomaly_detector_job": - detectorJob = AnomalyDetectorJob.parse(parser); + detectorJob = Job.parse(parser); break; case "realtime_detection_task": if (parser.currentToken() != XContentParser.Token.VALUE_NULL) { diff --git a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java index 1004b01a4..3de006435 100644 --- a/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/EntityProfileRunnerTests.java @@ -35,7 +35,6 @@ import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.EntityProfile; import org.opensearch.ad.model.EntityProfileName; import org.opensearch.ad.model.EntityState; @@ -44,7 +43,6 @@ import org.opensearch.ad.model.ModelProfileOnNode; import org.opensearch.ad.transport.EntityProfileAction; import org.opensearch.ad.transport.EntityProfileResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.common.settings.Settings; @@ -57,10 +55,14 @@ import org.opensearch.search.aggregations.metrics.InternalMax; import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.SecurityClientUtil; public class EntityProfileRunnerTests extends AbstractTimeSeriesTest { private AnomalyDetector detector; @@ -74,7 +76,7 @@ public class EntityProfileRunnerTests extends AbstractTimeSeriesTest { private String detectorId; private String entityValue; private int requiredSamples; - private AnomalyDetectorJob job; + private Job job; private int smallUpdates; private String categoryField; @@ -131,10 +133,10 @@ public void setUp() throws Exception { when(client.threadPool()).thenReturn(threadPool); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, Settings.EMPTY); runner = new EntityProfileRunner(client, clientUtil, xContentRegistry(), requiredSamples); diff --git a/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java b/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java index b19eb7242..a419b8719 100644 --- a/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java +++ b/src/test/java/org/opensearch/ad/HistoricalAnalysisIntegTestCase.java @@ -40,7 +40,6 @@ import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.transport.AnomalyDetectorJobAction; import org.opensearch.ad.transport.AnomalyDetectorJobRequest; import org.opensearch.ad.transport.AnomalyDetectorJobResponse; @@ -57,6 +56,7 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; +import org.opensearch.timeseries.model.Job; import com.google.common.collect.ImmutableList; @@ -212,7 +212,7 @@ public ADTask getADTask(String taskId) throws IOException { return adTask; } - public AnomalyDetectorJob getADJob(String detectorId) throws IOException { + public Job getADJob(String detectorId) throws IOException { return toADJob(getDoc(CommonName.JOB_INDEX, detectorId)); } @@ -220,8 +220,8 @@ public ADTask toADTask(GetResponse doc) throws IOException { return ADTask.parse(TestHelpers.parser(doc.getSourceAsString())); } - public AnomalyDetectorJob toADJob(GetResponse doc) throws IOException { - return AnomalyDetectorJob.parse(TestHelpers.parser(doc.getSourceAsString())); + public Job toADJob(GetResponse doc) throws IOException { + return Job.parse(TestHelpers.parser(doc.getSourceAsString())); } public ADTask startHistoricalAnalysis(Instant startTime, Instant endTime) throws IOException { diff --git a/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java b/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java index 17ecaa216..35bf1a29f 100644 --- a/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java +++ b/src/test/java/org/opensearch/ad/HistoricalAnalysisRestTestCase.java @@ -61,7 +61,7 @@ public void setUp() throws Exception { public ToXContentObject[] getHistoricalAnomalyDetector(String detectorId, boolean returnTask, RestClient client) throws IOException { BasicHeader header = new BasicHeader(HttpHeaders.CONTENT_TYPE, "application/json"); - return getAnomalyDetector(detectorId, header, false, returnTask, client); + return getConfig(detectorId, header, false, returnTask, client); } public ADTaskProfile getADTaskProfile(String detectorId) throws IOException, ParseException { diff --git a/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java b/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java index 80ef180ed..e90783d0f 100644 --- a/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java +++ b/src/test/java/org/opensearch/ad/MultiEntityProfileRunnerTests.java @@ -46,7 +46,6 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.DetectorInternalState; import org.opensearch.ad.model.DetectorProfile; @@ -64,9 +63,12 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.transport.TransportAddress; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; public class MultiEntityProfileRunnerTests extends AbstractTimeSeriesTest { @@ -92,7 +94,7 @@ public class MultiEntityProfileRunnerTests extends AbstractTimeSeriesTest { private String model0Id; private int shingleSize; - private AnomalyDetectorJob job; + private Job job; private TransportService transportService; private ADTaskManager adTaskManager; diff --git a/src/test/java/org/opensearch/ad/ODFERestTestCase.java b/src/test/java/org/opensearch/ad/ODFERestTestCase.java index e7b69e388..5ee0fec98 100644 --- a/src/test/java/org/opensearch/ad/ODFERestTestCase.java +++ b/src/test/java/org/opensearch/ad/ODFERestTestCase.java @@ -59,10 +59,10 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.common.util.concurrent.ThreadContext; -import org.opensearch.common.xcontent.XContentType; import org.opensearch.commons.rest.SecureRestClientBuilder; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.DeprecationHandler; +import org.opensearch.core.xcontent.MediaType; import org.opensearch.core.xcontent.NamedXContentRegistry; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -156,7 +156,7 @@ protected RestClient buildClient(Settings settings, HttpHost[] hosts) throws IOE @After protected void wipeAllODFEIndices() throws IOException { Response response = adminClient().performRequest(new Request("GET", "/_cat/indices?format=json&expand_wildcards=all")); - XContentType xContentType = XContentType.fromMediaType(response.getEntity().getContentType()); + MediaType xContentType = MediaType.fromMediaType(response.getEntity().getContentType()); try ( XContentParser parser = xContentType .xContent() diff --git a/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java b/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java index d1dde1654..0f513b502 100644 --- a/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java +++ b/src/test/java/org/opensearch/ad/bwc/ADBackwardsCompatibilityIT.java @@ -43,14 +43,14 @@ import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.rest.ADRestTestUtils; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.client.Response; import org.opensearch.common.settings.Settings; import org.opensearch.core.rest.RestStatus; import org.opensearch.test.rest.OpenSearchRestTestCase; import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.ExceptionUtil; import org.opensearch.timeseries.util.RestHandlerUtils; import com.google.common.collect.ImmutableList; @@ -70,6 +70,7 @@ public class ADBackwardsCompatibilityIT extends OpenSearchRestTestCase { private List runningRealtimeDetectors; private List historicalDetectors; + @Override @Before public void setUp() throws Exception { super.setUp(); @@ -188,7 +189,7 @@ public void testBackwardsCompatibility() throws Exception { case UPGRADED: // This branch is for testing full upgraded cluster. That means all nodes in cluster are running // latest AD version. - Assert.assertTrue(pluginNames.contains("opensearch-anomaly-detection")); + Assert.assertTrue(pluginNames.contains("opensearch-time-series-analytics")); Assert.assertTrue(pluginNames.contains("opensearch-job-scheduler")); Map detectors = new HashMap<>(); @@ -258,7 +259,7 @@ private void verifyAdTasks() throws InterruptedException, IOException { i++; for (String detectorId : runningRealtimeDetectors) { Map jobAndTask = getDetectorWithJobAndTask(client(), detectorId); - AnomalyDetectorJob job = (AnomalyDetectorJob) jobAndTask.get(ANOMALY_DETECTOR_JOB); + Job job = (Job) jobAndTask.get(ANOMALY_DETECTOR_JOB); ADTask historicalTask = (ADTask) jobAndTask.get(HISTORICAL_ANALYSIS_TASK); ADTask realtimeTask = (ADTask) jobAndTask.get(REALTIME_TASK); assertTrue(job.isEnabled()); @@ -291,7 +292,7 @@ private void stopAndDeleteDetectors() throws Exception { } } Map jobAndTask = getDetectorWithJobAndTask(client(), detectorId); - AnomalyDetectorJob job = (AnomalyDetectorJob) jobAndTask.get(ANOMALY_DETECTOR_JOB); + Job job = (Job) jobAndTask.get(ANOMALY_DETECTOR_JOB); ADTask historicalAdTask = (ADTask) jobAndTask.get(HISTORICAL_ANALYSIS_TASK); if (!job.isEnabled() && historicalAdTask.isDone()) { Response deleteDetectorResponse = deleteDetector(client(), detectorId); @@ -320,7 +321,7 @@ private void startRealtimeJobForHistoricalDetectorOnNewNode() throws IOException String jobId = startAnomalyDetectorDirectly(client(), detectorId); assertEquals(detectorId, jobId); Map jobAndTask = getDetectorWithJobAndTask(client(), detectorId); - AnomalyDetectorJob detectorJob = (AnomalyDetectorJob) jobAndTask.get(ANOMALY_DETECTOR_JOB); + Job detectorJob = (Job) jobAndTask.get(ANOMALY_DETECTOR_JOB); assertTrue(detectorJob.isEnabled()); runningRealtimeDetectors.add(detectorId); } @@ -329,7 +330,7 @@ private void startRealtimeJobForHistoricalDetectorOnNewNode() throws IOException private void verifyAllRealtimeJobsRunning() throws IOException { for (String detectorId : runningRealtimeDetectors) { Map jobAndTask = getDetectorWithJobAndTask(client(), detectorId); - AnomalyDetectorJob detectorJob = (AnomalyDetectorJob) jobAndTask.get(ANOMALY_DETECTOR_JOB); + Job detectorJob = (Job) jobAndTask.get(ANOMALY_DETECTOR_JOB); assertTrue(detectorJob.isEnabled()); } } @@ -452,7 +453,7 @@ private List startAnomalyDetector(Response response, boolean historicalD if (!historicalDetector) { Map jobAndTask = getDetectorWithJobAndTask(client(), detectorId); - AnomalyDetectorJob job = (AnomalyDetectorJob) jobAndTask.get(ANOMALY_DETECTOR_JOB); + Job job = (Job) jobAndTask.get(ANOMALY_DETECTOR_JOB); assertTrue(job.isEnabled()); runningRealtimeDetectors.add(detectorId); } else { diff --git a/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java b/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java index 637c5e10e..125d58d2c 100644 --- a/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java +++ b/src/test/java/org/opensearch/ad/cluster/ClusterManagerEventListenerTests.java @@ -30,7 +30,6 @@ import org.opensearch.ad.cluster.diskcleanup.ModelCheckpointIndexRetention; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.component.LifecycleListener; @@ -40,6 +39,7 @@ import org.opensearch.threadpool.Scheduler.Cancellable; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; public class ClusterManagerEventListenerTests extends AbstractTimeSeriesTest { diff --git a/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java b/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java index 63d48ef3c..23446b0e7 100644 --- a/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java +++ b/src/test/java/org/opensearch/ad/cluster/DailyCronTests.java @@ -24,11 +24,11 @@ import org.opensearch.OpenSearchException; import org.opensearch.action.ActionListener; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.util.ClientUtil; public class DailyCronTests extends AbstractTimeSeriesTest { diff --git a/src/test/java/org/opensearch/ad/cluster/HashRingTests.java b/src/test/java/org/opensearch/ad/cluster/HashRingTests.java index e85051dd9..c7cf43125 100644 --- a/src/test/java/org/opensearch/ad/cluster/HashRingTests.java +++ b/src/test/java/org/opensearch/ad/cluster/HashRingTests.java @@ -19,7 +19,7 @@ import static org.mockito.Mockito.mock; import static org.mockito.Mockito.spy; import static org.mockito.Mockito.when; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.COOLDOWN_MINUTES; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_COOLDOWN_MINUTES; import java.net.UnknownHostException; import java.time.Clock; @@ -88,8 +88,8 @@ public void setUp() throws Exception { warmNodeId = "warmNode"; warmNode = createNode(warmNodeId, "127.0.0.3", 9202, ImmutableMap.of(ADCommonName.BOX_TYPE_KEY, ADCommonName.WARM_BOX_TYPE)); - settings = Settings.builder().put(COOLDOWN_MINUTES.getKey(), TimeValue.timeValueSeconds(5)).build(); - ClusterSettings clusterSettings = clusterSetting(settings, COOLDOWN_MINUTES); + settings = Settings.builder().put(AD_COOLDOWN_MINUTES.getKey(), TimeValue.timeValueSeconds(5)).build(); + ClusterSettings clusterSettings = clusterSetting(settings, AD_COOLDOWN_MINUTES); clusterService = spy(new ClusterService(settings, clusterSettings, null)); nodeFilter = spy(new DiscoveryNodeFilterer(clusterService)); diff --git a/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java b/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java index 1425a5ec3..23c17ae23 100644 --- a/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java +++ b/src/test/java/org/opensearch/ad/cluster/diskcleanup/IndexCleanupTests.java @@ -29,7 +29,6 @@ import org.opensearch.action.admin.indices.stats.CommonStats; import org.opensearch.action.admin.indices.stats.IndicesStatsResponse; import org.opensearch.action.admin.indices.stats.ShardStats; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.client.IndicesAdminClient; import org.opensearch.cluster.service.ClusterService; @@ -38,6 +37,7 @@ import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.store.StoreStats; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.util.ClientUtil; public class IndexCleanupTests extends AbstractTimeSeriesTest { diff --git a/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java b/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java index 919b3e068..39344a0b6 100644 --- a/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java +++ b/src/test/java/org/opensearch/ad/e2e/AbstractSyntheticDataTest.java @@ -11,9 +11,9 @@ package org.opensearch.ad.e2e; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import static org.opensearch.timeseries.TestHelpers.toHttpEntity; +import static org.opensearch.timeseries.settings.TimeSeriesSettings.BACKOFF_MINUTES; +import static org.opensearch.timeseries.settings.TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import java.io.File; import java.io.FileReader; diff --git a/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java b/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java index b5ce70d05..25f3a3d4c 100644 --- a/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java +++ b/src/test/java/org/opensearch/ad/feature/FeatureManagerTests.java @@ -59,10 +59,12 @@ import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.util.ArrayEqMatcher; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; @@ -204,10 +206,12 @@ public void getColdStartData_returnExpectedToListener( }).when(searchFeatureDao).getLatestDataTime(eq(detector), any(ActionListener.class)); if (latestTime != null) { doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(2); + ActionListener>> listener = invocation.getArgument(3); listener.onResponse(samples); return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), eq(sampleRanges), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), eq(sampleRanges), eq(AnalysisType.AD), any(ActionListener.class)); } ActionListener> listener = mock(ActionListener.class); @@ -260,7 +264,9 @@ public void getColdStartData_throwToListener_onQueryCreationError() throws Excep listener.onResponse(Optional.ofNullable(0L)); return null; }).when(searchFeatureDao).getLatestDataTime(eq(detector), any(ActionListener.class)); - doThrow(IOException.class).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(), any(ActionListener.class)); + doThrow(IOException.class) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(), eq(AnalysisType.AD), any(ActionListener.class)); ActionListener> listener = mock(ActionListener.class); featureManager.getColdStartData(detector, listener); @@ -319,7 +325,7 @@ public void clear_deleteFeatures() throws IOException { AtomicBoolean firstQuery = new AtomicBoolean(true); doAnswer(invocation -> { - ActionListener>> daoListener = invocation.getArgument(2); + ActionListener>> daoListener = invocation.getArgument(3); if (firstQuery.get()) { firstQuery.set(false); daoListener @@ -328,7 +334,9 @@ public void clear_deleteFeatures() throws IOException { daoListener.onResponse(asList(Optional.ofNullable(null), Optional.ofNullable(null), Optional.of(new double[] { 1 }))); } return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); featureManager.getCurrentFeatures(detector, start, end, mock(ActionListener.class)); SinglePointFeatures beforeMaintenance = getCurrentFeatures(detector, start, end); @@ -358,7 +366,7 @@ public void maintenance_removeStaleData() throws IOException { AtomicBoolean firstQuery = new AtomicBoolean(true); doAnswer(invocation -> { - ActionListener>> daoListener = invocation.getArgument(2); + ActionListener>> daoListener = invocation.getArgument(3); if (firstQuery.get()) { firstQuery.set(false); daoListener @@ -367,7 +375,9 @@ public void maintenance_removeStaleData() throws IOException { daoListener.onResponse(asList(Optional.ofNullable(null), Optional.ofNullable(null), Optional.of(new double[] { 1 }))); } return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); featureManager.getCurrentFeatures(detector, start, end, mock(ActionListener.class)); SinglePointFeatures beforeMaintenance = getCurrentFeatures(detector, start, end); @@ -390,7 +400,7 @@ public void maintenance_keepRecentData() throws IOException { AtomicBoolean firstQuery = new AtomicBoolean(true); doAnswer(invocation -> { - ActionListener>> daoListener = invocation.getArgument(2); + ActionListener>> daoListener = invocation.getArgument(3); if (firstQuery.get()) { firstQuery.set(false); daoListener @@ -399,7 +409,9 @@ public void maintenance_keepRecentData() throws IOException { daoListener.onResponse(asList(Optional.ofNullable(null), Optional.ofNullable(null), Optional.of(new double[] { 1 }))); } return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); featureManager.getCurrentFeatures(detector, start, end, mock(ActionListener.class)); SinglePointFeatures beforeMaintenance = getCurrentFeatures(detector, start, end); @@ -435,8 +447,8 @@ private void getPreviewFeaturesTemplate(List> samplesResults, ActionListener>> listener = null; - if (args[2] instanceof ActionListener) { - listener = (ActionListener>>) args[2]; + if (args[3] instanceof ActionListener) { + listener = (ActionListener>>) args[3]; } if (querySuccess) { @@ -446,7 +458,7 @@ private void getPreviewFeaturesTemplate(List> samplesResults, } return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), eq(sampleRanges), any()); + }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), eq(sampleRanges), eq(AnalysisType.AD), any()); when(imputer.impute(argThat(new ArrayEqMatcher<>(new double[][] { { 1, 3 } })), eq(3))).thenReturn(new double[][] { { 1, 2, 3 } }); when(imputer.impute(argThat(new ArrayEqMatcher<>(new double[][] { { 0, 120000 } })), eq(3))) @@ -496,10 +508,10 @@ public void getPreviewFeatureForEntity() throws IOException { coldStartSamples.add(Optional.of(new double[] { 30.0 })); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); ActionListener listener = mock(ActionListener.class); @@ -520,10 +532,10 @@ public void getPreviewFeatureForEntity_noDataToPreview() throws IOException { Entity entity = Entity.createSingleAttributeEntity("fieldName", "value"); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(new ArrayList<>()); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); ActionListener listener = mock(ActionListener.class); @@ -560,7 +572,7 @@ private void setupSearchFeatureDaoForGetCurrentFeatures( AtomicBoolean isPreQuery = new AtomicBoolean(true); doAnswer(invocation -> { - ActionListener>> daoListener = invocation.getArgument(2); + ActionListener>> daoListener = invocation.getArgument(3); if (isPreQuery.get()) { isPreQuery.set(false); daoListener.onResponse(preQueryResponse); @@ -572,7 +584,9 @@ private void setupSearchFeatureDaoForGetCurrentFeatures( } } return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); } private Object[] getCurrentFeaturesTestData_whenAfterQueryResultsFormFullShingle() { @@ -615,7 +629,7 @@ public void getCurrentFeatures_returnExpectedProcessedFeatures_whenAfterQueryRes // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, testStartTime, testEndTime); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); @@ -652,7 +666,7 @@ public void getCurrentFeatures_returnExpectedProcessedFeatures_whenNoQueryNeeded // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, start, end); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); @@ -712,7 +726,7 @@ public void getCurrentFeatures_returnExpectedProcessedFeatures_whenAfterQueryRes // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, testStartTime, testEndTime); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); @@ -758,7 +772,7 @@ public void getCurrentFeatures_returnNoProcessedOrUnprocessedFeatures_whenMissin // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, testStartTime, testEndTime); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertFalse(listenerResponse.getUnprocessedFeatures().isPresent()); assertFalse(listenerResponse.getProcessedFeatures().isPresent()); } @@ -795,7 +809,7 @@ public void getCurrentFeatures_returnNoProcessedFeatures_whenAfterQueryResultsCa // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, testStartTime, testEndTime); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); assertFalse(listenerResponse.getProcessedFeatures().isPresent()); } @@ -826,7 +840,7 @@ public void getCurrentFeatures_returnExceptionToListener_whenQueryThrowsIOExcept ActionListener listener = mock(ActionListener.class); featureManager.getCurrentFeatures(detector, testStartTime, testEndTime, listener); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); verify(listener).onFailure(any(IOException.class)); } @@ -859,12 +873,17 @@ public void getCurrentFeatures_returnExpectedFeatures_cacheMissingData( // first call to cache missing points featureManager.getCurrentFeatures(detector, firstStartTime, firstEndTime, mock(ActionListener.class)); verify(searchFeatureDao, times(1)) - .getFeatureSamplesForPeriods(eq(detector), argThat(list -> list.size() == shingleSize), any(ActionListener.class)); + .getFeatureSamplesForPeriods( + eq(detector), + argThat(list -> list.size() == shingleSize), + eq(AnalysisType.AD), + any(ActionListener.class) + ); // second call should only fetch current point even if previous points missing SinglePointFeatures listenerResponse = getCurrentFeatures(detector, secondStartTime, secondEndTime); verify(searchFeatureDao, times(1)) - .getFeatureSamplesForPeriods(eq(detector), argThat(list -> list.size() == 1), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), argThat(list -> list.size() == 1), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); if (expectedProcessedFeaturesOptional.isPresent()) { @@ -942,7 +961,7 @@ public void getCurrentFeatures_returnExpectedFeatures_withTimeJitterUpToHalfInte // Start test SinglePointFeatures listenerResponse = getCurrentFeatures(detector, testStartTime, testEndTime); verify(searchFeatureDao, times(expectedNumQueriesToSearchFeatureDao)) - .getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); assertTrue(listenerResponse.getUnprocessedFeatures().isPresent()); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); @@ -977,14 +996,16 @@ public void getCurrentFeatures_setsShingleSizeFromDetectorConfig(int shingleSize List> ranges = invocation.getArgument(1); assertEquals(ranges.size(), shingleSize); - ActionListener>> daoListener = invocation.getArgument(2); + ActionListener>> daoListener = invocation.getArgument(3); List> response = new ArrayList>(); for (int i = 0; i < ranges.size(); i++) { response.add(Optional.of(new double[] { i })); } daoListener.onResponse(response); return null; - }).when(searchFeatureDao).getFeatureSamplesForPeriods(eq(detector), any(List.class), any(ActionListener.class)); + }) + .when(searchFeatureDao) + .getFeatureSamplesForPeriods(eq(detector), any(List.class), eq(AnalysisType.AD), any(ActionListener.class)); SinglePointFeatures listenerResponse = getCurrentFeatures(detector, 0, intervalInMilliseconds); assertTrue(listenerResponse.getProcessedFeatures().isPresent()); diff --git a/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java b/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java index e7d74f28a..0cf65e63d 100644 --- a/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java +++ b/src/test/java/org/opensearch/ad/ml/AbstractCosineDataTest.java @@ -15,9 +15,7 @@ import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; import static org.opensearch.ad.settings.AnomalyDetectorSettings.CHECKPOINT_SAVING_FREQ; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import java.time.Clock; import java.time.Instant; @@ -34,13 +32,10 @@ import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.ratelimit.CheckpointWriteWorker; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodeRole; @@ -52,13 +47,17 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.settings.TimeSeriesSettings; +import org.opensearch.timeseries.util.ClientUtil; import com.google.common.collect.ImmutableList; @@ -125,8 +124,8 @@ public void setUp() throws Exception { }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); nodestateSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - nodestateSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE); - nodestateSetting.add(BACKOFF_MINUTES); + nodestateSetting.add(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE); + nodestateSetting.add(TimeSeriesSettings.BACKOFF_MINUTES); nodestateSetting.add(CHECKPOINT_SAVING_FREQ); clusterSettings = new ClusterSettings(Settings.EMPTY, nodestateSetting); @@ -147,7 +146,9 @@ public void setUp() throws Exception { clientUtil, clock, AnomalyDetectorSettings.HOURLY_MAINTENANCE, - clusterService + clusterService, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES ); imputer = new LinearUniformImputer(true); diff --git a/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java b/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java index 8c3e6c472..4adf9cc29 100644 --- a/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java +++ b/src/test/java/org/opensearch/ad/ml/CheckpointDaoTests.java @@ -97,13 +97,13 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.core.index.shard.ShardId; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.util.ClientUtil; import test.org.opensearch.ad.util.JsonDeserializer; import test.org.opensearch.ad.util.MLUtil; diff --git a/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java b/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java index c94c145cb..12c85ba65 100644 --- a/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java +++ b/src/test/java/org/opensearch/ad/ml/CheckpointDeleteTests.java @@ -29,13 +29,13 @@ import org.opensearch.action.ActionListener; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndexManagement; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.client.Client; import org.opensearch.index.IndexNotFoundException; import org.opensearch.index.reindex.BulkByScrollResponse; import org.opensearch.index.reindex.DeleteByQueryAction; import org.opensearch.index.reindex.ScrollableHitSource; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.util.ClientUtil; import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestMapper; import com.amazon.randomcutforest.parkservices.state.ThresholdedRandomCutForestState; diff --git a/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java b/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java index 34265b0e6..9852b5d97 100644 --- a/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java +++ b/src/test/java/org/opensearch/ad/ml/EntityColdStarterTests.java @@ -13,6 +13,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.never; @@ -54,6 +55,7 @@ import org.opensearch.common.settings.Setting; import org.opensearch.common.settings.Settings; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonName; @@ -119,10 +121,10 @@ public void testColdStart() throws InterruptedException, IOException { modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); @@ -134,10 +136,10 @@ public void testColdStart() throws InterruptedException, IOException { coldStartSamples.add(Optional.of(sample2)); coldStartSamples.add(Optional.of(sample3)); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -183,14 +185,14 @@ public void testMissMin() throws IOException, InterruptedException { modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.empty()); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); - verify(searchFeatureDao, never()).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + verify(searchFeatureDao, never()).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); assertTrue(!model.getTrcf().isPresent()); checkSemaphoreRelease(); @@ -268,10 +270,10 @@ public void testTwoSegmentsWithSingleSample() throws InterruptedException, IOExc modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); double[] sample1 = new double[] { 57.0 }; @@ -284,10 +286,10 @@ public void testTwoSegmentsWithSingleSample() throws InterruptedException, IOExc coldStartSamples.add(Optional.empty()); coldStartSamples.add(Optional.of(sample5)); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -322,10 +324,10 @@ public void testTwoSegments() throws InterruptedException, IOException { modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); double[] sample1 = new double[] { 57.0 }; @@ -340,10 +342,10 @@ public void testTwoSegments() throws InterruptedException, IOException { coldStartSamples.add(Optional.of(new double[] { -17.0 })); coldStartSamples.add(Optional.of(new double[] { -38.0 })); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -378,17 +380,17 @@ public void testThrottledColdStart() throws InterruptedException { modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onFailure(new OpenSearchRejectedExecutionException("")); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); entityColdStarter.trainModel(entity, "456", modelState, listener); // only the first one makes the call - verify(searchFeatureDao, times(1)).getEntityMinDataTime(any(), any(), any()); + verify(searchFeatureDao, times(1)).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); checkSemaphoreRelease(); } @@ -398,14 +400,14 @@ public void testColdStartException() throws InterruptedException { modelState = new ModelState<>(model, modelId, detectorId, ModelType.ENTITY.getName(), clock, priority); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onFailure(new TimeSeriesException(detectorId, "")); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); - assertTrue(stateManager.getLastDetectionError(detectorId) != null); + assertTrue(stateManager.fetchExceptionAndClear(detectorId).isPresent()); checkSemaphoreRelease(); } @@ -429,19 +431,19 @@ public void testNotEnoughSamples() throws InterruptedException, IOException { }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); coldStartSamples.add(Optional.of(new double[] { 57.0 })); coldStartSamples.add(Optional.of(new double[] { 1.0 })); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -482,10 +484,10 @@ public void testEmptyDataRange() throws InterruptedException { }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(894056973000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -594,10 +596,10 @@ private void accuracyTemplate(int detectorIntervalMins, float precisionThreshold }).when(clientUtil).asyncRequest(any(GetRequest.class), any(), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(timestamps[0])); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); doAnswer(invocation -> { List> ranges = invocation.getArgument(1); @@ -616,10 +618,10 @@ public int compare(Entry p1, Entry p2) { coldStartSamples.add(Optional.of(data[valueIndex])); } - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); EntityModel model = new EntityModel(entity, new ArrayDeque<>(), null); modelState = new ModelState<>(model, modelId, detector.getId(), ModelType.ENTITY.getName(), clock, priority); @@ -749,10 +751,10 @@ private ModelState createStateForCacheRelease() { public void testCacheReleaseAfterMaintenance() throws IOException, InterruptedException { ModelState modelState = createStateForCacheRelease(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); @@ -764,10 +766,10 @@ public void testCacheReleaseAfterMaintenance() throws IOException, InterruptedEx coldStartSamples.add(Optional.of(sample2)); coldStartSamples.add(Optional.of(sample3)); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); @@ -794,10 +796,10 @@ public void testCacheReleaseAfterMaintenance() throws IOException, InterruptedEx public void testCacheReleaseAfterClear() throws IOException, InterruptedException { ModelState modelState = createStateForCacheRelease(); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(1602269260000L)); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); List> coldStartSamples = new ArrayList<>(); @@ -809,10 +811,10 @@ public void testCacheReleaseAfterClear() throws IOException, InterruptedExceptio coldStartSamples.add(Optional.of(sample2)); coldStartSamples.add(Optional.of(sample3)); doAnswer(invocation -> { - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entityColdStarter.trainModel(entity, detectorId, modelState, listener); checkSemaphoreRelease(); diff --git a/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java b/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java index 6fd32c2c9..5e940392b 100644 --- a/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java +++ b/src/test/java/org/opensearch/ad/ml/HCADModelPerfTests.java @@ -13,6 +13,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyBoolean; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.reset; @@ -36,15 +37,16 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.ad.MemoryTracker; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.ml.ModelManager.ModelType; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.test.ClusterServiceUtils; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.feature.SearchFeatureDao; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.settings.TimeSeriesSettings; @@ -188,10 +190,10 @@ private void averageAccuracyTemplate( when(clock.millis()).thenReturn(timestamps[trainTestSplit - 1]); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(2); + ActionListener> listener = invocation.getArgument(3); listener.onResponse(Optional.of(timestamps[0])); return null; - }).when(searchFeatureDao).getEntityMinDataTime(any(), any(), any()); + }).when(searchFeatureDao).getMinDataTime(any(), any(), eq(AnalysisType.AD), any()); doAnswer(invocation -> { List> ranges = invocation.getArgument(1); @@ -210,10 +212,10 @@ public int compare(Entry p1, Entry p2) { coldStartSamples.add(Optional.of(data[valueIndex])); } - ActionListener>> listener = invocation.getArgument(4); + ActionListener>> listener = invocation.getArgument(5); listener.onResponse(coldStartSamples); return null; - }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), any()); + }).when(searchFeatureDao).getColdStartSamplesForPeriods(any(), any(), any(), anyBoolean(), eq(AnalysisType.AD), any()); entity = Entity.createSingleAttributeEntity("field", entityName + z); EntityModel model = new EntityModel(entity, new ArrayDeque<>(), null); diff --git a/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java b/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java index 7d981a297..bf59d761b 100644 --- a/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java +++ b/src/test/java/org/opensearch/ad/ml/ModelManagerTests.java @@ -56,11 +56,9 @@ import org.mockito.MockitoAnnotations; import org.opensearch.action.ActionListener; import org.opensearch.ad.MemoryTracker; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.EntityCache; import org.opensearch.ad.feature.FeatureManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.ml.ModelManager.ModelType; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.ratelimit.CheckpointWriteWorker; @@ -72,10 +70,13 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.monitor.jvm.JvmService; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.LimitExceededException; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import org.powermock.modules.junit4.PowerMockRunner; diff --git a/src/test/java/org/opensearch/ad/ml/SingleStreamModelIdMapperTests.java b/src/test/java/org/opensearch/ad/ml/SingleStreamModelIdMapperTests.java index 59a0d02da..bda02043a 100644 --- a/src/test/java/org/opensearch/ad/ml/SingleStreamModelIdMapperTests.java +++ b/src/test/java/org/opensearch/ad/ml/SingleStreamModelIdMapperTests.java @@ -12,6 +12,7 @@ package org.opensearch.ad.ml; import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; public class SingleStreamModelIdMapperTests extends OpenSearchTestCase { public void testGetThresholdModelIdFromRCFModelId() { diff --git a/src/test/java/org/opensearch/ad/mock/transport/MockAnomalyDetectorJobTransportActionWithUser.java b/src/test/java/org/opensearch/ad/mock/transport/MockAnomalyDetectorJobTransportActionWithUser.java index 15d37c89d..69e15e4d1 100644 --- a/src/test/java/org/opensearch/ad/mock/transport/MockAnomalyDetectorJobTransportActionWithUser.java +++ b/src/test/java/org/opensearch/ad/mock/transport/MockAnomalyDetectorJobTransportActionWithUser.java @@ -11,8 +11,8 @@ package org.opensearch.ad.mock.transport; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.ad.settings.AnomalyDetectorSettings.FILTER_BY_BACKEND_ROLES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static org.opensearch.timeseries.util.ParseUtils.resolveUserAndExecute; import org.apache.logging.log4j.LogManager; @@ -22,6 +22,7 @@ import org.opensearch.action.support.HandledTransportAction; import org.opensearch.ad.ExecuteADResultResponseRecorder; import org.opensearch.ad.indices.ADIndexManagement; +import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorJobActionHandler; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.ad.transport.AnomalyDetectorJobRequest; @@ -91,7 +92,7 @@ protected void doExecute(Task task, AnomalyDetectorJobRequest request, ActionLis long seqNo = request.getSeqNo(); long primaryTerm = request.getPrimaryTerm(); String rawPath = request.getRawPath(); - TimeValue requestTimeout = REQUEST_TIMEOUT.get(settings); + TimeValue requestTimeout = AD_REQUEST_TIMEOUT.get(settings); String userStr = "user_name|backendrole1,backendrole2|roles1,role2"; // By the time request reaches here, the user permissions are validated by Security plugin. User user = User.parse(userStr); @@ -114,7 +115,8 @@ protected void doExecute(Task task, AnomalyDetectorJobRequest request, ActionLis ), client, clusterService, - xContentRegistry + xContentRegistry, + AnomalyDetector.class ); } catch (Exception e) { logger.error(e); diff --git a/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java b/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java index 75d821507..df506b010 100644 --- a/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java +++ b/src/test/java/org/opensearch/ad/model/AnomalyDetectorJobTests.java @@ -24,6 +24,7 @@ import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; +import org.opensearch.timeseries.model.Job; public class AnomalyDetectorJobTests extends OpenSearchSingleNodeTestCase { @@ -38,22 +39,22 @@ protected NamedWriteableRegistry writableRegistry() { } public void testParseAnomalyDetectorJob() throws IOException { - AnomalyDetectorJob anomalyDetectorJob = TestHelpers.randomAnomalyDetectorJob(); + Job anomalyDetectorJob = TestHelpers.randomAnomalyDetectorJob(); String anomalyDetectorJobString = TestHelpers .xContentBuilderToString(anomalyDetectorJob.toXContent(TestHelpers.builder(), ToXContent.EMPTY_PARAMS)); anomalyDetectorJobString = anomalyDetectorJobString .replaceFirst("\\{", String.format(Locale.ROOT, "{\"%s\":\"%s\",", randomAlphaOfLength(5), randomAlphaOfLength(5))); - AnomalyDetectorJob parsedAnomalyDetectorJob = AnomalyDetectorJob.parse(TestHelpers.parser(anomalyDetectorJobString)); + Job parsedAnomalyDetectorJob = Job.parse(TestHelpers.parser(anomalyDetectorJobString)); assertEquals("Parsing anomaly detect result doesn't work", anomalyDetectorJob, parsedAnomalyDetectorJob); } public void testSerialization() throws IOException { - AnomalyDetectorJob anomalyDetectorJob = TestHelpers.randomAnomalyDetectorJob(); + Job anomalyDetectorJob = TestHelpers.randomAnomalyDetectorJob(); BytesStreamOutput output = new BytesStreamOutput(); anomalyDetectorJob.writeTo(output); NamedWriteableAwareStreamInput input = new NamedWriteableAwareStreamInput(output.bytes().streamInput(), writableRegistry()); - AnomalyDetectorJob parsedAnomalyDetectorJob = new AnomalyDetectorJob(input); + Job parsedAnomalyDetectorJob = new Job(input); assertNotNull(parsedAnomalyDetectorJob); } } diff --git a/src/test/java/org/opensearch/ad/model/MergeableListTests.java b/src/test/java/org/opensearch/ad/model/MergeableListTests.java index f9d794da6..1375d72a7 100644 --- a/src/test/java/org/opensearch/ad/model/MergeableListTests.java +++ b/src/test/java/org/opensearch/ad/model/MergeableListTests.java @@ -15,6 +15,7 @@ import java.util.List; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.model.MergeableList; public class MergeableListTests extends AbstractTimeSeriesTest { diff --git a/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java b/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java index 075cf46c3..2f35ed295 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java +++ b/src/test/java/org/opensearch/ad/ratelimit/AbstractRateLimitingTest.java @@ -12,6 +12,7 @@ package org.opensearch.ad.ratelimit; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -22,10 +23,11 @@ import java.util.Optional; import org.opensearch.action.ActionListener; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.Entity; @@ -54,10 +56,10 @@ public void setUp() throws Exception { nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); entity = Entity.createSingleAttributeEntity(categoryField, "value"); entity2 = Entity.createSingleAttributeEntity(categoryField, "value2"); diff --git a/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java b/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java index 76090cce9..3d0b5cbf6 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java +++ b/src/test/java/org/opensearch/ad/ratelimit/CheckpointReadWorkerTests.java @@ -71,6 +71,7 @@ import org.opensearch.index.seqno.SequenceNumbers; import org.opensearch.threadpool.ThreadPoolStats; import org.opensearch.threadpool.ThreadPoolStats.Stats; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.common.exception.LimitExceededException; @@ -733,16 +734,16 @@ public void testHostException() throws IOException { AnomalyDetector detector2 = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId2, Arrays.asList(categoryField)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector2)); return null; - }).when(nodeStateManager).getAnomalyDetector(eq(detectorId2), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(eq(detectorId2), eq(AnalysisType.AD), any(ActionListener.class)); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(eq(detectorId), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(eq(detectorId), eq(AnalysisType.AD), any(ActionListener.class)); doAnswer(invocation -> { MultiGetItemResponse[] items = new MultiGetItemResponse[2]; diff --git a/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java b/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java index 97e8370bf..eb731ae10 100644 --- a/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java +++ b/src/test/java/org/opensearch/ad/ratelimit/CheckpointWriteWorkerTests.java @@ -62,6 +62,7 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.constant.CommonName; @@ -376,10 +377,10 @@ public void testEmptyDetectorId() { @SuppressWarnings("unchecked") public void testDetectorNotAvailableSingleWrite() { doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.empty()); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); worker.write(state, true, RequestPriority.MEDIUM); verify(checkpoint, never()).batchWrite(any(), any()); @@ -388,10 +389,10 @@ public void testDetectorNotAvailableSingleWrite() { @SuppressWarnings("unchecked") public void testDetectorNotAvailableWriteAll() { doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.empty()); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); List> states = new ArrayList<>(); states.add(state); @@ -402,10 +403,10 @@ public void testDetectorNotAvailableWriteAll() { @SuppressWarnings("unchecked") public void testDetectorFetchException() { doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onFailure(new RuntimeException()); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); worker.write(state, true, RequestPriority.MEDIUM); verify(checkpoint, never()).batchWrite(any(), any()); diff --git a/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java b/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java index fb1ccc1e4..e9923a24e 100644 --- a/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java +++ b/src/test/java/org/opensearch/ad/rest/ADRestTestUtils.java @@ -40,13 +40,13 @@ import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskProfile; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.client.Response; import org.opensearch.client.RestClient; import org.opensearch.core.xcontent.XContentParser; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -351,12 +351,12 @@ public static Map getDetectorWithJobAndTask(RestClient client, S Map jobMap = (Map) responseMap.get(ANOMALY_DETECTOR_JOB); if (jobMap != null) { - String jobName = (String) jobMap.get(AnomalyDetectorJob.NAME_FIELD); - boolean enabled = (boolean) jobMap.get(AnomalyDetectorJob.IS_ENABLED_FIELD); - long enabledTime = (long) jobMap.get(AnomalyDetectorJob.ENABLED_TIME_FIELD); - long lastUpdateTime = (long) jobMap.get(AnomalyDetectorJob.LAST_UPDATE_TIME_FIELD); + String jobName = (String) jobMap.get(Job.NAME_FIELD); + boolean enabled = (boolean) jobMap.get(Job.IS_ENABLED_FIELD); + long enabledTime = (long) jobMap.get(Job.ENABLED_TIME_FIELD); + long lastUpdateTime = (long) jobMap.get(Job.LAST_UPDATE_TIME_FIELD); - AnomalyDetectorJob job = new AnomalyDetectorJob( + Job job = new Job( jobName, null, null, diff --git a/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java b/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java index 390e68ef7..691e6b439 100644 --- a/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java +++ b/src/test/java/org/opensearch/ad/rest/AnomalyDetectorRestApiIT.java @@ -36,7 +36,6 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorExecutionInput; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.rest.handler.AbstractAnomalyDetectorActionHandler; import org.opensearch.ad.settings.ADEnabledSetting; @@ -54,6 +53,7 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Feature; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.settings.TimeSeriesSettings; import com.google.common.collect.ImmutableList; @@ -240,18 +240,18 @@ public void testGetAnomalyDetector() throws Exception { updateClusterSettings(ADEnabledSetting.AD_ENABLED, false); - Exception ex = expectThrows(ResponseException.class, () -> getAnomalyDetector(detector.getId(), client())); + Exception ex = expectThrows(ResponseException.class, () -> getConfig(detector.getId(), client())); assertThat(ex.getMessage(), containsString(ADCommonMessages.DISABLED_ERR_MSG)); updateClusterSettings(ADEnabledSetting.AD_ENABLED, true); - AnomalyDetector createdDetector = getAnomalyDetector(detector.getId(), client()); + AnomalyDetector createdDetector = getConfig(detector.getId(), client()); assertEquals("Incorrect Location header", detector, createdDetector); } public void testGetNotExistingAnomalyDetector() throws Exception { createRandomAnomalyDetector(true, true, client()); - TestHelpers.assertFailWith(ResponseException.class, null, () -> getAnomalyDetector(randomAlphaOfLength(5), client())); + TestHelpers.assertFailWith(ResponseException.class, null, () -> getConfig(randomAlphaOfLength(5), client())); } public void testUpdateAnomalyDetector() throws Exception { @@ -311,7 +311,7 @@ public void testUpdateAnomalyDetector() throws Exception { assertEquals("Updated anomaly detector id doesn't match", detector.getId(), responseBody.get("_id")); assertEquals("Version not incremented", (detector.getVersion().intValue() + 1), (int) responseBody.get("_version")); - AnomalyDetector updatedDetector = getAnomalyDetector(detector.getId(), client()); + AnomalyDetector updatedDetector = getConfig(detector.getId(), client()); assertNotEquals("Anomaly detector last update time not changed", updatedDetector.getLastUpdateTime(), detector.getLastUpdateTime()); assertEquals("Anomaly detector description not updated", newDescription, updatedDetector.getDescription()); } @@ -389,7 +389,7 @@ public void testUpdateAnomalyDetectorNameToNew() throws Exception { null ); - AnomalyDetector resultDetector = getAnomalyDetector(detectorWithNewName.getId(), client()); + AnomalyDetector resultDetector = getConfig(detectorWithNewName.getId(), client()); assertEquals("Detector name updating failed", detectorWithNewName.getName(), resultDetector.getName()); assertEquals("Updated anomaly detector id doesn't match", detectorWithNewName.getId(), resultDetector.getId()); assertNotEquals( @@ -818,12 +818,12 @@ public void testGetDetectorWithAdJob() throws Exception { assertEquals("Fail to start AD job", RestStatus.OK, TestHelpers.restStatus(startAdJobResponse)); - ToXContentObject[] results = getAnomalyDetector(detector.getId(), true, client()); + ToXContentObject[] results = getConfig(detector.getId(), true, client()); assertEquals("Incorrect Location header", detector, results[0]); - assertEquals("Incorrect detector job name", detector.getId(), ((AnomalyDetectorJob) results[1]).getName()); - assertTrue(((AnomalyDetectorJob) results[1]).isEnabled()); + assertEquals("Incorrect detector job name", detector.getId(), ((Job) results[1]).getName()); + assertTrue(((Job) results[1]).isEnabled()); - results = getAnomalyDetector(detector.getId(), false, client()); + results = getConfig(detector.getId(), false, client()); assertEquals("Incorrect Location header", detector, results[0]); assertEquals("Should not return detector job", null, results[1]); } @@ -1197,7 +1197,7 @@ public void testBackwardCompatibilityWithOpenDistro() throws IOException { assertTrue("incorrect version", version > 0); // Get the detector using new _plugins API - AnomalyDetector createdDetector = getAnomalyDetector(id, client()); + AnomalyDetector createdDetector = getConfig(id, client()); assertEquals("Get anomaly detector failed", createdDetector.getId(), id); // Delete the detector using legacy _opendistro API diff --git a/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java b/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java index 7d0be2ae9..c083ea25f 100644 --- a/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java +++ b/src/test/java/org/opensearch/ad/rest/HistoricalAnalysisRestApiIT.java @@ -36,12 +36,12 @@ import org.opensearch.ad.model.ADTaskProfile; import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.client.Response; import org.opensearch.client.ResponseException; import org.opensearch.core.rest.RestStatus; import org.opensearch.core.xcontent.ToXContentObject; import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.model.Job; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; @@ -146,7 +146,7 @@ private List startHistoricalAnalysis(int categoryFieldSize, String resul // get detector with AD task ToXContentObject[] result = getHistoricalAnomalyDetector(detectorId, true, client()); AnomalyDetector parsedDetector = (AnomalyDetector) result[0]; - AnomalyDetectorJob parsedJob = (AnomalyDetectorJob) result[1]; + Job parsedJob = (Job) result[1]; ADTask parsedADTask = (ADTask) result[2]; assertNull(parsedJob); assertNotNull(parsedDetector); @@ -212,7 +212,7 @@ public void testUpdateHistoricalAnalysis() throws IOException, IllegalAccessExce assertEquals((detector.getVersion().intValue() + 1), (int) responseBody.get("_version")); // get historical detector - AnomalyDetector updatedDetector = getAnomalyDetector(detector.getId(), client()); + AnomalyDetector updatedDetector = getConfig(detector.getId(), client()); assertNotEquals(updatedDetector.getLastUpdateTime(), detector.getLastUpdateTime()); assertEquals(newDetector.getName(), updatedDetector.getName()); assertEquals(newDetector.getDescription(), updatedDetector.getDescription()); diff --git a/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java b/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java index 1c0758ebf..9dab61e84 100644 --- a/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java +++ b/src/test/java/org/opensearch/ad/rest/SecureADRestIT.java @@ -191,7 +191,7 @@ public void testFilterByDisabled() throws IOException { // User Alice has AD full access, should be able to create a detector AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); // User Cat has AD full access, should be able to get a detector - AnomalyDetector detector = getAnomalyDetector(aliceDetector.getId(), catClient); + AnomalyDetector detector = getConfig(aliceDetector.getId(), catClient); Assert.assertEquals(aliceDetector.getId(), detector.getId()); } @@ -201,7 +201,7 @@ public void testGetApiFilterByEnabled() throws IOException { enableFilterBy(); // User Cat has AD full access, but is part of different backend role so Cat should not be able to access // Alice detector - Exception exception = expectThrows(IOException.class, () -> { getAnomalyDetector(aliceDetector.getId(), catClient); }); + Exception exception = expectThrows(IOException.class, () -> { getConfig(aliceDetector.getId(), catClient); }); Assert.assertTrue(exception.getMessage().contains("User does not have permissions to access detector: " + aliceDetector.getId())); } @@ -225,7 +225,7 @@ public void testGetApiFilterByEnabledForAdmin() throws IOException { AnomalyDetector aliceDetector = createRandomAnomalyDetector(false, false, aliceClient); enableFilterBy(); confirmingClientIsAdmin(); - AnomalyDetector detector = getAnomalyDetector(aliceDetector.getId(), client()); + AnomalyDetector detector = getConfig(aliceDetector.getId(), client()); Assert .assertArrayEquals( "User backend role of detector doesn't change", @@ -268,7 +268,7 @@ public void testUpdateApiFilterByEnabledForAdmin() throws IOException { // But the detector's backend role should not be replaced as client's backend roles (all_access). Response response = updateAnomalyDetector(aliceDetector.getId(), newDetector, client()); Assert.assertEquals(response.getStatusLine().getStatusCode(), 200); - AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getId(), aliceClient); + AnomalyDetector anomalyDetector = getConfig(aliceDetector.getId(), aliceClient); Assert .assertArrayEquals( "odfe is still the backendrole, not opensearch", @@ -317,7 +317,7 @@ public void testUpdateApiFilterByEnabled() throws IOException { // not be replaced as Fish's backend roles. Response response = updateAnomalyDetector(aliceDetector.getId(), newDetector, fishClient); Assert.assertEquals(response.getStatusLine().getStatusCode(), 200); - AnomalyDetector anomalyDetector = getAnomalyDetector(aliceDetector.getId(), aliceClient); + AnomalyDetector anomalyDetector = getConfig(aliceDetector.getId(), aliceClient); Assert .assertArrayEquals( "Wrong user roles", diff --git a/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java b/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java index 59eba777c..da04488a5 100644 --- a/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java +++ b/src/test/java/org/opensearch/ad/rest/handler/IndexAnomalyDetectorJobActionHandlerTests.java @@ -35,7 +35,6 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.update.UpdateResponse; import org.opensearch.ad.ExecuteADResultResponseRecorder; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndexManagement; @@ -56,6 +55,7 @@ import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.InternalFailure; import org.opensearch.timeseries.common.exception.ResourceNotFoundException; diff --git a/src/test/java/org/opensearch/ad/settings/AnomalyDetectorSettingsTests.java b/src/test/java/org/opensearch/ad/settings/AnomalyDetectorSettingsTests.java index 72e336ea7..18dba3df4 100644 --- a/src/test/java/org/opensearch/ad/settings/AnomalyDetectorSettingsTests.java +++ b/src/test/java/org/opensearch/ad/settings/AnomalyDetectorSettingsTests.java @@ -20,6 +20,7 @@ import org.opensearch.common.unit.TimeValue; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; +import org.opensearch.timeseries.settings.TimeSeriesSettings; @SuppressWarnings({ "rawtypes" }) public class AnomalyDetectorSettingsTests extends OpenSearchTestCase { @@ -79,14 +80,14 @@ public void testAllOpenSearchSettingsReturned() { AnomalyDetectorSettings.MAX_SINGLE_ENTITY_ANOMALY_DETECTORS, AnomalyDetectorSettings.MAX_MULTI_ENTITY_ANOMALY_DETECTORS, AnomalyDetectorSettings.MAX_ANOMALY_FEATURES, - AnomalyDetectorSettings.REQUEST_TIMEOUT, + AnomalyDetectorSettings.AD_REQUEST_TIMEOUT, AnomalyDetectorSettings.DETECTION_INTERVAL, AnomalyDetectorSettings.DETECTION_WINDOW_DELAY, AnomalyDetectorSettings.AD_RESULT_HISTORY_ROLLOVER_PERIOD, AnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS_PER_SHARD, - AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, - AnomalyDetectorSettings.COOLDOWN_MINUTES, - AnomalyDetectorSettings.BACKOFF_MINUTES, + AnomalyDetectorSettings.AD_MAX_RETRY_FOR_UNRESPONSIVE_NODE, + AnomalyDetectorSettings.AD_COOLDOWN_MINUTES, + AnomalyDetectorSettings.AD_BACKOFF_MINUTES, AnomalyDetectorSettings.AD_BACKOFF_INITIAL_DELAY, AnomalyDetectorSettings.AD_MAX_RETRY_FOR_BACKOFF, AnomalyDetectorSettings.AD_RESULT_HISTORY_RETENTION_PERIOD, @@ -116,7 +117,9 @@ public void testAllOpenSearchSettingsReturned() { AnomalyDetectorSettings.ENTITY_COLD_START_QUEUE_MAX_HEAP_PERCENT, AnomalyDetectorSettings.EXPECTED_COLD_ENTITY_EXECUTION_TIME_IN_MILLISECS, AnomalyDetectorSettings.MAX_ENTITIES_PER_QUERY, - AnomalyDetectorSettings.PAGE_SIZE + AnomalyDetectorSettings.PAGE_SIZE, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES ) ) ); @@ -136,7 +139,7 @@ public void testAllLegacyOpenDistroSettingsFallback() { LegacyOpenDistroAnomalyDetectorSettings.MAX_ANOMALY_FEATURES.get(Settings.EMPTY) ); assertEquals( - AnomalyDetectorSettings.REQUEST_TIMEOUT.get(Settings.EMPTY), + AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(Settings.EMPTY), LegacyOpenDistroAnomalyDetectorSettings.REQUEST_TIMEOUT.get(Settings.EMPTY) ); assertEquals( @@ -152,15 +155,15 @@ public void testAllLegacyOpenDistroSettingsFallback() { LegacyOpenDistroAnomalyDetectorSettings.AD_RESULT_HISTORY_ROLLOVER_PERIOD.get(Settings.EMPTY) ); assertEquals( - AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(Settings.EMPTY), + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(Settings.EMPTY), LegacyOpenDistroAnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(Settings.EMPTY) ); assertEquals( - AnomalyDetectorSettings.COOLDOWN_MINUTES.get(Settings.EMPTY), + AnomalyDetectorSettings.AD_COOLDOWN_MINUTES.get(Settings.EMPTY), LegacyOpenDistroAnomalyDetectorSettings.COOLDOWN_MINUTES.get(Settings.EMPTY) ); assertEquals( - AnomalyDetectorSettings.BACKOFF_MINUTES.get(Settings.EMPTY), + TimeSeriesSettings.BACKOFF_MINUTES.get(Settings.EMPTY), LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_MINUTES.get(Settings.EMPTY) ); assertEquals( @@ -211,7 +214,7 @@ public void testAllLegacyOpenDistroSettingsFallback() { public void testSettingsGetValue() { Settings settings = Settings.builder().put("plugins.anomaly_detection.request_timeout", "42s").build(); - assertEquals(AnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(42)); + assertEquals(AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(42)); assertEquals(LegacyOpenDistroAnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(10)); settings = Settings.builder().put("plugins.anomaly_detection.max_anomaly_detectors", 99).build(); @@ -253,17 +256,23 @@ public void testSettingsGetValue() { assertEquals(LegacyOpenDistroAnomalyDetectorSettings.AD_RESULT_HISTORY_RETENTION_PERIOD.get(settings), TimeValue.timeValueDays(30)); settings = Settings.builder().put("plugins.anomaly_detection.max_retry_for_unresponsive_node", 91).build(); - assertEquals(AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(91)); + assertEquals(AnomalyDetectorSettings.AD_MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(91)); assertEquals(LegacyOpenDistroAnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(5)); + settings = Settings.builder().put("plugins.timeseries.max_retry_for_unresponsive_node", 91).build(); + assertEquals(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(91)); + settings = Settings.builder().put("plugins.anomaly_detection.cooldown_minutes", TimeValue.timeValueMinutes(90)).build(); - assertEquals(AnomalyDetectorSettings.COOLDOWN_MINUTES.get(settings), TimeValue.timeValueMinutes(90)); + assertEquals(AnomalyDetectorSettings.AD_COOLDOWN_MINUTES.get(settings), TimeValue.timeValueMinutes(90)); assertEquals(LegacyOpenDistroAnomalyDetectorSettings.COOLDOWN_MINUTES.get(settings), TimeValue.timeValueMinutes(5)); settings = Settings.builder().put("plugins.anomaly_detection.backoff_minutes", TimeValue.timeValueMinutes(89)).build(); - assertEquals(AnomalyDetectorSettings.BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(89)); + assertEquals(AnomalyDetectorSettings.AD_BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(89)); assertEquals(LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(15)); + settings = Settings.builder().put("plugins.timeseries.backoff_minutes", TimeValue.timeValueMinutes(89)).build(); + assertEquals(TimeSeriesSettings.BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(89)); + settings = Settings.builder().put("plugins.anomaly_detection.backoff_initial_delay", TimeValue.timeValueMillis(88)).build(); assertEquals(AnomalyDetectorSettings.AD_BACKOFF_INITIAL_DELAY.get(settings), TimeValue.timeValueMillis(88)); assertEquals(LegacyOpenDistroAnomalyDetectorSettings.BACKOFF_INITIAL_DELAY.get(settings), TimeValue.timeValueMillis(1000)); @@ -333,8 +342,10 @@ public void testSettingsGetValueWithLegacyFallback() { .put("opendistro.anomaly_detection.ad_result_history_max_docs", 8L) .put("opendistro.anomaly_detection.ad_result_history_retention_period", "9d") .put("opendistro.anomaly_detection.max_retry_for_unresponsive_node", 10) + .put("plugins.timeseries.max_retry_for_unresponsive_node", 10) .put("opendistro.anomaly_detection.cooldown_minutes", "11m") .put("opendistro.anomaly_detection.backoff_minutes", "12m") + .put("plugins.timeseries.backoff_minutes", "12m") .put("opendistro.anomaly_detection.backoff_initial_delay", "13ms") // .put("opendistro.anomaly_detection.max_retry_for_backoff", 14) .put("opendistro.anomaly_detection.max_retry_for_end_run_exception", 15) @@ -353,16 +364,18 @@ public void testSettingsGetValueWithLegacyFallback() { assertEquals(AnomalyDetectorSettings.MAX_SINGLE_ENTITY_ANOMALY_DETECTORS.get(settings), Integer.valueOf(1)); assertEquals(AnomalyDetectorSettings.MAX_MULTI_ENTITY_ANOMALY_DETECTORS.get(settings), Integer.valueOf(2)); assertEquals(AnomalyDetectorSettings.MAX_ANOMALY_FEATURES.get(settings), Integer.valueOf(3)); - assertEquals(AnomalyDetectorSettings.REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(4)); + assertEquals(AnomalyDetectorSettings.AD_REQUEST_TIMEOUT.get(settings), TimeValue.timeValueSeconds(4)); assertEquals(AnomalyDetectorSettings.DETECTION_INTERVAL.get(settings), TimeValue.timeValueMinutes(5)); assertEquals(AnomalyDetectorSettings.DETECTION_WINDOW_DELAY.get(settings), TimeValue.timeValueMinutes(6)); assertEquals(AnomalyDetectorSettings.AD_RESULT_HISTORY_ROLLOVER_PERIOD.get(settings), TimeValue.timeValueHours(7)); // AD_RESULT_HISTORY_MAX_DOCS is removed in the new release assertEquals(LegacyOpenDistroAnomalyDetectorSettings.AD_RESULT_HISTORY_MAX_DOCS.get(settings), Long.valueOf(8L)); assertEquals(AnomalyDetectorSettings.AD_RESULT_HISTORY_RETENTION_PERIOD.get(settings), TimeValue.timeValueDays(9)); - assertEquals(AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(10)); - assertEquals(AnomalyDetectorSettings.COOLDOWN_MINUTES.get(settings), TimeValue.timeValueMinutes(11)); - assertEquals(AnomalyDetectorSettings.BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(12)); + assertEquals(AnomalyDetectorSettings.AD_MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(10)); + assertEquals(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.get(settings), Integer.valueOf(10)); + assertEquals(AnomalyDetectorSettings.AD_COOLDOWN_MINUTES.get(settings), TimeValue.timeValueMinutes(11)); + assertEquals(AnomalyDetectorSettings.AD_BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(12)); + assertEquals(TimeSeriesSettings.BACKOFF_MINUTES.get(settings), TimeValue.timeValueMinutes(12)); assertEquals(AnomalyDetectorSettings.AD_BACKOFF_INITIAL_DELAY.get(settings), TimeValue.timeValueMillis(13)); assertEquals(AnomalyDetectorSettings.AD_MAX_RETRY_FOR_BACKOFF.get(settings), Integer.valueOf(14)); assertEquals(AnomalyDetectorSettings.MAX_RETRY_FOR_END_RUN_EXCEPTION.get(settings), Integer.valueOf(15)); diff --git a/src/test/java/org/opensearch/ad/stats/ADStatsTests.java b/src/test/java/org/opensearch/ad/stats/ADStatsTests.java index 0d8150683..fb5bf220e 100644 --- a/src/test/java/org/opensearch/ad/stats/ADStatsTests.java +++ b/src/test/java/org/opensearch/ad/stats/ADStatsTests.java @@ -100,7 +100,6 @@ public void setup() { IndexUtils indexUtils = mock(IndexUtils.class); when(indexUtils.getIndexHealthStatus(anyString())).thenReturn("yellow"); - when(indexUtils.getNumberOfDocumentsInIndex(anyString())).thenReturn(100L); clusterStatName1 = "clusterStat1"; clusterStatName2 = "clusterStat2"; diff --git a/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java b/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java index f1b67e71e..cbdb72646 100644 --- a/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java +++ b/src/test/java/org/opensearch/ad/task/ADTaskManagerTests.java @@ -30,12 +30,12 @@ import static org.mockito.Mockito.when; import static org.opensearch.ad.constant.ADCommonName.ANOMALY_RESULT_INDEX_ALIAS; import static org.opensearch.ad.constant.ADCommonName.DETECTION_STATE_INDEX; +import static org.opensearch.ad.settings.AnomalyDetectorSettings.AD_REQUEST_TIMEOUT; import static org.opensearch.ad.settings.AnomalyDetectorSettings.BATCH_TASK_PIECE_INTERVAL_SECONDS; import static org.opensearch.ad.settings.AnomalyDetectorSettings.DELETE_AD_RESULT_WHEN_DELETE_DETECTOR; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_BATCH_TASK_PER_NODE; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_OLD_AD_TASK_DOCS_PER_DETECTOR; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.REQUEST_TIMEOUT; import static org.opensearch.index.seqno.SequenceNumbers.UNASSIGNED_SEQ_NO; import static org.opensearch.timeseries.TestHelpers.randomAdTask; import static org.opensearch.timeseries.TestHelpers.randomAnomalyDetector; @@ -90,7 +90,6 @@ import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.rest.handler.IndexAnomalyDetectorJobActionHandler; import org.opensearch.ad.stats.InternalStatNames; import org.opensearch.ad.transport.ADStatsNodeResponse; @@ -129,6 +128,7 @@ import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.DateRange; import org.opensearch.timeseries.model.Entity; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import org.opensearch.transport.TransportResponseHandler; import org.opensearch.transport.TransportService; @@ -213,14 +213,14 @@ public void setUp() throws Exception { .builder() .put(MAX_OLD_AD_TASK_DOCS_PER_DETECTOR.getKey(), 2) .put(BATCH_TASK_PIECE_INTERVAL_SECONDS.getKey(), 1) - .put(REQUEST_TIMEOUT.getKey(), TimeValue.timeValueSeconds(10)) + .put(AD_REQUEST_TIMEOUT.getKey(), TimeValue.timeValueSeconds(10)) .build(); clusterSettings = clusterSetting( settings, MAX_OLD_AD_TASK_DOCS_PER_DETECTOR, BATCH_TASK_PIECE_INTERVAL_SECONDS, - REQUEST_TIMEOUT, + AD_REQUEST_TIMEOUT, DELETE_AD_RESULT_WHEN_DELETE_DETECTOR, MAX_BATCH_TASK_PER_NODE, MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS @@ -847,7 +847,7 @@ public void testCleanADResultOfDeletedDetectorWithException() { .builder() .put(MAX_OLD_AD_TASK_DOCS_PER_DETECTOR.getKey(), 2) .put(BATCH_TASK_PIECE_INTERVAL_SECONDS.getKey(), 1) - .put(REQUEST_TIMEOUT.getKey(), TimeValue.timeValueSeconds(10)) + .put(AD_REQUEST_TIMEOUT.getKey(), TimeValue.timeValueSeconds(10)) .put(DELETE_AD_RESULT_WHEN_DELETE_DETECTOR.getKey(), true) .build(); @@ -855,7 +855,7 @@ public void testCleanADResultOfDeletedDetectorWithException() { settings, MAX_OLD_AD_TASK_DOCS_PER_DETECTOR, BATCH_TASK_PIECE_INTERVAL_SECONDS, - REQUEST_TIMEOUT, + AD_REQUEST_TIMEOUT, DELETE_AD_RESULT_WHEN_DELETE_DETECTOR, MAX_BATCH_TASK_PER_NODE, MAX_RUNNING_ENTITIES_PER_DETECTOR_FOR_HISTORICAL_ANALYSIS @@ -1237,7 +1237,7 @@ private void setupGetAndExecuteOnLatestADTasks(ADTaskProfile adTaskProfile) { true, BytesReference .bytes( - new AnomalyDetectorJob( + new Job( detectorId, randomIntervalSchedule(), randomIntervalTimeConfiguration(), diff --git a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java index 1e3a3506e..8ce30df12 100644 --- a/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADBatchAnomalyResultTransportActionTests.java @@ -28,13 +28,13 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.common.settings.Settings; import org.opensearch.core.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.model.DateRange; +import org.opensearch.timeseries.util.ExceptionUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/ADStatsNodesTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ADStatsNodesTransportActionTests.java index 95799f911..79ca32d77 100644 --- a/src/test/java/org/opensearch/ad/transport/ADStatsNodesTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ADStatsNodesTransportActionTests.java @@ -37,9 +37,7 @@ import org.opensearch.ad.stats.suppliers.ModelsOnNodeSupplier; import org.opensearch.ad.stats.suppliers.SettableSupplier; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.service.ClusterService; @@ -49,6 +47,7 @@ import org.opensearch.monitor.jvm.JvmStats; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.util.ClientUtil; import org.opensearch.transport.TransportService; public class ADStatsNodesTransportActionTests extends OpenSearchIntegTestCase { @@ -67,15 +66,9 @@ public void setUp() throws Exception { Client client = client(); Clock clock = mock(Clock.class); - Throttler throttler = new Throttler(clock); ThreadPool threadPool = mock(ThreadPool.class); IndexNameExpressionResolver indexNameResolver = mock(IndexNameExpressionResolver.class); - IndexUtils indexUtils = new IndexUtils( - client, - new ClientUtil(Settings.EMPTY, client, throttler, threadPool), - clusterService(), - indexNameResolver - ); + IndexUtils indexUtils = new IndexUtils(client, new ClientUtil(client), clusterService(), indexNameResolver); ModelManager modelManager = mock(ModelManager.class); CacheProvider cacheProvider = mock(CacheProvider.class); EntityCache cache = mock(EntityCache.class); diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java index daf86ab7c..87a5853f9 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyDetectorJobTransportActionTests.java @@ -49,7 +49,6 @@ import org.opensearch.ad.model.ADTaskState; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.client.Client; import org.opensearch.common.lucene.uid.Versions; import org.opensearch.common.settings.Settings; @@ -58,6 +57,7 @@ import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.DateRange; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.stats.StatNames; import com.google.common.collect.ImmutableList; @@ -354,7 +354,7 @@ public void testStartRealtimeDetector() throws IOException { String detectorId = realtimeResult.get(0); String jobId = realtimeResult.get(1); GetResponse jobDoc = getDoc(CommonName.JOB_INDEX, detectorId); - AnomalyDetectorJob job = toADJob(jobDoc); + Job job = toADJob(jobDoc); assertTrue(job.isEnabled()); assertEquals(detectorId, job.getName()); @@ -423,7 +423,7 @@ public void testStopRealtimeDetector() throws IOException { AnomalyDetectorJobRequest request = stopDetectorJobRequest(detectorId, false); client().execute(AnomalyDetectorJobAction.INSTANCE, request).actionGet(10000); GetResponse doc = getDoc(CommonName.JOB_INDEX, detectorId); - AnomalyDetectorJob job = toADJob(doc); + Job job = toADJob(doc); assertFalse(job.isEnabled()); assertEquals(detectorId, job.getName()); diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java index 6b671d6e2..6c8634959 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyResultTests.java @@ -20,6 +20,7 @@ import static org.hamcrest.Matchers.nullValue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.anyDouble; import static org.mockito.Mockito.anyLong; import static org.mockito.Mockito.doAnswer; @@ -65,7 +66,6 @@ import org.opensearch.action.index.IndexResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.cluster.HashRing; import org.opensearch.ad.common.exception.JsonPathNotFoundException; @@ -74,7 +74,6 @@ import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.feature.SinglePointFeatures; import org.opensearch.ad.ml.ModelManager; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.ad.ml.ThresholdingResult; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.DetectorInternalState; @@ -83,7 +82,6 @@ import org.opensearch.ad.stats.ADStats; import org.opensearch.ad.stats.suppliers.CounterSupplier; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; @@ -107,6 +105,8 @@ import org.opensearch.index.IndexNotFoundException; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.InternalFailure; @@ -115,8 +115,10 @@ import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.NodeNotConnectedException; import org.opensearch.transport.RemoteTransportException; import org.opensearch.transport.Transport; @@ -194,10 +196,10 @@ public void setUp() throws Exception { when(detector.getId()).thenReturn(adID); when(detector.getCategoryFields()).thenReturn(null); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(stateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); when(detector.getIntervalInMinutes()).thenReturn(1L); hashRing = mock(HashRing.class); @@ -867,10 +869,10 @@ public void testMute() { NodeStateManager muteStateManager = mock(NodeStateManager.class); when(muteStateManager.isMuted(any(String.class), any(String.class))).thenReturn(true); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(muteStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(muteStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); AnomalyResultTransportAction action = new AnomalyResultTransportAction( new ActionFilters(Collections.emptySet()), transportService, @@ -1597,10 +1599,10 @@ public void testNullPointerRCFResult() { @SuppressWarnings("unchecked") public void testAllFeaturesDisabled() throws IOException { doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onFailure(new EndRunException(adID, CommonMessages.ALL_FEATURES_DISABLED_ERR_MSG, true)); return null; - }).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(stateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); AnomalyResultTransportAction action = new AnomalyResultTransportAction( new ActionFilters(Collections.emptySet()), diff --git a/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java index 0cd9218f0..78ffca8dd 100644 --- a/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/AnomalyResultTransportActionTests.java @@ -25,7 +25,6 @@ import org.opensearch.action.get.GetResponse; import org.opensearch.ad.ADIntegTestCase; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.util.ExceptionUtil; import org.opensearch.core.common.io.stream.NotSerializableExceptionWrapper; import org.opensearch.search.aggregations.AggregationBuilder; import org.opensearch.test.rest.OpenSearchRestTestCase; @@ -33,6 +32,7 @@ import org.opensearch.timeseries.common.exception.TimeSeriesException; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.util.ExceptionUtil; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java index a65c35839..9adf7b387 100644 --- a/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/CronTransportActionTests.java @@ -23,7 +23,6 @@ import org.junit.Before; import org.opensearch.Version; import org.opensearch.action.support.ActionFilters; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; import org.opensearch.ad.common.exception.JsonPathNotFoundException; @@ -42,6 +41,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.transport.TransportService; import test.org.opensearch.ad.util.JsonDeserializer; diff --git a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java index 7b67843f1..69a4586e9 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteAnomalyDetectorTests.java @@ -37,7 +37,6 @@ import org.opensearch.action.support.PlainActionFuture; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.client.Client; @@ -58,6 +57,7 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.function.ExecutorFunction; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; @@ -71,7 +71,7 @@ public class DeleteAnomalyDetectorTests extends AbstractTimeSeriesTest { private DeleteResponse deleteResponse; private GetResponse getResponse; ClusterService clusterService; - private AnomalyDetectorJob jobParameter; + private Job jobParameter; @BeforeClass public static void setUpBeforeClass() { @@ -117,7 +117,7 @@ public void setUp() throws Exception { adTaskManager ); - jobParameter = mock(AnomalyDetectorJob.class); + jobParameter = mock(Job.class); when(jobParameter.getName()).thenReturn(randomAlphaOfLength(10)); IntervalSchedule schedule = new IntervalSchedule(Instant.now(), 1, ChronoUnit.MINUTES); when(jobParameter.getSchedule()).thenReturn(schedule); @@ -288,7 +288,7 @@ private void setupMocks( true, BytesReference .bytes( - new AnomalyDetectorJob( + new Job( "1234", jobParameter.getSchedule(), jobParameter.getWindowDelay(), diff --git a/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java index fd74a2802..b0fc233f6 100644 --- a/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/DeleteModelTransportActionTests.java @@ -27,7 +27,6 @@ import org.opensearch.Version; import org.opensearch.action.ActionRequestValidationException; import org.opensearch.action.support.ActionFilters; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; import org.opensearch.ad.common.exception.JsonPathNotFoundException; @@ -47,6 +46,7 @@ import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.transport.TransportService; import test.org.opensearch.ad.util.JsonDeserializer; diff --git a/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java index 30177220b..cdee8f68e 100644 --- a/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/EntityResultTransportActionTests.java @@ -17,6 +17,7 @@ import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyInt; import static org.mockito.ArgumentMatchers.anyString; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doThrow; import static org.mockito.Mockito.eq; @@ -50,7 +51,6 @@ import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.ad.AnomalyDetectorJobRunnerTests; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -81,6 +81,8 @@ import org.opensearch.core.xcontent.ToXContent; import org.opensearch.core.xcontent.XContentBuilder; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.LimitExceededException; @@ -171,7 +173,7 @@ public void setUp() throws Exception { settings = Settings .builder() - .put(AnomalyDetectorSettings.COOLDOWN_MINUTES.getKey(), TimeValue.timeValueMinutes(5)) + .put(AnomalyDetectorSettings.AD_COOLDOWN_MINUTES.getKey(), TimeValue.timeValueMinutes(5)) .put(AnomalyDetectorSettings.CHECKPOINT_SAVING_FREQ.getKey(), TimeValue.timeValueHours(12)) .build(); @@ -207,10 +209,10 @@ public void setUp() throws Exception { detector = TestHelpers.randomAnomalyDetectorUsingCategoryFields(detectorId, Arrays.asList(field)); stateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(stateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); cacheMissEntity = "0.0.0.1"; cacheMissData = new double[] { 0.1 }; @@ -302,10 +304,10 @@ public void testNormal() { @SuppressWarnings("unchecked") public void testFailtoGetDetector() { doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.empty()); return null; - }).when(stateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(stateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); PlainActionFuture future = PlainActionFuture.newFuture(); diff --git a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java index f2da82c36..dae5f8713 100644 --- a/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/ForwardADTaskTransportActionTests.java @@ -32,13 +32,13 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; import org.opensearch.ad.ADUnitTestCase; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.feature.FeatureManager; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.task.ADTaskCacheManager; import org.opensearch.ad.task.ADTaskManager; import org.opensearch.tasks.Task; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.transport.TransportService; diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorActionTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorActionTests.java index 60144c63c..9cc58bb8a 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorActionTests.java @@ -20,11 +20,11 @@ import org.mockito.Mockito; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.DetectorProfile; import org.opensearch.common.io.stream.BytesStreamOutput; import org.opensearch.core.common.io.stream.StreamInput; import org.opensearch.core.rest.RestStatus; +import org.opensearch.timeseries.model.Job; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PrepareForTest; import org.powermock.modules.junit4.PowerMockRunner; @@ -52,7 +52,7 @@ public void testGetRequest() throws IOException { public void testGetResponse() throws Exception { BytesStreamOutput out = new BytesStreamOutput(); AnomalyDetector detector = Mockito.mock(AnomalyDetector.class); - AnomalyDetectorJob detectorJob = Mockito.mock(AnomalyDetectorJob.class); + Job detectorJob = Mockito.mock(Job.class); Mockito.doNothing().when(detector).writeTo(out); GetAnomalyDetectorResponse response = new GetAnomalyDetectorResponse( 1234, diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java index 4a3f2a89c..f37e79a5b 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTests.java @@ -23,7 +23,6 @@ import java.io.IOException; import java.nio.ByteBuffer; -import java.time.Clock; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; @@ -40,14 +39,11 @@ import org.opensearch.action.get.MultiGetResponse; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonMessages; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.SecurityClientUtil; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.ClusterSettings; @@ -55,10 +51,12 @@ import org.opensearch.core.common.bytes.BytesReference; import org.opensearch.index.get.GetResult; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.constant.CommonMessages; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportService; @@ -116,9 +114,6 @@ public void setUp() throws Exception { client = mock(Client.class); when(client.threadPool()).thenReturn(threadPool); - Clock clock = mock(Clock.class); - Throttler throttler = new Throttler(clock); - NodeStateManager nodeStateManager = mock(NodeStateManager.class); clientUtil = new SecurityClientUtil(nodeStateManager, Settings.EMPTY); diff --git a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java index 34f1485c2..322b140b0 100644 --- a/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/GetAnomalyDetectorTransportActionTests.java @@ -26,11 +26,9 @@ import org.mockito.Mockito; import org.opensearch.action.ActionListener; import org.opensearch.action.support.ActionFilters; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.ADTask; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.EntityProfile; import org.opensearch.ad.model.InitProgressProfile; import org.opensearch.ad.settings.AnomalyDetectorSettings; @@ -50,10 +48,13 @@ import org.opensearch.test.OpenSearchSingleNodeTestCase; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.model.Entity; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.util.DiscoveryNodeFilterer; import org.opensearch.timeseries.util.RestHandlerUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableMap; @@ -125,32 +126,14 @@ protected NamedWriteableRegistry writableRegistry() { @Test public void testGetTransportAction() throws IOException { - GetAnomalyDetectorRequest getAnomalyDetectorRequest = new GetAnomalyDetectorRequest( - "1234", - 4321, - false, - false, - "nonempty", - "", - false, - null - ); - action.doExecute(task, getAnomalyDetectorRequest, response); + GetAnomalyDetectorRequest getConfigRequest = new GetAnomalyDetectorRequest("1234", 4321, false, false, "nonempty", "", false, null); + action.doExecute(task, getConfigRequest, response); } @Test public void testGetTransportActionWithReturnJob() throws IOException { - GetAnomalyDetectorRequest getAnomalyDetectorRequest = new GetAnomalyDetectorRequest( - "1234", - 4321, - true, - false, - "", - "abcd", - false, - null - ); - action.doExecute(task, getAnomalyDetectorRequest, response); + GetAnomalyDetectorRequest getConfigRequest = new GetAnomalyDetectorRequest("1234", 4321, true, false, "", "abcd", false, null); + action.doExecute(task, getConfigRequest, response); } @Test @@ -186,7 +169,7 @@ public void testGetAnomalyDetectorRequestNoEntityValue() throws IOException { public void testGetAnomalyDetectorResponse() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); - AnomalyDetectorJob adJob = TestHelpers.randomAnomalyDetectorJob(); + Job adJob = TestHelpers.randomAnomalyDetectorJob(); GetAnomalyDetectorResponse response = new GetAnomalyDetectorResponse( 4321, "1234", @@ -220,7 +203,7 @@ public void testGetAnomalyDetectorResponse() throws IOException { public void testGetAnomalyDetectorProfileResponse() throws IOException { BytesStreamOutput out = new BytesStreamOutput(); AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); - AnomalyDetectorJob adJob = TestHelpers.randomAnomalyDetectorJob(); + Job adJob = TestHelpers.randomAnomalyDetectorJob(); InitProgressProfile initProgress = new InitProgressProfile("99%", 2L, 2); EntityProfile entityProfile = new EntityProfile.Builder().initProgress(initProgress).build(); GetAnomalyDetectorResponse response = new GetAnomalyDetectorResponse( diff --git a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java index 0a3859bc2..e93af3ca5 100644 --- a/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java +++ b/src/test/java/org/opensearch/ad/transport/IndexAnomalyDetectorTransportActionTests.java @@ -37,13 +37,10 @@ import org.opensearch.action.search.ShardSearchFailure; import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.WriteRequest; -import org.opensearch.ad.NodeStateManager; -import org.opensearch.ad.feature.SearchFeatureDao; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterName; import org.opensearch.cluster.ClusterState; @@ -61,8 +58,11 @@ import org.opensearch.tasks.Task; import org.opensearch.test.OpenSearchIntegTestCase; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.feature.SearchFeatureDao; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.TransportService; import com.google.common.collect.ImmutableMap; diff --git a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java index 9ec5aa9d5..fe8877c2a 100644 --- a/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java +++ b/src/test/java/org/opensearch/ad/transport/MultiEntityResultTests.java @@ -24,9 +24,7 @@ import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_ENTITIES_PER_QUERY; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import static org.opensearch.ad.settings.AnomalyDetectorSettings.PAGE_SIZE; import java.io.IOException; @@ -69,7 +67,6 @@ import org.opensearch.action.support.ActionFilters; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.action.support.master.AcknowledgedResponse; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.breaker.ADCircuitBreakerService; import org.opensearch.ad.caching.CacheProvider; import org.opensearch.ad.caching.EntityCache; @@ -90,9 +87,6 @@ import org.opensearch.ad.stats.ADStats; import org.opensearch.ad.stats.suppliers.CounterSupplier; import org.opensearch.ad.task.ADTaskManager; -import org.opensearch.ad.util.ClientUtil; -import org.opensearch.ad.util.SecurityClientUtil; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; import org.opensearch.cluster.node.DiscoveryNode; @@ -115,6 +109,8 @@ import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.EndRunException; import org.opensearch.timeseries.common.exception.InternalFailure; @@ -123,7 +119,10 @@ import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.settings.TimeSeriesSettings; import org.opensearch.timeseries.stats.StatNames; +import org.opensearch.timeseries.util.ClientUtil; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportException; import org.opensearch.transport.TransportInterceptor; @@ -201,12 +200,12 @@ public void setUp() throws Exception { stateManager = mock(NodeStateManager.class); // make sure parameters are not null, otherwise this mock won't get invoked doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(stateManager).getAnomalyDetector(anyString(), any(ActionListener.class)); + }).when(stateManager).getConfig(anyString(), eq(AnalysisType.AD), any(ActionListener.class)); - settings = Settings.builder().put(AnomalyDetectorSettings.COOLDOWN_MINUTES.getKey(), TimeValue.timeValueMinutes(5)).build(); + settings = Settings.builder().put(AnomalyDetectorSettings.AD_COOLDOWN_MINUTES.getKey(), TimeValue.timeValueMinutes(5)).build(); // make sure end time is larger enough than Clock.systemUTC().millis() to get PageIterator.hasNext() to pass request = new AnomalyResultRequest(detectorId, 100, Clock.systemUTC().millis() + 100_000); @@ -230,8 +229,8 @@ public void setUp() throws Exception { Set> anomalyResultSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); anomalyResultSetting.add(MAX_ENTITIES_PER_QUERY); anomalyResultSetting.add(PAGE_SIZE); - anomalyResultSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE); - anomalyResultSetting.add(BACKOFF_MINUTES); + anomalyResultSetting.add(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE); + anomalyResultSetting.add(TimeSeriesSettings.BACKOFF_MINUTES); ClusterSettings clusterSettings = new ClusterSettings(Settings.EMPTY, anomalyResultSetting); DiscoveryNode discoveryNode = new DiscoveryNode( @@ -440,10 +439,12 @@ public void setUpNormlaStateManager() throws IOException { client, xContentRegistry(), settings, - new ClientUtil(settings, client, new Throttler(mock(Clock.class)), threadPool), + new ClientUtil(client), clock, AnomalyDetectorSettings.HOURLY_MAINTENANCE, - clusterService + clusterService, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES ); clientUtil = new SecurityClientUtil(stateManager, settings); @@ -754,7 +755,9 @@ public void testCircuitBreakerOpen() throws InterruptedException, IOException { clientUtil, clock, AnomalyDetectorSettings.HOURLY_MAINTENANCE, - clusterService + clusterService, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES ); NodeStateManager spyStateManager = spy(stateManager); @@ -1208,11 +1211,11 @@ private NodeStateManager setUpTestExceptionTestingInModelNode() throws IOExcepti CountDownLatch modelNodeInProgress = new CountDownLatch(1); // make sure parameters are not null, otherwise this mock won't get invoked doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); modelNodeInProgress.countDown(); return null; - }).when(modelNodeStateManager).getAnomalyDetector(anyString(), any(ActionListener.class)); + }).when(modelNodeStateManager).getConfig(anyString(), eq(AnalysisType.AD), any(ActionListener.class)); return modelNodeStateManager; } diff --git a/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java b/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java index edb480dd1..0ed3fd1ee 100644 --- a/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java +++ b/src/test/java/org/opensearch/ad/transport/RCFPollingTests.java @@ -33,7 +33,6 @@ import org.opensearch.ad.common.exception.JsonPathNotFoundException; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.ml.ModelManager; -import org.opensearch.ad.ml.SingleStreamModelIdMapper; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -44,6 +43,7 @@ import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.ml.SingleStreamModelIdMapper; import org.opensearch.transport.ConnectTransportException; import org.opensearch.transport.Transport; import org.opensearch.transport.TransportException; diff --git a/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java b/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java index 4d1c1ed44..901c58e60 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AbstractIndexHandlerTest.java @@ -30,9 +30,7 @@ import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.transport.AnomalyResultTests; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.ClusterState; import org.opensearch.cluster.metadata.IndexMetadata; @@ -43,6 +41,7 @@ import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.AbstractTimeSeriesTest; import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.util.ClientUtil; public abstract class AbstractIndexHandlerTest extends AbstractTimeSeriesTest { enum IndexCreation { @@ -64,9 +63,6 @@ enum IndexCreation { @Mock protected ADIndexManagement anomalyDetectionIndices; - @Mock - protected Throttler throttler; - @Mock protected ClusterService clusterService; @@ -95,7 +91,7 @@ public void setUp() throws Exception { MockitoAnnotations.initMocks(this); setWriteBlockAdResultIndex(false); context = TestHelpers.createThreadPool(); - clientUtil = new ClientUtil(settings, client, throttler, context); + clientUtil = new ClientUtil(client); indexUtil = new IndexUtils(client, clientUtil, clusterService, indexNameResolver); } diff --git a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java index a2635ed8f..a87c6d2ea 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultBulkIndexHandlerTests.java @@ -37,9 +37,7 @@ import org.opensearch.ad.ADUnitTestCase; import org.opensearch.ad.indices.ADIndexManagement; import org.opensearch.ad.model.AnomalyResult; -import org.opensearch.ad.util.ClientUtil; import org.opensearch.ad.util.IndexUtils; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -48,6 +46,7 @@ import org.opensearch.index.engine.VersionConflictEngineException; import org.opensearch.threadpool.ThreadPool; import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.util.ClientUtil; import com.google.common.collect.ImmutableList; @@ -66,9 +65,8 @@ public void setUp() throws Exception { client = mock(Client.class); Settings settings = Settings.EMPTY; Clock clock = mock(Clock.class); - Throttler throttler = new Throttler(clock); ThreadPool threadpool = mock(ThreadPool.class); - ClientUtil clientUtil = new ClientUtil(Settings.EMPTY, client, throttler, threadpool); + ClientUtil clientUtil = new ClientUtil(client); indexUtils = mock(IndexUtils.class); ClusterService clusterService = mock(ClusterService.class); ThreadPool threadPool = mock(ThreadPool.class); diff --git a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java index 89367a72b..fc400004c 100644 --- a/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java +++ b/src/test/java/org/opensearch/ad/transport/handler/AnomalyResultHandlerTests.java @@ -34,12 +34,12 @@ import org.opensearch.action.ActionListener; import org.opensearch.action.index.IndexRequest; import org.opensearch.action.index.IndexResponse; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.concurrency.OpenSearchRejectedExecutionException; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; diff --git a/src/test/java/org/opensearch/ad/util/ExceptionUtilsTests.java b/src/test/java/org/opensearch/ad/util/ExceptionUtilsTests.java index 8d64ba08e..3a9ff1047 100644 --- a/src/test/java/org/opensearch/ad/util/ExceptionUtilsTests.java +++ b/src/test/java/org/opensearch/ad/util/ExceptionUtilsTests.java @@ -18,6 +18,7 @@ import org.opensearch.core.rest.RestStatus; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.timeseries.common.exception.TimeSeriesException; +import org.opensearch.timeseries.util.ExceptionUtil; public class ExceptionUtilsTests extends OpenSearchTestCase { diff --git a/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java b/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java index bea6abf95..7234f6feb 100644 --- a/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java +++ b/src/test/java/org/opensearch/ad/util/IndexUtilsTests.java @@ -13,17 +13,13 @@ import static org.mockito.Mockito.mock; -import java.time.Clock; - import org.junit.Before; import org.junit.Test; import org.opensearch.action.support.master.AcknowledgedResponse; import org.opensearch.client.Client; import org.opensearch.cluster.metadata.IndexNameExpressionResolver; -import org.opensearch.common.settings.Settings; import org.opensearch.test.OpenSearchIntegTestCase; -import org.opensearch.threadpool.ThreadPool; -import org.opensearch.timeseries.TestHelpers; +import org.opensearch.timeseries.util.ClientUtil; public class IndexUtilsTests extends OpenSearchIntegTestCase { @@ -34,10 +30,7 @@ public class IndexUtilsTests extends OpenSearchIntegTestCase { @Before public void setup() { Client client = client(); - Clock clock = mock(Clock.class); - Throttler throttler = new Throttler(clock); - ThreadPool context = TestHelpers.createThreadPool(); - clientUtil = new ClientUtil(Settings.EMPTY, client, throttler, context); + clientUtil = new ClientUtil(client); indexNameResolver = mock(IndexNameExpressionResolver.class); } @@ -70,25 +63,4 @@ public void testGetIndexHealth_Alias() { String status = indexUtils.getIndexHealthStatus(aliasName); assertTrue(status.equals("green") || status.equals("yellow")); } - - @Test - public void testGetNumberOfDocumentsInIndex_NonExistentIndex() { - IndexUtils indexUtils = new IndexUtils(client(), clientUtil, clusterService(), indexNameResolver); - assertEquals((Long) 0L, indexUtils.getNumberOfDocumentsInIndex("index")); - } - - @Test - public void testGetNumberOfDocumentsInIndex_RegularIndex() { - String indexName = "test-2"; - createIndex(indexName); - flush(); - - long count = 2100; - for (int i = 0; i < count; i++) { - index(indexName, "_doc", String.valueOf(i), "{}"); - } - flushAndRefresh(indexName); - IndexUtils indexUtils = new IndexUtils(client(), clientUtil, clusterService(), indexNameResolver); - assertEquals((Long) count, indexUtils.getNumberOfDocumentsInIndex(indexName)); - } } diff --git a/src/test/java/org/opensearch/ad/util/ThrottlerTests.java b/src/test/java/org/opensearch/ad/util/ThrottlerTests.java deleted file mode 100644 index 61bb19ec8..000000000 --- a/src/test/java/org/opensearch/ad/util/ThrottlerTests.java +++ /dev/null @@ -1,67 +0,0 @@ -/* - * SPDX-License-Identifier: Apache-2.0 - * - * The OpenSearch Contributors require contributions made to - * this file be licensed under the Apache-2.0 license or a - * compatible open source license. - * - * Modifications Copyright OpenSearch Contributors. See - * GitHub history for details. - */ - -package org.opensearch.ad.util; - -import static org.mockito.Mockito.mock; -import static org.powermock.api.mockito.PowerMockito.when; - -import java.time.Clock; - -import org.junit.Before; -import org.junit.Test; -import org.opensearch.action.search.SearchRequest; -import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.test.OpenSearchTestCase; - -public class ThrottlerTests extends OpenSearchTestCase { - private Throttler throttler; - - @Before - public void setup() { - Clock clock = mock(Clock.class); - this.throttler = new Throttler(clock); - } - - @Test - public void testGetFilteredQuery() { - AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getId()).thenReturn("test detector Id"); - SearchRequest dummySearchRequest = new SearchRequest(); - throttler.insertFilteredQuery(detector.getId(), dummySearchRequest); - // case 1: key exists - assertTrue(throttler.getFilteredQuery(detector.getId()).isPresent()); - // case 2: key doesn't exist - assertFalse(throttler.getFilteredQuery("different test detector Id").isPresent()); - } - - @Test - public void testInsertFilteredQuery() { - AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getId()).thenReturn("test detector Id"); - SearchRequest dummySearchRequest = new SearchRequest(); - // first time: key doesn't exist - assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); - // second time: key exists - assertFalse(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); - } - - @Test - public void testClearFilteredQuery() { - AnomalyDetector detector = mock(AnomalyDetector.class); - when(detector.getId()).thenReturn("test detector Id"); - SearchRequest dummySearchRequest = new SearchRequest(); - assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); - throttler.clearFilteredQuery(detector.getId()); - assertTrue(throttler.insertFilteredQuery(detector.getId(), dummySearchRequest)); - } - -} diff --git a/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java b/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java index 7d9f9b1b2..654664287 100644 --- a/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java +++ b/src/test/java/org/opensearch/search/aggregations/metrics/CardinalityProfileTests.java @@ -31,24 +31,24 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.ad.AbstractProfileRunnerTests; import org.opensearch.ad.AnomalyDetectorProfileRunner; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.constant.ADCommonName; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.transport.ProfileAction; import org.opensearch.ad.transport.ProfileNodeResponse; import org.opensearch.ad.transport.ProfileResponse; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.cluster.ClusterName; import org.opensearch.common.settings.Settings; import org.opensearch.common.util.BigArrays; +import org.opensearch.common.util.BitMixer; import org.opensearch.search.aggregations.InternalAggregation; import org.opensearch.search.aggregations.InternalAggregations; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.model.IntervalTimeConfiguration; - -import com.carrotsearch.hppc.BitMixer; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.util.SecurityClientUtil; /** * Run tests in ES package since InternalCardinality has only package private constructors @@ -73,10 +73,10 @@ private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus .randomAnomalyDetectorWithInterval(new IntervalTimeConfiguration(detectorIntervalMin, ChronoUnit.MINUTES), true); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(anyString(), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(anyString(), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, Settings.EMPTY); runner = new AnomalyDetectorProfileRunner( client, @@ -103,7 +103,7 @@ private void setUpMultiEntityClientGet(DetectorStatus detectorStatus, JobStatus break; } } else if (request.index().equals(CommonName.JOB_INDEX)) { - AnomalyDetectorJob job = null; + Job job = null; switch (jobStatus) { case ENABLED: job = TestHelpers.randomAnomalyDetectorJob(true); diff --git a/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java b/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java index 8799b9be6..e2e2e2a76 100644 --- a/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java +++ b/src/test/java/org/opensearch/timeseries/AbstractTimeSeriesTest.java @@ -44,7 +44,6 @@ import org.opensearch.action.ActionResponse; import org.opensearch.action.support.PlainActionFuture; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.DetectorInternalState; import org.opensearch.cluster.metadata.AliasMetadata; @@ -64,6 +63,7 @@ import org.opensearch.threadpool.FixedExecutorBuilder; import org.opensearch.threadpool.TestThreadPool; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.model.Job; import org.opensearch.transport.TransportInterceptor; import org.opensearch.transport.TransportService; @@ -351,7 +351,7 @@ protected NamedXContentRegistry xContentRegistry() { AnomalyDetector.XCONTENT_REGISTRY, AnomalyResult.XCONTENT_REGISTRY, DetectorInternalState.XCONTENT_REGISTRY, - AnomalyDetectorJob.XCONTENT_REGISTRY + Job.XCONTENT_REGISTRY ) ); return new NamedXContentRegistry(entries); diff --git a/src/test/java/org/opensearch/ad/NodeStateManagerTests.java b/src/test/java/org/opensearch/timeseries/NodeStateManagerTests.java similarity index 68% rename from src/test/java/org/opensearch/ad/NodeStateManagerTests.java rename to src/test/java/org/opensearch/timeseries/NodeStateManagerTests.java index 9cad7d5eb..be97f8f53 100644 --- a/src/test/java/org/opensearch/ad/NodeStateManagerTests.java +++ b/src/test/java/org/opensearch/timeseries/NodeStateManagerTests.java @@ -9,8 +9,10 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; import static org.mockito.Matchers.any; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; @@ -18,8 +20,6 @@ import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyZeroInteractions; import static org.mockito.Mockito.when; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.BACKOFF_MINUTES; -import static org.opensearch.ad.settings.AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE; import java.io.IOException; import java.time.Clock; @@ -29,9 +29,12 @@ import java.util.Collections; import java.util.HashSet; import java.util.Locale; +import java.util.Optional; import java.util.Set; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; +import java.util.concurrent.atomic.AtomicReference; +import java.util.function.Consumer; import java.util.stream.IntStream; import org.junit.After; @@ -40,15 +43,11 @@ import org.junit.BeforeClass; import org.opensearch.Version; import org.opensearch.action.ActionListener; +import org.opensearch.action.LatchedActionListener; import org.opensearch.action.get.GetRequest; import org.opensearch.action.get.GetResponse; -import org.opensearch.action.search.SearchRequest; import org.opensearch.ad.model.AnomalyDetector; -import org.opensearch.ad.model.AnomalyDetectorJob; -import org.opensearch.ad.settings.AnomalyDetectorSettings; import org.opensearch.ad.transport.AnomalyResultTests; -import org.opensearch.ad.util.ClientUtil; -import org.opensearch.ad.util.Throttler; import org.opensearch.client.Client; import org.opensearch.cluster.node.DiscoveryNode; import org.opensearch.cluster.node.DiscoveryNodeRole; @@ -58,13 +57,16 @@ import org.opensearch.common.settings.Settings; import org.opensearch.common.unit.TimeValue; import org.opensearch.core.xcontent.NamedXContentRegistry; +import org.opensearch.forecast.model.Forecaster; import org.opensearch.search.SearchModule; import org.opensearch.test.ClusterServiceUtils; import org.opensearch.test.OpenSearchTestCase; import org.opensearch.threadpool.ThreadPool; -import org.opensearch.timeseries.AbstractTimeSeriesTest; -import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.constant.CommonName; +import org.opensearch.timeseries.model.Config; +import org.opensearch.timeseries.model.Job; +import org.opensearch.timeseries.settings.TimeSeriesSettings; +import org.opensearch.timeseries.util.ClientUtil; import com.google.common.collect.ImmutableMap; @@ -74,7 +76,6 @@ public class NodeStateManagerTests extends AbstractTimeSeriesTest { private ClientUtil clientUtil; private Clock clock; private Duration duration; - private Throttler throttler; private ThreadPool context; private AnomalyDetector detectorToCheck; private Settings settings; @@ -84,7 +85,7 @@ public class NodeStateManagerTests extends AbstractTimeSeriesTest { private GetResponse checkpointResponse; private ClusterService clusterService; private ClusterSettings clusterSettings; - private AnomalyDetectorJob jobToCheck; + private Job jobToCheck; @Override protected NamedXContentRegistry xContentRegistry() { @@ -109,18 +110,17 @@ public void setUp() throws Exception { client = mock(Client.class); settings = Settings .builder() - .put("plugins.anomaly_detection.max_retry_for_unresponsive_node", 3) - .put("plugins.anomaly_detection.ad_mute_minutes", TimeValue.timeValueMinutes(10)) + .put("plugins.timeseries.max_retry_for_unresponsive_node", 3) + .put("plugins.timeseries.backoff_minutes", TimeValue.timeValueMinutes(10)) .build(); clock = mock(Clock.class); duration = Duration.ofHours(1); context = TestHelpers.createThreadPool(); - throttler = new Throttler(clock); - clientUtil = new ClientUtil(Settings.EMPTY, client, throttler, mock(ThreadPool.class)); + clientUtil = new ClientUtil(client); Set> nodestateSetting = new HashSet<>(ClusterSettings.BUILT_IN_CLUSTER_SETTINGS); - nodestateSetting.add(MAX_RETRY_FOR_UNRESPONSIVE_NODE); - nodestateSetting.add(BACKOFF_MINUTES); + nodestateSetting.add(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE); + nodestateSetting.add(TimeSeriesSettings.BACKOFF_MINUTES); clusterSettings = new ClusterSettings(Settings.EMPTY, nodestateSetting); DiscoveryNode discoveryNode = new DiscoveryNode( @@ -132,7 +132,17 @@ public void setUp() throws Exception { ); clusterService = ClusterServiceUtils.createClusterService(threadPool, discoveryNode, clusterSettings); - stateManager = new NodeStateManager(client, xContentRegistry(), settings, clientUtil, clock, duration, clusterService); + stateManager = new NodeStateManager( + client, + xContentRegistry(), + settings, + clientUtil, + clock, + duration, + clusterService, + TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE, + TimeSeriesSettings.BACKOFF_MINUTES + ); checkpointResponse = mock(GetResponse.class); jobToCheck = TestHelpers.randomAnomalyDetectorJob(true, Instant.ofEpochMilli(1602401500000L), null); @@ -203,13 +213,6 @@ private void setupCheckpoint(boolean responseExists) throws IOException { }).when(client).get(any(), any(ActionListener.class)); } - public void testGetLastError() throws IOException, InterruptedException { - String error = "blah"; - assertEquals(NodeStateManager.NO_ERROR, stateManager.getLastDetectionError(adId)); - stateManager.setLastDetectionError(adId, error); - assertEquals(error, stateManager.getLastDetectionError(adId)); - } - public void testShouldMute() { assertTrue(!stateManager.isMuted(nodeId, adId)); @@ -235,29 +238,11 @@ public void testMaintenanceDoNothing() { verifyZeroInteractions(clock); } - public void testHasRunningQuery() throws IOException { - stateManager = new NodeStateManager( - client, - xContentRegistry(), - settings, - new ClientUtil(settings, client, throttler, context), - clock, - duration, - clusterService - ); - - AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of(), null); - SearchRequest dummySearchRequest = new SearchRequest(); - assertFalse(stateManager.hasRunningQuery(detector)); - throttler.insertFilteredQuery(detector.getId(), dummySearchRequest); - assertTrue(stateManager.hasRunningQuery(detector)); - } - public void testGetAnomalyDetector() throws IOException, InterruptedException { String detectorId = setupDetector(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - stateManager.getAnomalyDetector(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(asDetector -> { assertEquals(detectorToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -277,7 +262,7 @@ public void testRepeatedGetAnomalyDetector() throws IOException, InterruptedExce String detectorId = setupDetector(); final CountDownLatch inProgressLatch = new CountDownLatch(2); - stateManager.getAnomalyDetector(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(asDetector -> { assertEquals(detectorToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -285,7 +270,7 @@ public void testRepeatedGetAnomalyDetector() throws IOException, InterruptedExce inProgressLatch.countDown(); })); - stateManager.getAnomalyDetector(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getConfig(detectorId, AnalysisType.AD, ActionListener.wrap(asDetector -> { assertEquals(detectorToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -363,7 +348,7 @@ public void testSettingUpdateMaxRetry() { // In setUp method, we mute after 3 tries assertTrue(!stateManager.isMuted(nodeId, adId)); - Settings newSettings = Settings.builder().put(AnomalyDetectorSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.getKey(), "1").build(); + Settings newSettings = Settings.builder().put(TimeSeriesSettings.MAX_RETRY_FOR_UNRESPONSIVE_NODE.getKey(), "1").build(); Settings.Builder target = Settings.builder(); clusterSettings.updateDynamicSettings(newSettings, target, Settings.builder(), "test"); clusterSettings.applySettings(target.build()); @@ -381,7 +366,7 @@ public void testSettingUpdateBackOffMin() { assertTrue(stateManager.isMuted(nodeId, adId)); - Settings newSettings = Settings.builder().put(AnomalyDetectorSettings.BACKOFF_MINUTES.getKey(), "1m").build(); + Settings newSettings = Settings.builder().put(TimeSeriesSettings.BACKOFF_MINUTES.getKey(), "1m").build(); Settings.Builder target = Settings.builder(); clusterSettings.updateDynamicSettings(newSettings, target, Settings.builder(), "test"); clusterSettings.applySettings(target.build()); @@ -412,7 +397,7 @@ private String setupJob() throws IOException { public void testGetAnomalyJob() throws IOException, InterruptedException { String detectorId = setupJob(); final CountDownLatch inProgressLatch = new CountDownLatch(1); - stateManager.getAnomalyDetectorJob(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getJob(detectorId, ActionListener.wrap(asDetector -> { assertEquals(jobToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -432,7 +417,7 @@ public void testRepeatedGetAnomalyJob() throws IOException, InterruptedException String detectorId = setupJob(); final CountDownLatch inProgressLatch = new CountDownLatch(2); - stateManager.getAnomalyDetectorJob(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getJob(detectorId, ActionListener.wrap(asDetector -> { assertEquals(jobToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -440,7 +425,7 @@ public void testRepeatedGetAnomalyJob() throws IOException, InterruptedException inProgressLatch.countDown(); })); - stateManager.getAnomalyDetectorJob(detectorId, ActionListener.wrap(asDetector -> { + stateManager.getJob(detectorId, ActionListener.wrap(asDetector -> { assertEquals(jobToCheck, asDetector.get()); inProgressLatch.countDown(); }, exception -> { @@ -452,4 +437,118 @@ public void testRepeatedGetAnomalyJob() throws IOException, InterruptedException verify(client, times(1)).get(any(), any(ActionListener.class)); } + + public void testGetConfigAD() throws IOException, InterruptedException { + String configId = "123"; + AnomalyDetector detector = TestHelpers.randomAnomalyDetector(ImmutableMap.of("testKey", "testValue"), Instant.now()); + GetResponse getResponse = TestHelpers.createGetResponse(detector, configId, CommonName.CONFIG_INDEX); + doAnswer(invocationOnMock -> { + ((ActionListener) invocationOnMock.getArguments()[1]).onResponse(getResponse); + return null; + }).when(client).get(any(GetRequest.class), any()); + + final AtomicReference actualResponse = new AtomicReference<>(); + final AtomicReference exception = new AtomicReference<>(); + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(AnomalyDetector resultResponse) { + actualResponse.set(resultResponse); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + ActionListener latchListener = new LatchedActionListener<>(listener, latch); + + Consumer> function = mock(Consumer.class); + doAnswer(invocationOnMock -> { + Optional receivedDetector = (Optional) invocationOnMock.getArguments()[0]; + latchListener.onResponse(receivedDetector.get()); + return null; + }).when(function).accept(any(Optional.class)); + + stateManager.getConfig(configId, AnalysisType.AD, function, latchListener); + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + assertNotNull(actualResponse.get()); + assertNull(exception.get()); + org.hamcrest.MatcherAssert.assertThat(actualResponse.get(), equalTo(detector)); + } + + public void testGetConfigForecaster() throws IOException, InterruptedException { + String configId = "123"; + Forecaster forecaster = TestHelpers.randomForecaster(); + GetResponse getResponse = TestHelpers.createGetResponse(forecaster, configId, CommonName.CONFIG_INDEX); + doAnswer(invocationOnMock -> { + ((ActionListener) invocationOnMock.getArguments()[1]).onResponse(getResponse); + return null; + }).when(client).get(any(GetRequest.class), any()); + + final AtomicReference actualResponse = new AtomicReference<>(); + final AtomicReference exception = new AtomicReference<>(); + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(Forecaster resultResponse) { + actualResponse.set(resultResponse); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + ActionListener latchListener = new LatchedActionListener<>(listener, latch); + + Consumer> function = mock(Consumer.class); + doAnswer(invocationOnMock -> { + Optional receivedDetector = (Optional) invocationOnMock.getArguments()[0]; + latchListener.onResponse(receivedDetector.get()); + return null; + }).when(function).accept(any(Optional.class)); + + stateManager.getConfig(configId, AnalysisType.FORECAST, function, latchListener); + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + assertNotNull(actualResponse.get()); + assertNull(exception.get()); + org.hamcrest.MatcherAssert.assertThat(actualResponse.get(), equalTo(forecaster)); + } + + public void testGetConfigException() throws IOException, InterruptedException { + String configId = "123"; + Exception testException = new Exception("Test exception"); + doAnswer(invocationOnMock -> { + ((ActionListener) invocationOnMock.getArguments()[1]).onFailure(testException); + return null; + }).when(client).get(any(GetRequest.class), any()); + + final AtomicReference actualResponse = new AtomicReference<>(); + final AtomicReference exception = new AtomicReference<>(); + ActionListener listener = new ActionListener<>() { + @Override + public void onResponse(Forecaster resultResponse) { + actualResponse.set(resultResponse); + } + + @Override + public void onFailure(Exception e) { + exception.set(e); + } + }; + + CountDownLatch latch = new CountDownLatch(1); + ActionListener latchListener = new LatchedActionListener<>(listener, latch); + + Consumer> function = mock(Consumer.class); + + stateManager.getConfig(configId, AnalysisType.FORECAST, function, latchListener); + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + assertNull(actualResponse.get()); + assertNotNull(exception.get()); + assertEquals("Test exception", exception.get().getMessage()); + } } diff --git a/src/test/java/org/opensearch/ad/NodeStateTests.java b/src/test/java/org/opensearch/timeseries/NodeStateTests.java similarity index 86% rename from src/test/java/org/opensearch/ad/NodeStateTests.java rename to src/test/java/org/opensearch/timeseries/NodeStateTests.java index c48afdb76..f97b288d7 100644 --- a/src/test/java/org/opensearch/ad/NodeStateTests.java +++ b/src/test/java/org/opensearch/timeseries/NodeStateTests.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad; +package org.opensearch.timeseries; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; @@ -20,7 +20,6 @@ import java.time.Instant; import org.opensearch.test.OpenSearchTestCase; -import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.common.exception.TimeSeriesException; public class NodeStateTests extends OpenSearchTestCase { @@ -38,7 +37,7 @@ public void setUp() throws Exception { public void testMaintenanceNotRemoveSingle() throws IOException { when(clock.instant()).thenReturn(Instant.ofEpochMilli(1000)); - state.setDetectorDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); + state.setConfigDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); when(clock.instant()).thenReturn(Instant.MIN); assertTrue(!state.expired(duration)); @@ -46,8 +45,8 @@ public void testMaintenanceNotRemoveSingle() throws IOException { public void testMaintenanceNotRemove() throws IOException { when(clock.instant()).thenReturn(Instant.ofEpochSecond(1000)); - state.setDetectorDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); - state.setLastDetectionError(null); + state.setConfigDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); + state.setException(null); when(clock.instant()).thenReturn(Instant.ofEpochSecond(3700)); assertTrue(!state.expired(duration)); @@ -56,11 +55,11 @@ public void testMaintenanceNotRemove() throws IOException { public void testMaintenanceRemoveLastError() throws IOException { when(clock.instant()).thenReturn(Instant.ofEpochMilli(1000)); state - .setDetectorDef( + .setConfigDef( TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null) ); - state.setLastDetectionError(null); + state.setException(null); when(clock.instant()).thenReturn(Instant.ofEpochSecond(3700)); assertTrue(state.expired(duration)); @@ -68,7 +67,7 @@ public void testMaintenanceRemoveLastError() throws IOException { public void testMaintenancRemoveDetector() throws IOException { when(clock.instant()).thenReturn(Instant.MIN); - state.setDetectorDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); + state.setConfigDef(TestHelpers.randomAnomalyDetector(TestHelpers.randomUiMetadata(), null)); when(clock.instant()).thenReturn(Instant.MAX); assertTrue(state.expired(duration)); diff --git a/src/test/java/org/opensearch/timeseries/TestHelpers.java b/src/test/java/org/opensearch/timeseries/TestHelpers.java index 33ceb54fa..3287f4118 100644 --- a/src/test/java/org/opensearch/timeseries/TestHelpers.java +++ b/src/test/java/org/opensearch/timeseries/TestHelpers.java @@ -71,7 +71,6 @@ import org.opensearch.ad.model.ADTaskType; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.model.AnomalyDetectorExecutionInput; -import org.opensearch.ad.model.AnomalyDetectorJob; import org.opensearch.ad.model.AnomalyResult; import org.opensearch.ad.model.AnomalyResultBucket; import org.opensearch.ad.model.DetectorInternalState; @@ -143,6 +142,7 @@ import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.FeatureData; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.model.Job; import org.opensearch.timeseries.model.TimeConfiguration; import org.opensearch.timeseries.model.ValidationAspect; import org.opensearch.timeseries.model.ValidationIssueType; @@ -963,12 +963,12 @@ public static AnomalyResult randomHCADAnomalyDetectResult( ); } - public static AnomalyDetectorJob randomAnomalyDetectorJob() { + public static Job randomAnomalyDetectorJob() { return randomAnomalyDetectorJob(true); } - public static AnomalyDetectorJob randomAnomalyDetectorJob(boolean enabled, Instant enabledTime, Instant disabledTime) { - return new AnomalyDetectorJob( + public static Job randomAnomalyDetectorJob(boolean enabled, Instant enabledTime, Instant disabledTime) { + return new Job( randomAlphaOfLength(10), randomIntervalSchedule(), randomIntervalTimeConfiguration(), @@ -982,7 +982,7 @@ public static AnomalyDetectorJob randomAnomalyDetectorJob(boolean enabled, Insta ); } - public static AnomalyDetectorJob randomAnomalyDetectorJob(boolean enabled) { + public static Job randomAnomalyDetectorJob(boolean enabled) { return randomAnomalyDetectorJob( enabled, Instant.now().truncatedTo(ChronoUnit.SECONDS), diff --git a/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java b/src/test/java/org/opensearch/timeseries/feature/NoPowermockSearchFeatureDaoTests.java similarity index 97% rename from src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java rename to src/test/java/org/opensearch/timeseries/feature/NoPowermockSearchFeatureDaoTests.java index 1d0da6d19..c689d3679 100644 --- a/src/test/java/org/opensearch/ad/feature/NoPowermockSearchFeatureDaoTests.java +++ b/src/test/java/org/opensearch/timeseries/feature/NoPowermockSearchFeatureDaoTests.java @@ -9,10 +9,11 @@ * GitHub history for details. */ -package org.opensearch.ad.feature; +package org.opensearch.timeseries.feature; import static org.hamcrest.core.IsInstanceOf.instanceOf; import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify; @@ -55,16 +56,15 @@ import org.opensearch.action.search.SearchResponse.Clusters; import org.opensearch.action.search.SearchResponseSections; import org.opensearch.action.search.ShardSearchFailure; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.lease.Releasables; import org.opensearch.common.settings.ClusterSettings; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; +import org.opensearch.common.util.BitMixer; import org.opensearch.common.util.MockBigArrays; import org.opensearch.common.util.MockPageCacheRecycler; import org.opensearch.index.mapper.DateFieldMapper; @@ -94,14 +94,16 @@ import org.opensearch.search.aggregations.metrics.SumAggregationBuilder; import org.opensearch.search.internal.InternalSearchResponse; import org.opensearch.timeseries.AbstractTimeSeriesTest; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TestHelpers; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.Feature; import org.opensearch.timeseries.model.IntervalTimeConfiguration; +import org.opensearch.timeseries.util.SecurityClientUtil; -import com.carrotsearch.hppc.BitMixer; import com.google.common.collect.ImmutableList; /** @@ -169,10 +171,10 @@ public void setUp() throws Exception { clock = mock(Clock.class); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, settings); searchFeatureDao = new SearchFeatureDao( @@ -525,8 +527,9 @@ public void getColdStartSamplesForPeriodsTemplate(DocValueFormat format) throws .getColdStartSamplesForPeriods( detector, sampleRanges, - Entity.createSingleAttributeEntity("field", "abc"), + Optional.of(Entity.createSingleAttributeEntity("field", "abc")), true, + AnalysisType.AD, ActionListener.wrap(samples -> { assertEquals(3, samples.size()); for (int i = 0; i < samples.size(); i++) { @@ -558,8 +561,9 @@ public void getColdStartSamplesForPeriodsTemplate(DocValueFormat format) throws .getColdStartSamplesForPeriods( detector, sampleRanges, - Entity.createSingleAttributeEntity("field", "abc"), + Optional.of(Entity.createSingleAttributeEntity("field", "abc")), false, + AnalysisType.AD, ActionListener.wrap(samples -> { assertEquals(2, samples.size()); for (int i = 0; i < samples.size(); i++) { diff --git a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java b/src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoParamTests.java similarity index 98% rename from src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java rename to src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoParamTests.java index e00225ef0..ff5b6ab19 100644 --- a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoParamTests.java +++ b/src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoParamTests.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.feature; +package org.opensearch.timeseries.feature; import static java.util.Arrays.asList; import static org.junit.Assert.assertEquals; @@ -52,10 +52,8 @@ import org.opensearch.action.search.MultiSearchResponse.Item; import org.opensearch.action.search.SearchRequest; import org.opensearch.action.search.SearchResponse; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.cluster.service.ClusterService; import org.opensearch.common.settings.Settings; @@ -75,12 +73,15 @@ import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; import org.opensearch.timeseries.dataprocessor.LinearUniformImputer; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; @@ -172,10 +173,10 @@ public void setup() throws Exception { when(client.threadPool()).thenReturn(threadPool); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, settings); searchFeatureDao = spy( new SearchFeatureDao(client, xContent, imputer, clientUtil, settings, null, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE) diff --git a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java b/src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoTests.java similarity index 97% rename from src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java rename to src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoTests.java index cf18b2fdd..3d23d61b7 100644 --- a/src/test/java/org/opensearch/ad/feature/SearchFeatureDaoTests.java +++ b/src/test/java/org/opensearch/timeseries/feature/SearchFeatureDaoTests.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.feature; +package org.opensearch.timeseries.feature; import static java.util.Arrays.asList; import static org.hamcrest.MatcherAssert.assertThat; @@ -57,10 +57,8 @@ import org.opensearch.action.search.SearchResponse; import org.opensearch.action.search.SearchResponseSections; import org.opensearch.action.search.ShardSearchFailure; -import org.opensearch.ad.NodeStateManager; import org.opensearch.ad.model.AnomalyDetector; import org.opensearch.ad.settings.AnomalyDetectorSettings; -import org.opensearch.ad.util.SecurityClientUtil; import org.opensearch.client.Client; import org.opensearch.common.settings.Settings; import org.opensearch.common.time.DateFormatter; @@ -90,6 +88,8 @@ import org.opensearch.search.aggregations.metrics.Percentile; import org.opensearch.search.builder.SearchSourceBuilder; import org.opensearch.threadpool.ThreadPool; +import org.opensearch.timeseries.AnalysisType; +import org.opensearch.timeseries.NodeStateManager; import org.opensearch.timeseries.TimeSeriesAnalyticsPlugin; import org.opensearch.timeseries.constant.CommonName; import org.opensearch.timeseries.dataprocessor.Imputer; @@ -97,6 +97,7 @@ import org.opensearch.timeseries.model.Entity; import org.opensearch.timeseries.model.IntervalTimeConfiguration; import org.opensearch.timeseries.util.ParseUtils; +import org.opensearch.timeseries.util.SecurityClientUtil; import org.powermock.api.mockito.PowerMockito; import org.powermock.core.classloader.annotations.PowerMockIgnore; import org.powermock.core.classloader.annotations.PrepareForTest; @@ -175,10 +176,10 @@ public void setup() throws Exception { when(client.threadPool()).thenReturn(threadPool); NodeStateManager nodeStateManager = mock(NodeStateManager.class); doAnswer(invocation -> { - ActionListener> listener = invocation.getArgument(1); + ActionListener> listener = invocation.getArgument(2); listener.onResponse(Optional.of(detector)); return null; - }).when(nodeStateManager).getAnomalyDetector(any(String.class), any(ActionListener.class)); + }).when(nodeStateManager).getConfig(any(String.class), eq(AnalysisType.AD), any(ActionListener.class)); clientUtil = new SecurityClientUtil(nodeStateManager, settings); searchFeatureDao = spy( new SearchFeatureDao(client, xContent, imputer, clientUtil, settings, null, AnomalyDetectorSettings.NUM_SAMPLES_PER_TREE) @@ -378,7 +379,7 @@ public void testGetEntityMinDataTime() { ActionListener> listener = mock(ActionListener.class); Entity entity = Entity.createSingleAttributeEntity("field", "app_1"); - searchFeatureDao.getEntityMinDataTime(detector, entity, listener); + searchFeatureDao.getMinDataTime(detector, Optional.ofNullable(entity), AnalysisType.AD, listener); ArgumentCaptor> captor = ArgumentCaptor.forClass(Optional.class); verify(listener).onResponse(captor.capture()); diff --git a/src/test/java/org/opensearch/timeseries/util/ClientUtilTests.java b/src/test/java/org/opensearch/timeseries/util/ClientUtilTests.java new file mode 100644 index 000000000..d7847edd1 --- /dev/null +++ b/src/test/java/org/opensearch/timeseries/util/ClientUtilTests.java @@ -0,0 +1,153 @@ +/* + * Copyright OpenSearch Contributors + * SPDX-License-Identifier: Apache-2.0 + */ + +package org.opensearch.timeseries.util; + +import static org.hamcrest.Matchers.equalTo; +import static org.mockito.ArgumentMatchers.any; +import static org.mockito.ArgumentMatchers.eq; +import static org.mockito.Mockito.doAnswer; +import static org.mockito.Mockito.mock; + +import java.util.Collections; +import java.util.concurrent.CountDownLatch; +import java.util.concurrent.TimeUnit; +import java.util.function.BiConsumer; + +import org.opensearch.action.ActionListener; +import org.opensearch.action.LatchedActionListener; +import org.opensearch.ad.transport.AnomalyResultAction; +import org.opensearch.ad.transport.AnomalyResultRequest; +import org.opensearch.ad.transport.AnomalyResultResponse; +import org.opensearch.client.Client; +import org.opensearch.test.OpenSearchTestCase; +import org.opensearch.timeseries.model.FeatureData; + +public class ClientUtilTests extends OpenSearchTestCase { + private AnomalyResultRequest asyncRequest; + + private ClientUtil clientUtil; + + private Client client; + private CountDownLatch latch; + private ActionListener latchListener; + private AnomalyResultResponse actualResponse; + private Exception exception; + private ActionListener listener; + + @Override + public void setUp() throws Exception { + super.setUp(); + asyncRequest = new AnomalyResultRequest("abc123", 100, 200); + + listener = new ActionListener<>() { + @Override + public void onResponse(AnomalyResultResponse resultResponse) { + actualResponse = resultResponse; + } + + @Override + public void onFailure(Exception e) { + exception = e; + } + }; + actualResponse = null; + exception = null; + + latch = new CountDownLatch(1); + latchListener = new LatchedActionListener<>(listener, latch); + + client = mock(Client.class); + clientUtil = new ClientUtil(client); + } + + public void testAsyncRequestOnSuccess() throws InterruptedException { + AnomalyResultResponse expected = new AnomalyResultResponse( + 4d, + 0.993, + 1.01, + Collections.singletonList(new FeatureData("xyz", "foo", 0d)), + randomAlphaOfLength(4), + randomLong(), + randomLong(), + randomBoolean(), + randomInt(), + new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, + new double[] { randomDouble(), randomDouble() }, + new double[][] { new double[] { randomDouble(), randomDouble() } }, + new double[] { randomDouble() }, + randomDoubleBetween(1.1, 10.0, true) + ); + BiConsumer> consumer = (request, actionListener) -> { + // simulate successful operation + // actionListener.onResponse(); + latchListener.onResponse(expected); + }; + clientUtil.asyncRequest(asyncRequest, consumer, listener); + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + assertNotNull(actualResponse); + assertNull(exception); + org.hamcrest.MatcherAssert.assertThat(actualResponse, equalTo(expected)); + } + + public void testAsyncRequestOnFailure() { + Exception testException = new Exception("Test exception"); + BiConsumer> consumer = (request, actionListener) -> { + // simulate successful operation + // actionListener.onResponse(); + latchListener.onFailure(testException); + }; + clientUtil.asyncRequest(asyncRequest, consumer, listener); + assertNull(actualResponse); + assertNotNull(exception); + assertEquals("Test exception", exception.getMessage()); + } + + @SuppressWarnings("unchecked") + public void testExecuteOnSuccess() throws InterruptedException { + AnomalyResultResponse expected = new AnomalyResultResponse( + 4d, + 0.993, + 1.01, + Collections.singletonList(new FeatureData("xyz", "foo", 0d)), + randomAlphaOfLength(4), + randomLong(), + randomLong(), + randomBoolean(), + randomInt(), + new double[] { randomDoubleBetween(0, 1.0, true), randomDoubleBetween(0, 1.0, true) }, + new double[] { randomDouble(), randomDouble() }, + new double[][] { new double[] { randomDouble(), randomDouble() } }, + new double[] { randomDouble() }, + randomDoubleBetween(1.1, 10.0, true) + ); + doAnswer(invocationOnMock -> { + ((ActionListener) invocationOnMock.getArguments()[2]).onResponse(expected); + latch.countDown(); + return null; + }).when(client).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); + clientUtil.execute(AnomalyResultAction.INSTANCE, asyncRequest, latchListener); + + assertTrue(latch.await(30L, TimeUnit.SECONDS)); + assertNotNull(actualResponse); + assertNull(exception); + org.hamcrest.MatcherAssert.assertThat(actualResponse, equalTo(expected)); + } + + @SuppressWarnings("unchecked") + public void testExecuteOnFailure() { + Exception testException = new Exception("Test exception"); + doAnswer(invocationOnMock -> { + ((ActionListener) invocationOnMock.getArguments()[2]).onFailure(testException); + latch.countDown(); + return null; + }).when(client).execute(eq(AnomalyResultAction.INSTANCE), any(), any()); + clientUtil.execute(AnomalyResultAction.INSTANCE, asyncRequest, latchListener); + assertNull(actualResponse); + assertNotNull(exception); + assertEquals("Test exception", exception.getMessage()); + } +} diff --git a/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java b/src/test/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListenerTests.java similarity index 98% rename from src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java rename to src/test/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListenerTests.java index b905ce623..a93b39ad7 100644 --- a/src/test/java/org/opensearch/ad/util/MultiResponsesDelegateActionListenerTests.java +++ b/src/test/java/org/opensearch/timeseries/util/MultiResponsesDelegateActionListenerTests.java @@ -9,7 +9,7 @@ * GitHub history for details. */ -package org.opensearch.ad.util; +package org.opensearch.timeseries.util; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.verify;