From 2ab1d202ca578150682dcf4eabe1e176e579480d Mon Sep 17 00:00:00 2001
From: JD Kurma
Date: Mon, 11 Jul 2022 16:44:41 -0400
Subject: [PATCH 01/61] [Security Solution] Add usage counter for num of
endpoints (#135953)
* add usage counter for num of endpoints
* unit tests for endpoint counter
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
---
.../server/lib/telemetry/__mocks__/index.ts | 3 +-
.../server/lib/telemetry/__mocks__/metrics.ts | 126 ++++++++++++++++++
.../server/lib/telemetry/receiver.ts | 5 +
.../lib/telemetry/tasks/endpoint.test.ts | 13 ++
.../server/lib/telemetry/tasks/endpoint.ts | 12 ++
.../server/lib/telemetry/types.ts | 1 +
6 files changed, 159 insertions(+), 1 deletion(-)
create mode 100644 x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/metrics.ts
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/index.ts b/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/index.ts
index ea1caa3c97b69..eba224f477db4 100644
--- a/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/index.ts
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/index.ts
@@ -13,6 +13,7 @@ import type { TelemetryReceiver } from '../receiver';
import type { SecurityTelemetryTaskConfig } from '../task';
import type { PackagePolicy } from '@kbn/fleet-plugin/common/types/models/package_policy';
import { stubEndpointAlertResponse, stubProcessTree, stubFetchTimelineEvents } from './timeline';
+import { stubEndpointMetricsResponse } from './metrics';
export const createMockTelemetryEventsSender = (
enableTelemetry?: boolean,
@@ -82,7 +83,7 @@ export const createMockTelemetryReceiver = (
copyLicenseFields: jest.fn(),
fetchFleetAgents: jest.fn(),
fetchDiagnosticAlerts: jest.fn().mockReturnValue(diagnosticsAlert ?? jest.fn()),
- fetchEndpointMetrics: jest.fn(),
+ fetchEndpointMetrics: jest.fn().mockReturnValue(stubEndpointMetricsResponse),
fetchEndpointPolicyResponses: jest.fn(),
fetchTrustedApplications: jest.fn(),
fetchEndpointList: jest.fn(),
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/metrics.ts b/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/metrics.ts
new file mode 100644
index 0000000000000..2cf94eccb78dd
--- /dev/null
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/__mocks__/metrics.ts
@@ -0,0 +1,126 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export const stubEndpointMetricsResponse = {
+ body: {
+ took: 0,
+ timed_out: false,
+ _shards: {
+ total: 1,
+ successful: 1,
+ skipped: 0,
+ failed: 0,
+ },
+ hits: {
+ total: {
+ value: 2,
+ relation: 'eq',
+ },
+ max_score: null,
+ hits: [],
+ },
+ aggregations: {
+ endpoint_count: {
+ value: 1,
+ },
+ endpoint_agents: {
+ doc_count_error_upper_bound: 0,
+ sum_other_doc_count: 0,
+ buckets: [
+ {
+ key: '7116aa6c-0bad-4edc-b954-860f9d487755',
+ doc_count: 2,
+ latest_metrics: {
+ hits: {
+ total: {
+ value: 2,
+ relation: 'eq',
+ },
+ max_score: null,
+ hits: [
+ {
+ _index: '.ds-metrics-endpoint.metadata-default-2022.07.08-000001',
+ _id: 'ChGf7YEBj3fALY0Ne9um',
+ _score: null,
+ _source: {
+ agent: {
+ id: '7116aa6c-0bad-4edc-b954-860f9d487755',
+ type: 'endpoint',
+ version: '7.16.11',
+ },
+ '@timestamp': 1657484259677,
+ Endpoint: {
+ capabilities: ['isolation'],
+ configuration: {
+ isolation: true,
+ },
+ state: {
+ isolation: true,
+ },
+ status: 'enrolled',
+ policy: {
+ applied: {
+ name: 'With Eventing',
+ id: 'C2A9093E-E289-4C0A-AA44-8C32A414FA7A',
+ endpoint_policy_version: 3,
+ version: 5,
+ status: 'failure',
+ },
+ },
+ },
+ data_stream: {
+ namespace: 'default',
+ type: 'metrics',
+ dataset: 'endpoint.metadata',
+ },
+ elastic: {
+ agent: {
+ id: '7116aa6c-0bad-4edc-b954-860f9d487755',
+ },
+ },
+ host: {
+ hostname: 'Host-x46nlluvd1',
+ os: {
+ Ext: {
+ variant: 'Windows Server Release 2',
+ },
+ name: 'Windows',
+ family: 'windows',
+ version: '6.3',
+ platform: 'Windows',
+ full: 'Windows Server 2012R2',
+ },
+ ip: ['10.198.33.76'],
+ name: 'Host-x46nlluvd1',
+ id: 'b85883ad-6f72-4ab5-9794-4e1f0593a15e',
+ mac: ['b6-f3-d2-3d-b4-95', '33-5c-95-1e-20-17', 'b0-90-8a-57-82-1f'],
+ architecture: '1dc25ub5gq',
+ },
+ event: {
+ agent_id_status: 'auth_metadata_missing',
+ ingested: '2022-07-11T14:17:41Z',
+ created: 1657484259677,
+ kind: 'metric',
+ module: 'endpoint',
+ action: 'endpoint_metadata',
+ id: 'c7648bef-b723-4925-b9a1-b3953fbb2a23',
+ category: ['host'],
+ type: ['info'],
+ dataset: 'endpoint.metadata',
+ },
+ },
+ sort: [1657484259677],
+ },
+ ],
+ },
+ },
+ },
+ ],
+ },
+ },
+ },
+};
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/receiver.ts b/x-pack/plugins/security_solution/server/lib/telemetry/receiver.ts
index d9cabfed0ceec..4d93834790b6c 100644
--- a/x-pack/plugins/security_solution/server/lib/telemetry/receiver.ts
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/receiver.ts
@@ -304,6 +304,11 @@ export class TelemetryReceiver implements ITelemetryReceiver {
},
},
},
+ endpoint_count: {
+ cardinality: {
+ field: 'agent.id',
+ },
+ },
},
},
};
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.test.ts b/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.test.ts
index ce31247f075d7..f0c359b9e0b0a 100644
--- a/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.test.ts
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.test.ts
@@ -8,6 +8,12 @@
import { loggingSystemMock } from '@kbn/core/server/mocks';
import { createTelemetryEndpointTaskConfig } from './endpoint';
import { createMockTelemetryEventsSender, createMockTelemetryReceiver } from '../__mocks__';
+import { usageCountersServiceMock } from '@kbn/usage-collection-plugin/server/usage_counters/usage_counters_service.mock';
+
+const usageCountersServiceSetup = usageCountersServiceMock.createSetupContract();
+const telemetryUsageCounter = usageCountersServiceSetup.createUsageCounter(
+ 'testTelemetryUsageCounter'
+);
describe('endpoint telemetry task test', () => {
let logger: ReturnType;
@@ -22,6 +28,9 @@ describe('endpoint telemetry task test', () => {
current: new Date().toISOString(),
};
const mockTelemetryEventsSender = createMockTelemetryEventsSender();
+ mockTelemetryEventsSender.getTelemetryUsageCluster = jest
+ .fn()
+ .mockReturnValue(telemetryUsageCounter);
const mockTelemetryReceiver = createMockTelemetryReceiver();
const telemetryEndpointTaskConfig = createTelemetryEndpointTaskConfig(1);
@@ -42,5 +51,9 @@ describe('endpoint telemetry task test', () => {
testTaskExecutionPeriod.last,
testTaskExecutionPeriod.current
);
+ expect(mockTelemetryEventsSender.getTelemetryUsageCluster).toHaveBeenCalled();
+ expect(mockTelemetryEventsSender.getTelemetryUsageCluster()?.incrementCounter).toBeCalledTimes(
+ 1
+ );
});
});
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.ts b/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.ts
index 66047113d90fd..b2a74d53e90c2 100644
--- a/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.ts
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/tasks/endpoint.ts
@@ -21,6 +21,7 @@ import type { ITelemetryReceiver } from '../receiver';
import type { TaskExecutionPeriod } from '../task';
import {
batchTelemetryRecords,
+ createUsageCounterLabel,
extractEndpointPolicyConfig,
getPreviousDailyTaskTimestamp,
isPackagePolicyList,
@@ -38,6 +39,8 @@ const EmptyFleetAgentResponse = {
perPage: 0,
};
+const usageLabelPrefix: string[] = ['security_telemetry', 'endpoint_task'];
+
export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
return {
type: 'security:endpoint-meta-telemetry',
@@ -98,6 +101,15 @@ export function createTelemetryEndpointTaskConfig(maxTelemetryBatch: number) {
return 0;
}
+ const telemetryUsageCounter = sender.getTelemetryUsageCluster();
+ telemetryUsageCounter?.incrementCounter({
+ counterName: createUsageCounterLabel(
+ usageLabelPrefix.concat(['payloads', TELEMETRY_CHANNEL_ENDPOINT_META])
+ ),
+ counterType: 'num_endpoint',
+ incrementBy: endpointMetricsResponse.aggregations.endpoint_count.value,
+ });
+
const endpointMetrics = endpointMetricsResponse.aggregations.endpoint_agents.buckets.map(
(epMetrics) => {
return {
diff --git a/x-pack/plugins/security_solution/server/lib/telemetry/types.ts b/x-pack/plugins/security_solution/server/lib/telemetry/types.ts
index b1f154ac03654..859dbcbb7e58d 100644
--- a/x-pack/plugins/security_solution/server/lib/telemetry/types.ts
+++ b/x-pack/plugins/security_solution/server/lib/telemetry/types.ts
@@ -135,6 +135,7 @@ export interface EndpointMetricsAggregation {
endpoint_agents: {
buckets: Array<{ key: string; doc_count: number; latest_metrics: EndpointMetricHits }>;
};
+ endpoint_count: { value: number };
};
}
From a4d633ce96bb6ecbb3635be3d35465811b3fab06 Mon Sep 17 00:00:00 2001
From: Byron Hulcher
Date: Mon, 11 Jul 2022 16:46:02 -0400
Subject: [PATCH 02/61] [Enterprise Search] Migrate Crawler functionality into
Kibana Content app (#135719)
---
...m_settings_flyout_seed_urls_panel.test.tsx | 3 +-
...custom_settings_flyout_seed_urls_panel.tsx | 3 +-
.../crawl_details_preview.test.tsx | 3 +-
.../crawl_details_preview.tsx | 2 +-
.../simplified_selectable.tsx | 91 +--
.../components/custom_formatted_timestamp.tsx | 22 +-
.../components/sitemaps_table.test.tsx | 6 +-
.../app_search/components/data_panel/index.ts | 2 +-
.../utils/encode_path_params/index.ts | 33 +-
.../utils/formatted_date_time/index.tsx | 21 +-
.../api/crawler/_mocks_/crawl_events.mock.ts | 109 +++
.../api/crawler/_mocks_/crawler.mock.ts | 28 +
.../crawler/_mocks_/crawler_domains.mock.ts | 102 +++
.../create_crawler_index_api_logic.test.ts | 35 +
.../crawler/create_crawler_index_api_logic.ts | 2 +-
.../delete_crawler_domain_api_logic.test.ts | 33 +
.../delete_crawler_domain_api_logic.ts | 31 +
.../api/crawler/get_crawler_api_logic.test.ts | 35 +
.../api/crawler/get_crawler_api_logic.ts | 27 +
.../get_crawler_domain_api_logic.test.ts | 39 +
.../crawler/get_crawler_domain_api_logic.ts | 31 +
.../get_crawler_domains_api_logic.test.ts | 44 +
.../crawler/get_crawler_domains_api_logic.ts | 37 +
.../api/crawler/types.ts | 254 ++++++
.../api/crawler/utils.test.ts | 194 +++++
.../api/crawler/utils.ts | 231 ++++++
.../crawl_rules_table.test.tsx | 302 +++++++
.../crawl_rules_table.tsx | 258 ++++++
.../crawler_domain_detail.tsx | 116 +++
.../crawler_domain_detail_logic.ts | 167 ++++
.../deduplication_panel.scss | 14 +
.../deduplication_panel.test.tsx | 159 ++++
.../deduplication_panel.tsx | 204 +++++
.../deduplication_panel/utils.ts | 39 +
.../entry_points_table.test.tsx | 119 +++
.../entry_points_table.tsx | 137 ++++
.../entry_points_table_logic.test.ts | 76 ++
.../entry_points_table_logic.ts | 53 ++
.../sitemaps_table.test.tsx | 187 +++++
.../crawler_domain_detail/sitemaps_table.tsx | 120 +++
.../components/new_index/button_group.tsx | 3 +-
.../components/new_index/constants.ts | 30 +-
.../method_crawler/method_crawler_logic.ts | 11 +-
.../new_index/new_search_index_template.tsx | 1 +
.../index_name_logic.mock.ts} | 14 +-
.../automatic_crawl_scheduler.test.tsx | 80 ++
.../automatic_crawl_scheduler.tsx | 198 +++++
.../automatic_crawl_scheduler_logic.test.ts | 293 +++++++
.../automatic_crawl_scheduler_logic.ts | 189 +++++
.../crawl_custom_settings_flyout.test.tsx | 152 ++++
.../crawl_custom_settings_flyout.tsx | 108 +++
...settings_flyout_crawl_depth_panel.test.tsx | 45 ++
...stom_settings_flyout_crawl_depth_panel.tsx | 64 ++
...tom_settings_flyout_domains_panel.test.tsx | 77 ++
...l_custom_settings_flyout_domains_panel.tsx | 84 ++
...crawl_custom_settings_flyout_logic.test.ts | 443 +++++++++++
.../crawl_custom_settings_flyout_logic.ts | 239 ++++++
...m_settings_flyout_seed_urls_panel.test.tsx | 167 ++++
...custom_settings_flyout_seed_urls_panel.tsx | 205 +++++
.../crawl_detail_logic.test.ts | 158 ++++
.../crawl_detail_logic.ts | 99 +++
.../crawl_details_flyout.test.tsx | 131 +++
.../crawl_details_flyout.tsx | 82 ++
.../crawl_details_preview.test.tsx | 118 +++
.../crawl_details_preview.tsx | 70 ++
.../crawl_details_summary.test.tsx | 63 ++
.../crawl_details_summary.tsx | 251 ++++++
.../crawler/crawl_requests_panel/constants.ts | 62 ++
.../crawl_event_type_badge.test.tsx | 73 ++
.../crawl_event_type_badge.tsx | 35 +
.../crawl_requests_panel.tsx | 46 ++
.../crawl_requests_table.test.tsx | 131 +++
.../crawl_requests_table.tsx | 122 +++
.../search_index/crawler/crawler_logic.ts | 207 +++++
.../crawler_status_indicator.test.tsx | 130 +++
.../crawler_status_indicator.tsx | 82 ++
.../start_crawl_context_menu.test.tsx | 85 ++
.../start_crawl_context_menu.tsx | 93 +++
.../stop_crawl_popover_context_menu.test.tsx | 59 ++
.../stop_crawl_popover_context_menu.tsx | 84 ++
.../add_domain/add_domain_flyout.test.tsx | 69 ++
.../add_domain/add_domain_flyout.tsx | 85 ++
.../add_domain/add_domain_form.test.tsx | 142 ++++
.../add_domain/add_domain_form.tsx | 103 +++
.../add_domain_form_errors.test.tsx | 41 +
.../add_domain/add_domain_form_errors.tsx | 37 +
.../add_domain_form_submit_button.test.tsx | 59 ++
.../add_domain_form_submit_button.tsx | 30 +
.../add_domain/add_domain_logic.test.ts | 752 ++++++++++++++++++
.../add_domain/add_domain_logic.ts | 344 ++++++++
.../add_domain/add_domain_validation.test.tsx | 66 ++
.../add_domain/add_domain_validation.tsx | 127 +++
.../add_domain/utils.test.ts | 125 +++
.../domain_management/add_domain/utils.ts | 128 +++
.../add_domain/validation_state_icon.test.tsx | 40 +
.../add_domain/validation_state_icon.tsx | 27 +
.../add_domain/validation_step_panel.test.tsx | 79 ++
.../add_domain/validation_step_panel.tsx | 65 ++
.../crawler_status_banner.test.tsx | 59 ++
.../crawler_status_banner.tsx | 43 +
.../domain_management/domain_management.tsx | 42 +
.../domain_management_logic.ts | 129 +++
.../domain_management/domains_panel.tsx | 57 ++
.../domain_management/domains_table.test.tsx | 184 +++++
.../domain_management/domains_table.tsx | 130 +++
.../domain_management/empty_state_panel.tsx | 82 ++
.../components/search_index/crawler/utils.ts | 21 +
.../search_index/generate_api_key_panel.tsx | 66 ++
.../search_index/index_name_logic.ts | 35 +
.../components/search_index/overview.tsx | 67 +-
.../components/search_index/search_index.tsx | 27 +-
.../search_index/search_index_router.tsx | 47 ++
.../search_indices_router.test.tsx | 14 +-
.../search_indices/search_indices_router.tsx | 16 +-
.../enterprise_search_content/routes.ts | 1 +
.../accordion_list}/accordion_list.scss | 0
.../accordion_list}/accordion_list.test.tsx | 2 -
.../accordion_list}/accordion_list.tsx | 0
.../custom_formatted_timestamp.test.tsx | 2 +-
.../custom_formatted_timestamp.tsx | 28 +
.../data_panel/data_panel.scss | 0
.../data_panel/data_panel.test.tsx | 2 +-
.../data_panel/data_panel.tsx | 14 +-
.../shared/encode_path_params/index.test.ts | 49 ++
.../shared/encode_path_params/index.ts | 35 +
.../formatted_date_time/index.test.tsx | 2 +-
.../shared/formatted_date_time/index.tsx | 27 +
.../simplified_selectable.test.tsx | 2 +-
.../simplified_selectable.tsx | 97 +++
.../url_combo_box}/url_combo_box.scss | 0
.../url_combo_box}/url_combo_box.test.tsx | 4 +-
.../url_combo_box}/url_combo_box.tsx | 9 +-
.../url_combo_box_logic.test.ts | 2 +-
.../url_combo_box}/url_combo_box_logic.ts | 2 +-
.../server/lib/indices/fetch_index.ts | 22 +-
.../enterprise_search/server/plugin.ts | 2 +-
.../routes/enterprise_search/crawler.ts | 30 -
.../enterprise_search/crawler/crawler.test.ts | 660 +++++++++++++++
.../enterprise_search/crawler/crawler.ts | 294 +++++++
.../crawler/crawler_crawl_rules.ts | 82 ++
.../crawler/crawler_entry_points.ts | 77 ++
.../crawler/crawler_sitemaps.ts | 77 ++
.../enterprise_search/server/types/crawler.ts | 11 +
.../translations/translations/fr-FR.json | 154 +++-
.../translations/translations/ja-JP.json | 154 +++-
.../translations/translations/zh-CN.json | 154 +++-
146 files changed, 12962 insertions(+), 324 deletions(-)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawl_events.mock.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler.mock.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler_domains.mock.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/types.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.scss
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/utils.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.tsx
rename x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/{domain_management.tsx => _mocks_/index_name_logic.mock.ts} (59%)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/constants.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.test.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/empty_state_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/utils.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/generate_api_key_panel.tsx
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/index_name_logic.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index_router.tsx
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_details_flyout => shared/accordion_list}/accordion_list.scss (100%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_details_flyout => shared/accordion_list}/accordion_list.test.tsx (97%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_details_flyout => shared/accordion_list}/accordion_list.tsx (100%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components => shared/custom_formatted_timestamp}/custom_formatted_timestamp.test.tsx (94%)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.tsx
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components => shared}/data_panel/data_panel.scss (100%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components => shared}/data_panel/data_panel.test.tsx (98%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components => shared}/data_panel/data_panel.tsx (86%)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.test.ts
create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.ts
rename x-pack/plugins/enterprise_search/public/applications/{app_search/utils => shared}/formatted_date_time/index.test.tsx (94%)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.tsx
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_select_domains_modal => shared/simplified_selectable}/simplified_selectable.test.tsx (98%)
create mode 100644 x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.tsx
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_custom_settings_flyout => shared/url_combo_box}/url_combo_box.scss (100%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_custom_settings_flyout => shared/url_combo_box}/url_combo_box.test.tsx (95%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_custom_settings_flyout => shared/url_combo_box}/url_combo_box.tsx (89%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_custom_settings_flyout => shared/url_combo_box}/url_combo_box_logic.test.ts (93%)
rename x-pack/plugins/enterprise_search/public/applications/{app_search/components/crawler/components/crawl_custom_settings_flyout => shared/url_combo_box}/url_combo_box_logic.ts (90%)
delete mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler.ts
create mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.test.ts
create mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.ts
create mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_crawl_rules.ts
create mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_entry_points.ts
create mode 100644 x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_sitemaps.ts
create mode 100644 x-pack/plugins/enterprise_search/server/types/crawler.ts
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
index 71da3f8c596e4..7faf7696af497 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
@@ -13,11 +13,12 @@ import { shallow, ShallowWrapper } from 'enzyme';
import { EuiAccordion, EuiTabbedContent, EuiNotificationBadge, EuiCheckbox } from '@elastic/eui';
+import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
+
import { rerender } from '../../../../../test_helpers';
import { SimplifiedSelectable } from '../crawl_select_domains_modal/simplified_selectable';
import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
-import { UrlComboBox } from './url_combo_box';
const MOCK_VALUES = {
// CrawlCustomSettingsFlyoutLogic
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
index 457a544f91582..b72332fa4eeca 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
@@ -27,10 +27,11 @@ import {
import { i18n } from '@kbn/i18n';
import { FormattedMessage } from '@kbn/i18n-react';
+import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
+
import { SimplifiedSelectable } from '../crawl_select_domains_modal/simplified_selectable';
import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
-import { UrlComboBox } from './url_combo_box';
export const CrawlCustomSettingsFlyoutSeedUrlsPanel: React.FC = () => {
const {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.test.tsx
index f97e2ff913150..b941b8d497850 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.test.tsx
@@ -11,10 +11,11 @@ import React from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
import { set } from 'lodash/fp';
+import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
+
import { CrawlDetailValues } from '../../crawl_detail_logic';
import { CrawlerStatus, CrawlType } from '../../types';
-import { AccordionList } from './accordion_list';
import { CrawlDetailsPreview } from './crawl_details_preview';
import { CrawlDetailsSummary } from './crawl_details_summary';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.tsx
index a9f3d95edf1fa..6afe9badf1fd1 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/crawl_details_preview.tsx
@@ -12,9 +12,9 @@ import { useValues } from 'kea';
import { EuiSpacer } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
+import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
import { CrawlDetailLogic } from '../../crawl_detail_logic';
-import { AccordionList } from './accordion_list';
import { CrawlDetailsSummary } from './crawl_details_summary';
interface CrawlDetailsPreviewProps {
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.tsx
index e13304b4a8f2f..5f57477b7ae5c 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.tsx
@@ -4,94 +4,5 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
-import React from 'react';
-import { EuiButtonEmpty, EuiFlexGroup, EuiFlexItem, EuiSelectable } from '@elastic/eui';
-import { EuiSelectableLIOption } from '@elastic/eui/src/components/selectable/selectable_option';
-import { i18n } from '@kbn/i18n';
-
-export interface Props {
- emptyMessage?: string;
- options: string[];
- selectedOptions: string[];
- onChange(selectedOptions: string[]): void;
-}
-
-export interface OptionMap {
- [key: string]: boolean;
-}
-
-export const SimplifiedSelectable: React.FC = ({
- emptyMessage,
- options,
- selectedOptions,
- onChange,
-}) => {
- const selectedOptionsMap: OptionMap = selectedOptions.reduce(
- (acc, selectedOption) => ({
- ...acc,
- [selectedOption]: true,
- }),
- {}
- );
-
- const selectableOptions: Array> = options.map((option) => ({
- label: option,
- checked: selectedOptionsMap[option] ? 'on' : undefined,
- }));
-
- return (
- <>
-
-
- onChange(options)}
- disabled={selectedOptions.length === options.length}
- >
- {i18n.translate(
- 'xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.selectAllButtonLabel',
- {
- defaultMessage: 'Select all',
- }
- )}
-
-
-
- onChange([])}
- disabled={selectedOptions.length === 0}
- >
- {i18n.translate(
- 'xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.deselectAllButtonLabel',
- {
- defaultMessage: 'Deselect all',
- }
- )}
-
-
-
- {
- onChange(
- newSelectableOptions.filter((option) => option.checked).map((option) => option.label)
- );
- }}
- emptyMessage={emptyMessage}
- >
- {(list, search) => (
- <>
- {search}
- {list}
- >
- )}
-
- >
- );
-};
+export { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.tsx
index 47098d844e7df..ec011e06e0c25 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.tsx
@@ -5,24 +5,4 @@
* 2.0.
*/
-import React from 'react';
-
-import { FormattedRelative } from '@kbn/i18n-react';
-
-import { FormattedDateTime } from '../../../utils/formatted_date_time';
-
-interface CustomFormattedTimestampProps {
- timestamp: string;
-}
-
-export const CustomFormattedTimestamp: React.FC = ({
- timestamp,
-}) => {
- const date = new Date(timestamp);
- const isDateToday = date >= new Date(new Date(Date.now()).toDateString());
- return isDateToday ? (
-
- ) : (
-
- );
-};
+export { CustomFormattedTimestamp } from '../../../../shared/custom_formatted_timestamp/custom_formatted_timestamp';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/sitemaps_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/sitemaps_table.test.tsx
index 8e1da07af1c8c..418157d38e756 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/sitemaps_table.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/sitemaps_table.test.tsx
@@ -27,14 +27,14 @@ describe('SitemapsTable', () => {
{ id: '2', url: 'http://www.example.com/whatever/sitemaps.xml' },
];
const domain = {
+ crawlRules: [],
createdOn: '2018-01-01T00:00:00.000Z',
+ deduplicationEnabled: true,
+ entryPoints: [],
documentCount: 10,
id: '6113e1407a2f2e6f42489794',
url: 'https://www.elastic.co',
- crawlRules: [],
- entryPoints: [],
sitemaps,
- deduplicationEnabled: true,
deduplicationFields: ['title'],
availableDeduplicationFields: ['title', 'description'],
};
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/index.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/index.ts
index 092a86180e95d..e483e28dcc2e8 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/index.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/index.ts
@@ -5,4 +5,4 @@
* 2.0.
*/
-export { DataPanel } from './data_panel';
+export { DataPanel } from '../../../shared/data_panel/data_panel';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/encode_path_params/index.ts b/x-pack/plugins/enterprise_search/public/applications/app_search/utils/encode_path_params/index.ts
index 2f9d9bed30944..405d530ff20f3 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/encode_path_params/index.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/utils/encode_path_params/index.ts
@@ -5,31 +5,8 @@
* 2.0.
*/
-import { generatePath, useParams } from 'react-router-dom';
-
-type PathParams = Record;
-
-export const encodePathParams = (pathParams: PathParams) => {
- const encodedParams: PathParams = {};
-
- Object.entries(pathParams).map(([key, value]) => {
- encodedParams[key] = encodeURIComponent(value);
- });
-
- return encodedParams;
-};
-
-export const generateEncodedPath = (path: string, pathParams: PathParams) => {
- return generatePath(path, encodePathParams(pathParams));
-};
-
-export const useDecodedParams = () => {
- const decodedParams: PathParams = {};
-
- const params = useParams();
- Object.entries(params).map(([key, value]) => {
- decodedParams[key] = decodeURIComponent(value as string);
- });
-
- return decodedParams;
-};
+export {
+ encodePathParams,
+ generateEncodedPath,
+ useDecodedParams,
+} from '../../../shared/encode_path_params';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.tsx b/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.tsx
index 74ee71185f920..e794ad6e704fd 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.tsx
@@ -5,23 +5,4 @@
* 2.0.
*/
-import React from 'react';
-
-import { FormattedDate, FormattedTime } from '@kbn/i18n-react';
-
-interface Props {
- date: Date;
- hideTime?: boolean;
-}
-
-export const FormattedDateTime: React.FC = ({ date, hideTime = false }) => (
- <>
-
- {!hideTime && (
- <>
- {' '}
-
- >
- )}
- >
-);
+export { FormattedDateTime } from '../../../shared/formatted_date_time';
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawl_events.mock.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawl_events.mock.ts
new file mode 100644
index 0000000000000..cce2e3a6e2a82
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawl_events.mock.ts
@@ -0,0 +1,109 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ CrawlConfig,
+ CrawlConfigFromServer,
+ CrawlerStatus,
+ CrawlEvent,
+ CrawlEventFromServer,
+ CrawlRequest,
+ CrawlRequestFromServer,
+ CrawlRequestStats,
+ CrawlRequestStatsFromServer,
+ CrawlRequestWithDetails,
+ CrawlRequestWithDetailsFromServer,
+ CrawlType,
+} from '../types';
+
+// Server
+
+export const CRAWL_CONFIG_FROM_SERVER: CrawlConfigFromServer = {
+ domain_allowlist: ['https://elastic.co'],
+ max_crawl_depth: 2,
+ seed_urls: ['https://elastic.co/guide', 'https://elastic.co/blogs'],
+ sitemap_urls: ['https://elastic.co/sitemap.txt'],
+};
+
+export const CRAWL_REQUEST_FROM_SERVER: CrawlRequestFromServer = {
+ began_at: '1657235281',
+ completed_at: '1657235291',
+ created_at: '1657235271',
+ id: 'crawl-request-1',
+ status: CrawlerStatus.Success,
+};
+
+export const CRAWL_REQUEST_STATS_FROM_SERVER: CrawlRequestStatsFromServer = {
+ status: {
+ avg_response_time_msec: 100,
+ crawl_duration_msec: 5000,
+ pages_visited: 20,
+ status_codes: {
+ '200': 20,
+ },
+ urls_allowed: 10,
+ },
+};
+
+export const CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER: CrawlRequestWithDetailsFromServer = {
+ ...CRAWL_REQUEST_FROM_SERVER,
+ crawl_config: CRAWL_CONFIG_FROM_SERVER,
+ stats: CRAWL_REQUEST_STATS_FROM_SERVER,
+ type: CrawlType.Full,
+};
+
+export const CRAWL_EVENT_FROM_SERVER: CrawlEventFromServer = {
+ ...CRAWL_REQUEST_FROM_SERVER,
+ crawl_config: CRAWL_CONFIG_FROM_SERVER,
+ id: 'crawl-event-1',
+ stage: 'crawl',
+ type: CrawlType.Full,
+};
+
+// Client
+
+export const CRAWL_CONFIG: CrawlConfig = {
+ domainAllowlist: ['https://elastic.co'],
+ maxCrawlDepth: 2,
+ seedUrls: ['https://elastic.co/guide', 'https://elastic.co/blogs'],
+ sitemapUrls: ['https://elastic.co/sitemap.txt'],
+};
+
+export const CRAWL_REQUEST: CrawlRequest = {
+ beganAt: '1657235281',
+ completedAt: '1657235291',
+ createdAt: '1657235271',
+ id: 'crawl-request-1',
+ status: CrawlerStatus.Success,
+};
+
+export const CRAWL_REQUEST_STATS: CrawlRequestStats = {
+ status: {
+ avgResponseTimeMSec: 100,
+ crawlDurationMSec: 5000,
+ pagesVisited: 20,
+ statusCodes: {
+ '200': 20,
+ },
+ urlsAllowed: 10,
+ },
+};
+
+export const CRAWL_REQUEST_WITH_DETAILS: CrawlRequestWithDetails = {
+ ...CRAWL_REQUEST,
+ crawlConfig: CRAWL_CONFIG,
+ stats: CRAWL_REQUEST_STATS,
+ type: CrawlType.Full,
+};
+
+export const CRAWL_EVENT: CrawlEvent = {
+ ...CRAWL_REQUEST,
+ crawlConfig: CRAWL_CONFIG,
+ id: 'crawl-event-1',
+ stage: 'crawl',
+ type: CrawlType.Full,
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler.mock.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler.mock.ts
new file mode 100644
index 0000000000000..88e84e2fab531
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler.mock.ts
@@ -0,0 +1,28 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ CRAWL_EVENT,
+ CRAWL_EVENT_FROM_SERVER,
+ CRAWL_REQUEST,
+ CRAWL_REQUEST_FROM_SERVER,
+} from './crawl_events.mock';
+import { CRAWLER_DOMAIN, CRAWLER_DOMAIN_FROM_SERVER } from './crawler_domains.mock';
+
+import { CrawlerData, CrawlerDataFromServer } from '../types';
+
+export const CRAWLER_DATA: CrawlerData = {
+ domains: [CRAWLER_DOMAIN],
+ events: [CRAWL_EVENT],
+ mostRecentCrawlRequest: CRAWL_REQUEST,
+};
+
+export const CRAWLER_DATA_FROM_SERVER: CrawlerDataFromServer = {
+ domains: [CRAWLER_DOMAIN_FROM_SERVER],
+ events: [CRAWL_EVENT_FROM_SERVER],
+ most_recent_crawl_request: CRAWL_REQUEST_FROM_SERVER,
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler_domains.mock.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler_domains.mock.ts
new file mode 100644
index 0000000000000..b6c329b78ac2a
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/_mocks_/crawler_domains.mock.ts
@@ -0,0 +1,102 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Meta } from '../../../../../../common/types';
+
+import {
+ CrawlerDomain,
+ CrawlerPolicies,
+ CrawlRule,
+ CrawlerRules,
+ EntryPoint,
+ Sitemap,
+ CrawlerDomainFromServer,
+ DomainConfigFromServer,
+ DomainConfig,
+ CrawlerDomainsWithMeta,
+ CrawlerDomainsWithMetaFromServer,
+} from '../types';
+
+export const CRAWL_RULE: CrawlRule = {
+ id: 'crawl-rule-1',
+ pattern: 'elasticsearch',
+ policy: CrawlerPolicies.allow,
+ rule: CrawlerRules.contains,
+};
+
+export const ENTRY_POINT: EntryPoint = {
+ id: 'entry-point-1',
+ value: '/guide',
+};
+
+export const SITEMAP: Sitemap = {
+ id: 'sitemap-1',
+ url: '/sitemap.txt',
+};
+
+export const META: Meta = {
+ page: {
+ current: 1,
+ size: 10,
+ total_pages: 1,
+ total_results: 8,
+ },
+};
+
+// Server
+
+export const CRAWLER_DOMAIN_CONFIG_FROM_SERVER: DomainConfigFromServer = {
+ id: 'crawler-domain-config-1',
+ name: 'https://www.elastic.co',
+ seed_urls: ['https://www.elastic.co/guide', 'https://www.elastic.co/blogs'],
+ sitemap_urls: ['https://www.elastic.co/sitemap.txt'],
+};
+
+export const CRAWLER_DOMAIN_FROM_SERVER: CrawlerDomainFromServer = {
+ available_deduplication_fields: ['title', 'url'],
+ crawl_rules: [CRAWL_RULE],
+ created_on: '1657234422',
+ deduplication_enabled: true,
+ deduplication_fields: ['url'],
+ document_count: 400,
+ entry_points: [ENTRY_POINT],
+ id: '123abc',
+ name: 'https://www.elastic.co',
+ sitemaps: [SITEMAP],
+};
+
+export const CRAWLER_DOMAINS_WITH_META_FROM_SERVER: CrawlerDomainsWithMetaFromServer = {
+ meta: META,
+ results: [CRAWLER_DOMAIN_FROM_SERVER],
+};
+
+// Client
+
+export const CRAWLER_DOMAIN_CONFIG: DomainConfig = {
+ id: 'crawler-domain-config-1',
+ name: 'https://www.elastic.co',
+ seedUrls: ['https://www.elastic.co/guide', 'https://www.elastic.co/blogs'],
+ sitemapUrls: ['https://www.elastic.co/sitemap.txt'],
+};
+
+export const CRAWLER_DOMAIN: CrawlerDomain = {
+ availableDeduplicationFields: ['title', 'url'],
+ crawlRules: [CRAWL_RULE],
+ createdOn: '1657234422',
+ deduplicationEnabled: true,
+ deduplicationFields: ['url'],
+ documentCount: 400,
+ entryPoints: [ENTRY_POINT],
+ id: '123abc',
+ sitemaps: [SITEMAP],
+ url: 'https://www.elastic.co',
+};
+
+export const CRAWLER_DOMAINS_WITH_META: CrawlerDomainsWithMeta = {
+ domains: [CRAWLER_DOMAIN],
+ meta: META,
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.test.ts
new file mode 100644
index 0000000000000..dbb4e4939188b
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.test.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../__mocks__/kea_logic';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { createCrawlerIndex } from './create_crawler_index_api_logic';
+
+describe('CreateCrawlerIndexApiLogic', () => {
+ const { http } = mockHttpValues;
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+ describe('createCrawlerIndex', () => {
+ it('calls correct api', async () => {
+ const indexName = 'elastic-co-crawler';
+ const language = 'Universal';
+ const promise = Promise.resolve({ created: true });
+ http.post.mockReturnValue(promise);
+
+ const result = createCrawlerIndex({ indexName, language });
+ await nextTick();
+
+ expect(http.post).toHaveBeenCalledWith('/internal/enterprise_search/crawler', {
+ body: JSON.stringify({ index_name: indexName, language }),
+ });
+ await expect(result).resolves.toEqual({ created: true });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.ts
index ec664c52688d7..81194357faea1 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/create_crawler_index_api_logic.ts
@@ -22,7 +22,7 @@ export interface CreateCrawlerIndexResponse {
created: string; // the name of the newly created index
}
-const createCrawlerIndex = async ({ indexName, language }: CreateCrawlerIndexArgs) => {
+export const createCrawlerIndex = async ({ indexName, language }: CreateCrawlerIndexArgs) => {
const route = '/internal/enterprise_search/crawler';
const params: CreateCrawlerIndexRequest = {
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.test.ts
new file mode 100644
index 0000000000000..5db2fcdd1529f
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.test.ts
@@ -0,0 +1,33 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../__mocks__/kea_logic';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { CRAWLER_DOMAIN } from './_mocks_/crawler_domains.mock';
+import { deleteCrawlerDomain } from './delete_crawler_domain_api_logic';
+
+describe('DeleteCrawlerDomainApiLogic', () => {
+ const { http } = mockHttpValues;
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+ describe('deleteCrawlerDomain', () => {
+ it('calls correct api', async () => {
+ const indexName = 'elastic-co-crawler';
+ http.post.mockReturnValue(Promise.resolve());
+
+ const result = deleteCrawlerDomain({ domain: CRAWLER_DOMAIN, indexName });
+ await nextTick();
+ expect(http.delete).toHaveBeenCalledWith(
+ `/internal/enterprise_search/indices/elastic-co-crawler/crawler/domains/${CRAWLER_DOMAIN.id}`
+ );
+ await expect(result).resolves.toEqual({ domain: CRAWLER_DOMAIN });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.ts
new file mode 100644
index 0000000000000..e7a959cedff47
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/delete_crawler_domain_api_logic.ts
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
+import { HttpLogic } from '../../../shared/http';
+
+import { CrawlerDomain } from './types';
+
+export interface GetCrawlerDomainsArgs {
+ domain: CrawlerDomain;
+ indexName: string;
+}
+
+export const deleteCrawlerDomain = async ({ domain, indexName }: GetCrawlerDomainsArgs) => {
+ await HttpLogic.values.http.delete(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}`
+ );
+
+ return {
+ domain,
+ };
+};
+
+export const DeleteCrawlerDomainApiLogic = createApiLogic(
+ ['delete_crawler_domain'],
+ deleteCrawlerDomain
+);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.test.ts
new file mode 100644
index 0000000000000..b525db135923e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.test.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../__mocks__/kea_logic';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { CRAWLER_DATA_FROM_SERVER } from './_mocks_/crawler.mock';
+import { getCrawler } from './get_crawler_api_logic';
+import { crawlerDataServerToClient } from './utils';
+
+describe('GetCrawlerApiLogic', () => {
+ const { http } = mockHttpValues;
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+ describe('getCrawler', () => {
+ it('calls correct api', async () => {
+ const indexName = 'elastic-co-crawler';
+ http.get.mockReturnValue(Promise.resolve(CRAWLER_DATA_FROM_SERVER));
+
+ const result = getCrawler({ indexName });
+ await nextTick();
+
+ expect(http.get).toHaveBeenCalledWith(
+ `/internal/enterprise_search/indices/${indexName}/crawler`
+ );
+ await expect(result).resolves.toEqual(crawlerDataServerToClient(CRAWLER_DATA_FROM_SERVER));
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.ts
new file mode 100644
index 0000000000000..b2c960740b414
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_api_logic.ts
@@ -0,0 +1,27 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
+import { HttpLogic } from '../../../shared/http';
+
+import { CrawlerData, CrawlerDataFromServer } from './types';
+
+import { crawlerDataServerToClient } from './utils';
+
+export interface GetCrawlerArgs {
+ indexName: string;
+}
+
+export const getCrawler = async ({ indexName }: GetCrawlerArgs): Promise => {
+ const response = await HttpLogic.values.http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler`
+ );
+
+ return crawlerDataServerToClient(response);
+};
+
+export const GetCrawlerApiLogic = createApiLogic(['get_crawler_domain'], getCrawler);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.test.ts
new file mode 100644
index 0000000000000..d8a9c73c5848e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.test.ts
@@ -0,0 +1,39 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../__mocks__/kea_logic';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { CRAWLER_DOMAIN_FROM_SERVER } from './_mocks_/crawler_domains.mock';
+import { getCrawlerDomain } from './get_crawler_domain_api_logic';
+import { crawlerDomainServerToClient } from './utils';
+
+describe('GetCrawlerDomainApiLogic', () => {
+ const { http } = mockHttpValues;
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+ describe('getCrawlerDomain', () => {
+ it('calls correct api', async () => {
+ const indexName = 'elastic-co-crawler';
+ const domainId = CRAWLER_DOMAIN_FROM_SERVER.id;
+
+ http.get.mockReturnValue(Promise.resolve(CRAWLER_DOMAIN_FROM_SERVER));
+
+ const result = getCrawlerDomain({ domainId, indexName });
+ await nextTick();
+
+ expect(http.get).toHaveBeenCalledWith(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
+ );
+ await expect(result).resolves.toEqual(
+ crawlerDomainServerToClient(CRAWLER_DOMAIN_FROM_SERVER)
+ );
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.ts
new file mode 100644
index 0000000000000..762cd5d1fb00f
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domain_api_logic.ts
@@ -0,0 +1,31 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
+import { HttpLogic } from '../../../shared/http';
+
+import { CrawlerDomain, CrawlerDomainFromServer } from './types';
+
+import { crawlerDomainServerToClient } from './utils';
+
+export interface GetCrawlerDomainArgs {
+ domainId: string;
+ indexName: string;
+}
+
+export const getCrawlerDomain = async ({
+ indexName,
+ domainId,
+}: GetCrawlerDomainArgs): Promise => {
+ const response = await HttpLogic.values.http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
+ );
+
+ return crawlerDomainServerToClient(response);
+};
+
+export const GetCrawlerDomainApiLogic = createApiLogic(['get_crawler_domain'], getCrawlerDomain);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.test.ts
new file mode 100644
index 0000000000000..222aa4ad4276c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.test.ts
@@ -0,0 +1,44 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../__mocks__/kea_logic';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { CRAWLER_DOMAINS_WITH_META_FROM_SERVER, META } from './_mocks_/crawler_domains.mock';
+import { getCrawlerDomains } from './get_crawler_domains_api_logic';
+import { crawlerDomainsWithMetaServerToClient } from './utils';
+
+describe('GetCrawlerDomainsApiLogic', () => {
+ const { http } = mockHttpValues;
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+ describe('getCrawlerDomains', () => {
+ it('calls correct api', async () => {
+ const indexName = 'elastic-co-crawler';
+
+ http.get.mockReturnValue(Promise.resolve(CRAWLER_DOMAINS_WITH_META_FROM_SERVER));
+
+ const result = getCrawlerDomains({ indexName, meta: META });
+ await nextTick();
+
+ expect(http.get).toHaveBeenCalledWith(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains`,
+ {
+ query: {
+ 'page[current]': META.page.current,
+ 'page[size]': META.page.size,
+ },
+ }
+ );
+ await expect(result).resolves.toEqual(
+ crawlerDomainsWithMetaServerToClient(CRAWLER_DOMAINS_WITH_META_FROM_SERVER)
+ );
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.ts
new file mode 100644
index 0000000000000..4096ece3b0757
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/get_crawler_domains_api_logic.ts
@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { Meta } from '../../../../../common/types';
+import { createApiLogic } from '../../../shared/api_logic/create_api_logic';
+import { HttpLogic } from '../../../shared/http';
+
+import { CrawlerDomainsWithMetaFromServer } from './types';
+
+import { crawlerDomainsWithMetaServerToClient } from './utils';
+
+export interface GetCrawlerDomainsArgs {
+ indexName: string;
+ meta: Meta;
+}
+
+export const getCrawlerDomains = async ({ indexName, meta }: GetCrawlerDomainsArgs) => {
+ const query = {
+ 'page[current]': meta.page.current,
+ 'page[size]': meta.page.size,
+ };
+
+ const response = await HttpLogic.values.http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains`,
+ {
+ query,
+ }
+ );
+
+ return crawlerDomainsWithMetaServerToClient(response);
+};
+
+export const GetCrawlerDomainsApiLogic = createApiLogic(['get_crawler_domains'], getCrawlerDomains);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/types.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/types.ts
new file mode 100644
index 0000000000000..a9119d6fb9e02
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/types.ts
@@ -0,0 +1,254 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { Meta } from '../../../../../common/types';
+
+export enum CrawlerPolicies {
+ allow = 'allow',
+ deny = 'deny',
+}
+
+export enum CrawlerRules {
+ beginsWith = 'begins',
+ endsWith = 'ends',
+ contains = 'contains',
+ regex = 'regex',
+}
+
+export interface CrawlRule {
+ id: string;
+ pattern: string;
+ policy: CrawlerPolicies;
+ rule: CrawlerRules;
+}
+
+export interface EntryPoint {
+ id: string;
+ value: string;
+}
+
+export interface Sitemap {
+ id: string;
+ url: string;
+}
+
+export type CrawlerDomainValidationStepState = '' | 'loading' | 'valid' | 'warning' | 'invalid';
+
+// The BE uses a singular form of each unit
+// See shared_togo/app/models/shared_togo/crawler/crawl_schedule.rb
+export enum CrawlUnits {
+ hours = 'hour',
+ days = 'day',
+ weeks = 'week',
+ months = 'month',
+}
+
+export type CrawlerDomainValidationStepName =
+ | 'initialValidation'
+ | 'networkConnectivity'
+ | 'indexingRestrictions'
+ | 'contentVerification';
+// See SharedTogo::Crawler::Status for details on how these are generated
+export enum CrawlerStatus {
+ Pending = 'pending',
+ Suspended = 'suspended',
+ Starting = 'starting',
+ Running = 'running',
+ Suspending = 'suspending',
+ Canceling = 'canceling',
+ Success = 'success',
+ Failed = 'failed',
+ Canceled = 'canceled',
+ Skipped = 'skipped',
+}
+
+export type CrawlEventStage = 'crawl' | 'process';
+
+export enum CrawlType {
+ Full = 'full',
+ Partial = 'partial',
+}
+
+// Server
+
+export interface CrawlerDomainFromServer {
+ available_deduplication_fields: string[];
+ crawl_rules: CrawlRule[];
+ created_on: string;
+ deduplication_enabled: boolean;
+ deduplication_fields: string[];
+ default_crawl_rule?: CrawlRule;
+ document_count: number;
+ entry_points: EntryPoint[];
+ id: string;
+ last_visited_at?: string;
+ name: string;
+ sitemaps: Sitemap[];
+}
+
+export interface CrawlerDomainsWithMetaFromServer {
+ meta: Meta;
+ results: CrawlerDomainFromServer[];
+}
+
+export interface CrawlerDataFromServer {
+ domains: CrawlerDomainFromServer[];
+ events: CrawlEventFromServer[];
+ most_recent_crawl_request: CrawlRequestFromServer | null;
+}
+
+export interface CrawlerDomainValidationResultFromServer {
+ results: Array<{
+ comment: string;
+ name: string;
+ result: 'ok' | 'warning' | 'failure';
+ }>;
+ valid: boolean;
+}
+
+export interface CrawlRequestFromServer {
+ began_at: string | null;
+ completed_at: string | null;
+ created_at: string;
+ id: string;
+ status: CrawlerStatus;
+}
+
+export interface CrawlRequestStatsFromServer {
+ status: {
+ avg_response_time_msec?: number;
+ crawl_duration_msec?: number;
+ pages_visited?: number;
+ status_codes?: {
+ [code: string]: number;
+ };
+ urls_allowed?: number;
+ };
+}
+
+export interface CrawlConfigFromServer {
+ domain_allowlist: string[];
+ max_crawl_depth: number;
+ seed_urls: string[];
+ sitemap_urls: string[];
+}
+
+export type CrawlRequestWithDetailsFromServer = CrawlRequestFromServer & {
+ crawl_config: CrawlConfigFromServer;
+ stats: CrawlRequestStatsFromServer;
+ type: CrawlType;
+};
+
+export type CrawlEventFromServer = CrawlRequestFromServer & {
+ crawl_config: CrawlConfigFromServer;
+ stage: CrawlEventStage;
+ type: CrawlType;
+};
+
+export interface DomainConfigFromServer {
+ id: string;
+ name: string;
+ seed_urls: string[];
+ sitemap_urls: string[];
+}
+
+// Client
+
+export interface CrawlerDomain {
+ availableDeduplicationFields: string[];
+ crawlRules: CrawlRule[];
+ createdOn: string;
+ deduplicationEnabled: boolean;
+ deduplicationFields: string[];
+ defaultCrawlRule?: CrawlRule;
+ documentCount: number;
+ entryPoints: EntryPoint[];
+ id: string;
+ lastCrawl?: string;
+ sitemaps: Sitemap[];
+ url: string;
+}
+
+export interface CrawlerDomainsWithMeta {
+ domains: CrawlerDomain[];
+ meta: Meta;
+}
+
+export interface CrawlerData {
+ domains: CrawlerDomain[];
+ events: CrawlEvent[];
+ mostRecentCrawlRequest: CrawlRequest | null;
+}
+
+export interface CrawlerDomainValidationStep {
+ blockingFailure?: boolean;
+ message?: string;
+ state: CrawlerDomainValidationStepState;
+}
+
+interface CrawlerDomainValidationState {
+ contentVerification: CrawlerDomainValidationStep;
+ indexingRestrictions: CrawlerDomainValidationStep;
+ initialValidation: CrawlerDomainValidationStep;
+ networkConnectivity: CrawlerDomainValidationStep;
+}
+
+export interface CrawlerDomainValidationResult {
+ steps: CrawlerDomainValidationState;
+}
+
+export type CrawlerDomainValidationResultChange = Partial;
+
+export interface CrawlRequest {
+ beganAt: string | null;
+ completedAt: string | null;
+ createdAt: string;
+ id: string;
+ status: CrawlerStatus;
+}
+
+export interface CrawlRequestStats {
+ status: {
+ avgResponseTimeMSec?: number;
+ crawlDurationMSec?: number;
+ pagesVisited?: number;
+ statusCodes?: {
+ [code: string]: number;
+ };
+ urlsAllowed?: number;
+ };
+}
+
+export interface CrawlConfig {
+ domainAllowlist: string[];
+ maxCrawlDepth: number;
+ seedUrls: string[];
+ sitemapUrls: string[];
+}
+
+export type CrawlRequestWithDetails = CrawlRequest & {
+ crawlConfig: CrawlConfig;
+ stats: CrawlRequestStats | null;
+ type: CrawlType;
+};
+
+export type CrawlEvent = CrawlRequest & {
+ crawlConfig: CrawlConfig;
+ stage: CrawlEventStage;
+ type: CrawlType;
+};
+
+export interface CrawlSchedule {
+ frequency: number;
+ unit: CrawlUnits;
+}
+
+export interface DomainConfig {
+ id: string;
+ name: string;
+ seedUrls: string[];
+ sitemapUrls: string[];
+}
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.test.ts
new file mode 100644
index 0000000000000..1f03778e3ddb1
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.test.ts
@@ -0,0 +1,194 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ CRAWL_EVENT,
+ CRAWL_EVENT_FROM_SERVER,
+ CRAWL_REQUEST,
+ CRAWL_REQUEST_FROM_SERVER,
+ CRAWL_REQUEST_WITH_DETAILS,
+ CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
+} from './_mocks_/crawl_events.mock';
+import { CRAWLER_DATA, CRAWLER_DATA_FROM_SERVER } from './_mocks_/crawler.mock';
+import {
+ CRAWLER_DOMAIN,
+ CRAWLER_DOMAINS_WITH_META,
+ CRAWLER_DOMAINS_WITH_META_FROM_SERVER,
+ CRAWLER_DOMAIN_CONFIG,
+ CRAWLER_DOMAIN_CONFIG_FROM_SERVER,
+ CRAWLER_DOMAIN_FROM_SERVER,
+ CRAWL_RULE,
+} from './_mocks_/crawler_domains.mock';
+
+import { CrawlerDomainValidationStep, CrawlerDomainValidationResultFromServer } from './types';
+
+import {
+ crawlerDomainServerToClient,
+ crawlerDataServerToClient,
+ crawlDomainValidationToResult,
+ crawlEventServerToClient,
+ crawlRequestServerToClient,
+ crawlRequestWithDetailsServerToClient,
+ domainConfigServerToClient,
+ crawlerDomainsWithMetaServerToClient,
+} from './utils';
+
+describe('crawlerDomainServerToClient', () => {
+ it('converts the API payload into properties matching our code style', () => {
+ expect(crawlerDomainServerToClient(CRAWLER_DOMAIN_FROM_SERVER)).toStrictEqual(CRAWLER_DOMAIN);
+ expect(
+ crawlerDomainServerToClient({
+ ...CRAWLER_DOMAIN_FROM_SERVER,
+ last_visited_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWLER_DOMAIN, lastCrawl: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ expect(
+ crawlerDomainServerToClient({
+ ...CRAWLER_DOMAIN_FROM_SERVER,
+ default_crawl_rule: CRAWL_RULE,
+ })
+ ).toStrictEqual({ ...CRAWLER_DOMAIN, defaultCrawlRule: CRAWL_RULE });
+ });
+});
+
+describe('crawlRequestServerToClient', () => {
+ it('converts the API payload into properties matching our code style', () => {
+ expect(crawlRequestServerToClient(CRAWL_REQUEST_FROM_SERVER)).toStrictEqual(CRAWL_REQUEST);
+ expect(
+ crawlRequestServerToClient({
+ ...CRAWL_REQUEST_FROM_SERVER,
+ began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWL_REQUEST, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ expect(
+ crawlRequestServerToClient({
+ ...CRAWL_REQUEST_FROM_SERVER,
+ completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWL_REQUEST, completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ });
+});
+
+describe('crawlRequestWithDetailsServerToClient', () => {
+ it('converts the API payload into properties matching our code style', () => {
+ expect(
+ crawlRequestWithDetailsServerToClient(CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER)
+ ).toStrictEqual(CRAWL_REQUEST_WITH_DETAILS);
+ expect(
+ crawlRequestWithDetailsServerToClient({
+ ...CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
+ began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWL_REQUEST_WITH_DETAILS, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ expect(
+ crawlRequestWithDetailsServerToClient({
+ ...CRAWL_REQUEST_WITH_DETAILS_FROM_SERVER,
+ completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({
+ ...CRAWL_REQUEST_WITH_DETAILS,
+ completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ });
+ });
+});
+
+describe('crawlEventServerToClient', () => {
+ it('converts the API payload into properties matching our code style', () => {
+ expect(crawlEventServerToClient(CRAWL_EVENT_FROM_SERVER)).toStrictEqual(CRAWL_EVENT);
+ expect(
+ crawlEventServerToClient({
+ ...CRAWL_EVENT_FROM_SERVER,
+ began_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWL_EVENT, beganAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ expect(
+ crawlEventServerToClient({
+ ...CRAWL_EVENT_FROM_SERVER,
+ completed_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ })
+ ).toStrictEqual({ ...CRAWL_EVENT, completedAt: 'Mon, 31 Aug 2020 17:00:00 +0000' });
+ });
+});
+
+describe('crawlerDataServerToClient', () => {
+ it('converts all data from the server form to their client form', () => {
+ expect(crawlerDataServerToClient(CRAWLER_DATA_FROM_SERVER)).toStrictEqual(CRAWLER_DATA);
+ });
+});
+
+describe('crawlDomainValidationToResult', () => {
+ it('handles results with warnings', () => {
+ const data: CrawlerDomainValidationResultFromServer = {
+ results: [
+ {
+ comment: 'A warning, not failure',
+ name: '-',
+ result: 'warning',
+ },
+ ],
+ valid: true,
+ };
+
+ expect(crawlDomainValidationToResult(data)).toEqual({
+ blockingFailure: false,
+ message: 'A warning, not failure',
+ state: 'warning',
+ } as CrawlerDomainValidationStep);
+ });
+
+ it('handles valid results, without warnings', () => {
+ const data: CrawlerDomainValidationResultFromServer = {
+ results: [
+ {
+ comment: 'Something happened',
+ name: '-',
+ result: 'ok',
+ },
+ ],
+ valid: true,
+ };
+
+ expect(crawlDomainValidationToResult(data)).toEqual({
+ state: 'valid',
+ } as CrawlerDomainValidationStep);
+ });
+
+ it('handes invalid results', () => {
+ const data: CrawlerDomainValidationResultFromServer = {
+ results: [
+ {
+ comment: 'Something unexpected happened',
+ name: '-',
+ result: 'failure',
+ },
+ ],
+ valid: false,
+ };
+
+ expect(crawlDomainValidationToResult(data)).toEqual({
+ blockingFailure: true,
+ message: 'Something unexpected happened',
+ state: 'invalid',
+ } as CrawlerDomainValidationStep);
+ });
+});
+
+describe('domainConfigServerToClient', () => {
+ it('converts the domain config payload into properties matching our code style', () => {
+ expect(domainConfigServerToClient(CRAWLER_DOMAIN_CONFIG_FROM_SERVER)).toEqual(
+ CRAWLER_DOMAIN_CONFIG
+ );
+ });
+});
+
+describe('crawlerDomainsWithMetaServerToClient', () => {
+ it('converts the domain config payload into properties matching our code style', () => {
+ expect(crawlerDomainsWithMetaServerToClient(CRAWLER_DOMAINS_WITH_META_FROM_SERVER)).toEqual(
+ CRAWLER_DOMAINS_WITH_META
+ );
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.ts
new file mode 100644
index 0000000000000..1510e8c9afc3a
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/api/crawler/utils.ts
@@ -0,0 +1,231 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ CrawlerDomain,
+ CrawlerDomainFromServer,
+ CrawlerData,
+ CrawlerDataFromServer,
+ CrawlerDomainValidationResultFromServer,
+ CrawlerDomainValidationStep,
+ CrawlRequestFromServer,
+ CrawlRequest,
+ CrawlRequestStats,
+ CrawlRequestStatsFromServer,
+ CrawlEventFromServer,
+ CrawlEvent,
+ CrawlConfigFromServer,
+ CrawlConfig,
+ CrawlRequestWithDetailsFromServer,
+ CrawlRequestWithDetails,
+ DomainConfig,
+ DomainConfigFromServer,
+ CrawlerDomainsWithMetaFromServer,
+ CrawlerDomainsWithMeta,
+} from './types';
+
+export function crawlerDomainServerToClient(payload: CrawlerDomainFromServer): CrawlerDomain {
+ const {
+ id,
+ name,
+ sitemaps,
+ created_on: createdOn,
+ last_visited_at: lastCrawl,
+ document_count: documentCount,
+ crawl_rules: crawlRules,
+ default_crawl_rule: defaultCrawlRule,
+ entry_points: entryPoints,
+ deduplication_enabled: deduplicationEnabled,
+ deduplication_fields: deduplicationFields,
+ available_deduplication_fields: availableDeduplicationFields,
+ } = payload;
+
+ const clientPayload: CrawlerDomain = {
+ availableDeduplicationFields,
+ crawlRules,
+ createdOn,
+ deduplicationEnabled,
+ deduplicationFields,
+ documentCount,
+ entryPoints,
+ id,
+ sitemaps,
+ url: name,
+ };
+
+ if (lastCrawl) {
+ clientPayload.lastCrawl = lastCrawl;
+ }
+
+ if (defaultCrawlRule) {
+ clientPayload.defaultCrawlRule = defaultCrawlRule;
+ }
+
+ return clientPayload;
+}
+
+export function crawlRequestStatsServerToClient(
+ crawlStats: CrawlRequestStatsFromServer
+): CrawlRequestStats {
+ const {
+ status: {
+ avg_response_time_msec: avgResponseTimeMSec,
+ crawl_duration_msec: crawlDurationMSec,
+ pages_visited: pagesVisited,
+ urls_allowed: urlsAllowed,
+ status_codes: statusCodes,
+ },
+ } = crawlStats;
+
+ return {
+ status: {
+ avgResponseTimeMSec,
+ crawlDurationMSec,
+ pagesVisited,
+ statusCodes,
+ urlsAllowed,
+ },
+ };
+}
+
+export function crawlRequestServerToClient(crawlRequest: CrawlRequestFromServer): CrawlRequest {
+ const {
+ id,
+ status,
+ created_at: createdAt,
+ began_at: beganAt,
+ completed_at: completedAt,
+ } = crawlRequest;
+
+ return {
+ beganAt,
+ completedAt,
+ createdAt,
+ id,
+ status,
+ };
+}
+
+export function crawlConfigServerToClient(crawlConfig: CrawlConfigFromServer): CrawlConfig {
+ const {
+ domain_allowlist: domainAllowlist,
+ seed_urls: seedUrls,
+ sitemap_urls: sitemapUrls,
+ max_crawl_depth: maxCrawlDepth,
+ } = crawlConfig;
+
+ return {
+ domainAllowlist,
+ maxCrawlDepth,
+ seedUrls,
+ sitemapUrls,
+ };
+}
+
+export function crawlEventServerToClient(event: CrawlEventFromServer): CrawlEvent {
+ const {
+ id,
+ stage,
+ status,
+ created_at: createdAt,
+ began_at: beganAt,
+ completed_at: completedAt,
+ type,
+ crawl_config: crawlConfig,
+ } = event;
+
+ return {
+ beganAt,
+ completedAt,
+ crawlConfig: crawlConfigServerToClient(crawlConfig),
+ createdAt,
+ id,
+ stage,
+ status,
+ type,
+ };
+}
+
+export function crawlRequestWithDetailsServerToClient(
+ event: CrawlRequestWithDetailsFromServer
+): CrawlRequestWithDetails {
+ const {
+ began_at: beganAt,
+ completed_at: completedAt,
+ crawl_config: crawlConfig,
+ created_at: createdAt,
+ id,
+ stats: crawlStats,
+ status,
+ type,
+ } = event;
+
+ return {
+ beganAt,
+ completedAt,
+ crawlConfig: crawlConfigServerToClient(crawlConfig),
+ createdAt,
+ id,
+ stats: crawlStats && crawlRequestStatsServerToClient(crawlStats),
+ status,
+ type,
+ };
+}
+
+export function crawlerDataServerToClient(payload: CrawlerDataFromServer): CrawlerData {
+ const { domains, events, most_recent_crawl_request: mostRecentCrawlRequest } = payload;
+
+ return {
+ domains: domains.map((domain) => crawlerDomainServerToClient(domain)),
+ events: events.map((event) => crawlEventServerToClient(event)),
+ mostRecentCrawlRequest:
+ mostRecentCrawlRequest && crawlRequestServerToClient(mostRecentCrawlRequest),
+ };
+}
+
+export function crawlDomainValidationToResult(
+ data: CrawlerDomainValidationResultFromServer
+): CrawlerDomainValidationStep {
+ if (!data.valid) {
+ return {
+ blockingFailure: true,
+ message: data.results.find((result) => result.result === 'failure')?.comment,
+ state: 'invalid',
+ };
+ }
+
+ const warningResult = data.results.find((result) => result.result === 'warning');
+
+ if (warningResult) {
+ return {
+ blockingFailure: !data.valid,
+ message: warningResult.comment,
+ state: 'warning',
+ };
+ }
+
+ return {
+ state: 'valid',
+ };
+}
+
+export const domainConfigServerToClient = (
+ domainConfigFromServer: DomainConfigFromServer
+): DomainConfig => ({
+ id: domainConfigFromServer.id,
+ name: domainConfigFromServer.name,
+ seedUrls: domainConfigFromServer.seed_urls,
+ sitemapUrls: domainConfigFromServer.sitemap_urls,
+});
+
+export const crawlerDomainsWithMetaServerToClient = ({
+ results,
+ meta,
+}: CrawlerDomainsWithMetaFromServer): CrawlerDomainsWithMeta => ({
+ domains: results.map(crawlerDomainServerToClient),
+ meta,
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.test.tsx
new file mode 100644
index 0000000000000..c5d871a70cf20
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.test.tsx
@@ -0,0 +1,302 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockFlashMessageHelpers, setMockActions } from '../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiFieldText, EuiSelect } from '@elastic/eui';
+
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+import { CrawlerPolicies, CrawlerRules } from '../../api/crawler/types';
+
+import { CrawlRulesTable, CrawlRulesTableProps } from './crawl_rules_table';
+
+describe('CrawlRulesTable', () => {
+ const { clearFlashMessages, flashSuccessToast } = mockFlashMessageHelpers;
+ const indexName = 'index-name';
+ const crawlRules = [
+ { id: '1', pattern: '*', policy: CrawlerPolicies.allow, rule: CrawlerRules.beginsWith },
+ { id: '2', pattern: '*', policy: CrawlerPolicies.deny, rule: CrawlerRules.endsWith },
+ ];
+
+ const DEFAULT_PROPS: CrawlRulesTableProps = {
+ crawlRules,
+ domainId: '6113e1407a2f2e6f42489794',
+ indexName,
+ };
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('renders', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(GenericEndpointInlineEditableTable).exists()).toBe(true);
+ });
+
+ describe('columns', () => {
+ const crawlRule = {
+ id: '1',
+ pattern: '*',
+ policy: CrawlerPolicies.allow,
+ rule: CrawlerRules.beginsWith,
+ };
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ wrapper = shallow( );
+ });
+
+ const renderColumn = (index: number) => {
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ return shallow({columns[index].render(crawlRule)}
);
+ };
+
+ const onChange = jest.fn();
+ const renderColumnInEditingMode = (index: number) => {
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ return shallow(
+
+ {columns[index].editingRender(crawlRule, onChange, {
+ isInvalid: false,
+ isLoading: false,
+ })}
+
+ );
+ };
+
+ describe('policy column', () => {
+ it('shows the policy of a crawl rule', () => {
+ expect(renderColumn(0).html()).toContain('Allow');
+ });
+
+ it('can show the policy of a crawl rule as editable', () => {
+ const column = renderColumnInEditingMode(0);
+
+ const selectField = column.find(EuiSelect);
+ expect(selectField.props()).toEqual(
+ expect.objectContaining({
+ disabled: false,
+ isInvalid: false,
+ options: [
+ { text: 'Allow', value: 'allow' },
+ { text: 'Disallow', value: 'deny' },
+ ],
+ value: 'allow',
+ })
+ );
+
+ selectField.simulate('change', { target: { value: 'deny' } });
+ expect(onChange).toHaveBeenCalledWith('deny');
+ });
+ });
+
+ describe('rule column', () => {
+ it('shows the rule of a crawl rule', () => {
+ expect(renderColumn(1).html()).toContain('Begins with');
+ });
+
+ it('can show the rule of a crawl rule as editable', () => {
+ const column = renderColumnInEditingMode(1);
+
+ const selectField = column.find(EuiSelect);
+ expect(selectField.props()).toEqual(
+ expect.objectContaining({
+ disabled: false,
+ isInvalid: false,
+ options: [
+ { text: 'Begins with', value: 'begins' },
+ { text: 'Ends with', value: 'ends' },
+ { text: 'Contains', value: 'contains' },
+ { text: 'Regex', value: 'regex' },
+ ],
+ value: 'begins',
+ })
+ );
+
+ selectField.simulate('change', { target: { value: 'ends' } });
+ expect(onChange).toHaveBeenCalledWith('ends');
+ });
+ });
+
+ describe('pattern column', () => {
+ it('shows the pattern of a crawl rule', () => {
+ expect(renderColumn(2).html()).toContain('*');
+ });
+
+ it('can show the pattern of a crawl rule as editable', () => {
+ const column = renderColumnInEditingMode(2);
+
+ const field = column.find(EuiFieldText);
+ expect(field.props()).toEqual(
+ expect.objectContaining({
+ disabled: false,
+ isInvalid: false,
+ value: '*',
+ })
+ );
+
+ field.simulate('change', { target: { value: 'foo' } });
+ expect(onChange).toHaveBeenCalledWith('foo');
+ });
+ });
+ });
+
+ describe('routes', () => {
+ it('can calculate an update and delete route correctly', () => {
+ const wrapper = shallow( );
+
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const crawlRule = {
+ id: '1',
+ pattern: '*',
+ policy: CrawlerPolicies.allow,
+ rule: CrawlerRules.beginsWith,
+ };
+ expect(table.prop('deleteRoute')(crawlRule)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/crawl_rules/1'
+ );
+ expect(table.prop('updateRoute')(crawlRule)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/crawl_rules/1'
+ );
+ });
+ });
+
+ it('shows a custom description if one is provided', () => {
+ const wrapper = shallow(
+
+ );
+
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+ expect(table.prop('description')).toEqual('I am a description');
+ });
+
+ it('shows a default crawl rule as uneditable if one is provided', () => {
+ const wrapper = shallow(
+
+ );
+
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+ expect(table.prop('uneditableItems')).toEqual([crawlRules[0]]);
+ });
+
+ describe('when a crawl rule is added', () => {
+ it('should update the crawl rules for the current domain, and clear flash messages', () => {
+ const updateCrawlRules = jest.fn();
+ setMockActions({
+ updateCrawlRules,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const crawlRulesThatWasAdded = {
+ id: '2',
+ pattern: '*',
+ policy: CrawlerPolicies.deny,
+ rule: CrawlerRules.endsWith,
+ };
+ const updatedCrawlRules = [
+ { id: '1', pattern: '*', policy: CrawlerPolicies.allow, rule: CrawlerRules.beginsWith },
+ { id: '2', pattern: '*', policy: CrawlerPolicies.deny, rule: CrawlerRules.endsWith },
+ ];
+ table.prop('onAdd')(crawlRulesThatWasAdded, updatedCrawlRules);
+ expect(updateCrawlRules).toHaveBeenCalledWith(updatedCrawlRules);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+
+ describe('when a crawl rule is updated', () => {
+ it('should update the crawl rules for the current domain, and clear flash messages', () => {
+ const updateCrawlRules = jest.fn();
+ setMockActions({
+ updateCrawlRules,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const crawlRulesThatWasUpdated = {
+ id: '2',
+ pattern: '*',
+ policy: CrawlerPolicies.deny,
+ rule: CrawlerRules.endsWith,
+ };
+ const updatedCrawlRules = [
+ { id: '1', pattern: '*', policy: CrawlerPolicies.allow, rule: CrawlerRules.beginsWith },
+ {
+ id: '2',
+ pattern: 'newPattern',
+ policy: CrawlerPolicies.deny,
+ rule: CrawlerRules.endsWith,
+ },
+ ];
+ table.prop('onUpdate')(crawlRulesThatWasUpdated, updatedCrawlRules);
+ expect(updateCrawlRules).toHaveBeenCalledWith(updatedCrawlRules);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+
+ describe('when a crawl rule is deleted', () => {
+ it('should update the crawl rules for the current domain, clear flash messages, and show a success', () => {
+ const updateCrawlRules = jest.fn();
+ setMockActions({
+ updateCrawlRules,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const crawlRulesThatWasDeleted = {
+ id: '2',
+ pattern: '*',
+ policy: CrawlerPolicies.deny,
+ rule: CrawlerRules.endsWith,
+ };
+ const updatedCrawlRules = [
+ { id: '1', pattern: '*', policy: CrawlerPolicies.allow, rule: CrawlerRules.beginsWith },
+ ];
+ table.prop('onDelete')(crawlRulesThatWasDeleted, updatedCrawlRules);
+ expect(updateCrawlRules).toHaveBeenCalledWith(updatedCrawlRules);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ expect(flashSuccessToast).toHaveBeenCalled();
+ });
+ });
+
+ describe('when a crawl rule is reordered', () => {
+ it('should update the crawl rules for the current domain and clear flash messages', () => {
+ const updateCrawlRules = jest.fn();
+ setMockActions({
+ updateCrawlRules,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const updatedCrawlRules = [
+ { id: '2', pattern: '*', policy: CrawlerPolicies.deny, rule: CrawlerRules.endsWith },
+ { id: '1', pattern: '*', policy: CrawlerPolicies.allow, rule: CrawlerRules.beginsWith },
+ ];
+ table.prop('onReorder')!(updatedCrawlRules);
+ expect(updateCrawlRules).toHaveBeenCalledWith(updatedCrawlRules);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.tsx
new file mode 100644
index 0000000000000..3b3ee0282dfde
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawl_rules_table.tsx
@@ -0,0 +1,258 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions } from 'kea';
+
+import {
+ EuiCode,
+ EuiFieldText,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiIconTip,
+ EuiLink,
+ EuiSelect,
+ EuiText,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { clearFlashMessages, flashSuccessToast } from '../../../shared/flash_messages';
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+
+import { InlineEditableTableColumn } from '../../../shared/tables/inline_editable_table/types';
+import { ItemWithAnID } from '../../../shared/tables/types';
+import { CrawlerPolicies, CrawlRule, CrawlerRules } from '../../api/crawler/types';
+
+import { CrawlerDomainDetailLogic } from './crawler_domain_detail_logic';
+
+export interface CrawlRulesTableProps {
+ crawlRules: CrawlRule[];
+ defaultCrawlRule?: CrawlRule;
+ description?: React.ReactNode;
+ domainId: string;
+ indexName: string;
+}
+
+export const getReadableCrawlerRule = (rule: CrawlerRules) => {
+ switch (rule) {
+ case CrawlerRules.beginsWith:
+ return i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel',
+ {
+ defaultMessage: 'Begins with',
+ }
+ );
+ case CrawlerRules.endsWith:
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel', {
+ defaultMessage: 'Ends with',
+ });
+ case CrawlerRules.contains:
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel', {
+ defaultMessage: 'Contains',
+ });
+ case CrawlerRules.regex:
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel', {
+ defaultMessage: 'Regex',
+ });
+ }
+};
+
+export const getReadableCrawlerPolicy = (policy: CrawlerPolicies) => {
+ switch (policy) {
+ case CrawlerPolicies.allow:
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel', {
+ defaultMessage: 'Allow',
+ });
+ case CrawlerPolicies.deny:
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel', {
+ defaultMessage: 'Disallow',
+ });
+ }
+};
+
+export const getCrawlRulePathPatternTooltip = (crawlRule: CrawlRule) => {
+ if (crawlRule.rule === CrawlerRules.regex) {
+ return i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip',
+ {
+ defaultMessage:
+ 'The path pattern is a regular expression compatible with the Ruby language regular expression engine.',
+ }
+ );
+ }
+
+ return i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip', {
+ defaultMessage:
+ 'The path pattern is a literal string except for the asterisk (*) character, which is a meta character that will match anything.',
+ });
+};
+
+const DEFAULT_DESCRIPTION = (
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText', {
+ defaultMessage: 'Learn more about crawl rules',
+ })}
+
+ ),
+ }}
+ />
+
+);
+
+export const CrawlRulesTable: React.FC = ({
+ description = DEFAULT_DESCRIPTION,
+ domainId,
+ indexName,
+ crawlRules,
+ defaultCrawlRule,
+}) => {
+ const { updateCrawlRules } = useActions(CrawlerDomainDetailLogic);
+
+ const columns: Array> = [
+ {
+ editingRender: (crawlRule, onChange, { isInvalid, isLoading }) => (
+ onChange(e.target.value)}
+ disabled={isLoading}
+ isInvalid={isInvalid}
+ options={[CrawlerPolicies.allow, CrawlerPolicies.deny].map(
+ (policyOption: CrawlerPolicies) => ({
+ text: getReadableCrawlerPolicy(policyOption),
+ value: policyOption,
+ })
+ )}
+ />
+ ),
+ field: 'policy',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead', {
+ defaultMessage: 'Policy',
+ }),
+ render: (crawlRule) => (
+ {getReadableCrawlerPolicy((crawlRule as CrawlRule).policy)}
+ ),
+ },
+ {
+ editingRender: (crawlRule, onChange, { isInvalid, isLoading }) => (
+ onChange(e.target.value)}
+ disabled={isLoading}
+ isInvalid={isInvalid}
+ options={[
+ CrawlerRules.beginsWith,
+ CrawlerRules.endsWith,
+ CrawlerRules.contains,
+ CrawlerRules.regex,
+ ].map((ruleOption: CrawlerRules) => ({
+ text: getReadableCrawlerRule(ruleOption),
+ value: ruleOption,
+ }))}
+ />
+ ),
+ field: 'rule',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead', {
+ defaultMessage: 'Rule',
+ }),
+ render: (crawlRule) => (
+ {getReadableCrawlerRule((crawlRule as CrawlRule).rule)}
+ ),
+ },
+ {
+ editingRender: (crawlRule, onChange, { isInvalid, isLoading }) => (
+
+
+ onChange(e.target.value)}
+ disabled={isLoading}
+ isInvalid={isInvalid}
+ />
+
+
+
+
+
+ ),
+ field: 'pattern',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead', {
+ defaultMessage: 'Path pattern',
+ }),
+ render: (crawlRule) => {(crawlRule as CrawlRule).pattern} ,
+ },
+ ];
+
+ const crawlRulesRoute = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/crawl_rules`;
+ const domainRoute = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`;
+ const getCrawlRuleRoute = (crawlRule: CrawlRule) =>
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}/crawl_rules/${crawlRule.id}`;
+
+ return (
+ {
+ updateCrawlRules(newCrawlRules as CrawlRule[]);
+ clearFlashMessages();
+ }}
+ onDelete={(_, newCrawlRules) => {
+ updateCrawlRules(newCrawlRules as CrawlRule[]);
+ clearFlashMessages();
+ flashSuccessToast(
+ i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage',
+ {
+ defaultMessage: 'The crawl rule has been deleted.',
+ }
+ )
+ );
+ }}
+ onUpdate={(_, newCrawlRules) => {
+ updateCrawlRules(newCrawlRules as CrawlRule[]);
+ clearFlashMessages();
+ }}
+ onReorder={(newCrawlRules) => {
+ updateCrawlRules(newCrawlRules as CrawlRule[]);
+ clearFlashMessages();
+ }}
+ title={i18n.translate('xpack.enterpriseSearch.crawler.crawlRulesTable.title', {
+ defaultMessage: 'Crawl rules',
+ })}
+ uneditableItems={defaultCrawlRule ? [defaultCrawlRule] : undefined}
+ canRemoveLastItem
+ />
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail.tsx
new file mode 100644
index 0000000000000..c4303be878e9c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail.tsx
@@ -0,0 +1,116 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useEffect } from 'react';
+
+import { useParams } from 'react-router-dom';
+
+import { useActions, useValues } from 'kea';
+
+import { EuiButton, EuiPanel, EuiSpacer } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { generateEncodedPath } from '../../../shared/encode_path_params';
+import { EuiButtonTo } from '../../../shared/react_router_helpers';
+import { SEARCH_INDEX_TAB_PATH } from '../../routes';
+import { EnterpriseSearchContentPageTemplate } from '../layout/page_template';
+import { CrawlCustomSettingsFlyout } from '../search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout';
+import { CrawlerStatusIndicator } from '../search_index/crawler/crawler_status_indicator/crawler_status_indicator';
+import { CrawlerStatusBanner } from '../search_index/crawler/domain_management/crawler_status_banner';
+import { getDeleteDomainConfirmationMessage } from '../search_index/crawler/utils';
+import { IndexNameLogic } from '../search_index/index_name_logic';
+import { SearchIndexTabId } from '../search_index/search_index';
+import { baseBreadcrumbs } from '../search_indices';
+
+import { CrawlRulesTable } from './crawl_rules_table';
+import { CrawlerDomainDetailLogic } from './crawler_domain_detail_logic';
+import { DeduplicationPanel } from './deduplication_panel/deduplication_panel';
+import { EntryPointsTable } from './entry_points_table';
+import { SitemapsTable } from './sitemaps_table';
+
+export const CrawlerDomainDetail: React.FC = () => {
+ const { domainId } = useParams<{
+ domainId: string;
+ }>();
+
+ const { indexName } = useValues(IndexNameLogic);
+ const crawlerDomainDetailLogic = CrawlerDomainDetailLogic({ domainId });
+ const { deleteLoading, domain, getLoading } = useValues(crawlerDomainDetailLogic);
+ const { fetchDomainData, deleteDomain } = useActions(crawlerDomainDetailLogic);
+
+ useEffect(() => {
+ fetchDomainData(domainId);
+ }, [domainId]);
+
+ const domainUrl = domain?.url ?? '...';
+
+ return (
+ ,
+ {
+ if (window.confirm(getDeleteDomainConfirmationMessage(domainUrl))) {
+ deleteDomain();
+ }
+ }}
+ >
+ {i18n.translate('xpack.enterpriseSearch.crawler.domainDetail.deleteDomainButtonLabel', {
+ defaultMessage: 'Delete domain',
+ })}
+ ,
+ ],
+ }}
+ >
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.domainDetail.allDomainsButtonLabel', {
+ defaultMessage: 'All domains',
+ })}
+
+
+ {domain && (
+ <>
+
+
+
+
+
+
+
+
+
+
+
+
+
+ >
+ )}
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail_logic.ts
new file mode 100644
index 0000000000000..5beb863ce579d
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/crawler_domain_detail_logic.ts
@@ -0,0 +1,167 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { i18n } from '@kbn/i18n';
+
+import { generateEncodedPath } from '../../../shared/encode_path_params';
+
+import { flashAPIErrors, flashSuccessToast } from '../../../shared/flash_messages';
+
+import { HttpLogic } from '../../../shared/http';
+import { KibanaLogic } from '../../../shared/kibana';
+import {
+ CrawlerDomain,
+ CrawlerDomainFromServer,
+ CrawlRule,
+ EntryPoint,
+ Sitemap,
+} from '../../api/crawler/types';
+import { crawlerDomainServerToClient } from '../../api/crawler/utils';
+import { SEARCH_INDEX_TAB_PATH } from '../../routes';
+import { IndexNameLogic } from '../search_index/index_name_logic';
+import { SearchIndexTabId } from '../search_index/search_index';
+
+export interface CrawlerDomainDetailProps {
+ domainId: string;
+}
+
+export interface CrawlerDomainDetailValues {
+ deleteLoading: boolean;
+ domain: CrawlerDomain | null;
+ domainId: string;
+ getLoading: boolean;
+}
+
+interface CrawlerDomainDetailActions {
+ deleteDomain(): void;
+ deleteDomainComplete(): void;
+ fetchDomainData(domainId: string): { domainId: string };
+ receiveDomainData(domain: CrawlerDomain): { domain: CrawlerDomain };
+ submitDeduplicationUpdate(payload: { enabled?: boolean; fields?: string[] }): {
+ enabled: boolean;
+ fields: string[];
+ };
+ updateCrawlRules(crawlRules: CrawlRule[]): { crawlRules: CrawlRule[] };
+ updateEntryPoints(entryPoints: EntryPoint[]): { entryPoints: EntryPoint[] };
+ updateSitemaps(entryPoints: Sitemap[]): { sitemaps: Sitemap[] };
+}
+
+export const CrawlerDomainDetailLogic = kea<
+ MakeLogicType
+>({
+ path: ['enterprise_search', 'crawler', 'crawler_domain_detail_logic'],
+ actions: {
+ deleteDomain: () => true,
+ deleteDomainComplete: () => true,
+ fetchDomainData: (domainId) => ({ domainId }),
+ receiveDomainData: (domain) => ({ domain }),
+ submitDeduplicationUpdate: ({ fields, enabled }) => ({ enabled, fields }),
+ updateCrawlRules: (crawlRules) => ({ crawlRules }),
+ updateEntryPoints: (entryPoints) => ({ entryPoints }),
+ updateSitemaps: (sitemaps) => ({ sitemaps }),
+ },
+ reducers: ({ props }) => ({
+ deleteLoading: [
+ false,
+ {
+ deleteDomain: () => true,
+ deleteDomainComplete: () => false,
+ },
+ ],
+ domain: [
+ null,
+ {
+ receiveDomainData: (_, { domain }) => domain,
+ updateCrawlRules: (currentDomain, { crawlRules }) =>
+ ({ ...currentDomain, crawlRules } as CrawlerDomain),
+ updateEntryPoints: (currentDomain, { entryPoints }) =>
+ ({ ...currentDomain, entryPoints } as CrawlerDomain),
+ updateSitemaps: (currentDomain, { sitemaps }) =>
+ ({ ...currentDomain, sitemaps } as CrawlerDomain),
+ },
+ ],
+ domainId: [props.domainId, { fetchDomainData: (_, { domainId }) => domainId }],
+ getLoading: [
+ true,
+ {
+ receiveDomainData: () => false,
+ },
+ ],
+ }),
+ listeners: ({ actions, values }) => ({
+ deleteDomain: async () => {
+ const { http } = HttpLogic.values;
+ const { domain, domainId } = values;
+ const { indexName } = IndexNameLogic.values;
+ try {
+ await http.delete(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
+ );
+ flashSuccessToast(
+ i18n.translate('xpack.enterpriseSearch.crawler.action.deleteDomain.successMessage', {
+ defaultMessage: "Domain '{domainUrl}' was deleted",
+ values: {
+ domainUrl: domain?.url,
+ },
+ })
+ );
+ KibanaLogic.values.navigateToUrl(
+ generateEncodedPath(SEARCH_INDEX_TAB_PATH, {
+ indexName,
+ tabId: SearchIndexTabId.DOMAIN_MANAGEMENT,
+ })
+ );
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ actions.deleteDomainComplete();
+ },
+ fetchDomainData: async ({ domainId }) => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ try {
+ const response = await http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`
+ );
+
+ const domainData = crawlerDomainServerToClient(response);
+
+ actions.receiveDomainData(domainData);
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ submitDeduplicationUpdate: async ({ fields, enabled }) => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+ const { domainId } = values;
+
+ const payload = {
+ deduplication_enabled: enabled,
+ deduplication_fields: fields,
+ };
+
+ try {
+ const response = await http.put(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domainId}`,
+ {
+ body: JSON.stringify(payload),
+ }
+ );
+
+ const domainData = crawlerDomainServerToClient(response);
+
+ actions.receiveDomainData(domainData);
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.scss b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.scss
new file mode 100644
index 0000000000000..6190a0beb91bc
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.scss
@@ -0,0 +1,14 @@
+.deduplicationPanel {
+ .selectableWrapper {
+ padding: $euiSize;
+ border-radius: $euiSize *.675;
+ border: $euiBorderThin solid $euiColorLightestShade;
+ }
+
+ .showAllFieldsPopoverToggle {
+ .euiButtonEmpty__content {
+ padding-left: $euiSizeM;
+ padding-right: $euiSizeM;
+ }
+ }
+}
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.test.tsx
new file mode 100644
index 0000000000000..bf6caaf0b03c6
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.test.tsx
@@ -0,0 +1,159 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { act } from 'react-dom/test-utils';
+
+import {
+ EuiButton,
+ EuiButtonEmpty,
+ EuiContextMenuItem,
+ EuiPopover,
+ EuiSelectable,
+ EuiSelectableList,
+ EuiSelectableSearch,
+ EuiSwitch,
+} from '@elastic/eui';
+
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { DataPanel } from '../../../../shared/data_panel/data_panel';
+import { rerender } from '../../../../test_helpers';
+
+import { DeduplicationPanel } from './deduplication_panel';
+
+const MOCK_ACTIONS = {
+ submitDeduplicationUpdate: jest.fn(),
+};
+
+const MOCK_VALUES = {
+ domain: {
+ availableDeduplicationFields: ['title', 'description'],
+ deduplicationEnabled: true,
+ deduplicationFields: ['title'],
+ },
+};
+
+describe('DeduplicationPanel', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockActions(MOCK_ACTIONS);
+ setMockValues(MOCK_VALUES);
+ });
+
+ it('renders an empty component if no domain', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ domain: null,
+ });
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+
+ it('contains a button to reset to defaults', () => {
+ const wrapper = shallow( );
+
+ wrapper.find(DataPanel).dive().find(EuiButton).simulate('click');
+
+ expect(MOCK_ACTIONS.submitDeduplicationUpdate).toHaveBeenCalledWith({
+ fields: [],
+ });
+ });
+
+ it('contains a switch to enable and disable deduplication', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ domain: {
+ ...MOCK_VALUES.domain,
+ deduplicationEnabled: false,
+ },
+ });
+ const wrapper = shallow( );
+
+ wrapper.find(EuiSwitch).simulate('change');
+
+ expect(MOCK_ACTIONS.submitDeduplicationUpdate).toHaveBeenNthCalledWith(1, {
+ enabled: true,
+ });
+
+ setMockValues({
+ ...MOCK_VALUES,
+ domain: {
+ ...MOCK_VALUES.domain,
+ deduplicationEnabled: true,
+ },
+ });
+ rerender(wrapper);
+
+ wrapper.find(EuiSwitch).simulate('change');
+
+ expect(MOCK_ACTIONS.submitDeduplicationUpdate).toHaveBeenNthCalledWith(2, {
+ enabled: false,
+ fields: [],
+ });
+ });
+
+ it('contains a popover to switch between displaying all fields or only selected ones', () => {
+ const fullRender = mountWithIntl( );
+
+ expect(fullRender.find(EuiButtonEmpty).text()).toEqual('All fields');
+ expect(fullRender.find(EuiPopover).prop('isOpen')).toEqual(false);
+
+ // Open the popover
+ fullRender.find(EuiButtonEmpty).simulate('click');
+ rerender(fullRender);
+
+ expect(fullRender.find(EuiPopover).prop('isOpen')).toEqual(true);
+
+ // Click "Show selected fields"
+ fullRender.find(EuiContextMenuItem).at(1).simulate('click');
+ rerender(fullRender);
+
+ expect(fullRender.find(EuiButtonEmpty).text()).toEqual('Selected fields');
+ expect(fullRender.find(EuiPopover).prop('isOpen')).toEqual(false);
+
+ // Open the popover and click "show all fields"
+ fullRender.find(EuiButtonEmpty).simulate('click');
+ fullRender.find(EuiContextMenuItem).at(0).simulate('click');
+ rerender(fullRender);
+
+ expect(fullRender.find(EuiButtonEmpty).text()).toEqual('All fields');
+ expect(fullRender.find(EuiPopover).prop('isOpen')).toEqual(false);
+
+ // Open the popover then simulate closing the popover
+ fullRender.find(EuiButtonEmpty).simulate('click');
+ act(() => {
+ fullRender.find(EuiPopover).prop('closePopover')();
+ });
+ rerender(fullRender);
+
+ expect(fullRender.find(EuiPopover).prop('isOpen')).toEqual(false);
+ });
+
+ it('contains a selectable to toggle fields for deduplication', () => {
+ const wrapper = shallow( );
+
+ wrapper
+ .find(EuiSelectable)
+ .simulate('change', [{ label: 'title' }, { label: 'description', checked: 'on' }]);
+
+ expect(MOCK_ACTIONS.submitDeduplicationUpdate).toHaveBeenCalledWith({
+ fields: ['description'],
+ });
+
+ const fullRender = mountWithIntl( );
+
+ expect(fullRender.find(EuiSelectableSearch)).toHaveLength(1);
+ expect(fullRender.find(EuiSelectableList)).toHaveLength(1);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.tsx
new file mode 100644
index 0000000000000..fb99f8ab70e6c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/deduplication_panel.tsx
@@ -0,0 +1,204 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useState } from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiButton,
+ EuiButtonEmpty,
+ EuiContextMenuItem,
+ EuiContextMenuPanel,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiLink,
+ EuiPopover,
+ EuiSelectable,
+ EuiSpacer,
+ EuiSwitch,
+} from '@elastic/eui';
+
+import { EuiSelectableLIOption } from '@elastic/eui/src/components/selectable/selectable_option';
+
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { DataPanel } from '../../../../shared/data_panel/data_panel';
+import { CrawlerDomainDetailLogic } from '../crawler_domain_detail_logic';
+
+import { getCheckedOptionLabels, getSelectableOptions } from './utils';
+
+import './deduplication_panel.scss';
+
+export const DeduplicationPanel: React.FC = () => {
+ const { domain } = useValues(CrawlerDomainDetailLogic);
+ const { submitDeduplicationUpdate } = useActions(CrawlerDomainDetailLogic);
+
+ const [showAllFields, setShowAllFields] = useState(true);
+ const [showAllFieldsPopover, setShowAllFieldsPopover] = useState(false);
+
+ if (!domain) {
+ return null;
+ }
+
+ const { deduplicationEnabled, deduplicationFields } = domain;
+
+ const selectableOptions = getSelectableOptions(domain, showAllFields);
+
+ return (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.deduplicationPanel.title', {
+ defaultMessage: 'Duplicate document handling',
+ })}
+
+ }
+ action={
+ submitDeduplicationUpdate({ fields: [] })}
+ disabled={deduplicationFields.length === 0}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel',
+ {
+ defaultMessage: 'Reset to defaults',
+ }
+ )}
+
+ }
+ subtitle={
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage',
+ {
+ defaultMessage: 'Learn more about content hashing',
+ }
+ )}
+
+ ),
+ }}
+ />
+ }
+ >
+
+ deduplicationEnabled
+ ? submitDeduplicationUpdate({ enabled: false, fields: [] })
+ : submitDeduplicationUpdate({ enabled: true })
+ }
+ />
+
+
+
+
+
+ submitDeduplicationUpdate({
+ fields: getCheckedOptionLabels(options as Array>),
+ })
+ }
+ searchable
+ searchProps={{
+ disabled: !deduplicationEnabled,
+ append: (
+ setShowAllFieldsPopover(!showAllFieldsPopover)}
+ className="showAllFieldsPopoverToggle"
+ disabled={!deduplicationEnabled}
+ >
+ {showAllFields
+ ? i18n.translate(
+ 'xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel',
+ {
+ defaultMessage: 'All fields',
+ }
+ )
+ : i18n.translate(
+ 'xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel',
+ {
+ defaultMessage: 'Selected fields',
+ }
+ )}
+
+ }
+ isOpen={showAllFieldsPopover}
+ closePopover={() => setShowAllFieldsPopover(false)}
+ panelPaddingSize="none"
+ anchorPosition="downLeft"
+ >
+ {
+ setShowAllFields(true);
+ setShowAllFieldsPopover(false);
+ }}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel',
+ {
+ defaultMessage: 'Show all fields',
+ }
+ )}
+ ,
+ {
+ setShowAllFields(false);
+ setShowAllFieldsPopover(false);
+ }}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel',
+ {
+ defaultMessage: 'Show only selected fields',
+ }
+ )}
+ ,
+ ]}
+ />
+
+ ),
+ }}
+ >
+ {(list, search) => (
+ <>
+ {search}
+ {list}
+ >
+ )}
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/utils.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/utils.ts
new file mode 100644
index 0000000000000..256385722c188
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/deduplication_panel/utils.ts
@@ -0,0 +1,39 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { EuiSelectableLIOption } from '@elastic/eui/src/components/selectable/selectable_option';
+
+import { CrawlerDomain } from '../../../api/crawler/types';
+
+export const getSelectableOptions = (
+ domain: CrawlerDomain,
+ showAllFields: boolean
+): Array> => {
+ const { availableDeduplicationFields, deduplicationFields, deduplicationEnabled } = domain;
+
+ let selectableOptions: Array>;
+
+ if (showAllFields) {
+ selectableOptions = availableDeduplicationFields.map((field) => ({
+ label: field,
+ checked: deduplicationFields.includes(field) ? 'on' : undefined,
+ }));
+ } else {
+ selectableOptions = availableDeduplicationFields
+ .filter((field) => deduplicationFields.includes(field))
+ .map((field) => ({ label: field, checked: 'on' }));
+ }
+
+ if (!deduplicationEnabled) {
+ selectableOptions = selectableOptions.map((option) => ({ ...option, disabled: true }));
+ }
+
+ return selectableOptions;
+};
+
+export const getCheckedOptionLabels = (options: Array>): string[] => {
+ return options.filter((option) => option.checked).map((option) => option.label);
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.test.tsx
new file mode 100644
index 0000000000000..e57cd195946a5
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.test.tsx
@@ -0,0 +1,119 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiFieldText } from '@elastic/eui';
+
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+import { CrawlerDomain } from '../../api/crawler/types';
+
+import { EntryPointsTable, EntryPointsTableProps } from './entry_points_table';
+
+describe('EntryPointsTable', () => {
+ const indexName = 'index-name';
+ const entryPoints = [
+ { id: '1', value: '/whatever' },
+ { id: '2', value: '/foo' },
+ ];
+ const domain: CrawlerDomain = {
+ availableDeduplicationFields: ['title', 'description'],
+ crawlRules: [],
+ createdOn: '2018-01-01T00:00:00.000Z',
+ deduplicationEnabled: true,
+ deduplicationFields: ['title'],
+ documentCount: 10,
+ entryPoints,
+ id: '6113e1407a2f2e6f42489794',
+ sitemaps: [],
+ url: 'https://www.elastic.co',
+ };
+
+ const DEFAULT_PROPS: EntryPointsTableProps = {
+ domain,
+ indexName,
+ items: domain.entryPoints,
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('renders', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(GenericEndpointInlineEditableTable).exists()).toBe(true);
+ });
+
+ describe('the first and only column in the table', () => {
+ it('shows the value of an entry point', () => {
+ const entryPoint = { id: '1', value: '/whatever' };
+
+ const wrapper = shallow( );
+
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ const column = shallow({columns[0].render(entryPoint)}
);
+ expect(column.html()).toContain('/whatever');
+ });
+
+ it('can show the value of an entry point as editable', () => {
+ const entryPoint = { id: '1', value: '/whatever' };
+ const onChange = jest.fn();
+
+ const wrapper = shallow( );
+
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ const column = shallow(
+
+ {columns[0].editingRender(entryPoint, onChange, { isInvalid: false, isLoading: false })}
+
+ );
+
+ const textField = column.find(EuiFieldText);
+ expect(textField.props()).toEqual(
+ expect.objectContaining({
+ disabled: false, // It would be disabled if isLoading is true
+ isInvalid: false,
+ prepend: 'https://www.elastic.co',
+ value: '/whatever',
+ })
+ );
+
+ textField.simulate('change', { target: { value: '/foo' } });
+ expect(onChange).toHaveBeenCalledWith('/foo');
+ });
+ });
+
+ describe('routes', () => {
+ it('can calculate an update and delete route correctly', () => {
+ const wrapper = shallow( );
+
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const entryPoint = { id: '1', value: '/whatever' };
+ expect(table.prop('deleteRoute')(entryPoint)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/entry_points/1'
+ );
+ expect(table.prop('updateRoute')(entryPoint)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/entry_points/1'
+ );
+ });
+ });
+
+ it('shows a no items message whem there are no entry points to show', () => {
+ const wrapper = shallow( );
+
+ const editNewItems = jest.fn();
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+ const message = mountWithIntl({table.prop('noItemsMessage')!(editNewItems)}
);
+ expect(message.html()).toContain('There are no existing entry points.');
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.tsx
new file mode 100644
index 0000000000000..b65ec56d05e8f
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table.tsx
@@ -0,0 +1,137 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions } from 'kea';
+
+import { EuiFieldText, EuiLink, EuiSpacer, EuiText, EuiTitle } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+
+import { InlineEditableTableColumn } from '../../../shared/tables/inline_editable_table/types';
+import { ItemWithAnID } from '../../../shared/tables/types';
+import { CrawlerDomain, EntryPoint } from '../../api/crawler/types';
+
+import { EntryPointsTableLogic } from './entry_points_table_logic';
+
+export interface EntryPointsTableProps {
+ domain: CrawlerDomain;
+ indexName: string;
+ items: EntryPoint[];
+}
+
+export const EntryPointsTable: React.FC = ({ domain, indexName, items }) => {
+ const { onAdd, onDelete, onUpdate } = useActions(EntryPointsTableLogic);
+ const field = 'value';
+
+ const columns: Array> = [
+ {
+ editingRender: (entryPoint, onChange, { isInvalid, isLoading }) => (
+ onChange(e.target.value)}
+ disabled={isLoading}
+ isInvalid={isInvalid}
+ prepend={domain.url}
+ />
+ ),
+ render: (entryPoint) => (
+
+ {domain.url}
+ {(entryPoint as EntryPoint)[field]}
+
+ ),
+ name: i18n.translate('xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead', {
+ defaultMessage: 'URL',
+ }),
+ field,
+ },
+ ];
+
+ const entryPointsRoute = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}/entry_points`;
+
+ const getEntryPointRoute = (entryPoint: EntryPoint) =>
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}/entry_points/${entryPoint.id}`;
+
+ return (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.entryPointsTable.description', {
+ defaultMessage:
+ 'Include the most important URLs for your website here. Entry point URLs will be the first pages to be indexed and processed for links to other pages.',
+ })}{' '}
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText', {
+ defaultMessage: 'Learn more about entry points.',
+ })}
+
+
+ }
+ instanceId="EntryPointsTable"
+ items={items}
+ lastItemWarning={i18n.translate(
+ 'xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage',
+ { defaultMessage: 'The crawler requires at least one entry point.' }
+ )}
+ // Since canRemoveLastItem is false, the only time noItemsMessage would be displayed is if the last entry point was deleted via the API.
+ noItemsMessage={(editNewItem) => (
+ <>
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle', {
+ defaultMessage: 'There are no existing entry points.',
+ })}
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText',
+ { defaultMessage: 'Add an entry point' }
+ )}
+
+ ),
+ }}
+ />
+
+
+ >
+ )}
+ addRoute={entryPointsRoute}
+ canRemoveLastItem={false}
+ deleteRoute={getEntryPointRoute}
+ updateRoute={getEntryPointRoute}
+ dataProperty="entry_points"
+ onAdd={onAdd}
+ onDelete={onDelete}
+ onUpdate={onUpdate}
+ title={i18n.translate('xpack.enterpriseSearch.crawler.entryPointsTable.title', {
+ defaultMessage: 'Entry points',
+ })}
+ disableReordering
+ />
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.test.ts
new file mode 100644
index 0000000000000..b5a0a3aec37ce
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.test.ts
@@ -0,0 +1,76 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+jest.mock('./crawler_domain_detail_logic', () => ({
+ CrawlerDomainDetailLogic: {
+ actions: {
+ updateEntryPoints: jest.fn(),
+ },
+ },
+}));
+
+import { LogicMounter, mockFlashMessageHelpers } from '../../../__mocks__/kea_logic';
+
+import { CrawlerDomainDetailLogic } from './crawler_domain_detail_logic';
+import { EntryPointsTableLogic } from './entry_points_table_logic';
+
+describe('EntryPointsTableLogic', () => {
+ const { mount } = new LogicMounter(EntryPointsTableLogic);
+ const { clearFlashMessages, flashSuccessToast } = mockFlashMessageHelpers;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('listeners', () => {
+ describe('onAdd', () => {
+ it('should update the entry points for the current domain, and clear flash messages', () => {
+ const entryThatWasAdded = { id: '2', value: 'bar' };
+ const updatedEntries = [
+ { id: '1', value: 'foo' },
+ { id: '2', value: 'bar' },
+ ];
+ mount();
+ EntryPointsTableLogic.actions.onAdd(entryThatWasAdded, updatedEntries);
+ expect(CrawlerDomainDetailLogic.actions.updateEntryPoints).toHaveBeenCalledWith(
+ updatedEntries
+ );
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+
+ describe('onDelete', () => {
+ it('should update the entry points for the current domain, clear flash messages, and show a success toast', () => {
+ const entryThatWasDeleted = { id: '2', value: 'bar' };
+ const updatedEntries = [{ id: '1', value: 'foo' }];
+ mount();
+ EntryPointsTableLogic.actions.onDelete(entryThatWasDeleted, updatedEntries);
+ expect(CrawlerDomainDetailLogic.actions.updateEntryPoints).toHaveBeenCalledWith(
+ updatedEntries
+ );
+ expect(clearFlashMessages).toHaveBeenCalled();
+ expect(flashSuccessToast).toHaveBeenCalled();
+ });
+ });
+
+ describe('onUpdate', () => {
+ it('should update the entry points for the current domain, clear flash messages, and show a success toast', () => {
+ const entryThatWasUpdated = { id: '2', value: 'baz' };
+ const updatedEntries = [
+ { id: '1', value: 'foo' },
+ { id: '2', value: 'baz' },
+ ];
+ mount();
+ EntryPointsTableLogic.actions.onUpdate(entryThatWasUpdated, updatedEntries);
+ expect(CrawlerDomainDetailLogic.actions.updateEntryPoints).toHaveBeenCalledWith(
+ updatedEntries
+ );
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.ts
new file mode 100644
index 0000000000000..b7b2daa525a8d
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/entry_points_table_logic.ts
@@ -0,0 +1,53 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { clearFlashMessages, flashSuccessToast } from '../../../shared/flash_messages';
+
+import { EntryPoint } from '../../api/crawler/types';
+
+import { CrawlerDomainDetailLogic } from './crawler_domain_detail_logic';
+
+interface EntryPointsTableActions {
+ onAdd(
+ entryPoint: EntryPoint,
+ entryPoints: EntryPoint[]
+ ): { entryPoint: EntryPoint; entryPoints: EntryPoint[] };
+ onDelete(
+ entryPoint: EntryPoint,
+ entryPoints: EntryPoint[]
+ ): { entryPoint: EntryPoint; entryPoints: EntryPoint[] };
+ onUpdate(
+ entryPoint: EntryPoint,
+ entryPoints: EntryPoint[]
+ ): { entryPoint: EntryPoint; entryPoints: EntryPoint[] };
+}
+
+export const EntryPointsTableLogic = kea>({
+ path: ['enterprise_search', 'app_search', 'crawler', 'entry_points_table'],
+ actions: () => ({
+ onAdd: (entryPoint, entryPoints) => ({ entryPoint, entryPoints }),
+ onDelete: (entryPoint, entryPoints) => ({ entryPoint, entryPoints }),
+ onUpdate: (entryPoint, entryPoints) => ({ entryPoint, entryPoints }),
+ }),
+ listeners: () => ({
+ onAdd: ({ entryPoints }) => {
+ CrawlerDomainDetailLogic.actions.updateEntryPoints(entryPoints);
+ clearFlashMessages();
+ },
+ onDelete: ({ entryPoint, entryPoints }) => {
+ CrawlerDomainDetailLogic.actions.updateEntryPoints(entryPoints);
+ clearFlashMessages();
+ flashSuccessToast(`Entry point "${entryPoint.value}" was removed.`);
+ },
+ onUpdate: ({ entryPoints }) => {
+ CrawlerDomainDetailLogic.actions.updateEntryPoints(entryPoints);
+ clearFlashMessages();
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.test.tsx
new file mode 100644
index 0000000000000..ded26ca4b1acf
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.test.tsx
@@ -0,0 +1,187 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockFlashMessageHelpers, setMockActions } from '../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiEmptyPrompt, EuiFieldText } from '@elastic/eui';
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+
+import { SitemapsTable } from './sitemaps_table';
+
+describe('SitemapsTable', () => {
+ const { clearFlashMessages, flashSuccessToast } = mockFlashMessageHelpers;
+ const indexName = 'index-name';
+ const sitemaps = [
+ { id: '1', url: 'http://www.example.com/sitemap.xml' },
+ { id: '2', url: 'http://www.example.com/whatever/sitemaps.xml' },
+ ];
+ const domain = {
+ createdOn: '2018-01-01T00:00:00.000Z',
+ documentCount: 10,
+ id: '6113e1407a2f2e6f42489794',
+ url: 'https://www.elastic.co',
+ crawlRules: [],
+ entryPoints: [],
+ sitemaps,
+ deduplicationEnabled: true,
+ deduplicationFields: ['title'],
+ availableDeduplicationFields: ['title', 'description'],
+ };
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('renders', () => {
+ const wrapper = shallow(
+
+ );
+
+ expect(wrapper.find(GenericEndpointInlineEditableTable).exists()).toBe(true);
+ });
+
+ describe('the first and only column in the table', () => {
+ it('shows the url of a sitemap', () => {
+ const sitemap = { id: '1', url: 'http://www.example.com/sitemap.xml' };
+
+ const wrapper = shallow(
+
+ );
+
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ const column = shallow({columns[0].render(sitemap)}
);
+ expect(column.html()).toContain('http://www.example.com/sitemap.xml');
+ });
+
+ it('can show the url of a sitemap as editable', () => {
+ const sitemap = { id: '1', url: 'http://www.example.com/sitemap.xml' };
+ const onChange = jest.fn();
+
+ const wrapper = shallow(
+
+ );
+
+ const columns = wrapper.find(GenericEndpointInlineEditableTable).prop('columns');
+ const column = shallow(
+
+ {columns[0].editingRender(sitemap, onChange, { isInvalid: false, isLoading: false })}
+
+ );
+
+ const textField = column.find(EuiFieldText);
+ expect(textField.props()).toEqual(
+ expect.objectContaining({
+ value: 'http://www.example.com/sitemap.xml',
+ disabled: false, // It would be disabled if isLoading is true
+ isInvalid: false,
+ })
+ );
+
+ textField.simulate('change', { target: { value: '/foo' } });
+ expect(onChange).toHaveBeenCalledWith('/foo');
+ });
+ });
+
+ describe('routes', () => {
+ it('can calculate an update and delete route correctly', () => {
+ const wrapper = shallow(
+
+ );
+
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const sitemap = { id: '1', url: '/whatever' };
+ expect(table.prop('deleteRoute')(sitemap)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/sitemaps/1'
+ );
+ expect(table.prop('updateRoute')(sitemap)).toEqual(
+ '/internal/enterprise_search/indices/index-name/crawler/domains/6113e1407a2f2e6f42489794/sitemaps/1'
+ );
+ });
+ });
+
+ it('shows a no items message whem there are no sitemaps to show', () => {
+ const wrapper = shallow(
+
+ );
+
+ const editNewItems = jest.fn();
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+ const message = mountWithIntl({table.prop('noItemsMessage')!(editNewItems)}
);
+ expect(message.find(EuiEmptyPrompt).exists()).toBe(true);
+ });
+
+ describe('when a sitemap is added', () => {
+ it('should update the sitemaps for the current domain, and clear flash messages', () => {
+ const updateSitemaps = jest.fn();
+ setMockActions({
+ updateSitemaps,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const sitemapThatWasAdded = { id: '2', value: 'bar' };
+ const updatedSitemaps = [
+ { id: '1', value: 'foo' },
+ { id: '2', value: 'bar' },
+ ];
+ table.prop('onAdd')(sitemapThatWasAdded, updatedSitemaps);
+ expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+
+ describe('when a sitemap is updated', () => {
+ it('should update the sitemaps for the current domain, and clear flash messages', () => {
+ const updateSitemaps = jest.fn();
+ setMockActions({
+ updateSitemaps,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const sitemapThatWasUpdated = { id: '2', value: 'bar' };
+ const updatedSitemaps = [
+ { id: '1', value: 'foo' },
+ { id: '2', value: 'baz' },
+ ];
+ table.prop('onUpdate')(sitemapThatWasUpdated, updatedSitemaps);
+ expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ });
+ });
+
+ describe('when a sitemap is deleted', () => {
+ it('should update the sitemaps for the current domain, clear flash messages, and show a success', () => {
+ const updateSitemaps = jest.fn();
+ setMockActions({
+ updateSitemaps,
+ });
+ const wrapper = shallow(
+
+ );
+ const table = wrapper.find(GenericEndpointInlineEditableTable);
+
+ const sitemapThatWasDeleted = { id: '2', value: 'bar' };
+ const updatedSitemaps = [{ id: '1', value: 'foo' }];
+ table.prop('onDelete')(sitemapThatWasDeleted, updatedSitemaps);
+ expect(updateSitemaps).toHaveBeenCalledWith(updatedSitemaps);
+ expect(clearFlashMessages).toHaveBeenCalled();
+ expect(flashSuccessToast).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.tsx
new file mode 100644
index 0000000000000..f4a4102fcac07
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/crawler_domain_detail/sitemaps_table.tsx
@@ -0,0 +1,120 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions } from 'kea';
+
+import { EuiButton, EuiEmptyPrompt, EuiFieldText, EuiText } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { clearFlashMessages, flashSuccessToast } from '../../../shared/flash_messages';
+import { GenericEndpointInlineEditableTable } from '../../../shared/tables/generic_endpoint_inline_editable_table';
+
+import { InlineEditableTableColumn } from '../../../shared/tables/inline_editable_table/types';
+import { ItemWithAnID } from '../../../shared/tables/types';
+import { CrawlerDomain, Sitemap } from '../../api/crawler/types';
+
+import { CrawlerDomainDetailLogic } from './crawler_domain_detail_logic';
+
+const ADD_BUTTON_LABEL = i18n.translate(
+ 'xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel',
+ { defaultMessage: 'Add sitemap' }
+);
+
+interface SitemapsTableProps {
+ domain: CrawlerDomain;
+ indexName: string;
+ items: Sitemap[];
+}
+
+export const SitemapsTable: React.FC = ({ domain, indexName, items }) => {
+ const { updateSitemaps } = useActions(CrawlerDomainDetailLogic);
+ const field = 'url';
+
+ const columns: Array> = [
+ {
+ editingRender: (sitemap, onChange, { isInvalid, isLoading }) => (
+ onChange(e.target.value)}
+ disabled={isLoading}
+ isInvalid={isInvalid}
+ />
+ ),
+ render: (sitemap) => {(sitemap as Sitemap)[field]} ,
+ name: i18n.translate('xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead', {
+ defaultMessage: 'URL',
+ }),
+ field,
+ },
+ ];
+
+ const sitemapsRoute = `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}/sitemaps`;
+ const getSitemapRoute = (sitemap: Sitemap) =>
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains/${domain.id}/sitemaps/${sitemap.id}`;
+
+ return (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.sitemapsTable.description', {
+ defaultMessage: 'Specify sitemap URLs for the crawler on this domain.',
+ })}
+
+ }
+ instanceId="SitemapsTable"
+ items={items}
+ canRemoveLastItem
+ noItemsMessage={(editNewItem) => (
+ <>
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle', {
+ defaultMessage: 'There are no existing sitemaps.',
+ })}
+
+ }
+ titleSize="s"
+ body={Add a sitemap to specify an entry point for the crawler. }
+ actions={{ADD_BUTTON_LABEL} }
+ />
+ >
+ )}
+ addRoute={sitemapsRoute}
+ deleteRoute={getSitemapRoute}
+ updateRoute={getSitemapRoute}
+ dataProperty="sitemaps"
+ onAdd={(_, newSitemaps) => {
+ updateSitemaps(newSitemaps as Sitemap[]);
+ clearFlashMessages();
+ }}
+ onDelete={(_, newSitemaps) => {
+ updateSitemaps(newSitemaps as Sitemap[]);
+ clearFlashMessages();
+ flashSuccessToast(
+ i18n.translate('xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage', {
+ defaultMessage: 'The sitemap has been deleted.',
+ })
+ );
+ }}
+ onUpdate={(_, newSitemaps) => {
+ updateSitemaps(newSitemaps as Sitemap[]);
+ clearFlashMessages();
+ }}
+ title={i18n.translate('xpack.enterpriseSearch.crawler.sitemapsTable.title', {
+ defaultMessage: 'Sitemaps',
+ })}
+ disableReordering
+ />
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/button_group.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/button_group.tsx
index 2b991998b45a1..4c9d9c081f82a 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/button_group.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/button_group.tsx
@@ -38,7 +38,7 @@ interface Props {
export const ButtonGroup: React.FC = ({ onChange, options, selected }) => (
- {options.map((option) => {
+ {options.map((option, index) => {
const isSelected = option === selected;
return (
= ({ onChange, options, selected }) =>
aria-label={option.label}
aria-checked={isSelected}
role="radio"
+ autoFocus={index === 0}
/>
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/constants.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/constants.ts
index e4c807b15247e..1377a80cd9b38 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/constants.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/constants.ts
@@ -64,7 +64,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'Universal',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.universalDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.universalDropDownOptionLabel',
{
defaultMessage: 'Universal',
}
@@ -77,7 +77,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'zh',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.chineseDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.chineseDropDownOptionLabel',
{
defaultMessage: 'Chinese',
}
@@ -86,7 +86,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'da',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.danishDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.danishDropDownOptionLabel',
{
defaultMessage: 'Danish',
}
@@ -95,7 +95,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'nl',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.dutchDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.dutchDropDownOptionLabel',
{
defaultMessage: 'Dutch',
}
@@ -104,7 +104,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'en',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.englishDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.englishDropDownOptionLabel',
{
defaultMessage: 'English',
}
@@ -113,7 +113,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'fr',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.frenchDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.frenchDropDownOptionLabel',
{
defaultMessage: 'French',
}
@@ -122,7 +122,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'de',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.germanDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.germanDropDownOptionLabel',
{
defaultMessage: 'German',
}
@@ -131,7 +131,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'it',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.italianDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.italianDropDownOptionLabel',
{
defaultMessage: 'Italian',
}
@@ -140,7 +140,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'ja',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.japaneseDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.japaneseDropDownOptionLabel',
{
defaultMessage: 'Japanese',
}
@@ -149,7 +149,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'ko',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.koreanDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.koreanDropDownOptionLabel',
{
defaultMessage: 'Korean',
}
@@ -158,7 +158,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'pt',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.portugueseDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseDropDownOptionLabel',
{
defaultMessage: 'Portuguese',
}
@@ -167,7 +167,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'pt-br',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.portugueseBrazilDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseBrazilDropDownOptionLabel',
{
defaultMessage: 'Portuguese (Brazil)',
}
@@ -176,7 +176,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'ru',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.russianDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.russianDropDownOptionLabel',
{
defaultMessage: 'Russian',
}
@@ -185,7 +185,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'es',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.spanishDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.spanishDropDownOptionLabel',
{
defaultMessage: 'Spanish',
}
@@ -194,7 +194,7 @@ export const SUPPORTED_LANGUAGES = [
{
value: 'th',
text: i18n.translate(
- 'xpack.enterpriseSearch.appSearch.engineCreation.supportedLanguages.thaiDropDownOptionLabel',
+ 'xpack.enterpriseSearch.content.newIndex.supportedLanguages.thaiDropDownOptionLabel',
{
defaultMessage: 'Thai',
}
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/method_crawler/method_crawler_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/method_crawler/method_crawler_logic.ts
index 8ec3bd8264ac9..57649ea710eef 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/method_crawler/method_crawler_logic.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/method_crawler/method_crawler_logic.ts
@@ -8,6 +8,7 @@
import { kea, MakeLogicType } from 'kea';
import { Actions } from '../../../../shared/api_logic/create_api_logic';
+import { generateEncodedPath } from '../../../../shared/encode_path_params';
import { clearFlashMessages, flashAPIErrors } from '../../../../shared/flash_messages';
@@ -17,7 +18,8 @@ import {
CreateCrawlerIndexArgs,
CreateCrawlerIndexResponse,
} from '../../../api/crawler/create_crawler_index_api_logic';
-import { SEARCH_INDEX_PATH } from '../../../routes';
+import { SEARCH_INDEX_TAB_PATH } from '../../../routes';
+import { SearchIndexTabId } from '../../search_index/search_index';
type MethodCrawlerActions = Pick<
Actions,
@@ -33,7 +35,12 @@ export const MethodCrawlerLogic = kea>({
flashAPIErrors(error);
},
apiSuccess: ({ created }) => {
- KibanaLogic.values.navigateToUrl(SEARCH_INDEX_PATH.replace(':indexName', created));
+ KibanaLogic.values.navigateToUrl(
+ generateEncodedPath(SEARCH_INDEX_TAB_PATH, {
+ indexName: created,
+ tabId: SearchIndexTabId.DOMAIN_MANAGEMENT,
+ })
+ );
},
makeRequest: () => clearFlashMessages(),
},
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/new_search_index_template.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/new_search_index_template.tsx
index 99267223907a4..b86f48f82859d 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/new_search_index_template.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/new_index/new_search_index_template.tsx
@@ -131,6 +131,7 @@ export const NewSearchIndexTemplate: React.FC = ({
isInvalid={false}
value={rawName}
onChange={handleNameChange}
+ autoFocus
/>
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/domain_management.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/_mocks_/index_name_logic.mock.ts
similarity index 59%
rename from x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/domain_management.tsx
rename to x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/_mocks_/index_name_logic.mock.ts
index 61821c116dc40..6aca4ddad2e19 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/domain_management.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/_mocks_/index_name_logic.mock.ts
@@ -5,10 +5,12 @@
* 2.0.
*/
-import React from 'react';
-
-export const SearchIndexDomainManagement: React.FC = () => {
- // TODO If index && !index.crawler then do something
-
- return <>Manage Domains>;
+export const mockIndexNameValues = {
+ indexName: 'index-name',
};
+
+jest.mock('../index_name_logic', () => ({
+ IndexNameLogic: {
+ values: mockIndexNameValues,
+ },
+}));
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.test.tsx
new file mode 100644
index 0000000000000..a81ae20408aa0
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.test.tsx
@@ -0,0 +1,80 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+import '../../../../../__mocks__/shallow_useeffect.mock';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiButton, EuiFieldNumber, EuiForm, EuiSelect, EuiSwitch } from '@elastic/eui';
+
+import { CrawlUnits } from '../../../../api/crawler/types';
+
+import { AutomaticCrawlScheduler } from './automatic_crawl_scheduler';
+
+const MOCK_ACTIONS = {
+ // AutomaticCrawlSchedulerLogic
+ setCrawlFrequency: jest.fn(),
+ setCrawlUnit: jest.fn(),
+ saveChanges: jest.fn(),
+ toggleCrawlAutomatically: jest.fn(),
+};
+
+const MOCK_VALUES = {
+ crawlAutomatically: false,
+ crawlFrequency: 7,
+ crawlUnit: CrawlUnits.days,
+ isSubmitting: false,
+};
+
+describe('AutomaticCrawlScheduler', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ setMockActions(MOCK_ACTIONS);
+ setMockValues(MOCK_VALUES);
+
+ wrapper = shallow( );
+ });
+
+ it('renders', () => {
+ expect(wrapper.find(EuiForm)).toHaveLength(1);
+ expect(wrapper.find(EuiFieldNumber)).toHaveLength(1);
+ expect(wrapper.find(EuiSelect)).toHaveLength(1);
+ });
+
+ it('saves changes on form submit', () => {
+ const preventDefault = jest.fn();
+ wrapper.find(EuiForm).simulate('submit', { preventDefault });
+
+ expect(preventDefault).toHaveBeenCalled();
+ expect(MOCK_ACTIONS.saveChanges).toHaveBeenCalled();
+ });
+
+ it('contains a switch that toggles automatic crawling', () => {
+ wrapper.find(EuiSwitch).simulate('change');
+
+ expect(MOCK_ACTIONS.toggleCrawlAutomatically).toHaveBeenCalled();
+ });
+
+ it('contains a number field that updates the crawl frequency', () => {
+ wrapper.find(EuiFieldNumber).simulate('change', { target: { value: '10' } });
+
+ expect(MOCK_ACTIONS.setCrawlFrequency).toHaveBeenCalledWith(10);
+ });
+
+ it('contains a select field that updates the crawl unit', () => {
+ wrapper.find(EuiSelect).simulate('change', { target: { value: CrawlUnits.weeks } });
+
+ expect(MOCK_ACTIONS.setCrawlUnit).toHaveBeenCalledWith(CrawlUnits.weeks);
+ });
+
+ it('contains a submit button', () => {
+ expect(wrapper.find(EuiButton).prop('type')).toEqual('submit');
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.tsx
new file mode 100644
index 0000000000000..3ce6cd2cc5efa
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler.tsx
@@ -0,0 +1,198 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiButton,
+ EuiFieldNumber,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiForm,
+ EuiFormRow,
+ EuiLink,
+ EuiSelect,
+ EuiSpacer,
+ EuiSwitch,
+ EuiText,
+ htmlIdGenerator,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import {
+ HOURS_UNIT_LABEL,
+ DAYS_UNIT_LABEL,
+ WEEKS_UNIT_LABEL,
+ MONTHS_UNIT_LABEL,
+ SAVE_BUTTON_LABEL,
+} from '../../../../../shared/constants';
+import { DataPanel } from '../../../../../shared/data_panel/data_panel';
+
+import { CrawlUnits } from '../../../../api/crawler/types';
+
+import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
+
+export const AutomaticCrawlScheduler: React.FC = () => {
+ const { setCrawlFrequency, setCrawlUnit, saveChanges, toggleCrawlAutomatically } = useActions(
+ AutomaticCrawlSchedulerLogic
+ );
+
+ const { crawlAutomatically, crawlFrequency, crawlUnit, isSubmitting } = useValues(
+ AutomaticCrawlSchedulerLogic
+ );
+
+ const formId = htmlIdGenerator('AutomaticCrawlScheduler')();
+
+ return (
+ <>
+
+
+ {i18n.translate('xpack.enterpriseSearch.automaticCrawlSchedule.title', {
+ defaultMessage: 'Automated Crawl Scheduling',
+ })}
+
+ }
+ titleSize="s"
+ subtitle={
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink',
+ {
+ defaultMessage: 'Read more.',
+ }
+ )}
+
+ ),
+ }}
+ />
+ }
+ iconType="calendar"
+ >
+ {
+ event.preventDefault();
+ saveChanges();
+ }}
+ component="form"
+ id={formId}
+ >
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel',
+ {
+ defaultMessage: 'Crawl automatically',
+ }
+ )}
+
+ }
+ onChange={toggleCrawlAutomatically}
+ compressed
+ />
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlUnitsPrefix',
+ {
+ defaultMessage: 'Every',
+ }
+ )}
+
+
+
+ setCrawlFrequency(parseInt(e.target.value, 10))}
+ />
+
+
+ setCrawlUnit(e.target.value as CrawlUnits)}
+ />
+
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription',
+ {
+ defaultMessage:
+ 'The crawl schedule will perform a full crawl on every domain on this index.',
+ }
+ )}
+
+
+
+
+ {SAVE_BUTTON_LABEL}
+
+
+
+
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.test.ts
new file mode 100644
index 0000000000000..0b1eceb379282
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.test.ts
@@ -0,0 +1,293 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ LogicMounter,
+ mockHttpValues,
+ mockFlashMessageHelpers,
+} from '../../../../../__mocks__/kea_logic';
+import '../../_mocks_/index_name_logic.mock';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { CrawlUnits } from '../../../../api/crawler/types';
+
+import { AutomaticCrawlSchedulerLogic } from './automatic_crawl_scheduler_logic';
+
+describe('AutomaticCrawlSchedulerLogic', () => {
+ const { mount } = new LogicMounter(AutomaticCrawlSchedulerLogic);
+ const { http } = mockHttpValues;
+ const { flashAPIErrors, flashSuccessToast } = mockFlashMessageHelpers;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('has expected default values', () => {
+ mount();
+
+ expect(AutomaticCrawlSchedulerLogic.values).toEqual({
+ crawlAutomatically: false,
+ crawlFrequency: 7,
+ crawlUnit: CrawlUnits.days,
+ isSubmitting: false,
+ });
+ });
+
+ describe('actions', () => {
+ describe('clearCrawlSchedule', () => {
+ it('sets crawl schedule related values to their defaults', () => {
+ mount({
+ crawlAutomatically: true,
+ crawlFrequency: 36,
+ crawlUnit: CrawlUnits.hours,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule();
+
+ expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
+ crawlAutomatically: false,
+ crawlFrequency: 7,
+ crawlUnit: CrawlUnits.days,
+ });
+ });
+ });
+
+ describe('toggleCrawlAutomatically', () => {
+ it('toggles the ability to crawl automatically', () => {
+ mount({
+ crawlAutomatically: false,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
+
+ expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(true);
+
+ AutomaticCrawlSchedulerLogic.actions.toggleCrawlAutomatically();
+
+ expect(AutomaticCrawlSchedulerLogic.values.crawlAutomatically).toEqual(false);
+ });
+ });
+
+ describe('onDoneSubmitting', () => {
+ mount({
+ isSubmitting: true,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting();
+
+ expect(AutomaticCrawlSchedulerLogic.values.isSubmitting).toEqual(false);
+ });
+
+ describe('setCrawlFrequency', () => {
+ it("sets the crawl schedule's frequency", () => {
+ mount({
+ crawlFrequency: 36,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.setCrawlFrequency(12);
+
+ expect(AutomaticCrawlSchedulerLogic.values.crawlFrequency).toEqual(12);
+ });
+ });
+
+ describe('setCrawlSchedule', () => {
+ it("sets the crawl schedule's frequency and unit, and enables crawling automatically", () => {
+ mount();
+
+ AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule({
+ frequency: 3,
+ unit: CrawlUnits.hours,
+ });
+
+ expect(AutomaticCrawlSchedulerLogic.values).toMatchObject({
+ crawlAutomatically: true,
+ crawlFrequency: 3,
+ crawlUnit: CrawlUnits.hours,
+ });
+ });
+ });
+
+ describe('setCrawlUnit', () => {
+ it("sets the crawl schedule's unit", () => {
+ mount({
+ crawlUnit: CrawlUnits.months,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.setCrawlUnit(CrawlUnits.weeks);
+
+ expect(AutomaticCrawlSchedulerLogic.values.crawlUnit).toEqual(CrawlUnits.weeks);
+ });
+ });
+ });
+
+ describe('listeners', () => {
+ describe('deleteCrawlSchedule', () => {
+ it('resets the states of the crawl scheduler and popover, and shows a toast, on success', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
+ http.delete.mockReturnValueOnce(Promise.resolve());
+
+ AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
+ await nextTick();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
+ expect(flashSuccessToast).toHaveBeenCalledWith(expect.any(String));
+ expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
+ });
+
+ describe('error paths', () => {
+ it('resets the states of the crawl scheduler and popover on a 404 respose', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
+ http.delete.mockReturnValueOnce(
+ Promise.reject({
+ response: { status: 404 },
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
+ await nextTick();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
+ expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
+ });
+
+ it('flashes an error on a non-404 respose', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
+ http.delete.mockReturnValueOnce(
+ Promise.reject({
+ response: { status: 500 },
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
+ await nextTick();
+
+ expect(flashAPIErrors).toHaveBeenCalledWith({
+ response: { status: 500 },
+ });
+ expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
+ });
+ });
+ });
+
+ describe('fetchCrawlSchedule', () => {
+ it('set the state of the crawl scheduler on success', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
+ http.get.mockReturnValueOnce(
+ Promise.resolve({
+ unit: CrawlUnits.days,
+ frequency: '30',
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
+ await nextTick();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
+ unit: CrawlUnits.days,
+ frequency: '30',
+ });
+ });
+
+ describe('error paths', () => {
+ it('resets the states of the crawl scheduler on a 404 respose', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'clearCrawlSchedule');
+ http.get.mockReturnValueOnce(
+ Promise.reject({
+ response: { status: 404 },
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
+ await nextTick();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.clearCrawlSchedule).toHaveBeenCalled();
+ });
+
+ it('flashes an error on a non-404 respose', async () => {
+ http.get.mockReturnValueOnce(
+ Promise.reject({
+ response: { status: 500 },
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.fetchCrawlSchedule();
+ await nextTick();
+
+ expect(flashAPIErrors).toHaveBeenCalledWith({
+ response: { status: 500 },
+ });
+ });
+ });
+ });
+
+ describe('saveChanges', () => {
+ it('updates or creates a crawl schedule if the user has chosen to crawl automatically', () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'submitCrawlSchedule');
+ mount({
+ crawlAutomatically: true,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.saveChanges();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule);
+ });
+
+ it('deletes the crawl schedule if the user has chosen to disable automatic crawling', () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'deleteCrawlSchedule');
+ mount({
+ crawlAutomatically: false,
+ });
+
+ AutomaticCrawlSchedulerLogic.actions.saveChanges();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule);
+ });
+ });
+
+ describe('submitCrawlSchedule', () => {
+ it('sets the states of the crawl scheduler and closes the popover on success', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'setCrawlSchedule');
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
+ http.put.mockReturnValueOnce(
+ Promise.resolve({
+ unit: CrawlUnits.days,
+ frequency: 30,
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.submitCrawlSchedule();
+ await nextTick();
+
+ expect(AutomaticCrawlSchedulerLogic.actions.setCrawlSchedule).toHaveBeenCalledWith({
+ unit: CrawlUnits.days,
+ frequency: 30,
+ });
+ expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
+ });
+
+ it('flashes an error callout if there is an error', async () => {
+ jest.spyOn(AutomaticCrawlSchedulerLogic.actions, 'onDoneSubmitting');
+ http.delete.mockReturnValueOnce(
+ Promise.reject({
+ response: { status: 500 },
+ })
+ );
+
+ AutomaticCrawlSchedulerLogic.actions.deleteCrawlSchedule();
+ await nextTick();
+
+ expect(flashAPIErrors).toHaveBeenCalledWith({
+ response: { status: 500 },
+ });
+ expect(AutomaticCrawlSchedulerLogic.actions.onDoneSubmitting).toHaveBeenCalled();
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.ts
new file mode 100644
index 0000000000000..736027540dc37
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/automatic_crawl_scheduler/automatic_crawl_scheduler_logic.ts
@@ -0,0 +1,189 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { i18n } from '@kbn/i18n';
+
+import { flashAPIErrors, flashSuccessToast } from '../../../../../shared/flash_messages';
+import { HttpLogic } from '../../../../../shared/http';
+import { CrawlSchedule, CrawlUnits } from '../../../../api/crawler/types';
+import { IndexNameLogic } from '../../index_name_logic';
+
+export interface AutomaticCrawlSchedulerLogicValues {
+ crawlAutomatically: boolean;
+ crawlFrequency: CrawlSchedule['frequency'];
+ crawlUnit: CrawlSchedule['unit'];
+ isSubmitting: boolean;
+}
+
+const DEFAULT_VALUES: Pick = {
+ crawlFrequency: 7,
+ crawlUnit: CrawlUnits.days,
+};
+
+export interface AutomaticCrawlSchedulerLogicActions {
+ clearCrawlSchedule(): void;
+ deleteCrawlSchedule(): void;
+ disableCrawlAutomatically(): void;
+ onDoneSubmitting(): void;
+ enableCrawlAutomatically(): void;
+ fetchCrawlSchedule(): void;
+ saveChanges(): void;
+ setCrawlFrequency(crawlFrequency: CrawlSchedule['frequency']): {
+ crawlFrequency: CrawlSchedule['frequency'];
+ };
+ setCrawlSchedule(crawlSchedule: CrawlSchedule): { crawlSchedule: CrawlSchedule };
+ setCrawlUnit(crawlUnit: CrawlSchedule['unit']): { crawlUnit: CrawlSchedule['unit'] };
+ submitCrawlSchedule(): void;
+ toggleCrawlAutomatically(): void;
+}
+
+export const AutomaticCrawlSchedulerLogic = kea<
+ MakeLogicType
+>({
+ path: ['enterprise_search', 'crawler', 'automatic_crawl_scheduler_logic'],
+ actions: () => ({
+ clearCrawlSchedule: true,
+ deleteCrawlSchedule: true,
+ disableCrawlAutomatically: true,
+ onDoneSubmitting: true,
+ enableCrawlAutomatically: true,
+ fetchCrawlSchedule: true,
+ saveChanges: true,
+ setCrawlSchedule: (crawlSchedule: CrawlSchedule) => ({ crawlSchedule }),
+ submitCrawlSchedule: true,
+ setCrawlFrequency: (crawlFrequency: string) => ({ crawlFrequency }),
+ setCrawlUnit: (crawlUnit: CrawlUnits) => ({ crawlUnit }),
+ toggleCrawlAutomatically: true,
+ }),
+ reducers: () => ({
+ crawlAutomatically: [
+ false,
+ {
+ clearCrawlSchedule: () => false,
+ setCrawlSchedule: () => true,
+ toggleCrawlAutomatically: (crawlAutomatically) => !crawlAutomatically,
+ },
+ ],
+ crawlFrequency: [
+ DEFAULT_VALUES.crawlFrequency,
+ {
+ clearCrawlSchedule: () => DEFAULT_VALUES.crawlFrequency,
+ setCrawlSchedule: (_, { crawlSchedule: { frequency } }) => frequency,
+ setCrawlFrequency: (_, { crawlFrequency }) => crawlFrequency,
+ },
+ ],
+ crawlUnit: [
+ DEFAULT_VALUES.crawlUnit,
+ {
+ clearCrawlSchedule: () => DEFAULT_VALUES.crawlUnit,
+ setCrawlSchedule: (_, { crawlSchedule: { unit } }) => unit,
+ setCrawlUnit: (_, { crawlUnit }) => crawlUnit,
+ },
+ ],
+ isSubmitting: [
+ false,
+ {
+ deleteCrawlSchedule: () => true,
+ onDoneSubmitting: () => false,
+ submitCrawlSchedule: () => true,
+ },
+ ],
+ }),
+ listeners: ({ actions, values }) => ({
+ deleteCrawlSchedule: async () => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ try {
+ await http.delete(
+ `/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`
+ );
+ actions.clearCrawlSchedule();
+ flashSuccessToast(
+ i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlScheduler.disableCrawlSchedule.successMessage',
+ {
+ defaultMessage: 'Automatic crawling has been disabled.',
+ }
+ )
+ );
+ } catch (e) {
+ // A 404 is expected and means the user has no crawl schedule to delete
+ if (e.response?.status === 404) {
+ actions.clearCrawlSchedule();
+ } else {
+ flashAPIErrors(e);
+ // Keep the popover open
+ }
+ } finally {
+ actions.onDoneSubmitting();
+ }
+ },
+ fetchCrawlSchedule: async () => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ try {
+ const crawlSchedule: CrawlSchedule = await http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`
+ );
+ actions.setCrawlSchedule(crawlSchedule);
+ } catch (e) {
+ // A 404 is expected and means the user does not have crawl schedule
+ // for this index. We continue to use the defaults.
+ if (e.response?.status === 404) {
+ actions.clearCrawlSchedule();
+ } else {
+ flashAPIErrors(e);
+ }
+ }
+ },
+ saveChanges: () => {
+ if (values.crawlAutomatically) {
+ actions.submitCrawlSchedule();
+ } else {
+ actions.deleteCrawlSchedule();
+ }
+ },
+ submitCrawlSchedule: async () => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ try {
+ const crawlSchedule: CrawlSchedule = await http.put(
+ `/internal/enterprise_search/indices/${indexName}/crawler/crawl_schedule`,
+ {
+ body: JSON.stringify({
+ unit: values.crawlUnit,
+ frequency: values.crawlFrequency,
+ }),
+ }
+ );
+ actions.setCrawlSchedule(crawlSchedule);
+ flashSuccessToast(
+ i18n.translate(
+ 'xpack.enterpriseSearch.crawler.automaticCrawlScheduler.submitCrawlSchedule.successMessage',
+ {
+ defaultMessage: 'Your automatic crawling schedule has been updated.',
+ }
+ )
+ );
+ } catch (e) {
+ flashAPIErrors(e);
+ } finally {
+ actions.onDoneSubmitting();
+ }
+ },
+ }),
+ events: ({ actions }) => ({
+ afterMount: () => {
+ actions.fetchCrawlSchedule();
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.test.tsx
new file mode 100644
index 0000000000000..5ca0ea23c15b0
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.test.tsx
@@ -0,0 +1,152 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiButton, EuiButtonEmpty, EuiFlyout, EuiFlyoutFooter } from '@elastic/eui';
+
+import { Loading } from '../../../../../shared/loading';
+import { rerender } from '../../../../../test_helpers';
+
+import { CrawlCustomSettingsFlyout } from './crawl_custom_settings_flyout';
+import { CrawlCustomSettingsFlyoutCrawlDepthPanel } from './crawl_custom_settings_flyout_crawl_depth_panel';
+import { CrawlCustomSettingsFlyoutDomainsPanel } from './crawl_custom_settings_flyout_domains_panel';
+import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
+
+const MOCK_VALUES = {
+ // CrawlCustomSettingsFlyoutLogic
+ isDataLoading: false,
+ isFormSubmitting: false,
+ isFlyoutVisible: true,
+ selectedDomainUrls: ['https://www.elastic.co'],
+};
+
+const MOCK_ACTIONS = {
+ // CrawlCustomSettingsFlyoutLogic
+ hideFlyout: jest.fn(),
+ onSelectDomainUrls: jest.fn(),
+ startCustomCrawl: jest.fn(),
+};
+
+describe('CrawlCustomSettingsFlyout', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockValues(MOCK_VALUES);
+ setMockActions(MOCK_ACTIONS);
+
+ wrapper = shallow( );
+ });
+
+ it('is empty when the flyout is hidden', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ isFlyoutVisible: false,
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+
+ it('renders as a modal when visible', () => {
+ expect(wrapper.is(EuiFlyout)).toBe(true);
+ });
+
+ it('can be closed', () => {
+ expect(wrapper.prop('onClose')).toEqual(MOCK_ACTIONS.hideFlyout);
+ expect(wrapper.find(EuiFlyoutFooter).find(EuiButtonEmpty).prop('onClick')).toEqual(
+ MOCK_ACTIONS.hideFlyout
+ );
+ });
+
+ it('lets the user customize their crawl', () => {
+ expect(wrapper.find(Loading)).toHaveLength(0);
+ for (const component of [
+ CrawlCustomSettingsFlyoutCrawlDepthPanel,
+ CrawlCustomSettingsFlyoutDomainsPanel,
+ CrawlCustomSettingsFlyoutSeedUrlsPanel,
+ ]) {
+ expect(wrapper.find(component)).toHaveLength(1);
+ }
+ });
+
+ it('shows a loading state', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ isDataLoading: true,
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(Loading)).toHaveLength(1);
+ for (const component of [
+ CrawlCustomSettingsFlyoutCrawlDepthPanel,
+ CrawlCustomSettingsFlyoutDomainsPanel,
+ CrawlCustomSettingsFlyoutSeedUrlsPanel,
+ ]) {
+ expect(wrapper.find(component)).toHaveLength(0);
+ }
+ });
+
+ describe('submit button', () => {
+ it('is enabled by default', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedDomainUrls: [],
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
+ });
+
+ it('is disabled when no domains are selected', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedDomainUrls: [],
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
+ });
+
+ it('is disabled when data is loading', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ isDataLoading: true,
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('disabled')).toEqual(true);
+ });
+
+ it('shows a loading state when the user makes a request', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ isFormSubmitting: true,
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiFlyoutFooter).find(EuiButton).prop('isLoading')).toEqual(true);
+ });
+
+ it('starts a crawl and hides the modal', () => {
+ wrapper.find(EuiFlyoutFooter).find(EuiButton).simulate('click');
+
+ expect(MOCK_ACTIONS.startCustomCrawl).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.tsx
new file mode 100644
index 0000000000000..d3c486fa20ca5
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout.tsx
@@ -0,0 +1,108 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues, useActions } from 'kea';
+
+import {
+ EuiButton,
+ EuiButtonEmpty,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFlyout,
+ EuiFlyoutBody,
+ EuiFlyoutFooter,
+ EuiFlyoutHeader,
+ EuiSpacer,
+ EuiText,
+ EuiTitle,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CANCEL_BUTTON_LABEL } from '../../../../../shared/constants';
+import { Loading } from '../../../../../shared/loading';
+
+import { CrawlCustomSettingsFlyoutCrawlDepthPanel } from './crawl_custom_settings_flyout_crawl_depth_panel';
+import { CrawlCustomSettingsFlyoutDomainsPanel } from './crawl_custom_settings_flyout_domains_panel';
+import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
+import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
+
+export const CrawlCustomSettingsFlyout: React.FC = () => {
+ const { isDataLoading, isFormSubmitting, isFlyoutVisible, selectedDomainUrls } = useValues(
+ CrawlCustomSettingsFlyoutLogic
+ );
+ const { hideFlyout, startCustomCrawl } = useActions(CrawlCustomSettingsFlyoutLogic);
+
+ if (!isFlyoutVisible) {
+ return null;
+ }
+
+ return (
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle',
+ {
+ defaultMessage: 'Custom crawl configuration',
+ }
+ )}
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription',
+ {
+ defaultMessage: 'Set up a one-time crawl with custom settings.',
+ }
+ )}
+
+
+
+
+ {isDataLoading ? (
+
+ ) : (
+ <>
+
+
+
+
+
+ >
+ )}
+
+
+
+
+ {CANCEL_BUTTON_LABEL}
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel',
+ {
+ defaultMessage: 'Apply and crawl now',
+ }
+ )}
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.test.tsx
new file mode 100644
index 0000000000000..24932de7cfb36
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.test.tsx
@@ -0,0 +1,45 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiFieldNumber } from '@elastic/eui';
+
+import { CrawlCustomSettingsFlyoutCrawlDepthPanel } from './crawl_custom_settings_flyout_crawl_depth_panel';
+
+const MOCK_VALUES = {
+ // CrawlCustomSettingsFlyoutLogic
+ maxCrawlDepth: 5,
+};
+
+const MOCK_ACTIONS = {
+ // CrawlCustomSettingsFlyoutLogic
+ onSelectMaxCrawlDepth: jest.fn(),
+};
+
+describe('CrawlCustomSettingsFlyoutCrawlDepthPanel', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockValues(MOCK_VALUES);
+ setMockActions(MOCK_ACTIONS);
+ });
+
+ it('allows the user to set max crawl depth', () => {
+ const wrapper = shallow( );
+ const crawlDepthField = wrapper.find(EuiFieldNumber);
+
+ expect(crawlDepthField.prop('value')).toEqual(5);
+
+ crawlDepthField.simulate('change', { target: { value: '10' } });
+
+ expect(MOCK_ACTIONS.onSelectMaxCrawlDepth).toHaveBeenCalledWith(10);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.tsx
new file mode 100644
index 0000000000000..d6bcda310b623
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_crawl_depth_panel.tsx
@@ -0,0 +1,64 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { ChangeEvent } from 'react';
+
+import { useValues, useActions } from 'kea';
+
+import {
+ EuiFieldNumber,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFormRow,
+ EuiPanel,
+ EuiText,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
+
+export const CrawlCustomSettingsFlyoutCrawlDepthPanel: React.FC = () => {
+ const { maxCrawlDepth } = useValues(CrawlCustomSettingsFlyoutLogic);
+ const { onSelectMaxCrawlDepth } = useActions(CrawlCustomSettingsFlyoutLogic);
+
+ return (
+
+
+
+
+ ) =>
+ onSelectMaxCrawlDepth(parseInt(e.target.value, 10))
+ }
+ />
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription',
+ {
+ defaultMessage:
+ 'Set a max crawl depth to specify how many pages deep the crawler should traverse. Set the value to one (1) to limit the crawl to only the entry points.',
+ }
+ )}
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.test.tsx
new file mode 100644
index 0000000000000..2e74c598cbe6d
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.test.tsx
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiAccordion, EuiNotificationBadge } from '@elastic/eui';
+
+import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
+
+import { rerender } from '../../../../../test_helpers';
+
+import { CrawlCustomSettingsFlyoutDomainsPanel } from './crawl_custom_settings_flyout_domains_panel';
+
+const MOCK_VALUES = {
+ // CrawlCustomSettingsFlyoutLogic
+ domainUrls: ['https://www.elastic.co', 'https://www.swiftype.com'],
+ selectedDomainUrls: ['https://www.elastic.co'],
+};
+
+const MOCK_ACTIONS = {
+ // CrawlCustomSettingsFlyoutLogic
+ onSelectDomainUrls: jest.fn(),
+};
+
+const getAccordionBadge = (wrapper: ShallowWrapper) => {
+ const accordionWrapper = wrapper.find(EuiAccordion);
+ const extraActionWrapper = shallow({accordionWrapper.prop('extraAction')}
);
+ return extraActionWrapper.find(EuiNotificationBadge);
+};
+
+describe('CrawlCustomSettingsFlyoutDomainsPanel', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockValues(MOCK_VALUES);
+ setMockActions(MOCK_ACTIONS);
+
+ wrapper = shallow( );
+ });
+
+ it('allows the user to select domains', () => {
+ const domainAccordionWrapper = wrapper.find(EuiAccordion);
+
+ expect(domainAccordionWrapper.find(SimplifiedSelectable).props()).toEqual({
+ options: ['https://www.elastic.co', 'https://www.swiftype.com'],
+ selectedOptions: ['https://www.elastic.co'],
+ onChange: MOCK_ACTIONS.onSelectDomainUrls,
+ });
+ });
+
+ it('indicates how many domains are selected', () => {
+ let badge = getAccordionBadge(wrapper);
+
+ expect(badge.render().text()).toContain('1');
+ expect(badge.prop('color')).toEqual('accent');
+
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedDomainUrls: [],
+ });
+
+ rerender(wrapper);
+ badge = getAccordionBadge(wrapper);
+
+ expect(badge.render().text()).toContain('0');
+ expect(badge.prop('color')).toEqual('subdued');
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.tsx
new file mode 100644
index 0000000000000..3cdeefa758fd4
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_domains_panel.tsx
@@ -0,0 +1,84 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues, useActions } from 'kea';
+
+import {
+ EuiAccordion,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiIcon,
+ EuiNotificationBadge,
+ EuiPanel,
+ EuiTitle,
+ useGeneratedHtmlId,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
+
+import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
+
+export const CrawlCustomSettingsFlyoutDomainsPanel: React.FC = () => {
+ const { domainUrls, selectedDomainUrls } = useValues(CrawlCustomSettingsFlyoutLogic);
+ const { onSelectDomainUrls } = useActions(CrawlCustomSettingsFlyoutLogic);
+
+ return (
+
+
+
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel',
+ {
+ defaultMessage: 'Add domains to your crawl',
+ }
+ )}
+
+
+
+
+ }
+ extraAction={
+
+ 0 ? 'accent' : 'subdued'}
+ >
+ {selectedDomainUrls.length}
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor',
+ {
+ defaultMessage: 'selected',
+ }
+ )}
+
+
+ }
+ >
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.test.ts
new file mode 100644
index 0000000000000..eff81c832f317
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.test.ts
@@ -0,0 +1,443 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { LogicMounter, mockHttpValues } from '../../../../../__mocks__/kea_logic';
+import '../../_mocks_/index_name_logic.mock';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { itShowsServerErrorAsFlashMessage } from '../../../../../test_helpers';
+
+import { DomainConfig } from '../../../../api/crawler/types';
+import { CrawlerLogic } from '../crawler_logic';
+
+import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
+
+describe('CrawlCustomSettingsFlyoutLogic', () => {
+ const { mount } = new LogicMounter(CrawlCustomSettingsFlyoutLogic);
+ const { http } = mockHttpValues;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mount();
+ });
+
+ it('has expected default values', () => {
+ expect(CrawlCustomSettingsFlyoutLogic.values).toEqual({
+ customEntryPointUrls: [],
+ customSitemapUrls: [],
+ domainConfigMap: {},
+ domainConfigs: [],
+ domainUrls: [],
+ entryPointUrls: [],
+ includeSitemapsInRobotsTxt: true,
+ isDataLoading: true,
+ isFlyoutVisible: false,
+ isFormSubmitting: false,
+ maxCrawlDepth: 2,
+ selectedDomainUrls: [],
+ selectedEntryPointUrls: [],
+ selectedSitemapUrls: [],
+ sitemapUrls: [],
+ });
+ });
+
+ describe('actions', () => {
+ describe('fetchDomainConfigData', () => {
+ it('updates logic with data that has been converted from server to client', async () => {
+ jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'onRecieveDomainConfigData');
+ http.get.mockReturnValueOnce(
+ Promise.resolve({
+ results: [
+ {
+ id: '1234',
+ name: 'https://www.elastic.co',
+ seed_urls: [],
+ sitemap_urls: [],
+ },
+ ],
+ })
+ );
+
+ CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData();
+ await nextTick();
+
+ expect(http.get).toHaveBeenCalledWith(
+ '/internal/enterprise_search/indices/index-name/crawler/domain_configs'
+ );
+ expect(
+ CrawlCustomSettingsFlyoutLogic.actions.onRecieveDomainConfigData
+ ).toHaveBeenCalledWith([
+ {
+ id: '1234',
+ name: 'https://www.elastic.co',
+ seedUrls: [],
+ sitemapUrls: [],
+ },
+ ]);
+ });
+
+ itShowsServerErrorAsFlashMessage(http.get, () => {
+ CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData();
+ });
+ });
+
+ describe('hideFlyout', () => {
+ it('hides the modal', () => {
+ CrawlCustomSettingsFlyoutLogic.actions.hideFlyout();
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.isFlyoutVisible).toBe(false);
+ });
+ });
+
+ describe('onRecieveDomainConfigData', () => {
+ it('saves the data', () => {
+ mount({
+ domainConfigs: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onRecieveDomainConfigData([
+ {
+ name: 'https://www.elastic.co',
+ },
+ ] as DomainConfig[]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.domainConfigs).toEqual([
+ {
+ name: 'https://www.elastic.co',
+ },
+ ]);
+ });
+ });
+
+ describe('onSelectCustomSitemapUrls', () => {
+ it('saves the urls', () => {
+ mount({
+ customSitemapUrls: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectCustomSitemapUrls([
+ 'https://www.elastic.co/custom-sitemap1.xml',
+ 'https://swiftype.com/custom-sitemap2.xml',
+ ]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.customSitemapUrls).toEqual([
+ 'https://www.elastic.co/custom-sitemap1.xml',
+ 'https://swiftype.com/custom-sitemap2.xml',
+ ]);
+ });
+ });
+
+ describe('onSelectCustomEntryPointUrls', () => {
+ it('saves the urls', () => {
+ mount({
+ customEntryPointUrls: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectCustomEntryPointUrls([
+ 'https://www.elastic.co/custom-entry-point',
+ 'https://swiftype.com/custom-entry-point',
+ ]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.customEntryPointUrls).toEqual([
+ 'https://www.elastic.co/custom-entry-point',
+ 'https://swiftype.com/custom-entry-point',
+ ]);
+ });
+ });
+
+ describe('onSelectDomainUrls', () => {
+ it('saves the urls', () => {
+ mount({
+ selectedDomainUrls: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls([
+ 'https://www.elastic.co',
+ 'https://swiftype.com',
+ ]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.selectedDomainUrls).toEqual([
+ 'https://www.elastic.co',
+ 'https://swiftype.com',
+ ]);
+ });
+
+ it('filters selected sitemap urls by selected domains', () => {
+ mount({
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ selectedSitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls(['https://swiftype.com']);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.selectedSitemapUrls).toEqual([
+ 'https://swiftype.com/sitemap2.xml',
+ ]);
+ });
+
+ it('filters selected entry point urls by selected domains', () => {
+ mount({
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ selectedEntryPointUrls: [
+ 'https://www.elastic.co/guide',
+ 'https://swiftype.com/documentation',
+ ],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectDomainUrls(['https://swiftype.com']);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.selectedEntryPointUrls).toEqual([
+ 'https://swiftype.com/documentation',
+ ]);
+ });
+ });
+
+ describe('onSelectEntryPointUrls', () => {
+ it('saves the urls', () => {
+ mount({
+ selectedEntryPointUrls: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectEntryPointUrls([
+ 'https://www.elastic.co/guide',
+ 'https://swiftype.com/documentation',
+ ]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.selectedEntryPointUrls).toEqual([
+ 'https://www.elastic.co/guide',
+ 'https://swiftype.com/documentation',
+ ]);
+ });
+ });
+
+ describe('onSelectMaxCrawlDepth', () => {
+ it('saves the crawl depth', () => {
+ mount({
+ maxCrawlDepth: 5,
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectMaxCrawlDepth(10);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.maxCrawlDepth).toEqual(10);
+ });
+ });
+
+ describe('onSelectSitemapUrls', () => {
+ it('saves the urls', () => {
+ mount({
+ selectedSitemapUrls: [],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.onSelectSitemapUrls([
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ]);
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.selectedSitemapUrls).toEqual([
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ]);
+ });
+ });
+
+ describe('showFlyout', () => {
+ it('shows the modal and resets the form', () => {
+ mount({
+ customEntryPointUrls: [
+ 'https://www.elastic.co/custom-entry-point',
+ 'https://swiftype.com/custom-entry-point',
+ ],
+ customSitemapUrls: [
+ 'https://www.elastic.co/custom-sitemap1.xml',
+ 'https://swiftype.com/custom-sitemap2.xml',
+ ],
+ includeSitemapsInRobotsTxt: false,
+ isDataLoading: false,
+ isFlyoutVisible: false,
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ selectedEntryPointUrls: [
+ 'https://www.elastic.co/guide',
+ 'https://swiftype.com/documentation',
+ ],
+ selectedSitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ],
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.showFlyout();
+
+ expect(CrawlCustomSettingsFlyoutLogic.values).toEqual(
+ expect.objectContaining({
+ customEntryPointUrls: [],
+ customSitemapUrls: [],
+ includeSitemapsInRobotsTxt: true,
+ isDataLoading: true,
+ isFlyoutVisible: true,
+ selectedDomainUrls: [],
+ selectedEntryPointUrls: [],
+ selectedSitemapUrls: [],
+ })
+ );
+ });
+
+ it('fetches the latest data', () => {
+ jest.spyOn(CrawlCustomSettingsFlyoutLogic.actions, 'fetchDomainConfigData');
+
+ CrawlCustomSettingsFlyoutLogic.actions.showFlyout();
+
+ expect(CrawlCustomSettingsFlyoutLogic.actions.fetchDomainConfigData).toHaveBeenCalled();
+ });
+ });
+
+ describe('startCustomCrawl', () => {
+ it('can start a custom crawl for selected domains', async () => {
+ mount({
+ includeSitemapsInRobotsTxt: true,
+ maxCrawlDepth: 5,
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ });
+ CrawlerLogic.mount();
+ jest.spyOn(CrawlerLogic.actions, 'startCrawl');
+
+ CrawlCustomSettingsFlyoutLogic.actions.startCustomCrawl();
+ await nextTick();
+
+ expect(CrawlerLogic.actions.startCrawl).toHaveBeenCalledWith({
+ domain_allowlist: ['https://www.elastic.co', 'https://swiftype.com'],
+ max_crawl_depth: 5,
+ sitemap_discovery_disabled: false,
+ });
+ });
+
+ it('can start a custom crawl selected domains, sitemaps, and seed urls', async () => {
+ mount({
+ includeSitemapsInRobotsTxt: true,
+ maxCrawlDepth: 5,
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ selectedEntryPointUrls: [
+ 'https://www.elastic.co/guide',
+ 'https://swiftype.com/documentation',
+ ],
+ selectedSitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ],
+ });
+ CrawlerLogic.mount();
+ jest.spyOn(CrawlerLogic.actions, 'startCrawl');
+
+ CrawlCustomSettingsFlyoutLogic.actions.startCustomCrawl();
+ await nextTick();
+
+ expect(CrawlerLogic.actions.startCrawl).toHaveBeenCalledWith({
+ domain_allowlist: ['https://www.elastic.co', 'https://swiftype.com'],
+ max_crawl_depth: 5,
+ seed_urls: ['https://www.elastic.co/guide', 'https://swiftype.com/documentation'],
+ sitemap_urls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ],
+ sitemap_discovery_disabled: false,
+ });
+ });
+ });
+
+ describe('toggleIncludeSitemapsInRobotsTxt', () => {
+ it('toggles the flag', () => {
+ mount({
+ includeSitemapsInRobotsTxt: false,
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.toggleIncludeSitemapsInRobotsTxt();
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.includeSitemapsInRobotsTxt).toEqual(true);
+
+ mount({
+ includeSitemapsInRobotsTxt: true,
+ });
+
+ CrawlCustomSettingsFlyoutLogic.actions.toggleIncludeSitemapsInRobotsTxt();
+
+ expect(CrawlCustomSettingsFlyoutLogic.values.includeSitemapsInRobotsTxt).toEqual(false);
+ });
+ });
+ });
+
+ describe('selectors', () => {
+ beforeEach(() => {
+ mount({
+ domainConfigs: [
+ {
+ name: 'https://www.elastic.co',
+ sitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://www.elastic.co/sitemap2.xml',
+ ],
+ seedUrls: ['https://www.elastic.co/', 'https://www.elastic.co/guide'],
+ },
+ {
+ name: 'https://swiftype.com',
+ sitemapUrls: ['https://swiftype.com/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
+ seedUrls: ['https://swiftype.com/', 'https://swiftype.com/documentation'],
+ },
+ ],
+ selectedDomainUrls: ['https://swiftype.com'],
+ });
+ });
+
+ describe('domainUrls', () => {
+ it('contains all the domain urls from the domain config', () => {
+ expect(CrawlCustomSettingsFlyoutLogic.values.domainUrls).toEqual([
+ 'https://www.elastic.co',
+ 'https://swiftype.com',
+ ]);
+ });
+ });
+
+ describe('domainConfigMap', () => {
+ it('contains all the domain urls from the domain config', () => {
+ expect(CrawlCustomSettingsFlyoutLogic.values.domainConfigMap).toEqual({
+ 'https://www.elastic.co': {
+ name: 'https://www.elastic.co',
+ sitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://www.elastic.co/sitemap2.xml',
+ ],
+ seedUrls: ['https://www.elastic.co/', 'https://www.elastic.co/guide'],
+ },
+ 'https://swiftype.com': {
+ name: 'https://swiftype.com',
+ sitemapUrls: ['https://swiftype.com/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
+ seedUrls: ['https://swiftype.com/', 'https://swiftype.com/documentation'],
+ },
+ });
+ });
+ });
+
+ describe('entryPointUrls', () => {
+ it('contains all the sitemap urls from selected domains', () => {
+ expect(CrawlCustomSettingsFlyoutLogic.values.entryPointUrls).toEqual([
+ 'https://swiftype.com/',
+ 'https://swiftype.com/documentation',
+ ]);
+ });
+ });
+
+ describe('sitemapUrls', () => {
+ it('contains all the sitemap urls from selected domains', () => {
+ expect(CrawlCustomSettingsFlyoutLogic.values.sitemapUrls).toEqual([
+ 'https://swiftype.com/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ]);
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.ts
new file mode 100644
index 0000000000000..7c0e2fcc33cc3
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic.ts
@@ -0,0 +1,239 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { flashAPIErrors } from '../../../../../shared/flash_messages';
+import { HttpLogic } from '../../../../../shared/http';
+import { GetCrawlerApiLogic } from '../../../../api/crawler/get_crawler_api_logic';
+import { DomainConfig, DomainConfigFromServer } from '../../../../api/crawler/types';
+import { domainConfigServerToClient } from '../../../../api/crawler/utils';
+import { IndexNameLogic } from '../../index_name_logic';
+import { CrawlerLogic, CrawlRequestOverrides } from '../crawler_logic';
+import { extractDomainAndEntryPointFromUrl } from '../domain_management/add_domain/utils';
+
+export interface CrawlCustomSettingsFlyoutLogicValues {
+ customEntryPointUrls: string[];
+ customSitemapUrls: string[];
+ domainUrls: string[];
+ domainConfigs: DomainConfig[];
+ domainConfigMap: {
+ [key: string]: DomainConfig;
+ };
+ entryPointUrls: string[];
+ includeSitemapsInRobotsTxt: boolean;
+ isDataLoading: boolean;
+ isFormSubmitting: boolean;
+ isFlyoutVisible: boolean;
+ maxCrawlDepth: number;
+ selectedDomainUrls: string[];
+ selectedEntryPointUrls: string[];
+ selectedSitemapUrls: string[];
+ sitemapUrls: string[];
+}
+
+export interface CrawlCustomSettingsFlyoutLogicActions {
+ fetchDomainConfigData(): void;
+ hideFlyout(): void;
+ onRecieveDomainConfigData(domainConfigs: DomainConfig[]): { domainConfigs: DomainConfig[] };
+ onSelectCustomEntryPointUrls(entryPointUrls: string[]): { entryPointUrls: string[] };
+ onSelectCustomSitemapUrls(sitemapUrls: string[]): { sitemapUrls: string[] };
+ onSelectDomainUrls(domainUrls: string[]): { domainUrls: string[] };
+ onSelectEntryPointUrls(entryPointUrls: string[]): { entryPointUrls: string[] };
+ onSelectMaxCrawlDepth(maxCrawlDepth: number): { maxCrawlDepth: number };
+ onSelectSitemapUrls(sitemapUrls: string[]): { sitemapUrls: string[] };
+ showFlyout(): void;
+ startCustomCrawl(): void;
+ toggleIncludeSitemapsInRobotsTxt(): void;
+}
+
+const filterSeedUrlsByDomainUrls = (seedUrls: string[], domainUrls: string[]): string[] => {
+ const domainUrlMap = domainUrls.reduce(
+ (acc, domainUrl) => ({ ...acc, [domainUrl]: true }),
+ {} as { [key: string]: boolean }
+ );
+
+ return seedUrls.filter((seedUrl) => {
+ const { domain } = extractDomainAndEntryPointFromUrl(seedUrl);
+ return !!domainUrlMap[domain];
+ });
+};
+
+export const CrawlCustomSettingsFlyoutLogic = kea<
+ MakeLogicType
+>({
+ path: ['enterprise_search', 'crawler', 'crawl_custom_settings_flyout_logic'],
+ connect: {
+ actions: [GetCrawlerApiLogic, ['apiSuccess', 'apiError', 'makeRequest']],
+ },
+ actions: () => ({
+ fetchDomainConfigData: true,
+ hideFlyout: true,
+ onRecieveDomainConfigData: (domainConfigs) => ({ domainConfigs }),
+ onSelectCustomEntryPointUrls: (entryPointUrls) => ({ entryPointUrls }),
+ onSelectCustomSitemapUrls: (sitemapUrls) => ({ sitemapUrls }),
+ onSelectDomainUrls: (domainUrls) => ({ domainUrls }),
+ onSelectEntryPointUrls: (entryPointUrls) => ({ entryPointUrls }),
+ onSelectMaxCrawlDepth: (maxCrawlDepth) => ({ maxCrawlDepth }),
+ onSelectSitemapUrls: (sitemapUrls) => ({ sitemapUrls }),
+ startCustomCrawl: true,
+ toggleIncludeSitemapsInRobotsTxt: true,
+ showFlyout: true,
+ }),
+ reducers: () => ({
+ customEntryPointUrls: [
+ [],
+ {
+ showFlyout: () => [],
+ onSelectCustomEntryPointUrls: (_, { entryPointUrls }) => entryPointUrls,
+ },
+ ],
+ customSitemapUrls: [
+ [],
+ {
+ showFlyout: () => [],
+ onSelectCustomSitemapUrls: (_, { sitemapUrls }) => sitemapUrls,
+ },
+ ],
+ domainConfigs: [
+ [],
+ {
+ onRecieveDomainConfigData: (_, { domainConfigs }) => domainConfigs,
+ },
+ ],
+ includeSitemapsInRobotsTxt: [
+ true,
+ {
+ showFlyout: () => true,
+ toggleIncludeSitemapsInRobotsTxt: (includeSitemapsInRobotsTxt) =>
+ !includeSitemapsInRobotsTxt,
+ },
+ ],
+ isDataLoading: [
+ true,
+ {
+ showFlyout: () => true,
+ onRecieveDomainConfigData: () => false,
+ },
+ ],
+ isFormSubmitting: [
+ false,
+ {
+ makeRequest: () => true,
+ apiSuccess: () => false,
+ apiError: () => false,
+ },
+ ],
+ isFlyoutVisible: [
+ false,
+ {
+ showFlyout: () => true,
+ hideFlyout: () => false,
+ apiSuccess: () => false,
+ apiError: () => false,
+ },
+ ],
+ maxCrawlDepth: [
+ 2,
+ {
+ showFlyout: () => 2,
+ onSelectMaxCrawlDepth: (_, { maxCrawlDepth }) => maxCrawlDepth,
+ },
+ ],
+ selectedDomainUrls: [
+ [],
+ {
+ showFlyout: () => [],
+ onSelectDomainUrls: (_, { domainUrls }) => domainUrls,
+ },
+ ],
+ selectedEntryPointUrls: [
+ [],
+ {
+ showFlyout: () => [],
+ onSelectEntryPointUrls: (_, { entryPointUrls }) => entryPointUrls,
+ onSelectDomainUrls: (entryPointUrls, { domainUrls }) =>
+ filterSeedUrlsByDomainUrls(entryPointUrls, domainUrls),
+ },
+ ],
+ selectedSitemapUrls: [
+ [],
+ {
+ showFlyout: () => [],
+ onSelectSitemapUrls: (_, { sitemapUrls }) => sitemapUrls,
+ onSelectDomainUrls: (selectedSitemapUrls, { domainUrls }) =>
+ filterSeedUrlsByDomainUrls(selectedSitemapUrls, domainUrls),
+ },
+ ],
+ }),
+ selectors: () => ({
+ domainUrls: [
+ (selectors) => [selectors.domainConfigs],
+ (domainConfigs: DomainConfig[]) => domainConfigs.map((domainConfig) => domainConfig.name),
+ ],
+ domainConfigMap: [
+ (selectors) => [selectors.domainConfigs],
+ (domainConfigs: DomainConfig[]) =>
+ domainConfigs.reduce(
+ (acc, domainConfig) => ({ ...acc, [domainConfig.name]: domainConfig }),
+ {} as { [key: string]: DomainConfig }
+ ),
+ ],
+ entryPointUrls: [
+ (selectors) => [selectors.domainConfigMap, selectors.selectedDomainUrls],
+ (domainConfigMap: { [key: string]: DomainConfig }, selectedDomainUrls: string[]): string[] =>
+ selectedDomainUrls.flatMap(
+ (selectedDomainUrl) => domainConfigMap[selectedDomainUrl].seedUrls
+ ),
+ ],
+ sitemapUrls: [
+ (selectors) => [selectors.domainConfigMap, selectors.selectedDomainUrls],
+ (domainConfigMap: { [key: string]: DomainConfig }, selectedDomainUrls: string[]): string[] =>
+ selectedDomainUrls.flatMap(
+ (selectedDomainUrl) => domainConfigMap[selectedDomainUrl].sitemapUrls
+ ),
+ ],
+ }),
+ listeners: ({ actions, values }) => ({
+ fetchDomainConfigData: async () => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+ try {
+ const { results } = await http.get<{
+ results: DomainConfigFromServer[];
+ }>(`/internal/enterprise_search/indices/${indexName}/crawler/domain_configs`);
+
+ const domainConfigs = results.map(domainConfigServerToClient);
+ actions.onRecieveDomainConfigData(domainConfigs);
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ showFlyout: () => {
+ actions.fetchDomainConfigData();
+ },
+ startCustomCrawl: () => {
+ const overrides: CrawlRequestOverrides = {
+ sitemap_discovery_disabled: !values.includeSitemapsInRobotsTxt,
+ max_crawl_depth: values.maxCrawlDepth,
+ domain_allowlist: values.selectedDomainUrls,
+ };
+
+ const seedUrls = [...values.selectedEntryPointUrls, ...values.customEntryPointUrls];
+ if (seedUrls.length > 0) {
+ overrides.seed_urls = seedUrls;
+ }
+
+ const sitemapUrls = [...values.selectedSitemapUrls, ...values.customSitemapUrls];
+ if (sitemapUrls.length > 0) {
+ overrides.sitemap_urls = sitemapUrls;
+ }
+
+ CrawlerLogic.actions.startCrawl(overrides);
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
new file mode 100644
index 0000000000000..edd7631a87986
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.test.tsx
@@ -0,0 +1,167 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiAccordion, EuiTabbedContent, EuiNotificationBadge, EuiCheckbox } from '@elastic/eui';
+
+import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
+
+import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
+
+import { rerender } from '../../../../../test_helpers';
+
+import { CrawlCustomSettingsFlyoutSeedUrlsPanel } from './crawl_custom_settings_flyout_seed_urls_panel';
+
+const MOCK_VALUES = {
+ // CrawlCustomSettingsFlyoutLogic
+ customEntryPointUrls: ['https://www.elastic.co/custom-entry-point'],
+ customSitemapUrls: [
+ 'https://www.elastic.co/custom-sitemap1.xml',
+ 'https://swiftype.com/custom-sitemap2.xml',
+ ],
+ entryPointUrls: ['https://www.elastic.co/guide', 'https://swiftype.com/documentation'],
+ selectedDomainUrls: ['https://www.elastic.co', 'https://swiftype.com'],
+ selectedEntryPointUrls: ['https://swiftype.com/documentation'],
+ selectedSitemapUrls: ['https://www.elastic.co/sitemap1.xml', 'https://swiftype.com/sitemap2.xml'],
+ sitemapUrls: [
+ 'https://www.elastic.co/sitemap1.xml',
+ 'https://www.elastic.co/sitemap2.xml',
+ 'https://swiftype.com/sitemap1.xml',
+ 'https://swiftype.com/sitemap2.xml',
+ ],
+ includeSitemapsInRobotsTxt: true,
+};
+
+const MOCK_ACTIONS = {
+ // CrawlCustomSettingsFlyoutLogic
+ onSelectCustomEntryPointUrls: jest.fn(),
+ onSelectCustomSitemapUrls: jest.fn(),
+ onSelectEntryPointUrls: jest.fn(),
+ onSelectSitemapUrls: jest.fn(),
+ toggleIncludeSitemapsInRobotsTxt: jest.fn(),
+};
+
+const getAccordionBadge = (wrapper: ShallowWrapper) => {
+ const accordionWrapper = wrapper.find(EuiAccordion);
+ const extraActionWrapper = shallow({accordionWrapper.prop('extraAction')}
);
+ return extraActionWrapper.find(EuiNotificationBadge);
+};
+
+describe('CrawlCustomSettingsFlyoutSeedUrlsPanel', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockValues(MOCK_VALUES);
+ setMockActions(MOCK_ACTIONS);
+
+ wrapper = shallow( );
+ });
+
+ describe('sitemaps tab', () => {
+ let sitemapTab: ShallowWrapper;
+
+ beforeEach(() => {
+ const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
+ sitemapTab = shallow({tabs[0].content}
);
+ });
+
+ it('allows the user to select sitemap urls', () => {
+ expect(sitemapTab.find(SimplifiedSelectable).props()).toEqual({
+ options: MOCK_VALUES.sitemapUrls,
+ selectedOptions: MOCK_VALUES.selectedSitemapUrls,
+ onChange: MOCK_ACTIONS.onSelectSitemapUrls,
+ });
+ });
+
+ it('allows the user to toggle whether to include robots.txt sitemaps', () => {
+ expect(sitemapTab.find(EuiCheckbox).props()).toEqual(
+ expect.objectContaining({
+ onChange: MOCK_ACTIONS.toggleIncludeSitemapsInRobotsTxt,
+ checked: true,
+ })
+ );
+ });
+
+ it('allows the user to add custom sitemap urls', () => {
+ expect(sitemapTab.find(UrlComboBox).props()).toEqual(
+ expect.objectContaining({
+ selectedUrls: MOCK_VALUES.customSitemapUrls,
+ onChange: MOCK_ACTIONS.onSelectCustomSitemapUrls,
+ })
+ );
+ });
+ });
+
+ describe('entry points tab', () => {
+ let entryPointsTab: ShallowWrapper;
+
+ beforeEach(() => {
+ const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
+ entryPointsTab = shallow({tabs[1].content}
);
+ });
+
+ it('allows the user to select entry point urls', () => {
+ expect(entryPointsTab.find(SimplifiedSelectable).props()).toEqual({
+ options: MOCK_VALUES.entryPointUrls,
+ selectedOptions: MOCK_VALUES.selectedEntryPointUrls,
+ onChange: MOCK_ACTIONS.onSelectEntryPointUrls,
+ });
+ });
+
+ it('allows the user to add custom entry point urls', () => {
+ expect(entryPointsTab.find(UrlComboBox).props()).toEqual(
+ expect.objectContaining({
+ selectedUrls: MOCK_VALUES.customEntryPointUrls,
+ onChange: MOCK_ACTIONS.onSelectCustomEntryPointUrls,
+ })
+ );
+ });
+ });
+
+ it('indicates how many seed urls are selected', () => {
+ let badge = getAccordionBadge(wrapper);
+
+ expect(badge.render().text()).toContain('6');
+ expect(badge.prop('color')).toEqual('accent');
+
+ setMockValues({
+ ...MOCK_VALUES,
+ customEntryPointUrls: [],
+ customSitemapUrls: [],
+ selectedEntryPointUrls: [],
+ selectedSitemapUrls: [],
+ });
+
+ rerender(wrapper);
+ badge = getAccordionBadge(wrapper);
+
+ expect(badge.render().text()).toContain('0');
+ expect(badge.prop('color')).toEqual('subdued');
+ });
+
+ it('shows empty messages when the user has not selected any domains', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedDomainUrls: [],
+ });
+
+ rerender(wrapper);
+
+ const tabs = wrapper.find(EuiTabbedContent).prop('tabs');
+ const sitemapsTab = shallow({tabs[0].content}
);
+ const entryPointsTab = shallow({tabs[1].content}
);
+
+ expect(sitemapsTab.find(SimplifiedSelectable).prop('emptyMessage')).toBeDefined();
+ expect(entryPointsTab.find(SimplifiedSelectable).prop('emptyMessage')).toBeDefined();
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
new file mode 100644
index 0000000000000..27f3543835d11
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout_seed_urls_panel.tsx
@@ -0,0 +1,205 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues, useActions } from 'kea';
+
+import {
+ EuiAccordion,
+ EuiCheckbox,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiHorizontalRule,
+ EuiIcon,
+ EuiNotificationBadge,
+ EuiPanel,
+ EuiSpacer,
+ EuiTabbedContent,
+ EuiTitle,
+ useGeneratedHtmlId,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { SimplifiedSelectable } from '../../../../../shared/simplified_selectable/simplified_selectable';
+import { UrlComboBox } from '../../../../../shared/url_combo_box/url_combo_box';
+
+import { CrawlCustomSettingsFlyoutLogic } from './crawl_custom_settings_flyout_logic';
+
+export const CrawlCustomSettingsFlyoutSeedUrlsPanel: React.FC = () => {
+ const {
+ customEntryPointUrls,
+ customSitemapUrls,
+ entryPointUrls,
+ includeSitemapsInRobotsTxt,
+ selectedDomainUrls,
+ selectedEntryPointUrls,
+ selectedSitemapUrls,
+ sitemapUrls,
+ } = useValues(CrawlCustomSettingsFlyoutLogic);
+ const {
+ onSelectCustomEntryPointUrls,
+ onSelectCustomSitemapUrls,
+ onSelectEntryPointUrls,
+ onSelectSitemapUrls,
+ toggleIncludeSitemapsInRobotsTxt,
+ } = useActions(CrawlCustomSettingsFlyoutLogic);
+
+ const totalSeedUrls =
+ customEntryPointUrls.length +
+ customSitemapUrls.length +
+ selectedEntryPointUrls.length +
+ selectedSitemapUrls.length;
+
+ return (
+
+
+
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel',
+ {
+ defaultMessage: 'Seed URLs',
+ }
+ )}
+
+
+
+
+ }
+ extraAction={
+
+ 0 ? 'accent' : 'subdued'}>
+ {totalSeedUrls}
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor',
+ {
+ defaultMessage: 'selected',
+ }
+ )}
+
+
+ }
+ >
+
+
+
+ robots.txt, // this is a technical term and shouldn't be translated
+ }}
+ />
+ }
+ checked={includeSitemapsInRobotsTxt}
+ onChange={toggleIncludeSitemapsInRobotsTxt}
+ />
+
+
+
+
+ >
+ ),
+ },
+ {
+ id: useGeneratedHtmlId({ prefix: 'entryPointsTab' }),
+ name: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel',
+ {
+ defaultMessage: 'Entry points',
+ }
+ ),
+ content: (
+ <>
+
+
+
+
+ >
+ ),
+ },
+ ]}
+ autoFocus="selected"
+ />
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.test.ts
new file mode 100644
index 0000000000000..e32b8f2c23f32
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.test.ts
@@ -0,0 +1,158 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { LogicMounter, mockHttpValues } from '../../../../../__mocks__/kea_logic';
+import '../../_mocks_/index_name_logic.mock';
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import { itShowsServerErrorAsFlashMessage } from '../../../../../test_helpers';
+import {
+ CrawlRequestWithDetailsFromServer,
+ CrawlerStatus,
+ CrawlType,
+} from '../../../../api/crawler/types';
+import { crawlRequestWithDetailsServerToClient } from '../../../../api/crawler/utils';
+
+import { CrawlDetailLogic, CrawlDetailValues } from './crawl_detail_logic';
+
+const DEFAULT_VALUES: CrawlDetailValues = {
+ dataLoading: true,
+ flyoutClosed: true,
+ crawlRequest: null,
+ crawlRequestFromServer: null,
+ selectedTab: 'preview',
+};
+
+const crawlRequestResponse: CrawlRequestWithDetailsFromServer = {
+ id: '12345',
+ status: CrawlerStatus.Pending,
+ created_at: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ began_at: null,
+ completed_at: null,
+ type: CrawlType.Full,
+ crawl_config: {
+ domain_allowlist: [],
+ seed_urls: [],
+ sitemap_urls: [],
+ max_crawl_depth: 10,
+ },
+ stats: {
+ status: {
+ urls_allowed: 4,
+ pages_visited: 4,
+ crawl_duration_msec: 100,
+ avg_response_time_msec: 10,
+ status_codes: {
+ 200: 4,
+ 404: 0,
+ },
+ },
+ },
+};
+
+const clientCrawlRequest = crawlRequestWithDetailsServerToClient(crawlRequestResponse);
+
+describe('CrawlDetailLogic', () => {
+ const { mount } = new LogicMounter(CrawlDetailLogic);
+ const { http } = mockHttpValues;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('has expected default values', () => {
+ mount();
+ expect(CrawlDetailLogic.values).toEqual(DEFAULT_VALUES);
+ });
+
+ describe('actions', () => {
+ describe('closeFlyout', () => {
+ it('closes the flyout', () => {
+ mount({ flyoutClosed: false });
+
+ CrawlDetailLogic.actions.closeFlyout();
+
+ expect(CrawlDetailLogic.values).toEqual({
+ ...DEFAULT_VALUES,
+ flyoutClosed: true,
+ });
+ });
+ });
+
+ describe('onRecieveCrawlRequest', () => {
+ it('saves the crawl request and sets data loading to false', () => {
+ mount({
+ dataLoading: true,
+ request: null,
+ });
+
+ CrawlDetailLogic.actions.onRecieveCrawlRequest(crawlRequestResponse);
+
+ expect(CrawlDetailLogic.values).toEqual({
+ ...DEFAULT_VALUES,
+ dataLoading: false,
+ crawlRequestFromServer: crawlRequestResponse,
+ crawlRequest: clientCrawlRequest,
+ });
+ });
+ });
+
+ describe('setSelectedTab', () => {
+ it('sets the select tab', () => {
+ mount({
+ selectedTab: 'preview',
+ });
+
+ CrawlDetailLogic.actions.setSelectedTab('json');
+
+ expect(CrawlDetailLogic.values).toEqual({
+ ...DEFAULT_VALUES,
+ selectedTab: 'json',
+ });
+ });
+ });
+
+ describe('fetchCrawlRequest', () => {
+ it('sets loading to true and opens the flyout', () => {
+ mount({
+ dataLoading: false,
+ });
+
+ CrawlDetailLogic.actions.fetchCrawlRequest('12345');
+
+ expect(CrawlDetailLogic.values).toEqual({
+ ...DEFAULT_VALUES,
+ dataLoading: true,
+ flyoutClosed: false,
+ });
+ });
+
+ it('updates logic with data that has been converted from server to client', async () => {
+ mount();
+ jest.spyOn(CrawlDetailLogic.actions, 'onRecieveCrawlRequest');
+
+ http.get.mockReturnValueOnce(Promise.resolve(crawlRequestResponse));
+
+ CrawlDetailLogic.actions.fetchCrawlRequest('12345');
+ await nextTick();
+
+ expect(http.get).toHaveBeenCalledWith(
+ '/internal/enterprise_search/indices/index-name/crawler/crawl_requests/12345'
+ );
+ expect(CrawlDetailLogic.actions.onRecieveCrawlRequest).toHaveBeenCalledWith(
+ crawlRequestResponse
+ );
+ });
+
+ itShowsServerErrorAsFlashMessage(http.get, () => {
+ mount();
+ CrawlDetailLogic.actions.fetchCrawlRequest('12345');
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.ts
new file mode 100644
index 0000000000000..d21084e4783ad
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_detail_logic.ts
@@ -0,0 +1,99 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { flashAPIErrors } from '../../../../../shared/flash_messages';
+import { HttpLogic } from '../../../../../shared/http';
+
+import {
+ CrawlRequestWithDetails,
+ CrawlRequestWithDetailsFromServer,
+} from '../../../../api/crawler/types';
+import { crawlRequestWithDetailsServerToClient } from '../../../../api/crawler/utils';
+import { IndexNameLogic } from '../../index_name_logic';
+
+type CrawlDetailFlyoutTabs = 'preview' | 'json';
+
+export interface CrawlDetailValues {
+ crawlRequest: CrawlRequestWithDetails | null;
+ crawlRequestFromServer: CrawlRequestWithDetailsFromServer | null;
+ dataLoading: boolean;
+ flyoutClosed: boolean;
+ selectedTab: CrawlDetailFlyoutTabs;
+}
+
+export interface CrawlDetailActions {
+ closeFlyout(): void;
+ fetchCrawlRequest(requestId: string): { requestId: string };
+ onRecieveCrawlRequest(crawlRequestFromServer: CrawlRequestWithDetailsFromServer): {
+ crawlRequestFromServer: CrawlRequestWithDetailsFromServer;
+ };
+ setSelectedTab(selectedTab: CrawlDetailFlyoutTabs): { selectedTab: CrawlDetailFlyoutTabs };
+}
+
+export const CrawlDetailLogic = kea>({
+ path: ['enterprise_search', 'crawler', 'crawl_detail_logic'],
+ actions: {
+ closeFlyout: true,
+ fetchCrawlRequest: (requestId) => ({ requestId }),
+ onRecieveCrawlRequest: (crawlRequestFromServer) => ({ crawlRequestFromServer }),
+ setSelectedTab: (selectedTab) => ({ selectedTab }),
+ },
+ reducers: {
+ crawlRequest: [
+ null,
+ {
+ onRecieveCrawlRequest: (_, { crawlRequestFromServer }) =>
+ crawlRequestWithDetailsServerToClient(crawlRequestFromServer),
+ },
+ ],
+ crawlRequestFromServer: [
+ null,
+ {
+ onRecieveCrawlRequest: (_, { crawlRequestFromServer }) => crawlRequestFromServer,
+ },
+ ],
+ dataLoading: [
+ true,
+ {
+ fetchCrawlRequest: () => true,
+ onRecieveCrawlRequest: () => false,
+ },
+ ],
+ flyoutClosed: [
+ true,
+ {
+ fetchCrawlRequest: () => false,
+ closeFlyout: () => true,
+ },
+ ],
+ selectedTab: [
+ 'preview',
+ {
+ fetchCrawlRequest: () => 'preview',
+ setSelectedTab: (_, { selectedTab }) => selectedTab,
+ },
+ ],
+ },
+ listeners: ({ actions }) => ({
+ fetchCrawlRequest: async ({ requestId }) => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ try {
+ const response = await http.get(
+ `/internal/enterprise_search/indices/${indexName}/crawler/crawl_requests/${requestId}`
+ );
+
+ actions.onRecieveCrawlRequest(response);
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.test.tsx
new file mode 100644
index 0000000000000..915b6283d8f70
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.test.tsx
@@ -0,0 +1,131 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+import '../../../../../__mocks__/shallow_useeffect.mock';
+import '../../_mocks_/index_name_logic.mock';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiCodeBlock, EuiFlyout, EuiTab, EuiTabs } from '@elastic/eui';
+
+import { Loading } from '../../../../../shared/loading';
+import { CrawlRequestWithDetailsFromServer } from '../../../../api/crawler/types';
+
+import { CrawlDetailsFlyout } from './crawl_details_flyout';
+import { CrawlDetailsPreview } from './crawl_details_preview';
+
+const MOCK_VALUES = {
+ dataLoading: false,
+ flyoutClosed: false,
+ crawlRequestFromServer: {} as CrawlRequestWithDetailsFromServer,
+};
+
+const MOCK_ACTIONS = {
+ setSelectedTab: jest.fn(),
+ fetchLogRetention: jest.fn(),
+};
+
+describe('CrawlDetailsFlyout', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('renders a flyout ', () => {
+ setMockActions(MOCK_ACTIONS);
+ setMockValues(MOCK_VALUES);
+
+ const wrapper = shallow( );
+
+ expect(wrapper.is(EuiFlyout)).toBe(true);
+ });
+
+ it('contains a tab group to control displayed content inside the flyout', () => {
+ setMockActions(MOCK_ACTIONS);
+ setMockValues(MOCK_VALUES);
+
+ const wrapper = shallow( );
+ const tabs = wrapper.find(EuiTabs).find(EuiTab);
+
+ expect(tabs).toHaveLength(2);
+
+ tabs.at(0).simulate('click');
+
+ expect(MOCK_ACTIONS.setSelectedTab).toHaveBeenCalledWith('preview');
+
+ tabs.at(1).simulate('click');
+
+ expect(MOCK_ACTIONS.setSelectedTab).toHaveBeenCalledWith('json');
+ });
+
+ describe('when the preview tab is selected', () => {
+ beforeEach(() => {
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedTab: 'preview',
+ });
+ });
+
+ it('shows the correct tab is selected in the UX', () => {
+ const wrapper = shallow( );
+ const tabs = wrapper.find(EuiTabs).find(EuiTab);
+
+ expect(tabs.at(0).prop('isSelected')).toBe(true);
+ expect(tabs.at(1).prop('isSelected')).toBe(false);
+ });
+
+ it('shows the human readable version of the crawl details', () => {
+ const wrapper = shallow( );
+
+ const crawlDetailsPreview = wrapper.find(CrawlDetailsPreview);
+ expect(crawlDetailsPreview).toHaveLength(1);
+ });
+ });
+
+ describe('when the json tab is selected', () => {
+ beforeEach(() => {
+ setMockValues({
+ ...MOCK_VALUES,
+ selectedTab: 'json',
+ });
+ });
+
+ it('shows the correct tab is selected in the UX', () => {
+ const wrapper = shallow( );
+ const tabs = wrapper.find(EuiTabs).find(EuiTab);
+
+ expect(tabs.at(0).prop('isSelected')).toBe(false);
+ expect(tabs.at(1).prop('isSelected')).toBe(true);
+ });
+
+ it('shows the raw json of the crawl details', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiCodeBlock)).toHaveLength(1);
+ });
+ });
+
+ it('renders a loading screen when loading', () => {
+ setMockValues({ ...MOCK_VALUES, dataLoading: true });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.is(EuiFlyout)).toBe(true);
+ expect(wrapper.find(Loading)).toHaveLength(1);
+ });
+
+ it('is empty when the flyout is hidden', () => {
+ setMockValues({
+ flyoutClosed: true,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.tsx
new file mode 100644
index 0000000000000..29111fb8598af
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_flyout.tsx
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiFlyout,
+ EuiFlyoutHeader,
+ EuiTitle,
+ EuiFlyoutBody,
+ EuiCodeBlock,
+ EuiTab,
+ EuiTabs,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { Loading } from '../../../../../shared/loading';
+
+import { CrawlDetailLogic } from './crawl_detail_logic';
+
+import { CrawlDetailsPreview } from './crawl_details_preview';
+
+export const CrawlDetailsFlyout: React.FC = () => {
+ const { closeFlyout, setSelectedTab } = useActions(CrawlDetailLogic);
+ const { crawlRequestFromServer, dataLoading, flyoutClosed, selectedTab } =
+ useValues(CrawlDetailLogic);
+
+ if (flyoutClosed) {
+ return null;
+ }
+
+ return (
+
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title', {
+ defaultMessage: 'Crawl request details',
+ })}
+
+
+
+ setSelectedTab('preview')}>
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel', {
+ defaultMessage: 'Preview',
+ })}
+
+ setSelectedTab('json')}>
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel', {
+ defaultMessage: 'Raw JSON',
+ })}
+
+
+
+
+ {dataLoading ? (
+
+ ) : (
+ <>
+ {selectedTab === 'preview' && }
+ {selectedTab === 'json' && (
+
+ {JSON.stringify(crawlRequestFromServer, null, 2)}
+
+ )}
+ >
+ )}
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.test.tsx
new file mode 100644
index 0000000000000..590989db59d02
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.test.tsx
@@ -0,0 +1,118 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+import { set } from 'lodash/fp';
+
+import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
+
+import { CrawlerStatus, CrawlType } from '../../../../api/crawler/types';
+
+import { CrawlDetailValues } from './crawl_detail_logic';
+import { CrawlDetailsPreview } from './crawl_details_preview';
+import { CrawlDetailsSummary } from './crawl_details_summary';
+
+const MOCK_VALUES: Partial = {
+ crawlRequest: {
+ id: '507f1f77bcf86cd799439011',
+ status: CrawlerStatus.Pending,
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ type: CrawlType.Full,
+ crawlConfig: {
+ domainAllowlist: ['https://www.elastic.co', 'https://www.swiftype.com'],
+ seedUrls: ['https://www.elastic.co/docs', 'https://www.swiftype.com/documentation'],
+ sitemapUrls: ['https://www.elastic.co/sitemap.xml', 'https://www.swiftype.com/sitemap.xml'],
+ maxCrawlDepth: 10,
+ },
+ stats: {
+ status: {
+ urlsAllowed: 10,
+ pagesVisited: 10,
+ crawlDurationMSec: 36000,
+ avgResponseTimeMSec: 100,
+ },
+ },
+ },
+};
+
+describe('CrawlDetailsPreview', () => {
+ it('is empty when a crawl request has not been loaded', () => {
+ setMockValues({
+ crawlRequest: null,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+
+ describe('when a crawl request has been loaded', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ setMockValues(MOCK_VALUES);
+ wrapper = shallow( );
+ });
+
+ it('contains a summary', () => {
+ const summary = wrapper.find(CrawlDetailsSummary);
+ expect(summary.props()).toEqual({
+ crawlDepth: 10,
+ crawlType: 'full',
+ domainCount: 2,
+ stats: {
+ status: {
+ avgResponseTimeMSec: 100,
+ crawlDurationMSec: 36000,
+ pagesVisited: 10,
+ urlsAllowed: 10,
+ },
+ },
+ });
+ });
+
+ it('will default values on summary if missing', () => {
+ const values = set('crawlRequest.stats', undefined, MOCK_VALUES);
+ setMockValues(values);
+ wrapper = shallow( );
+
+ const summary = wrapper.find(CrawlDetailsSummary);
+ expect(summary.prop('stats')).toEqual(null);
+ });
+
+ it('contains a list of domains', () => {
+ const domainList = wrapper.find(AccordionList).at(0);
+
+ expect(domainList.prop('items')).toEqual([
+ 'https://www.elastic.co',
+ 'https://www.swiftype.com',
+ ]);
+ });
+
+ it('contains a list of seed urls', () => {
+ const seedUrlList = wrapper.find(AccordionList).at(1);
+
+ expect(seedUrlList.prop('items')).toEqual([
+ 'https://www.elastic.co/docs',
+ 'https://www.swiftype.com/documentation',
+ ]);
+ });
+
+ it('contains a list of sitemap urls', () => {
+ const sitemapUrlList = wrapper.find(AccordionList).at(2);
+
+ expect(sitemapUrlList.prop('items')).toEqual([
+ 'https://www.elastic.co/sitemap.xml',
+ 'https://www.swiftype.com/sitemap.xml',
+ ]);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.tsx
new file mode 100644
index 0000000000000..299511fb8ce10
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_preview.tsx
@@ -0,0 +1,70 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues } from 'kea';
+
+import { EuiSpacer } from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { AccordionList } from '../../../../../shared/accordion_list/accordion_list';
+
+import { CrawlDetailLogic } from './crawl_detail_logic';
+import { CrawlDetailsSummary } from './crawl_details_summary';
+
+export const CrawlDetailsPreview: React.FC = () => {
+ const { crawlRequest } = useValues(CrawlDetailLogic);
+
+ if (crawlRequest === null) {
+ return null;
+ }
+
+ return (
+ <>
+
+
+ 0}
+ title={i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle', {
+ defaultMessage: 'Domains',
+ })}
+ iconType="globe"
+ items={crawlRequest.crawlConfig.domainAllowlist}
+ />
+
+ 0}
+ title={i18n.translate('xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle', {
+ defaultMessage: 'Seed URLs',
+ })}
+ iconType="crosshairs"
+ items={crawlRequest.crawlConfig.seedUrls}
+ />
+
+ 0}
+ title={i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle',
+ {
+ defaultMessage: 'Sitemap URLs',
+ }
+ )}
+ iconType="visMapRegion"
+ items={crawlRequest.crawlConfig.sitemapUrls}
+ />
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.test.tsx
new file mode 100644
index 0000000000000..1d1befde0329b
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.test.tsx
@@ -0,0 +1,63 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import '../../_mocks_/index_name_logic.mock';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiPanel } from '@elastic/eui';
+
+import { CrawlDetailsSummary, CrawlerDetailsSummaryProps } from './crawl_details_summary';
+
+const MOCK_PROPS: CrawlerDetailsSummaryProps = {
+ crawlDepth: 8,
+ crawlType: 'full',
+ domainCount: 15,
+ stats: {
+ status: {
+ urlsAllowed: 108,
+ crawlDurationMSec: 748382,
+ pagesVisited: 108,
+ avgResponseTimeMSec: 42,
+ statusCodes: {
+ 401: 4,
+ 404: 8,
+ 500: 0,
+ 503: 3,
+ },
+ },
+ },
+};
+
+describe('CrawlDetailsSummary', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeAll(() => {
+ wrapper = shallow( );
+ });
+
+ it('renders as a panel with all fields', () => {
+ expect(wrapper.is(EuiPanel)).toBe(true);
+ });
+
+ it('renders the proper count for errors', () => {
+ const serverErrors = wrapper.find({ 'data-test-subj': 'serverErrors' });
+ const clientErrors = wrapper.find({ 'data-test-subj': 'clientErrors' });
+
+ expect(serverErrors.prop('title')).toEqual(3);
+ expect(clientErrors.prop('title')).toEqual(12);
+ });
+
+ it('handles missing stats gracefully', () => {
+ wrapper.setProps({ stats: {} });
+ expect(wrapper.find({ 'data-test-subj': 'crawlDuration' }).prop('title')).toEqual('--');
+ expect(wrapper.find({ 'data-test-subj': 'pagesVisited' }).prop('title')).toEqual('--');
+ expect(wrapper.find({ 'data-test-subj': 'avgResponseTime' }).prop('title')).toEqual('--');
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.tsx
new file mode 100644
index 0000000000000..43f0b1d570177
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_details_flyout/crawl_details_summary.tsx
@@ -0,0 +1,251 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import moment from 'moment';
+
+import {
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiHorizontalRule,
+ EuiIconTip,
+ EuiPanel,
+ EuiSpacer,
+ EuiStat,
+ EuiText,
+} from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { CrawlRequestStats } from '../../../../api/crawler/types';
+
+export interface CrawlerDetailsSummaryProps {
+ crawlDepth: number;
+ crawlType: string;
+ domainCount: number;
+ stats: CrawlRequestStats | null;
+}
+
+export const CrawlDetailsSummary: React.FC = ({
+ crawlDepth,
+ crawlType,
+ domainCount,
+ stats,
+}) => {
+ const duration = () => {
+ if (stats?.status?.crawlDurationMSec) {
+ const milliseconds = moment.duration(stats.status.crawlDurationMSec, 'milliseconds');
+ const hours = milliseconds.hours();
+ const minutes = milliseconds.minutes();
+ const seconds = milliseconds.seconds();
+ return `${hours}h ${minutes}m ${seconds}s`;
+ } else {
+ return '--';
+ }
+ };
+
+ const getStatusCount = (code: string, codes: { [code: string]: number }) => {
+ return Object.entries(codes).reduce((count, [k, v]) => {
+ if (k[0] !== code) return count;
+ return v + count;
+ }, 0);
+ };
+
+ const statusCounts = {
+ clientErrorCount: stats?.status?.statusCodes
+ ? getStatusCount('4', stats.status.statusCodes)
+ : 0,
+ serverErrorCount: stats?.status?.statusCodes
+ ? getStatusCount('5', stats.status.statusCodes)
+ : 0,
+ };
+
+ const shouldHideStats = !stats;
+
+ return (
+
+
+
+
+
+
+
+
+ {!shouldHideStats && (
+
+
+
+ )}
+
+
+ {!shouldHideStats ? (
+
+
+
+ URLs{' '}
+
+
+ }
+ />
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle',
+ {
+ defaultMessage: 'Pages',
+ }
+ )}{' '}
+
+
+ }
+ />
+
+
+
+
+
+
+
+
+
+
+
+ ) : (
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage',
+ {
+ defaultMessage:
+ 'Enable Web Crawler logs in settings for more detailed crawl statistics.',
+ }
+ )}
+
+
+ )}
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/constants.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/constants.ts
new file mode 100644
index 0000000000000..d736e04981ba1
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/constants.ts
@@ -0,0 +1,62 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlerStatus, CrawlType } from '../../../../api/crawler/types';
+
+export const readableCrawlTypes: { [key in CrawlType]: string } = {
+ [CrawlType.Full]: i18n.translate('xpack.enterpriseSearch.crawler.crawlTypeOptions.full', {
+ defaultMessage: 'Full',
+ }),
+ [CrawlType.Partial]: i18n.translate('xpack.enterpriseSearch.crawler.crawlTypeOptions.partial', {
+ defaultMessage: 'Partial',
+ }),
+};
+
+export const readableCrawlerStatuses: { [key in CrawlerStatus]: string } = {
+ [CrawlerStatus.Pending]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending',
+ { defaultMessage: 'Pending' }
+ ),
+ [CrawlerStatus.Suspended]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended',
+ { defaultMessage: 'Suspended' }
+ ),
+ [CrawlerStatus.Starting]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting',
+ { defaultMessage: 'Starting' }
+ ),
+ [CrawlerStatus.Running]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.running',
+ { defaultMessage: 'Running' }
+ ),
+ [CrawlerStatus.Suspending]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending',
+ { defaultMessage: 'Suspending' }
+ ),
+ [CrawlerStatus.Canceling]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling',
+ { defaultMessage: 'Canceling' }
+ ),
+ [CrawlerStatus.Success]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.success',
+ { defaultMessage: 'Success' }
+ ),
+ [CrawlerStatus.Failed]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed',
+ { defaultMessage: 'Failed' }
+ ),
+ [CrawlerStatus.Canceled]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled',
+ { defaultMessage: 'Canceled' }
+ ),
+ [CrawlerStatus.Skipped]: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped',
+ { defaultMessage: 'Skipped' }
+ ),
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.test.tsx
new file mode 100644
index 0000000000000..5caa0ffdb8e94
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.test.tsx
@@ -0,0 +1,73 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import '../../_mocks_/index_name_logic.mock';
+
+import { shallow } from 'enzyme';
+
+import { EuiBadge } from '@elastic/eui';
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { CrawlEvent, CrawlerStatus, CrawlType } from '../../../../api/crawler/types';
+
+import { CrawlEventTypeBadge } from './crawl_event_type_badge';
+
+const MOCK_EVENT: CrawlEvent = {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ stage: 'crawl',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ type: CrawlType.Full,
+ crawlConfig: {
+ domainAllowlist: ['https://www.elastic.co'],
+ seedUrls: [],
+ sitemapUrls: [],
+ maxCrawlDepth: 10,
+ },
+};
+
+describe('CrawlEventTypeBadge', () => {
+ it('renders a badge for process crawls', () => {
+ const wrapper = mountWithIntl(
+
+ );
+
+ const badge = wrapper.find(EuiBadge);
+ expect(badge.prop('color')).toEqual('hollow');
+ expect(badge.text()).toEqual('Re-applied crawl rules');
+ });
+
+ it('renders a badge for partial crawls', () => {
+ const wrapper = mountWithIntl(
+
+ );
+
+ const badge = wrapper.find(EuiBadge);
+ expect(badge.prop('color')).toEqual('hollow');
+ expect(badge.text()).toEqual('Partial');
+ });
+
+ it('renders a badge for full crawls', () => {
+ const wrapper = mountWithIntl(
+
+ );
+
+ const badge = wrapper.find(EuiBadge);
+ expect(badge.prop('color')).toBeUndefined();
+ expect(badge.text()).toEqual('Full');
+ });
+
+ it('is empty by default', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.tsx
new file mode 100644
index 0000000000000..0f2e753a0c5b2
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_event_type_badge.tsx
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { EuiBadge } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlEvent, CrawlType } from '../../../../api/crawler/types';
+
+import { readableCrawlTypes } from './constants';
+
+export const CrawlEventTypeBadge: React.FC<{ event: CrawlEvent }> = ({ event }) => {
+ if (event.stage === 'process') {
+ return (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules', {
+ defaultMessage: 'Re-applied crawl rules',
+ })}
+
+ );
+ }
+ if (event.type === CrawlType.Full) {
+ return {readableCrawlTypes[CrawlType.Full]} ;
+ }
+ if (event.type === CrawlType.Partial) {
+ return {readableCrawlTypes[CrawlType.Partial]} ;
+ }
+ return null;
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_panel.tsx
new file mode 100644
index 0000000000000..e807b6eb65af6
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_panel.tsx
@@ -0,0 +1,46 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { EuiButton } from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { DataPanel } from '../../../../../shared/data_panel/data_panel';
+
+import { CrawlRequestsTable } from './crawl_requests_table';
+
+export const CrawlRequestsPanel: React.FC = () => (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsPanel.title', {
+ defaultMessage: 'Crawl requests',
+ })}
+
+ }
+ titleSize="s"
+ iconType="documents"
+ subtitle={i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsPanel.description', {
+ defaultMessage:
+ "Recent crawl requests are logged here. You can track progress and examine crawl events in Kibana's Discover or Logs user intefaces",
+ })}
+ action={
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRequestsPanel.viewInDiscoverButtonLabel',
+ {
+ defaultMessage: 'View in Discover',
+ }
+ )}
+
+ }
+ >
+
+
+);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.test.tsx
new file mode 100644
index 0000000000000..71f37fcaf19cd
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.test.tsx
@@ -0,0 +1,131 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+import '../../_mocks_/index_name_logic.mock';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiBasicTable, EuiEmptyPrompt } from '@elastic/eui';
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { CrawlEvent, CrawlerStatus, CrawlType } from '../../../../api/crawler/types';
+
+import { CrawlRequestsTable } from './crawl_requests_table';
+
+const values: { events: CrawlEvent[] } = {
+ // CrawlerLogic
+ events: [
+ {
+ id: '618d0e66abe97bc688328900',
+ status: CrawlerStatus.Pending,
+ stage: 'crawl',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ type: CrawlType.Full,
+ crawlConfig: {
+ domainAllowlist: ['https://www.elastic.co'],
+ seedUrls: [],
+ sitemapUrls: [],
+ maxCrawlDepth: 10,
+ },
+ },
+ {
+ id: '54325423aef7890543',
+ status: CrawlerStatus.Success,
+ stage: 'process',
+ createdAt: 'Mon, 31 Aug 2020 17:00:00 +0000',
+ beganAt: null,
+ completedAt: null,
+ type: CrawlType.Full,
+ crawlConfig: {
+ domainAllowlist: ['https://www.elastic.co'],
+ seedUrls: [],
+ sitemapUrls: [],
+ maxCrawlDepth: 10,
+ },
+ },
+ ],
+};
+
+const actions = {
+ fetchCrawlRequest: jest.fn(),
+};
+
+describe('CrawlRequestsTable', () => {
+ let wrapper: ShallowWrapper;
+ let tableContent: string;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('columns', () => {
+ beforeAll(() => {
+ setMockActions(actions);
+ setMockValues(values);
+ wrapper = shallow( );
+ tableContent = mountWithIntl( )
+ .find(EuiBasicTable)
+ .text();
+ });
+
+ it('renders a id column ', () => {
+ expect(tableContent).toContain('Request ID');
+
+ const table = wrapper.find(EuiBasicTable);
+ const columns = table.prop('columns');
+
+ // @ts-expect-error 4.3.5 upgrade
+ const crawlID = shallow(columns[0].render('618d0e66abe97bc688328900', { stage: 'crawl' }));
+ expect(crawlID.text()).toContain('618d0e66abe97bc688328900');
+
+ crawlID.simulate('click');
+ expect(actions.fetchCrawlRequest).toHaveBeenCalledWith('618d0e66abe97bc688328900');
+
+ // @ts-expect-error 4.3.5 upgrade
+ const processCrawlID = shallow(columns[0].render('54325423aef7890543', { stage: 'process' }));
+ expect(processCrawlID.text()).toContain('54325423aef7890543');
+ });
+
+ it('renders a created at column', () => {
+ expect(tableContent).toContain('Created');
+ expect(tableContent).toContain('Aug 31, 2020');
+ });
+
+ it('renders a type column', () => {
+ expect(tableContent).toContain('Crawl type');
+ expect(tableContent).toContain('Full');
+ });
+
+ it('renders a domains column', () => {
+ expect(tableContent).toContain('Domains');
+ // TODO How to test for the contents of this badge?
+ });
+
+ it('renders a status column', () => {
+ expect(tableContent).toContain('Status');
+ expect(tableContent).toContain('Pending');
+ });
+ });
+
+ describe('no items message', () => {
+ it('displays an empty prompt when there are no crawl requests', () => {
+ setMockValues({
+ ...values,
+ events: [],
+ });
+
+ wrapper = shallow( );
+
+ expect(wrapper.find(EuiBasicTable).dive().find(EuiEmptyPrompt)).toHaveLength(1);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.tsx
new file mode 100644
index 0000000000000..b72d93d0f2cd4
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawl_requests_panel/crawl_requests_table.tsx
@@ -0,0 +1,122 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiBadge,
+ EuiBasicTable,
+ EuiTableFieldDataColumnType,
+ EuiTableComputedColumnType,
+ EuiEmptyPrompt,
+ EuiLink,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CustomFormattedTimestamp } from '../../../../../shared/custom_formatted_timestamp/custom_formatted_timestamp';
+import { CrawlEvent } from '../../../../api/crawler/types';
+import { CrawlDetailLogic } from '../crawl_details_flyout/crawl_detail_logic';
+import { CrawlerLogic } from '../crawler_logic';
+
+import { readableCrawlerStatuses } from './constants';
+import { CrawlEventTypeBadge } from './crawl_event_type_badge';
+
+export const CrawlRequestsTable: React.FC = () => {
+ const { events } = useValues(CrawlerLogic);
+ const { fetchCrawlRequest } = useActions(CrawlDetailLogic);
+
+ const columns: Array<
+ EuiTableFieldDataColumnType | EuiTableComputedColumnType
+ > = [
+ {
+ field: 'id',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL', {
+ defaultMessage: 'Request ID',
+ }),
+ render: (id: string, event: CrawlEvent) => {
+ if (event.stage === 'crawl') {
+ return (
+ {
+ fetchCrawlRequest(id);
+ }}
+ >
+ {id}
+
+ );
+ }
+ return {id} ;
+ },
+ },
+ {
+ field: 'createdAt',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created', {
+ defaultMessage: 'Created',
+ }),
+ render: (createdAt: CrawlEvent['createdAt']) => (
+
+ ),
+ },
+ {
+ field: 'type',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType', {
+ defaultMessage: 'Crawl type',
+ }),
+ render: (_, event: CrawlEvent) => ,
+ },
+ {
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains', {
+ defaultMessage: 'Domains',
+ }),
+ render: (event: CrawlEvent) => (
+ {event.crawlConfig.domainAllowlist.length}
+ ),
+ },
+ {
+ field: 'status',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status', {
+ defaultMessage: 'Status',
+ }),
+ render: (status: CrawlEvent['status']) => readableCrawlerStatuses[status],
+ },
+ ];
+
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title',
+ {
+ defaultMessage: 'No recent crawl requests',
+ }
+ )}
+
+ }
+ body={
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body',
+ {
+ defaultMessage: "You haven't started any crawls yet.",
+ }
+ )}
+
+ }
+ />
+ }
+ />
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_logic.ts
new file mode 100644
index 0000000000000..1b9d30985c4d9
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_logic.ts
@@ -0,0 +1,207 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { i18n } from '@kbn/i18n';
+
+import { Actions } from '../../../../shared/api_logic/create_api_logic';
+import { flashAPIErrors, flashSuccessToast } from '../../../../shared/flash_messages';
+import { HttpLogic } from '../../../../shared/http';
+import { GetCrawlerApiLogic, GetCrawlerArgs } from '../../../api/crawler/get_crawler_api_logic';
+import {
+ CrawlerData,
+ CrawlerDomain,
+ CrawlerStatus,
+ CrawlEvent,
+ CrawlRequest,
+} from '../../../api/crawler/types';
+import { IndexNameLogic } from '../index_name_logic';
+
+const POLLING_DURATION = 1000;
+const POLLING_DURATION_ON_FAILURE = 5000;
+const ACTIVE_STATUSES = [
+ CrawlerStatus.Pending,
+ CrawlerStatus.Starting,
+ CrawlerStatus.Running,
+ CrawlerStatus.Canceling,
+];
+
+export interface CrawlRequestOverrides {
+ domain_allowlist?: string[];
+ max_crawl_depth?: number;
+ seed_urls?: string[];
+ sitemap_discovery_disabled?: boolean;
+ sitemap_urls?: string[];
+}
+
+export interface CrawlerValues {
+ data: CrawlerData | null;
+ dataLoading: boolean;
+ domains: CrawlerDomain[];
+ events: CrawlEvent[];
+ mostRecentCrawlRequest: CrawlRequest | null;
+ mostRecentCrawlRequestStatus: CrawlerStatus | null;
+ timeoutId: NodeJS.Timeout | null;
+}
+
+export type CrawlerActions = Pick<
+ Actions,
+ 'apiError' | 'apiSuccess'
+> & {
+ clearTimeoutId(): void;
+ createNewTimeoutForCrawlerData(duration: number): { duration: number };
+ fetchCrawlerData(): void;
+ onCreateNewTimeout(timeoutId: NodeJS.Timeout): { timeoutId: NodeJS.Timeout };
+ reApplyCrawlRules(domain?: CrawlerDomain): { domain?: CrawlerDomain };
+ startCrawl(overrides?: CrawlRequestOverrides): { overrides?: CrawlRequestOverrides };
+ stopCrawl(): void;
+};
+
+export const CrawlerLogic = kea>({
+ path: ['enterprise_search', 'crawler_logic'],
+ connect: {
+ actions: [GetCrawlerApiLogic, ['apiError', 'apiSuccess']],
+ values: [GetCrawlerApiLogic, ['status', 'data']],
+ },
+ actions: {
+ clearTimeoutId: true,
+ createNewTimeoutForCrawlerData: (duration) => ({ duration }),
+ fetchCrawlerData: true,
+ onCreateNewTimeout: (timeoutId) => ({ timeoutId }),
+ reApplyCrawlRules: (domain) => ({ domain }),
+ startCrawl: (overrides) => ({ overrides }),
+ stopCrawl: () => null,
+ },
+ reducers: {
+ dataLoading: [
+ true,
+ {
+ apiError: () => false,
+ apiSuccess: () => false,
+ },
+ ],
+ timeoutId: [
+ null,
+ {
+ apiError: () => null,
+ apiSuccess: () => null,
+ onCreateNewTimeout: (_, { timeoutId }) => timeoutId,
+ },
+ ],
+ },
+ selectors: ({ selectors }) => ({
+ domains: [() => [selectors.data], (data: CrawlerValues['data']) => data?.domains ?? []],
+ events: [() => [selectors.data], (data: CrawlerValues['data']) => data?.events ?? []],
+ mostRecentCrawlRequest: [
+ () => [selectors.data],
+ (data: CrawlerValues['data']) => data?.mostRecentCrawlRequest ?? null,
+ ],
+ mostRecentCrawlRequestStatus: [
+ () => [selectors.mostRecentCrawlRequest],
+ (crawlRequest: CrawlerValues['mostRecentCrawlRequest']) => crawlRequest?.status ?? null,
+ ],
+ }),
+ listeners: ({ actions, values }) => ({
+ apiError: (error) => {
+ flashAPIErrors(error);
+ actions.createNewTimeoutForCrawlerData(POLLING_DURATION_ON_FAILURE);
+ },
+ apiSuccess: ({ mostRecentCrawlRequest }) => {
+ const continuePoll =
+ mostRecentCrawlRequest && ACTIVE_STATUSES.includes(mostRecentCrawlRequest.status);
+
+ if (continuePoll) {
+ actions.createNewTimeoutForCrawlerData(POLLING_DURATION);
+ }
+ },
+
+ createNewTimeoutForCrawlerData: ({ duration }) => {
+ if (values.timeoutId) {
+ clearTimeout(values.timeoutId);
+ }
+
+ const timeoutIdId = setTimeout(() => {
+ actions.fetchCrawlerData();
+ }, duration);
+
+ actions.onCreateNewTimeout(timeoutIdId);
+ },
+ reApplyCrawlRules: async ({ domain }) => {
+ const { indexName } = IndexNameLogic.values;
+ const { http } = HttpLogic.values;
+ const requestBody: { domains?: string[] } = {};
+
+ if (domain) {
+ requestBody.domains = [domain.url];
+ }
+
+ try {
+ await http.post(`/internal/enterprise_search/indices/${indexName}/crawler/process_crawls`, {
+ body: JSON.stringify(requestBody),
+ });
+
+ flashSuccessToast(
+ i18n.translate(
+ 'xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage',
+ {
+ defaultMessage: 'Crawl rules are being re-applied in the background',
+ }
+ )
+ );
+
+ CrawlerLogic.actions.fetchCrawlerData();
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ fetchCrawlerData: () => {
+ const { indexName } = IndexNameLogic.values;
+
+ if (values.timeoutId) {
+ clearTimeout(values.timeoutId);
+ }
+ GetCrawlerApiLogic.actions.makeRequest({ indexName });
+ },
+ startCrawl: async ({ overrides = {} }) => {
+ const { indexName } = IndexNameLogic.values;
+ const { http } = HttpLogic.values;
+
+ try {
+ await http.post(`/internal/enterprise_search/indices/${indexName}/crawler/crawl_requests`, {
+ body: JSON.stringify({ overrides }),
+ });
+ actions.fetchCrawlerData();
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ stopCrawl: async () => {
+ const { indexName } = IndexNameLogic.values;
+ const { http } = HttpLogic.values;
+
+ try {
+ await http.post(
+ `/internal/enterprise_search/indices/${indexName}/crawler/crawl_requests/cancel`
+ );
+ actions.fetchCrawlerData();
+ } catch (e) {
+ flashAPIErrors(e);
+ }
+ },
+ }),
+ events: ({ actions, values }) => ({
+ afterMount: () => {
+ actions.fetchCrawlerData();
+ },
+ beforeUnmount: () => {
+ if (values.timeoutId) {
+ clearTimeout(values.timeoutId);
+ }
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.test.tsx
new file mode 100644
index 0000000000000..378f2def8acdb
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.test.tsx
@@ -0,0 +1,130 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiButton } from '@elastic/eui';
+
+import { CrawlerDomain, CrawlerStatus } from '../../../../api/crawler/types';
+
+import { CrawlerStatusIndicator } from './crawler_status_indicator';
+import { StartCrawlContextMenu } from './start_crawl_context_menu';
+import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
+
+const MOCK_VALUES = {
+ domains: [{}, {}] as CrawlerDomain[],
+ mostRecentCrawlRequestStatus: CrawlerStatus.Success,
+};
+
+describe('CrawlerStatusIndicator', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ describe('when there are no domains', () => {
+ it('is disabled', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ domains: [],
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(EuiButton)).toEqual(true);
+ expect(wrapper.prop('disabled')).toEqual(true);
+ });
+ });
+
+ [CrawlerStatus.Success, CrawlerStatus.Failed, CrawlerStatus.Canceled].forEach((status) => {
+ describe(`when the status is ready for retry: ${status}`, () => {
+ it('renders an CrawlerStatusIndicator with start crawl button', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: status,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(StartCrawlContextMenu)).toEqual(true);
+ });
+ });
+ });
+
+ [CrawlerStatus.Pending, CrawlerStatus.Suspended].forEach((status) => {
+ describe(`when the status is ${status}`, () => {
+ it('renders an CrawlerStatusIndicator with a pending indicator', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: status,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(EuiButton)).toEqual(true);
+ expect(wrapper.render().text()).toContain('Pending...');
+ expect(wrapper.prop('isLoading')).toEqual(true);
+ });
+ });
+ });
+
+ describe('when the status is Starting', () => {
+ it('renders an appropriate CrawlerStatusIndicator', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: CrawlerStatus.Starting,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(EuiButton)).toEqual(true);
+ expect(wrapper.render().text()).toContain('Starting...');
+ expect(wrapper.prop('isLoading')).toEqual(true);
+ });
+ });
+
+ describe('when the status is Running', () => {
+ it('renders a stop crawl popover menu', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: CrawlerStatus.Running,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(StopCrawlPopoverContextMenu)).toEqual(true);
+ });
+ });
+
+ [CrawlerStatus.Canceling, CrawlerStatus.Suspending].forEach((status) => {
+ describe(`when the status is ${status}`, () => {
+ it('renders an CrawlerStatusIndicator with a stopping indicator', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: status,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(EuiButton)).toEqual(true);
+ expect(wrapper.render().text()).toContain('Stopping...');
+ expect(wrapper.prop('isLoading')).toEqual(true);
+ });
+ });
+ });
+
+ describe('when status is not a valid status', () => {
+ it('renders an CrawlerStatusIndicator with start crawl button', () => {
+ // this tests a codepath that should be impossible to reach, status should always be a CrawlerStatus
+ // but we use a switch statement and need to test the default case for this to receive 100% coverage
+ setMockValues({
+ ...MOCK_VALUES,
+ mostRecentCrawlRequestStatus: null,
+ });
+
+ const wrapper = shallow( );
+ expect(wrapper.is(StartCrawlContextMenu)).toEqual(true);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.tsx
new file mode 100644
index 0000000000000..62648ec377774
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/crawler_status_indicator.tsx
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues } from 'kea';
+
+import { EuiButton } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlerStatus } from '../../../../api/crawler/types';
+import { CrawlerLogic } from '../crawler_logic';
+
+import { StartCrawlContextMenu } from './start_crawl_context_menu';
+import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
+
+export const CrawlerStatusIndicator: React.FC = () => {
+ const { dataLoading, domains, mostRecentCrawlRequestStatus } = useValues(CrawlerLogic);
+
+ if (dataLoading || domains.length === 0) {
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel',
+ {
+ defaultMessage: 'Crawl',
+ }
+ )}
+
+ );
+ }
+
+ switch (mostRecentCrawlRequestStatus) {
+ case CrawlerStatus.Pending:
+ case CrawlerStatus.Suspended:
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel',
+ {
+ defaultMessage: 'Pending...',
+ }
+ )}
+
+ );
+ case CrawlerStatus.Starting:
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel',
+ {
+ defaultMessage: 'Starting...',
+ }
+ )}
+
+ );
+ case CrawlerStatus.Running:
+ return ;
+ case CrawlerStatus.Canceling:
+ case CrawlerStatus.Suspending:
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel',
+ {
+ defaultMessage: 'Stopping...',
+ }
+ )}
+
+ );
+ case CrawlerStatus.Success:
+ case CrawlerStatus.Failed:
+ case CrawlerStatus.Canceled:
+ default:
+ return ;
+ }
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.test.tsx
new file mode 100644
index 0000000000000..63ca02d8a16f0
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.test.tsx
@@ -0,0 +1,85 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { ReactWrapper, shallow } from 'enzyme';
+
+import {
+ EuiButton,
+ EuiContextMenuItem,
+ EuiContextMenuPanel,
+ EuiPopover,
+ EuiResizeObserver,
+} from '@elastic/eui';
+
+import { mountWithIntl } from '../../../../../test_helpers';
+
+import { StartCrawlContextMenu } from './start_crawl_context_menu';
+
+const MOCK_ACTIONS = {
+ // CrawlerLogic
+ reApplyCrawlRules: jest.fn(),
+ startCrawl: jest.fn(),
+ // CrawlCustomSettingsFlyoutLogic
+ showFlyout: jest.fn(),
+};
+
+describe('StartCrawlContextMenu', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockActions(MOCK_ACTIONS);
+ });
+
+ it('is initially closed', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.is(EuiPopover)).toBe(true);
+ expect(wrapper.prop('isOpen')).toEqual(false);
+ });
+
+ describe('user actions', () => {
+ let wrapper: ReactWrapper;
+ let menuItems: ReactWrapper;
+
+ beforeEach(() => {
+ wrapper = mountWithIntl( );
+
+ wrapper.find(EuiButton).simulate('click');
+
+ menuItems = wrapper
+ .find(EuiContextMenuPanel)
+ .find(EuiResizeObserver)
+ .find(EuiContextMenuItem);
+ });
+
+ it('can be opened', () => {
+ expect(wrapper.find(EuiPopover).prop('isOpen')).toEqual(true);
+ expect(menuItems.length).toEqual(3);
+ });
+
+ it('can start crawls', () => {
+ menuItems.at(0).simulate('click');
+
+ expect(MOCK_ACTIONS.startCrawl).toHaveBeenCalled();
+ });
+
+ it('can open a modal to start a crawl with custom settings', () => {
+ menuItems.at(1).simulate('click');
+
+ expect(MOCK_ACTIONS.showFlyout).toHaveBeenCalled();
+ });
+
+ it('can reapply crawl rules', () => {
+ menuItems.at(2).simulate('click');
+
+ expect(MOCK_ACTIONS.reApplyCrawlRules).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.tsx
new file mode 100644
index 0000000000000..ba5b10cf1ad71
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/start_crawl_context_menu.tsx
@@ -0,0 +1,93 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import React, { useState } from 'react';
+
+import { useActions } from 'kea';
+
+import { EuiButton, EuiContextMenuItem, EuiContextMenuPanel, EuiPopover } from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { CrawlCustomSettingsFlyoutLogic } from '../crawl_custom_settings_flyout/crawl_custom_settings_flyout_logic';
+import { CrawlerLogic } from '../crawler_logic';
+
+export const StartCrawlContextMenu: React.FC = () => {
+ const { reApplyCrawlRules, startCrawl } = useActions(CrawlerLogic);
+ const { showFlyout: showCrawlCustomSettingsFlyout } = useActions(CrawlCustomSettingsFlyoutLogic);
+ const [isPopoverOpen, setPopover] = useState(false);
+
+ const togglePopover = () => setPopover(!isPopoverOpen);
+ const closePopover = () => setPopover(false);
+
+ return (
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel',
+ {
+ defaultMessage: 'Crawl',
+ }
+ )}
+
+ }
+ isOpen={isPopoverOpen}
+ closePopover={closePopover}
+ panelPaddingSize="none"
+ anchorPosition="downLeft"
+ >
+ {
+ closePopover();
+ startCrawl();
+ }}
+ icon="play"
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlAllDomainsMenuLabel',
+ {
+ defaultMessage: 'Crawl all domains on this index',
+ }
+ )}
+ ,
+ {
+ closePopover();
+ showCrawlCustomSettingsFlyout();
+ }}
+ icon="gear"
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel',
+ {
+ defaultMessage: 'Crawl with custom settings',
+ }
+ )}
+ ,
+ {
+ closePopover();
+ reApplyCrawlRules();
+ }}
+ icon="refresh"
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.startCrawlContextMenu.reapplyCrawlRulesMenuLabel',
+ {
+ defaultMessage: 'Reapply crawl rules',
+ }
+ )}
+ ,
+ ]}
+ />
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.test.tsx
new file mode 100644
index 0000000000000..bf1165c4350df
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.test.tsx
@@ -0,0 +1,59 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockActions } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import {
+ EuiButton,
+ EuiContextMenuItem,
+ EuiContextMenuPanel,
+ EuiPopover,
+ EuiResizeObserver,
+} from '@elastic/eui';
+
+import { mountWithIntl } from '../../../../../test_helpers';
+
+import { StopCrawlPopoverContextMenu } from './stop_crawl_popover_context_menu';
+
+const MOCK_ACTIONS = {
+ stopCrawl: jest.fn(),
+};
+
+describe('StopCrawlsPopoverContextMenu', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockActions(MOCK_ACTIONS);
+ });
+ it('is initially closed', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.is(EuiPopover)).toBe(true);
+ expect(wrapper.prop('isOpen')).toEqual(false);
+ });
+
+ it('can be opened to stop crawls', () => {
+ const wrapper = mountWithIntl( );
+
+ wrapper.find(EuiButton).simulate('click');
+
+ expect(wrapper.find(EuiPopover).prop('isOpen')).toEqual(true);
+
+ const menuItem = wrapper
+ .find(EuiContextMenuPanel)
+ .find(EuiResizeObserver)
+ .find(EuiContextMenuItem);
+
+ expect(menuItem).toHaveLength(1);
+
+ menuItem.simulate('click');
+
+ expect(MOCK_ACTIONS.stopCrawl).toHaveBeenCalled();
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.tsx
new file mode 100644
index 0000000000000..40be28411479d
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/crawler_status_indicator/stop_crawl_popover_context_menu.tsx
@@ -0,0 +1,84 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useState } from 'react';
+
+import { useActions } from 'kea';
+
+import {
+ EuiButton,
+ EuiContextMenuItem,
+ EuiContextMenuPanel,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiLoadingSpinner,
+ EuiPopover,
+} from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { CrawlerLogic } from '../crawler_logic';
+
+export const StopCrawlPopoverContextMenu: React.FC = () => {
+ const [isPopoverOpen, setPopover] = useState(false);
+
+ const togglePopover = () => setPopover(!isPopoverOpen);
+
+ const closePopover = () => setPopover(false);
+
+ const { stopCrawl } = useActions(CrawlerLogic);
+
+ return (
+
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel',
+ {
+ defaultMessage: 'Crawling...',
+ }
+ )}
+
+
+
+ }
+ isOpen={isPopoverOpen}
+ closePopover={closePopover}
+ panelPaddingSize="none"
+ anchorPosition="downLeft"
+ >
+ {
+ closePopover();
+ stopCrawl();
+ }}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel',
+ {
+ defaultMessage: 'Cancel Crawl',
+ }
+ )}
+ ,
+ ]}
+ />
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.test.tsx
new file mode 100644
index 0000000000000..28f0dd54605cf
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.test.tsx
@@ -0,0 +1,69 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockActions, setMockValues } from '../../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiFlyout, EuiFlyoutBody } from '@elastic/eui';
+
+import { AddDomainFlyout } from './add_domain_flyout';
+import { AddDomainForm } from './add_domain_form';
+import { AddDomainFormErrors } from './add_domain_form_errors';
+import { AddDomainFormSubmitButton } from './add_domain_form_submit_button';
+
+const MOCK_ACTIONS = {
+ closeFlyout: jest.fn(),
+};
+
+describe('AddDomainFlyout', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockActions(MOCK_ACTIONS);
+ });
+
+ it('can be hidden', () => {
+ setMockValues({
+ isFlyoutVisible: false,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+
+ describe('flyout', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ setMockValues({
+ isFlyoutVisible: true,
+ });
+
+ wrapper = shallow( );
+ });
+
+ it('displays form errors', () => {
+ expect(wrapper.find(EuiFlyoutBody).dive().find(AddDomainFormErrors)).toHaveLength(1);
+ });
+
+ it('contains a form to add domains', () => {
+ expect(wrapper.find(AddDomainForm)).toHaveLength(1);
+ });
+
+ it('contains a submit button', () => {
+ expect(wrapper.find(AddDomainFormSubmitButton)).toHaveLength(1);
+ });
+
+ it('hides the flyout on close', () => {
+ wrapper.find(EuiFlyout).simulate('close');
+
+ expect(MOCK_ACTIONS.closeFlyout).toHaveBeenCalled();
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.tsx
new file mode 100644
index 0000000000000..908f860099521
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_flyout.tsx
@@ -0,0 +1,85 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues, useActions } from 'kea';
+
+import {
+ EuiButtonEmpty,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiFlyout,
+ EuiFlyoutBody,
+ EuiFlyoutFooter,
+ EuiFlyoutHeader,
+ EuiPortal,
+ EuiSpacer,
+ EuiText,
+ EuiTitle,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CANCEL_BUTTON_LABEL } from '../../../../../../shared/constants';
+
+import { AddDomainForm } from './add_domain_form';
+import { AddDomainFormErrors } from './add_domain_form_errors';
+import { AddDomainFormSubmitButton } from './add_domain_form_submit_button';
+import { AddDomainLogic } from './add_domain_logic';
+
+export const AddDomainFlyout: React.FC = () => {
+ const { isFlyoutVisible } = useValues(AddDomainLogic);
+ const { closeFlyout } = useActions(AddDomainLogic);
+
+ if (isFlyoutVisible) {
+ return (
+
+
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainFlyout.title', {
+ defaultMessage: 'Add a new domain',
+ })}
+
+
+
+
+
+
+ >
+ }
+ >
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainFlyout.description', {
+ defaultMessage:
+ 'You can add multiple domains to this index\'s web crawler. Add another domain here and modify the entry points and crawl rules from the "Manage" page.',
+ })}
+
+
+
+
+
+
+
+
+ {CANCEL_BUTTON_LABEL}
+
+
+
+
+
+
+
+
+ );
+ }
+ return null;
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.test.tsx
new file mode 100644
index 0000000000000..e5de15c812b1b
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.test.tsx
@@ -0,0 +1,142 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiButton, EuiFieldText, EuiForm } from '@elastic/eui';
+
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { rerender } from '../../../../../../test_helpers';
+
+import { AddDomainForm } from './add_domain_form';
+import { AddDomainValidation } from './add_domain_validation';
+
+const MOCK_VALUES = {
+ addDomainFormInputValue: 'https://',
+ entryPointValue: '/',
+ isValidationLoading: false,
+ hasValidationCompleted: false,
+};
+
+const MOCK_ACTIONS = {
+ setAddDomainFormInputValue: jest.fn(),
+ startDomainValidation: jest.fn(),
+};
+
+describe('AddDomainForm', () => {
+ let wrapper: ShallowWrapper;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ setMockActions(MOCK_ACTIONS);
+ setMockValues(MOCK_VALUES);
+ wrapper = shallow( );
+ });
+
+ it('renders', () => {
+ expect(wrapper.find(EuiForm)).toHaveLength(1);
+ });
+
+ it('contains a submit button', () => {
+ expect(wrapper.find(EuiButton).prop('type')).toEqual('submit');
+ });
+
+ it('validates domain on submit', () => {
+ wrapper.find(EuiForm).simulate('submit', { preventDefault: jest.fn() });
+
+ expect(MOCK_ACTIONS.startDomainValidation).toHaveBeenCalledTimes(1);
+ });
+
+ describe('url field', () => {
+ it('uses the value from the logic', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ addDomainFormInputValue: 'test value',
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiFieldText).prop('value')).toEqual('test value');
+ });
+
+ it('sets the value in the logic on change', () => {
+ wrapper.find(EuiFieldText).simulate('change', { target: { value: 'test value' } });
+
+ expect(MOCK_ACTIONS.setAddDomainFormInputValue).toHaveBeenCalledWith('test value');
+ });
+ });
+
+ describe('validate domain button', () => {
+ it('is enabled when the input has a value', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ addDomainFormInputValue: 'https://elastic.co',
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiButton).prop('disabled')).toEqual(false);
+ });
+
+ it('is disabled when the input value is empty', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ addDomainFormInputValue: '',
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(EuiButton).prop('disabled')).toEqual(true);
+ });
+ });
+
+ describe('entry point indicator', () => {
+ it('is hidden when the entry point is /', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ entryPointValue: '/',
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(FormattedMessage)).toHaveLength(0);
+ });
+
+ it('displays the entry point otherwise', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ entryPointValue: '/guide',
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(FormattedMessage)).toHaveLength(1);
+ });
+ });
+
+ describe('validation', () => {
+ it('is hidden by default', () => {
+ expect(wrapper.find(AddDomainValidation)).toHaveLength(0);
+ });
+
+ it('can be shown to the user', () => {
+ setMockValues({
+ ...MOCK_VALUES,
+ displayValidation: true,
+ });
+
+ rerender(wrapper);
+
+ expect(wrapper.find(AddDomainValidation)).toHaveLength(1);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.tsx
new file mode 100644
index 0000000000000..b0722ca50d2c7
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form.tsx
@@ -0,0 +1,103 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiButton,
+ EuiCode,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiForm,
+ EuiFormRow,
+ EuiFieldText,
+ EuiSpacer,
+ EuiText,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+import { FormattedMessage } from '@kbn/i18n-react';
+
+import { AddDomainLogic } from './add_domain_logic';
+import { AddDomainValidation } from './add_domain_validation';
+
+export const AddDomainForm: React.FC = () => {
+ const { setAddDomainFormInputValue, startDomainValidation } = useActions(AddDomainLogic);
+
+ const { addDomainFormInputValue, displayValidation, entryPointValue } = useValues(AddDomainLogic);
+
+ return (
+ <>
+ {
+ event.preventDefault();
+ startDomainValidation();
+ }}
+ component="form"
+ >
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText', {
+ defaultMessage: 'Domain URLs require a protocol and cannot contain any paths.',
+ })}
+
+ }
+ >
+
+
+ setAddDomainFormInputValue(e.target.value)}
+ fullWidth
+ />
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel',
+ {
+ defaultMessage: 'Validate Domain',
+ }
+ )}
+
+
+
+
+
+ {entryPointValue !== '/' && (
+ <>
+
+
+
+
+ {entryPointValue},
+ }}
+ />
+
+
+
+ >
+ )}
+
+ {displayValidation && }
+
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.test.tsx
new file mode 100644
index 0000000000000..13be60a84c039
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.test.tsx
@@ -0,0 +1,41 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import { setMockValues } from '../../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { AddDomainFormErrors } from './add_domain_form_errors';
+
+describe('AddDomainFormErrors', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('is empty when there are no errors', () => {
+ setMockValues({
+ errors: [],
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+
+ it('displays all the errors from the logic', () => {
+ setMockValues({
+ errors: ['first error', 'second error'],
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.find('p')).toHaveLength(2);
+ expect(wrapper.find('p').first().text()).toContain('first error');
+ expect(wrapper.find('p').last().text()).toContain('second error');
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.tsx
new file mode 100644
index 0000000000000..e799161422234
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_errors.tsx
@@ -0,0 +1,37 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues } from 'kea';
+
+import { EuiCallOut } from '@elastic/eui';
+import { i18n } from '@kbn/i18n';
+
+import { AddDomainLogic } from './add_domain_logic';
+
+export const AddDomainFormErrors: React.FC = () => {
+ const { errors } = useValues(AddDomainLogic);
+
+ if (errors.length > 0) {
+ return (
+
+ {errors.map((message, index) => (
+ {message}
+ ))}
+
+ );
+ }
+
+ return null;
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.test.tsx
new file mode 100644
index 0000000000000..e8109891d0f2b
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.test.tsx
@@ -0,0 +1,59 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiButton } from '@elastic/eui';
+
+import { AddDomainFormSubmitButton } from './add_domain_form_submit_button';
+
+describe('AddDomainFormSubmitButton', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('is disabled when the domain has not been validated', () => {
+ setMockValues({
+ allowSubmit: false,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.prop('disabled')).toBe(true);
+ });
+
+ it('is enabled when the domain has been validated', () => {
+ setMockValues({
+ allowSubmit: true,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.prop('disabled')).toBe(false);
+ });
+
+ it('submits the domain on click', () => {
+ const submitNewDomain = jest.fn();
+
+ setMockActions({
+ submitNewDomain,
+ });
+ setMockValues({
+ allowSubmit: true,
+ });
+
+ const wrapper = shallow( );
+
+ wrapper.find(EuiButton).simulate('click');
+
+ expect(submitNewDomain).toHaveBeenCalled();
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.tsx
new file mode 100644
index 0000000000000..8ba831017df86
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_form_submit_button.tsx
@@ -0,0 +1,30 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import { EuiButton } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { AddDomainLogic } from './add_domain_logic';
+
+export const AddDomainFormSubmitButton: React.FC = () => {
+ const { submitNewDomain } = useActions(AddDomainLogic);
+
+ const { allowSubmit } = useValues(AddDomainLogic);
+
+ return (
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel', {
+ defaultMessage: 'Add domain',
+ })}
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.test.ts
new file mode 100644
index 0000000000000..c2933889aa4a8
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.test.ts
@@ -0,0 +1,752 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ LogicMounter,
+ mockFlashMessageHelpers,
+ mockHttpValues,
+ mockKibanaValues,
+} from '../../../../../../__mocks__/kea_logic';
+import '../../../_mocks_/index_name_logic.mock';
+
+jest.mock('../../crawler_logic', () => ({
+ CrawlerLogic: {
+ actions: {
+ fetchCrawlerData: jest.fn(),
+ },
+ },
+}));
+
+jest.mock('./utils', () => ({
+ ...(jest.requireActual('./utils') as object),
+ getDomainWithProtocol: jest.fn().mockImplementation((domain) => domain),
+}));
+
+import { nextTick } from '@kbn/test-jest-helpers';
+
+import {
+ CRAWLER_DOMAIN,
+ CRAWLER_DOMAIN_FROM_SERVER,
+} from '../../../../../api/crawler/_mocks_/crawler_domains.mock';
+
+import { CrawlerDomain } from '../../../../../api/crawler/types';
+
+import { AddDomainLogic, AddDomainLogicValues } from './add_domain_logic';
+import { getDomainWithProtocol } from './utils';
+
+const DEFAULT_VALUES: AddDomainLogicValues = {
+ addDomainFormInputValue: 'https://',
+ entryPointValue: '/',
+ canIgnoreValidationFailure: false,
+ displayValidation: false,
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: '' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ },
+ allowSubmit: false,
+ ignoreValidationFailure: false,
+ isValidationLoading: false,
+ hasBlockingFailure: false,
+ hasValidationCompleted: false,
+ errors: [],
+ isFlyoutVisible: false,
+};
+
+describe('AddDomainLogic', () => {
+ const { mount } = new LogicMounter(AddDomainLogic);
+ const { flashSuccessToast } = mockFlashMessageHelpers;
+ const { http } = mockHttpValues;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mount();
+ });
+
+ it('has default values', () => {
+ expect(AddDomainLogic.values).toEqual(DEFAULT_VALUES);
+ });
+
+ describe('actions', () => {
+ describe('clearDomainFormInputValue', () => {
+ beforeEach(() => {
+ mount({
+ addDomainFormInputValue: 'http://elastic.co',
+ entryPointValue: '/foo',
+ hasValidationCompleted: true,
+ errors: ['first error', 'second error'],
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ AddDomainLogic.actions.clearDomainFormInputValue();
+ });
+
+ it('should clear the input value', () => {
+ expect(AddDomainLogic.values.addDomainFormInputValue).toEqual('https://');
+ });
+
+ it('should clear the entry point value', () => {
+ expect(AddDomainLogic.values.entryPointValue).toEqual('/');
+ });
+
+ it('should reset validation completion', () => {
+ expect(AddDomainLogic.values.hasValidationCompleted).toEqual(false);
+ });
+
+ it('should clear errors', () => {
+ expect(AddDomainLogic.values.errors).toEqual([]);
+ });
+
+ it('should clear validation results', () => {
+ expect(AddDomainLogic.values.domainValidationResult).toEqual({
+ steps: {
+ contentVerification: { state: '' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ });
+ });
+ });
+
+ describe('onSubmitNewDomainError', () => {
+ it('should set errors', () => {
+ mount();
+
+ AddDomainLogic.actions.onSubmitNewDomainError(['first error', 'second error']);
+
+ expect(AddDomainLogic.values.errors).toEqual(['first error', 'second error']);
+ });
+ });
+
+ describe('setAddDomainFormInputValue', () => {
+ beforeEach(() => {
+ mount({
+ addDomainFormInputValue: 'https://elastic.co',
+ entryPointValue: '/customers',
+ hasValidationCompleted: true,
+ errors: ['first error', 'second error'],
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ AddDomainLogic.actions.setAddDomainFormInputValue('https://swiftype.com/site-search');
+ });
+
+ it('should set the input value', () => {
+ expect(AddDomainLogic.values.addDomainFormInputValue).toEqual(
+ 'https://swiftype.com/site-search'
+ );
+ });
+
+ it('should clear the entry point value', () => {
+ expect(AddDomainLogic.values.entryPointValue).toEqual('/');
+ });
+
+ it('should reset validation completion', () => {
+ expect(AddDomainLogic.values.hasValidationCompleted).toEqual(false);
+ });
+
+ it('should clear errors', () => {
+ expect(AddDomainLogic.values.errors).toEqual([]);
+ });
+
+ it('should clear validation results', () => {
+ expect(AddDomainLogic.values.domainValidationResult).toEqual({
+ steps: {
+ contentVerification: { state: '' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ });
+ });
+ });
+
+ describe('setDomainValidationResult', () => {
+ it('should update the validation result', () => {
+ AddDomainLogic.actions.setDomainValidationResult({
+ contentVerification: { state: 'invalid' },
+ });
+
+ expect(AddDomainLogic.values.domainValidationResult).toEqual({
+ steps: {
+ contentVerification: { state: 'invalid' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ });
+ });
+ });
+
+ describe('setIgnoreValidationFailure', () => {
+ beforeEach(() => {
+ mount({
+ addDomainFormInputValue: 'https://elastic.co',
+ entryPointValue: '/customers',
+ hasValidationCompleted: true,
+ errors: ['first error', 'second error'],
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ AddDomainLogic.actions.setIgnoreValidationFailure(true);
+ });
+
+ it('should set the input value', () => {
+ expect(AddDomainLogic.values.ignoreValidationFailure).toEqual(true);
+ });
+ });
+
+ describe('submitNewDomain', () => {
+ it('should clear errors', () => {
+ mount({
+ errors: ['first-error', 'second error'],
+ });
+
+ AddDomainLogic.actions.submitNewDomain();
+
+ expect(AddDomainLogic.values.errors).toEqual([]);
+ });
+ });
+
+ describe('validateDomainInitialVerification', () => {
+ beforeEach(() => {
+ mount({
+ addDomainFormInputValue: 'https://elastic.co',
+ entryPointValue: '/customers',
+ hasValidationCompleted: true,
+ errors: ['first error', 'second error'],
+ });
+
+ AddDomainLogic.actions.validateDomainInitialVerification(
+ 'https://swiftype.com',
+ '/site-search'
+ );
+ });
+
+ it('should set the input value', () => {
+ expect(AddDomainLogic.values.addDomainFormInputValue).toEqual('https://swiftype.com');
+ });
+
+ it('should set the entry point value', () => {
+ expect(AddDomainLogic.values.entryPointValue).toEqual('/site-search');
+ });
+
+ it('should clear errors', () => {
+ expect(AddDomainLogic.values.errors).toEqual([]);
+ });
+ });
+
+ describe('startDomainValidation', () => {
+ it('should set validation results to loading', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: '' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ },
+ });
+
+ AddDomainLogic.actions.startDomainValidation();
+
+ expect(AddDomainLogic.values.domainValidationResult).toEqual({
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ });
+ });
+ });
+ });
+
+ describe('listeners', () => {
+ describe('onSubmitNewDomainSuccess', () => {
+ it('should flash a success toast', () => {
+ const { navigateToUrl } = mockKibanaValues;
+
+ AddDomainLogic.actions.onSubmitNewDomainSuccess({ id: 'test-domain' } as CrawlerDomain);
+
+ expect(flashSuccessToast).toHaveBeenCalled();
+ expect(navigateToUrl).toHaveBeenCalledWith(
+ '/search_indices/index-name/crawler/domains/test-domain'
+ );
+ });
+ });
+
+ describe('submitNewDomain', () => {
+ it('calls the domains endpoint with a JSON formatted body', async () => {
+ mount({
+ addDomainFormInputValue: 'https://elastic.co',
+ entryPointValue: '/guide',
+ });
+ http.post.mockReturnValueOnce(Promise.resolve({}));
+
+ AddDomainLogic.actions.submitNewDomain();
+ await nextTick();
+
+ expect(http.post).toHaveBeenCalledWith(
+ '/internal/enterprise_search/indices/index-name/crawler/domains',
+ {
+ body: JSON.stringify({
+ entry_points: [{ value: '/guide' }],
+ name: 'https://elastic.co',
+ }),
+ }
+ );
+ });
+
+ describe('on success', () => {
+ it('calls success action', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'onSubmitNewDomainSuccess');
+ http.post.mockReturnValueOnce(Promise.resolve(CRAWLER_DOMAIN_FROM_SERVER));
+
+ AddDomainLogic.actions.submitNewDomain();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.onSubmitNewDomainSuccess).toHaveBeenCalledWith(
+ CRAWLER_DOMAIN
+ );
+ });
+ });
+
+ describe('on error', () => {
+ it('passes error messages to the error callback', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'onSubmitNewDomainError');
+
+ http.post.mockReturnValueOnce(
+ Promise.reject({
+ body: {
+ attributes: {
+ errors: ['first error', 'second error'],
+ },
+ },
+ })
+ );
+
+ AddDomainLogic.actions.submitNewDomain();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.onSubmitNewDomainError).toHaveBeenCalledWith([
+ 'first error',
+ 'second error',
+ ]);
+ });
+ });
+ });
+
+ describe('startDomainValidation', () => {
+ it('extracts the domain and entrypoint and passes them to the callback ', async () => {
+ mount({ addDomainFormInputValue: 'https://swiftype.com/site-search' });
+ jest.spyOn(AddDomainLogic.actions, 'validateDomainInitialVerification');
+
+ AddDomainLogic.actions.startDomainValidation();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.validateDomainInitialVerification).toHaveBeenCalledWith(
+ 'https://swiftype.com',
+ '/site-search'
+ );
+ expect(getDomainWithProtocol).toHaveBeenCalledWith('https://swiftype.com');
+ });
+ });
+
+ describe('validateDomainInitialVerification', () => {
+ it('validates the url', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'performDomainValidationStep');
+
+ AddDomainLogic.actions.validateDomainInitialVerification('https://elastic.co', '/');
+ await nextTick();
+
+ expect(AddDomainLogic.actions.performDomainValidationStep).toHaveBeenCalledWith(
+ 'initialValidation',
+ ['url']
+ );
+ });
+ });
+
+ describe('validateDomainContentVerification', () => {
+ it('validates the domain content', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'performDomainValidationStep');
+
+ AddDomainLogic.actions.validateDomainContentVerification();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.performDomainValidationStep).toHaveBeenCalledWith(
+ 'contentVerification',
+ ['url_request', 'url_content']
+ );
+ });
+ });
+
+ describe('validateDomainIndexingRestrictions', () => {
+ it("validates the domain's robots.txt", async () => {
+ jest.spyOn(AddDomainLogic.actions, 'performDomainValidationStep');
+
+ AddDomainLogic.actions.validateDomainIndexingRestrictions();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.performDomainValidationStep).toHaveBeenCalledWith(
+ 'indexingRestrictions',
+ ['robots_txt']
+ );
+ });
+ });
+
+ describe('validateDomainNetworkConnectivity', () => {
+ it("validates the domain's dns", async () => {
+ jest.spyOn(AddDomainLogic.actions, 'performDomainValidationStep');
+
+ AddDomainLogic.actions.validateDomainNetworkConnectivity();
+ await nextTick();
+
+ expect(AddDomainLogic.actions.performDomainValidationStep).toHaveBeenCalledWith(
+ 'networkConnectivity',
+ ['dns', 'tcp']
+ );
+ });
+ });
+
+ describe('performDomainValidationStep', () => {
+ beforeEach(() => {
+ mount({
+ addDomainFormInputValue: 'https://elastic.co',
+ });
+ });
+
+ describe('on success', () => {
+ it('sets all remaining validation steps invalid on a blocking failure', async () => {
+ http.post.mockReturnValueOnce(
+ Promise.resolve({
+ valid: false,
+ results: [
+ {
+ name: '-',
+ result: 'failure',
+ comment: 'Something unexpected happened',
+ },
+ ],
+ })
+ );
+
+ jest.spyOn(AddDomainLogic.actions, 'setDomainValidationResult');
+
+ AddDomainLogic.actions.performDomainValidationStep('initialValidation', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.setDomainValidationResult).toHaveBeenCalledWith({
+ initialValidation: {
+ state: 'invalid',
+ blockingFailure: true,
+ message: 'Something unexpected happened',
+ },
+ networkConnectivity: {
+ state: 'invalid',
+ message:
+ 'Unable to establish a network connection because the "Initial validation" check failed.',
+ },
+ indexingRestrictions: {
+ state: 'invalid',
+ message:
+ 'Unable to determine indexing restrictions because the "Network connectivity" check failed.',
+ },
+ contentVerification: {
+ state: 'invalid',
+ message: 'Unable to verify content because the "Indexing restrictions" check failed.',
+ },
+ });
+ });
+
+ describe('when there are no blocking failures', () => {
+ beforeEach(() => {
+ http.post.mockReturnValue(
+ Promise.resolve({
+ valid: true,
+ results: [],
+ })
+ );
+ });
+
+ it('updates the validation result', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'setDomainValidationResult');
+
+ AddDomainLogic.actions.performDomainValidationStep('initialValidation', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.setDomainValidationResult).toHaveBeenCalledWith({
+ initialValidation: {
+ state: 'valid',
+ },
+ });
+ });
+
+ describe('validation chain', () => {
+ beforeEach(() => {
+ http.post.mockReturnValue(
+ Promise.resolve({
+ valid: true,
+ results: [],
+ })
+ );
+ });
+
+ it('checks network connectivity after initial validation', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'validateDomainNetworkConnectivity');
+
+ AddDomainLogic.actions.performDomainValidationStep('initialValidation', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.validateDomainNetworkConnectivity).toHaveBeenCalled();
+ });
+
+ it('checks indexing restrictions after network connectivity', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'validateDomainIndexingRestrictions');
+
+ AddDomainLogic.actions.performDomainValidationStep('networkConnectivity', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.validateDomainIndexingRestrictions).toHaveBeenCalled();
+ });
+
+ it('checks content after indexing restrictions', async () => {
+ jest.spyOn(AddDomainLogic.actions, 'validateDomainContentVerification');
+
+ AddDomainLogic.actions.performDomainValidationStep('indexingRestrictions', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.validateDomainContentVerification).toHaveBeenCalled();
+ });
+ });
+ });
+ });
+
+ describe('on failure', () => {
+ it('it sets all remaining validation steps as invalid', async () => {
+ http.post.mockReturnValueOnce(Promise.reject({}));
+
+ jest.spyOn(AddDomainLogic.actions, 'setDomainValidationResult');
+
+ AddDomainLogic.actions.performDomainValidationStep('initialValidation', ['url']);
+ await nextTick();
+
+ expect(AddDomainLogic.actions.setDomainValidationResult).toHaveBeenCalledWith({
+ initialValidation: {
+ state: 'invalid',
+ blockingFailure: true,
+ message: 'Unexpected error',
+ },
+ networkConnectivity: {
+ state: 'invalid',
+ message:
+ 'Unable to establish a network connection because the "Initial validation" check failed.',
+ },
+ indexingRestrictions: {
+ state: 'invalid',
+ message:
+ 'Unable to determine indexing restrictions because the "Network connectivity" check failed.',
+ },
+ contentVerification: {
+ state: 'invalid',
+ message: 'Unable to verify content because the "Indexing restrictions" check failed.',
+ },
+ });
+ });
+ });
+ });
+ });
+
+ describe('selectors', () => {
+ describe('isValidationLoading', () => {
+ it('is false by default', () => {
+ expect(AddDomainLogic.values.isValidationLoading).toEqual(false);
+ });
+
+ it('is true when any steps are loading', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.isValidationLoading).toEqual(true);
+ });
+ });
+
+ describe('hasValidationCompleted', () => {
+ it('is false when steps are loading', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.hasValidationCompleted).toEqual(false);
+ });
+
+ it('is true when all steps no steps are valid or invalid', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'invalid' },
+ networkConnectivity: { state: 'invalid' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.hasValidationCompleted).toEqual(true);
+ });
+ });
+
+ describe('hasBlockingFailure', () => {
+ it('is true when any steps have blocking failures', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'invalid', blockingFailure: true },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.hasBlockingFailure).toEqual(true);
+ });
+ });
+
+ describe('canIgnoreValidationFailure', () => {
+ it('is true when any steps have blocking failures', () => {
+ mount({
+ hasValidationCompleted: true,
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'invalid', blockingFailure: true },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'valid' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.canIgnoreValidationFailure).toEqual(true);
+ });
+
+ it('is false when validation has not completed', () => {
+ mount({
+ hasValidationCompleted: false,
+ });
+
+ expect(AddDomainLogic.values.canIgnoreValidationFailure).toEqual(false);
+ });
+ });
+
+ describe('allowSubmit', () => {
+ it('is true when a user has validated all steps and has no failures', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'valid' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.allowSubmit).toEqual(true);
+ });
+
+ it('is true when a user ignores validation failure', () => {
+ mount({
+ ignoreValidationFailure: true,
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'invalid' },
+ networkConnectivity: { state: 'invalid' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.allowSubmit).toEqual(true);
+ });
+ });
+
+ describe('displayValidation', () => {
+ it('is true when a user is loading validation', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.displayValidation).toEqual(true);
+ });
+
+ it('is true when a user has completed validation', () => {
+ mount({
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'valid' },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'invalid' },
+ },
+ },
+ });
+
+ expect(AddDomainLogic.values.displayValidation).toEqual(true);
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.ts
new file mode 100644
index 0000000000000..3ad7c67042026
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_logic.ts
@@ -0,0 +1,344 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { i18n } from '@kbn/i18n';
+
+import { generateEncodedPath } from '../../../../../../shared/encode_path_params';
+import { flashSuccessToast } from '../../../../../../shared/flash_messages';
+import { getErrorsFromHttpResponse } from '../../../../../../shared/flash_messages/handle_api_errors';
+import { HttpLogic } from '../../../../../../shared/http';
+import { KibanaLogic } from '../../../../../../shared/kibana';
+import { CrawlerDomain, CrawlerDomainFromServer } from '../../../../../api/crawler/types';
+import {
+ CrawlerDomainValidationResult,
+ CrawlerDomainValidationResultChange,
+ CrawlerDomainValidationResultFromServer,
+ CrawlerDomainValidationStepName,
+} from '../../../../../api/crawler/types';
+import {
+ crawlDomainValidationToResult,
+ crawlerDomainServerToClient,
+} from '../../../../../api/crawler/utils';
+import { SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH } from '../../../../../routes';
+import { IndexNameLogic } from '../../../index_name_logic';
+import { CrawlerLogic } from '../../crawler_logic';
+
+import {
+ domainValidationFailureResultChange,
+ extractDomainAndEntryPointFromUrl,
+ getDomainWithProtocol,
+} from './utils';
+
+export interface AddDomainLogicValues {
+ addDomainFormInputValue: string;
+ allowSubmit: boolean;
+ canIgnoreValidationFailure: boolean;
+ displayValidation: boolean;
+ domainValidationResult: CrawlerDomainValidationResult;
+ entryPointValue: string;
+ errors: string[];
+ hasBlockingFailure: boolean;
+ hasValidationCompleted: boolean;
+ ignoreValidationFailure: boolean;
+ isFlyoutVisible: boolean;
+ isValidationLoading: boolean;
+}
+
+export interface AddDomainLogicActions {
+ clearDomainFormInputValue(): void;
+ closeFlyout(): void;
+ onSubmitNewDomainError(errors: string[]): { errors: string[] };
+ onSubmitNewDomainSuccess(domain: CrawlerDomain): { domain: CrawlerDomain };
+ openFlyout(): void;
+ performDomainValidationStep(
+ stepName: CrawlerDomainValidationStepName,
+ checks: string[]
+ ): {
+ checks: string[];
+ stepName: CrawlerDomainValidationStepName;
+ };
+ setAddDomainFormInputValue(newValue: string): string;
+ setDomainValidationResult(change: CrawlerDomainValidationResultChange): {
+ change: CrawlerDomainValidationResultChange;
+ };
+ setIgnoreValidationFailure(newValue: boolean): boolean;
+ startDomainValidation(): void;
+ submitNewDomain(): void;
+ validateDomainContentVerification(): void;
+ validateDomainIndexingRestrictions(): void;
+ validateDomainInitialVerification(
+ newValue: string,
+ newEntryPointValue: string
+ ): { newEntryPointValue: string; newValue: string };
+ validateDomainNetworkConnectivity(): void;
+}
+
+const DEFAULT_SELECTOR_VALUES = {
+ addDomainFormInputValue: 'https://',
+ allowSubmit: false,
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: '' },
+ indexingRestrictions: { state: '' },
+ initialValidation: { state: '' },
+ networkConnectivity: { state: '' },
+ },
+ } as CrawlerDomainValidationResult,
+ entryPointValue: '/',
+ ignoreValidationFailure: false,
+ isValidationLoading: false,
+};
+
+export const AddDomainLogic = kea>({
+ path: ['enterprise_search', 'crawler', 'add_domain_logic'],
+ actions: () => ({
+ clearDomainFormInputValue: true,
+ closeFlyout: true,
+ initialValidation: true,
+ onSubmitNewDomainError: (errors) => ({ errors }),
+ onSubmitNewDomainSuccess: (domain) => ({ domain }),
+ openFlyout: true,
+ performDomainValidationStep: (stepName, checks) => ({ checks, stepName }),
+ setAddDomainFormInputValue: (newValue) => newValue,
+ setDomainValidationResult: (change: CrawlerDomainValidationResultChange) => ({ change }),
+ setIgnoreValidationFailure: (newValue) => newValue,
+ startDomainValidation: true,
+ submitNewDomain: true,
+ validateDomainContentVerification: true,
+ validateDomainIndexingRestrictions: true,
+ validateDomainInitialVerification: (newValue, newEntryPointValue) => ({
+ newEntryPointValue,
+ newValue,
+ }),
+ validateDomainNetworkConnectivity: true,
+ }),
+ reducers: () => ({
+ addDomainFormInputValue: [
+ DEFAULT_SELECTOR_VALUES.addDomainFormInputValue,
+ {
+ clearDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.addDomainFormInputValue,
+ setAddDomainFormInputValue: (_, newValue: string) => newValue,
+ validateDomainInitialVerification: (_, { newValue }: { newValue: string }) => newValue,
+ },
+ ],
+ domainValidationResult: [
+ DEFAULT_SELECTOR_VALUES.domainValidationResult,
+ {
+ clearDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.domainValidationResult,
+ setAddDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.domainValidationResult,
+ setDomainValidationResult: ({ steps }, { change }) => ({
+ steps: {
+ ...steps,
+ ...change,
+ },
+ }),
+ startDomainValidation: () => ({
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ }),
+ },
+ ],
+ entryPointValue: [
+ DEFAULT_SELECTOR_VALUES.entryPointValue,
+ {
+ clearDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.entryPointValue,
+ setAddDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.entryPointValue,
+ validateDomainInitialVerification: (_, { newEntryPointValue }) => newEntryPointValue,
+ },
+ ],
+ errors: [
+ [],
+ {
+ clearDomainFormInputValue: () => [],
+ onSubmitNewDomainError: (_, { errors }) => errors,
+ setAddDomainFormInputValue: () => [],
+ submitNewDomain: () => [],
+ validateDomainInitialVerification: () => [],
+ },
+ ],
+ ignoreValidationFailure: [
+ DEFAULT_SELECTOR_VALUES.ignoreValidationFailure,
+ {
+ clearDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.ignoreValidationFailure,
+ setAddDomainFormInputValue: () => DEFAULT_SELECTOR_VALUES.ignoreValidationFailure,
+ setIgnoreValidationFailure: (_, newValue: boolean) => newValue,
+ },
+ ],
+ isFlyoutVisible: [
+ false,
+ {
+ closeFlyout: () => false,
+ onSubmitNewDomainSuccess: () => false,
+ openFlyout: () => true,
+ },
+ ],
+ }),
+ selectors: ({ selectors }) => ({
+ allowSubmit: [
+ () => [
+ selectors.ignoreValidationFailure,
+ selectors.hasValidationCompleted,
+ selectors.hasBlockingFailure,
+ ],
+ (ignoreValidationFailure, hasValidationCompleted, hasBlockingFailure) => {
+ if (ignoreValidationFailure) {
+ return true;
+ }
+
+ return hasValidationCompleted && !hasBlockingFailure;
+ },
+ ],
+ canIgnoreValidationFailure: [
+ () => [selectors.hasValidationCompleted, selectors.domainValidationResult],
+ (hasValidationCompleted: boolean, domainValidationResult: CrawlerDomainValidationResult) => {
+ if (!hasValidationCompleted) {
+ return false;
+ }
+
+ return (
+ domainValidationResult.steps.indexingRestrictions.blockingFailure ||
+ domainValidationResult.steps.contentVerification.blockingFailure
+ );
+ },
+ ],
+ displayValidation: [
+ () => [selectors.isValidationLoading, selectors.hasValidationCompleted],
+ (isValidationLoading, hasValidationCompleted) =>
+ isValidationLoading || hasValidationCompleted,
+ ],
+ hasBlockingFailure: [
+ () => [selectors.domainValidationResult],
+ (domainValidationResult: CrawlerDomainValidationResult) =>
+ !!Object.values(domainValidationResult.steps).find((step) => step.blockingFailure),
+ ],
+ hasValidationCompleted: [
+ () => [selectors.domainValidationResult],
+ (domainValidationResult: CrawlerDomainValidationResult) =>
+ !Object.values(domainValidationResult.steps).find(
+ (step) => step.state === 'loading' || step.state === ''
+ ),
+ ],
+ isValidationLoading: [
+ () => [selectors.domainValidationResult],
+ (domainValidationResult: CrawlerDomainValidationResult) =>
+ !!Object.values(domainValidationResult.steps).find((step) => step.state === 'loading'),
+ ],
+ }),
+ listeners: ({ actions, values }) => ({
+ onSubmitNewDomainSuccess: ({ domain }) => {
+ const { indexName } = IndexNameLogic.values;
+ flashSuccessToast(
+ i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage', {
+ defaultMessage: "Successfully added domain '{domainUrl}'",
+ values: {
+ domainUrl: domain.url,
+ },
+ })
+ );
+ KibanaLogic.values.navigateToUrl(
+ generateEncodedPath(SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH, {
+ domainId: domain.id,
+ indexName,
+ })
+ );
+ CrawlerLogic.actions.fetchCrawlerData();
+ },
+ performDomainValidationStep: async ({ stepName, checks }) => {
+ const { http } = HttpLogic.values;
+ const failureResultChange = domainValidationFailureResultChange(stepName);
+
+ const route = '/internal/enterprise_search/crawler/validate_url';
+
+ try {
+ const data = await http.post(route, {
+ body: JSON.stringify({ checks, url: values.addDomainFormInputValue.trim() }),
+ });
+ const result = crawlDomainValidationToResult(data);
+
+ if (result.blockingFailure) {
+ actions.setDomainValidationResult({ [stepName]: result, ...failureResultChange });
+ } else {
+ actions.setDomainValidationResult({ [stepName]: result });
+
+ // Trigger next step
+ switch (stepName) {
+ case 'initialValidation':
+ actions.validateDomainNetworkConnectivity();
+ break;
+ case 'networkConnectivity':
+ actions.validateDomainIndexingRestrictions();
+ break;
+ case 'indexingRestrictions':
+ actions.validateDomainContentVerification();
+ break;
+ }
+ }
+ } catch (e) {
+ actions.setDomainValidationResult({
+ [stepName]: {
+ blockingFailure: true,
+ message: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage',
+ { defaultMessage: 'Unexpected error' }
+ ),
+ state: 'invalid',
+ },
+ ...failureResultChange,
+ });
+ }
+ },
+ startDomainValidation: async () => {
+ const { domain, entryPoint } = extractDomainAndEntryPointFromUrl(
+ values.addDomainFormInputValue.trim()
+ );
+ const domainWithProtocol = await getDomainWithProtocol(domain);
+ actions.validateDomainInitialVerification(domainWithProtocol, entryPoint);
+ },
+ submitNewDomain: async () => {
+ const { http } = HttpLogic.values;
+ const { indexName } = IndexNameLogic.values;
+
+ const requestBody = JSON.stringify({
+ entry_points: [{ value: values.entryPointValue }],
+ name: values.addDomainFormInputValue.trim(),
+ });
+
+ try {
+ const response = await http.post(
+ `/internal/enterprise_search/indices/${indexName}/crawler/domains`,
+ {
+ body: requestBody,
+ }
+ );
+ const domain = crawlerDomainServerToClient(response);
+ actions.onSubmitNewDomainSuccess(domain);
+ } catch (e) {
+ // we surface errors inside the form instead of in flash messages
+ const errorMessages = getErrorsFromHttpResponse(e);
+ actions.onSubmitNewDomainError(errorMessages);
+ }
+ },
+ validateDomainContentVerification: () => {
+ actions.performDomainValidationStep('contentVerification', ['url_request', 'url_content']);
+ },
+ validateDomainIndexingRestrictions: () => {
+ actions.performDomainValidationStep('indexingRestrictions', ['robots_txt']);
+ },
+ validateDomainInitialVerification: () => {
+ actions.performDomainValidationStep('initialValidation', ['url']);
+ },
+ validateDomainNetworkConnectivity: () => {
+ actions.performDomainValidationStep('networkConnectivity', ['dns', 'tcp']);
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.test.tsx
new file mode 100644
index 0000000000000..54a6dde52e42c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.test.tsx
@@ -0,0 +1,66 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockActions, setMockValues } from '../../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiCheckbox } from '@elastic/eui';
+
+import { AddDomainValidation } from './add_domain_validation';
+import { ValidationStepPanel } from './validation_step_panel';
+
+describe('AddDomainValidation', () => {
+ const actions = {
+ setIgnoreValidationFailure: jest.fn(),
+ };
+
+ it('contains four validation steps', () => {
+ setMockValues({
+ addDomainFormInputValue: 'https://elastic.co',
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'loading' },
+ indexingRestrictions: { state: 'loading' },
+ initialValidation: { state: 'loading' },
+ networkConnectivity: { state: 'loading' },
+ },
+ },
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.find(ValidationStepPanel)).toHaveLength(4);
+ });
+
+ it('can ignore validation failure', () => {
+ setMockValues({
+ canIgnoreValidationFailure: true,
+ ignoreValidationFailure: false,
+ addDomainFormInputValue: 'https://elastic.co',
+ domainValidationResult: {
+ steps: {
+ contentVerification: { state: 'invalid', blockingFailure: true },
+ indexingRestrictions: { state: 'valid' },
+ initialValidation: { state: 'valid' },
+ networkConnectivity: { state: 'valid' },
+ },
+ },
+ });
+ setMockActions(actions);
+
+ const wrapper = shallow( );
+ wrapper
+ .find(EuiCheckbox)
+ .first()
+ .simulate('change', { target: { checked: true } });
+
+ expect(actions.setIgnoreValidationFailure).toHaveBeenCalledWith(true);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.tsx
new file mode 100644
index 0000000000000..9405d011adc25
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/add_domain_validation.tsx
@@ -0,0 +1,127 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiButton,
+ EuiCheckbox,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiPanel,
+ EuiSpacer,
+ EuiText,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { AddDomainLogic } from './add_domain_logic';
+import { ValidationStepPanel } from './validation_step_panel';
+
+export const AddDomainValidation: React.FC = () => {
+ const {
+ addDomainFormInputValue,
+ canIgnoreValidationFailure,
+ domainValidationResult,
+ ignoreValidationFailure,
+ } = useValues(AddDomainLogic);
+ const { setIgnoreValidationFailure } = useActions(AddDomainLogic);
+
+ return (
+ <>
+
+
+
+
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel', {
+ defaultMessage: 'Test URL in the browser',
+ })}
+
+ }
+ />
+
+
+
+
+
+
+
+ {canIgnoreValidationFailure && (
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle',
+ {
+ defaultMessage: 'Ignore validation failures and continue',
+ }
+ )}
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription',
+ {
+ defaultMessage:
+ 'The web crawler will be unable to index any content on this domain until the errors above are addressed.',
+ }
+ )}
+
+ >
+ }
+ checked={ignoreValidationFailure}
+ onChange={(e) => setIgnoreValidationFailure(e.target.checked)}
+ />
+
+
+ )}
+
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.test.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.test.ts
new file mode 100644
index 0000000000000..efc05eeb06ea7
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.test.ts
@@ -0,0 +1,125 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockHttpValues } from '../../../../../../__mocks__/kea_logic';
+
+import {
+ domainValidationFailureResultChange,
+ domainValidationStateToPanelColor,
+ extractDomainAndEntryPointFromUrl,
+ getDomainWithProtocol,
+} from './utils';
+
+describe('extractDomainAndEntryPointFromUrl', () => {
+ it('extracts a provided entry point and domain', () => {
+ expect(extractDomainAndEntryPointFromUrl('https://elastic.co/guide')).toEqual({
+ domain: 'https://elastic.co',
+ entryPoint: '/guide',
+ });
+ });
+
+ it('provides a default entry point if there is only a domain', () => {
+ expect(extractDomainAndEntryPointFromUrl('https://elastic.co')).toEqual({
+ domain: 'https://elastic.co',
+ entryPoint: '/',
+ });
+ });
+});
+
+describe('getDomainWithProtocol', () => {
+ const { http } = mockHttpValues;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('passes through domain if it starts with https', async () => {
+ const result = await getDomainWithProtocol('https://elastic.co');
+
+ expect(result).toEqual('https://elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(0);
+ });
+
+ it('passes through domain if it starts with http', async () => {
+ const result = await getDomainWithProtocol('http://elastic.co');
+
+ expect(result).toEqual('http://elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(0);
+ });
+
+ it('returns domain with https protocol if the back-end validates https', async () => {
+ http.post.mockReturnValueOnce(Promise.resolve({ valid: true }));
+ const result = await getDomainWithProtocol('elastic.co');
+
+ expect(result).toEqual('https://elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(1);
+ expect(http.post).toHaveBeenCalledWith('/internal/enterprise_search/crawler/validate_url', {
+ body: JSON.stringify({ url: 'https://elastic.co', checks: ['tcp', 'url_request'] }),
+ });
+ });
+
+ it('returns domain with http protocol if the back-end validates http', async () => {
+ http.post
+ .mockReturnValueOnce(Promise.resolve({ valid: false }))
+ .mockReturnValueOnce(Promise.resolve({ valid: true }));
+ const result = await getDomainWithProtocol('elastic.co');
+
+ expect(result).toEqual('http://elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(2);
+ expect(http.post).toHaveBeenLastCalledWith('/internal/enterprise_search/crawler/validate_url', {
+ body: JSON.stringify({ url: 'http://elastic.co', checks: ['tcp', 'url_request'] }),
+ });
+ });
+
+ it('passes through domain if back-end throws error', async () => {
+ http.post.mockReturnValueOnce(Promise.reject());
+
+ const result = await getDomainWithProtocol('elastic.co');
+
+ expect(result).toEqual('elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(1);
+ });
+
+ it('passes through domain if back-end fails to validate https and http', async () => {
+ http.post.mockReturnValueOnce(Promise.resolve({ valid: false }));
+ http.post.mockReturnValueOnce(Promise.resolve({ valid: false }));
+ const result = await getDomainWithProtocol('elastic.co');
+
+ expect(result).toEqual('elastic.co');
+ expect(http.post).toHaveBeenCalledTimes(2);
+ });
+});
+
+describe('domainValidationStateToPanelColor', () => {
+ it('returns expected values', () => {
+ expect(domainValidationStateToPanelColor('valid')).toEqual('success');
+ expect(domainValidationStateToPanelColor('invalid')).toEqual('danger');
+ expect(domainValidationStateToPanelColor('')).toEqual('subdued');
+ expect(domainValidationStateToPanelColor('loading')).toEqual('subdued');
+ });
+});
+
+describe('domainValidationFailureResultChange', () => {
+ it('returns the expected results', () => {
+ expect(domainValidationFailureResultChange('initialValidation')).toMatchObject({
+ networkConnectivity: expect.any(Object),
+ indexingRestrictions: expect.any(Object),
+ contentVerification: expect.any(Object),
+ });
+
+ expect(domainValidationFailureResultChange('networkConnectivity')).toMatchObject({
+ indexingRestrictions: expect.any(Object),
+ contentVerification: expect.any(Object),
+ });
+
+ expect(domainValidationFailureResultChange('indexingRestrictions')).toMatchObject({
+ contentVerification: expect.any(Object),
+ });
+
+ expect(domainValidationFailureResultChange('contentVerification')).toEqual({});
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.ts
new file mode 100644
index 0000000000000..ca6e2ece10d2c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/utils.ts
@@ -0,0 +1,128 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+import { HttpLogic } from '../../../../../../shared/http';
+import {
+ CrawlerDomainValidationResultFromServer,
+ CrawlerDomainValidationResultChange,
+ CrawlerDomainValidationStepName,
+ CrawlerDomainValidationStepState,
+} from '../../../../../api/crawler/types';
+
+export const extractDomainAndEntryPointFromUrl = (
+ url: string
+): { domain: string; entryPoint: string } => {
+ let domain = url;
+ let entryPoint = '/';
+
+ const pathSlashIndex = url.search(/[^\:\/]\//);
+ if (pathSlashIndex !== -1) {
+ domain = url.substring(0, pathSlashIndex + 1);
+ entryPoint = url.substring(pathSlashIndex + 1);
+ }
+
+ return { domain, entryPoint };
+};
+
+export const getDomainWithProtocol = async (domain: string) => {
+ const { http } = HttpLogic.values;
+
+ if (!domain.startsWith('https://') && !domain.startsWith('http://')) {
+ try {
+ const route = '/internal/enterprise_search/crawler/validate_url';
+ const checks = ['tcp', 'url_request'];
+
+ const httpsCheckData: CrawlerDomainValidationResultFromServer = await http.post(route, {
+ body: JSON.stringify({ url: `https://${domain}`, checks }),
+ });
+ if (httpsCheckData.valid) {
+ return `https://${domain}`;
+ }
+
+ const httpCheckData: CrawlerDomainValidationResultFromServer = await http.post(route, {
+ body: JSON.stringify({ url: `http://${domain}`, checks }),
+ });
+ if (httpCheckData.valid) {
+ return `http://${domain}`;
+ }
+ } catch (error) {
+ // Do nothing as later validation steps will catch errors
+ }
+ }
+
+ return domain;
+};
+
+export const domainValidationStateToPanelColor = (
+ state: CrawlerDomainValidationStepState
+): 'success' | 'warning' | 'danger' | 'subdued' => {
+ switch (state) {
+ case 'valid':
+ return 'success';
+ case 'warning':
+ return 'warning';
+ case 'invalid':
+ return 'danger';
+ default:
+ return 'subdued';
+ }
+};
+
+const allFailureResultChanges: CrawlerDomainValidationResultChange = {
+ networkConnectivity: {
+ state: 'invalid',
+ message: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.crawler.addDomainForm.networkConnectivityFailureMessage',
+ {
+ defaultMessage:
+ 'Unable to establish a network connection because the "Initial validation" check failed.',
+ }
+ ),
+ },
+ indexingRestrictions: {
+ state: 'invalid',
+ message: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.crawler.addDomainForm.indexingRestrictionsFailureMessage',
+ {
+ defaultMessage:
+ 'Unable to determine indexing restrictions because the "Network connectivity" check failed.',
+ }
+ ),
+ },
+ contentVerification: {
+ state: 'invalid',
+ message: i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.crawler.addDomainForm.contentVerificationFailureMessage',
+ {
+ defaultMessage:
+ 'Unable to verify content because the "Indexing restrictions" check failed.',
+ }
+ ),
+ },
+};
+
+export const domainValidationFailureResultChange = (
+ stepName: CrawlerDomainValidationStepName
+): CrawlerDomainValidationResultChange => {
+ switch (stepName) {
+ case 'initialValidation':
+ return allFailureResultChanges;
+ case 'networkConnectivity':
+ return {
+ indexingRestrictions: allFailureResultChanges.indexingRestrictions,
+ contentVerification: allFailureResultChanges.contentVerification,
+ };
+ case 'indexingRestrictions':
+ return {
+ contentVerification: allFailureResultChanges.contentVerification,
+ };
+ default:
+ return {};
+ }
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.test.tsx
new file mode 100644
index 0000000000000..8bb82f93e3ec4
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.test.tsx
@@ -0,0 +1,40 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiIcon, EuiLoadingSpinner } from '@elastic/eui';
+
+import { ValidationStateIcon } from './validation_state_icon';
+
+describe('ValidationStateIcon', () => {
+ it('shows a success icon when valid', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiIcon).prop('color')).toEqual('success');
+ });
+
+ it('shows a warning icon when warning', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiIcon).prop('color')).toEqual('warning');
+ });
+
+ it('shows a danger icon when invalid', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiIcon).prop('color')).toEqual('danger');
+ });
+
+ it('shows a loading spinner by default', () => {
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiLoadingSpinner)).toHaveLength(1);
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.tsx
new file mode 100644
index 0000000000000..ea3425e1c842c
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_state_icon.tsx
@@ -0,0 +1,27 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { EuiIcon, EuiLoadingSpinner } from '@elastic/eui';
+
+import { CrawlerDomainValidationStepState } from '../../../../../api/crawler/types';
+
+export const ValidationStateIcon: React.FC<{ state: CrawlerDomainValidationStepState }> = ({
+ state,
+}) => {
+ switch (state) {
+ case 'valid':
+ return ;
+ case 'warning':
+ return ;
+ case 'invalid':
+ return ;
+ default:
+ return ;
+ }
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.test.tsx
new file mode 100644
index 0000000000000..ace6b85210fc2
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.test.tsx
@@ -0,0 +1,79 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiPanel } from '@elastic/eui';
+
+import { ValidationStateIcon } from './validation_state_icon';
+import { ValidationStepPanel } from './validation_step_panel';
+
+describe('ValidationStepPanel', () => {
+ describe('renders', () => {
+ const wrapper = shallow(
+
+ );
+
+ it('passed the correct color to the EuiPanel', () => {
+ expect(wrapper.find(EuiPanel).prop('color')).toEqual('success');
+ });
+
+ it('contains a validation state icon', () => {
+ expect(wrapper.find(ValidationStateIcon)).toHaveLength(1);
+ });
+
+ it('renders a label', () => {
+ expect(wrapper.find('h3').text()).toEqual('Initial validation');
+ });
+ });
+ describe('invalid messages and actions', () => {
+ const errorMessage = 'Error message';
+ const action =
;
+
+ it('displays the passed error message and action is invalid', () => {
+ const wrapper = shallow(
+
+ );
+ expect(wrapper.find('[data-test-subj="errorMessage"]').childAt(0).text()).toContain(
+ 'Error message'
+ );
+ expect(wrapper.find('[data-test-subj="action"]')).toHaveLength(1);
+ });
+
+ it('displays the passed error message and action when state is warning', () => {
+ const wrapper = shallow(
+
+ );
+ expect(wrapper.find('[data-test-subj="errorMessage"]').childAt(0).text()).toContain(
+ 'Error message'
+ );
+ expect(wrapper.find('[data-test-subj="action"]')).toHaveLength(1);
+ });
+
+ it('does not display the passed error message or action when state is loading', () => {
+ const wrapper = shallow(
+
+ );
+ expect(wrapper.find('[data-test-subj="errorMessage"]')).toHaveLength(0);
+ expect(wrapper.find('[data-test-subj="action"]')).toHaveLength(0);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.tsx
new file mode 100644
index 0000000000000..07e86b5f92d9e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/add_domain/validation_step_panel.tsx
@@ -0,0 +1,65 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import {
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiMarkdownFormat,
+ EuiPanel,
+ EuiSpacer,
+ EuiTitle,
+} from '@elastic/eui';
+
+import { CrawlerDomainValidationStep } from '../../../../../api/crawler/types';
+
+import { domainValidationStateToPanelColor } from './utils';
+import { ValidationStateIcon } from './validation_state_icon';
+
+interface ValidationStepPanelProps {
+ action?: React.ReactNode;
+ label: string;
+ step: CrawlerDomainValidationStep;
+}
+
+export const ValidationStepPanel: React.FC = ({
+ step,
+ label,
+ action,
+}) => {
+ const showErrorMessage = step.state === 'invalid' || step.state === 'warning';
+
+ return (
+
+
+
+
+
+
+
+ {label}
+
+
+
+ {showErrorMessage && (
+ <>
+
+
+ {step.message || ''}
+
+ {action && (
+ <>
+
+ {action}
+ >
+ )}
+ >
+ )}
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.test.tsx
new file mode 100644
index 0000000000000..bf94f308e28d1
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.test.tsx
@@ -0,0 +1,59 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { setMockValues } from '../../../../../__mocks__/kea_logic';
+
+import React from 'react';
+
+import { shallow } from 'enzyme';
+
+import { EuiCallOut } from '@elastic/eui';
+
+import { CrawlerStatus } from '../../../../api/crawler/types';
+
+import { CrawlerStatusBanner } from './crawler_status_banner';
+
+describe('CrawlerStatusBanner', () => {
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ [(CrawlerStatus.Starting, CrawlerStatus.Running, CrawlerStatus.Canceling)].forEach((status) => {
+ describe(`when the status is ${status}`, () => {
+ it('renders a callout', () => {
+ setMockValues({
+ mostRecentCrawlRequestStatus: status,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.find(EuiCallOut)).toHaveLength(1);
+ });
+ });
+ });
+
+ [
+ CrawlerStatus.Success,
+ CrawlerStatus.Failed,
+ CrawlerStatus.Canceled,
+ CrawlerStatus.Pending,
+ CrawlerStatus.Suspended,
+ CrawlerStatus.Suspending,
+ ].forEach((status) => {
+ describe(`when the status is ${status}`, () => {
+ it('does not render a banner/callout', () => {
+ setMockValues({
+ mostRecentCrawlRequestStatus: status,
+ });
+
+ const wrapper = shallow( );
+
+ expect(wrapper.isEmptyRender()).toBe(true);
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.tsx
new file mode 100644
index 0000000000000..b7f029ff8579a
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/crawler_status_banner.tsx
@@ -0,0 +1,43 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues } from 'kea';
+
+import { EuiCallOut, EuiSpacer } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlerStatus } from '../../../../api/crawler/types';
+import { CrawlerLogic } from '../crawler_logic';
+
+export const CrawlerStatusBanner: React.FC = () => {
+ const { mostRecentCrawlRequestStatus } = useValues(CrawlerLogic);
+ if (
+ mostRecentCrawlRequestStatus === CrawlerStatus.Running ||
+ mostRecentCrawlRequestStatus === CrawlerStatus.Starting ||
+ mostRecentCrawlRequestStatus === CrawlerStatus.Canceling
+ ) {
+ return (
+ <>
+
+
+ >
+ );
+ }
+ return null;
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management.tsx
new file mode 100644
index 0000000000000..0f6211e07945a
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management.tsx
@@ -0,0 +1,42 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useValues } from 'kea';
+
+import { EuiSpacer } from '@elastic/eui';
+
+import { Loading } from '../../../../../shared/loading';
+
+import { DeleteCrawlerDomainApiLogic } from '../../../../api/crawler/delete_crawler_domain_api_logic';
+import { GetCrawlerDomainsApiLogic } from '../../../../api/crawler/get_crawler_domains_api_logic';
+
+import { AddDomainFlyout } from './add_domain/add_domain_flyout';
+import { CrawlerStatusBanner } from './crawler_status_banner';
+import { DomainManagementLogic } from './domain_management_logic';
+import { DomainsPanel } from './domains_panel';
+import { EmptyStatePanel } from './empty_state_panel';
+
+export const SearchIndexDomainManagement: React.FC = () => {
+ DeleteCrawlerDomainApiLogic.mount();
+ GetCrawlerDomainsApiLogic.mount();
+ const { domains, isLoading } = useValues(DomainManagementLogic);
+
+ if (isLoading) {
+ return ;
+ }
+
+ return (
+ <>
+
+
+ {domains.length > 0 ? : }
+
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management_logic.ts
new file mode 100644
index 0000000000000..f9f2c4c44eaaa
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domain_management_logic.ts
@@ -0,0 +1,129 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+import { i18n } from '@kbn/i18n';
+
+import { Meta } from '../../../../../../../common/types';
+import { HttpError, Status } from '../../../../../../../common/types/api';
+import { DEFAULT_META } from '../../../../../shared/constants';
+import { flashAPIErrors, flashSuccessToast } from '../../../../../shared/flash_messages';
+import { updateMetaPageIndex } from '../../../../../shared/table_pagination';
+import { DeleteCrawlerDomainApiLogic } from '../../../../api/crawler/delete_crawler_domain_api_logic';
+import { GetCrawlerDomainsApiLogic } from '../../../../api/crawler/get_crawler_domains_api_logic';
+import { CrawlerDomain, CrawlerDomainsWithMeta } from '../../../../api/crawler/types';
+import { IndexNameLogic } from '../../index_name_logic';
+import { CrawlerLogic } from '../crawler_logic';
+
+interface DomainManagementValues {
+ deleteStatus: Status;
+ domains: CrawlerDomain[];
+ getData: CrawlerDomainsWithMeta | null;
+ getStatus: Status;
+ isLoading: boolean;
+ meta: Meta;
+}
+
+interface DomainManagementActions {
+ deleteApiError(error: HttpError): HttpError;
+ deleteDomain(domain: CrawlerDomain): { domain: CrawlerDomain };
+ deleteSuccess(): void;
+ getApiError(error: HttpError): HttpError;
+ getApiSuccess(data: CrawlerDomainsWithMeta): CrawlerDomainsWithMeta;
+ getDomains(meta: Meta): { meta: Meta };
+ onPaginate(newPageIndex: number): { newPageIndex: number };
+}
+
+export const DomainManagementLogic = kea<
+ MakeLogicType
+>({
+ connect: {
+ actions: [
+ GetCrawlerDomainsApiLogic,
+ ['apiError as getApiError', 'apiSuccess as getApiSuccess'],
+ DeleteCrawlerDomainApiLogic,
+ ['apiError as deleteApiError', 'apiSuccess as deleteApiSuccess'],
+ ],
+ values: [
+ GetCrawlerDomainsApiLogic,
+ ['status as getStatus', 'data as getData'],
+ DeleteCrawlerDomainApiLogic,
+ ['status as deleteStatus'],
+ ],
+ },
+ path: ['enterprise_search', 'domain_management'],
+ actions: {
+ deleteDomain: (domain) => ({ domain }),
+ getDomains: (meta) => ({ meta }),
+ onPaginate: (newPageIndex) => ({
+ newPageIndex,
+ }),
+ },
+ listeners: ({ values, actions }) => ({
+ deleteApiError: (error) => {
+ flashAPIErrors(error);
+ },
+ deleteApiSuccess: ({ domain }) => {
+ actions.getDomains(values.meta);
+ flashSuccessToast(
+ i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.action.delete.successMessage', {
+ defaultMessage: "Successfully deleted domain '{domainUrl}'",
+ values: {
+ domainUrl: domain.url,
+ },
+ })
+ );
+ CrawlerLogic.actions.fetchCrawlerData();
+ },
+ deleteDomain: ({ domain }) => {
+ const { indexName } = IndexNameLogic.values;
+ DeleteCrawlerDomainApiLogic.actions.makeRequest({ domain, indexName });
+ },
+ getApiError: (error) => {
+ flashAPIErrors(error);
+ },
+ getDomains: ({ meta }) => {
+ const { indexName } = IndexNameLogic.values;
+ GetCrawlerDomainsApiLogic.actions.makeRequest({ indexName, meta });
+ },
+ onPaginate: ({ newPageIndex }) => {
+ actions.getDomains(updateMetaPageIndex(values.meta, newPageIndex));
+ },
+ }),
+ selectors: ({ selectors }) => ({
+ domains: [
+ () => [selectors.getData],
+ (getData: DomainManagementValues['getData']) => getData?.domains ?? [],
+ ],
+ meta: [
+ () => [selectors.getData],
+ (getData: DomainManagementValues['getData']) => getData?.meta ?? DEFAULT_META,
+ ],
+ isLoading: [
+ () => [selectors.getStatus, selectors.deleteStatus],
+ (
+ getStatus: DomainManagementValues['getStatus'],
+ deleteStatus: DomainManagementValues['deleteStatus']
+ ) =>
+ getStatus === Status.IDLE ||
+ getStatus === Status.LOADING ||
+ deleteStatus === Status.LOADING,
+ ],
+ }),
+ events: ({ actions, values }) => ({
+ afterMount: () => {
+ actions.getDomains(values.meta);
+ },
+ }),
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_panel.tsx
new file mode 100644
index 0000000000000..fdd56e055e1cd
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_panel.tsx
@@ -0,0 +1,57 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions } from 'kea';
+
+import {
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiTitle,
+ EuiSpacer,
+ EuiPanel,
+ EuiIcon,
+ EuiButton,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { AddDomainLogic } from './add_domain/add_domain_logic';
+import { DomainsTable } from './domains_table';
+
+export const DomainsPanel: React.FC = () => {
+ const { openFlyout } = useActions(AddDomainLogic);
+
+ return (
+
+
+
+
+
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.domainsTitle', {
+ defaultMessage: 'Domains',
+ })}
+
+
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel', {
+ defaultMessage: 'Add domain',
+ })}
+
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.test.tsx
new file mode 100644
index 0000000000000..b5d66f3078858
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.test.tsx
@@ -0,0 +1,184 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import {
+ setMockValues,
+ setMockActions,
+ mockKibanaValues,
+} from '../../../../../__mocks__/kea_logic';
+import '../../_mocks_/index_name_logic.mock';
+
+import React from 'react';
+
+import { shallow, ShallowWrapper } from 'enzyme';
+
+import { EuiBasicTable, EuiButtonIcon } from '@elastic/eui';
+
+import { mountWithIntl } from '@kbn/test-jest-helpers';
+
+import { DEFAULT_META } from '../../../../../shared/constants';
+import { CrawlerDomain } from '../../../../api/crawler/types';
+
+import { DomainsTable } from './domains_table';
+
+const domains: CrawlerDomain[] = [
+ {
+ id: '1234',
+ documentCount: 9999,
+ url: 'elastic.co',
+ crawlRules: [],
+ entryPoints: [],
+ sitemaps: [],
+ lastCrawl: '2020-01-01T00:00:00-12:00',
+ createdOn: '2020-01-01T00:00:00-12:00',
+ deduplicationEnabled: false,
+ deduplicationFields: ['title'],
+ availableDeduplicationFields: ['title', 'description'],
+ },
+ {
+ id: '4567',
+ documentCount: 0,
+ url: 'empty.site',
+ crawlRules: [],
+ entryPoints: [],
+ sitemaps: [],
+ createdOn: '1970-01-01T00:00:00-12:00',
+ deduplicationEnabled: false,
+ deduplicationFields: ['title'],
+ availableDeduplicationFields: ['title', 'description'],
+ },
+];
+
+const values = {
+ // IndexNameLogic
+ indexName: 'index-name',
+ // CrawlerDomainsLogic
+ domains,
+ meta: DEFAULT_META,
+ dataLoading: false,
+ // AppLogic
+ myRole: { canManageEngineCrawler: false },
+};
+
+const actions = {
+ // CrawlerDomainsLogic
+ deleteDomain: jest.fn(),
+ fetchCrawlerDomainsData: jest.fn(),
+ onPaginate: jest.fn(),
+};
+
+describe('DomainsTable', () => {
+ let wrapper: ShallowWrapper;
+ let tableContent: string;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ beforeAll(() => {
+ setMockValues(values);
+ setMockActions(actions);
+ wrapper = shallow( );
+ tableContent = mountWithIntl( )
+ .find(EuiBasicTable)
+ .text();
+ });
+
+ it('renders', () => {
+ expect(wrapper.find(EuiBasicTable)).toHaveLength(1);
+
+ expect(wrapper.find(EuiBasicTable).prop('pagination')).toEqual({
+ showPerPageOptions: false,
+ pageIndex: 0,
+ pageSize: 10,
+ totalItemCount: 0,
+ });
+
+ wrapper.find(EuiBasicTable).simulate('change', { page: { index: 2 } });
+ expect(actions.onPaginate).toHaveBeenCalledWith(3);
+ });
+
+ describe('columns', () => {
+ it('renders a url column', () => {
+ expect(tableContent).toContain('elastic.co');
+ });
+
+ it('renders a clickable domain url', () => {
+ const basicTable = wrapper.find(EuiBasicTable).dive();
+ const link = basicTable.find('[data-test-subj="CrawlerDomainURL"]').at(0);
+
+ expect(link.dive().text()).toContain('elastic.co');
+ expect(link.props()).toEqual(
+ expect.objectContaining({
+ to: '/search_indices/index-name/crawler/domains/1234',
+ })
+ );
+ });
+
+ it('renders a last crawled column', () => {
+ expect(tableContent).toContain('Last activity');
+ expect(tableContent).toContain('Jan 1, 2020');
+ });
+
+ it('renders a document count column', () => {
+ expect(tableContent).toContain('Documents');
+ expect(tableContent).toContain('9,999');
+ });
+
+ describe('actions column', () => {
+ const getTable = () => wrapper.find(EuiBasicTable).dive();
+ const getActions = () => getTable().find('ExpandedItemActions');
+ const getActionItems = () => getActions().first().dive().find('DefaultItemAction');
+
+ describe('when the user can manage/delete engines', () => {
+ const getManageAction = () => getActionItems().at(0).dive().find(EuiButtonIcon);
+ const getDeleteAction = () => getActionItems().at(1).dive().find(EuiButtonIcon);
+
+ beforeEach(() => {
+ setMockValues({
+ ...values,
+ // AppLogic
+ myRole: { canManageEngineCrawler: true },
+ });
+ wrapper = shallow( );
+ });
+
+ describe('manage action', () => {
+ it('sends the user to the engine overview on click', () => {
+ const { navigateToUrl } = mockKibanaValues;
+
+ getManageAction().simulate('click');
+
+ expect(navigateToUrl).toHaveBeenCalledWith(
+ '/search_indices/index-name/crawler/domains/1234'
+ );
+ });
+ });
+
+ describe('delete action', () => {
+ it('clicking the action and confirming deletes the domain', () => {
+ jest.spyOn(global, 'confirm').mockReturnValueOnce(true);
+
+ getDeleteAction().simulate('click');
+
+ expect(actions.deleteDomain).toHaveBeenCalledWith(
+ expect.objectContaining({ id: '1234' })
+ );
+ });
+
+ it('clicking the action and not confirming does not delete the engine', () => {
+ jest.spyOn(global, 'confirm').mockReturnValueOnce(false);
+
+ getDeleteAction().simulate('click');
+
+ expect(actions.deleteDomain).not.toHaveBeenCalled();
+ });
+ });
+ });
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.tsx
new file mode 100644
index 0000000000000..94c7c3ed32cf6
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/domains_table.tsx
@@ -0,0 +1,130 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import { EuiBasicTableColumn, EuiBasicTable } from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { FormattedNumber } from '@kbn/i18n-react';
+
+import { DELETE_BUTTON_LABEL, MANAGE_BUTTON_LABEL } from '../../../../../shared/constants';
+import { CustomFormattedTimestamp } from '../../../../../shared/custom_formatted_timestamp/custom_formatted_timestamp';
+import { generateEncodedPath } from '../../../../../shared/encode_path_params';
+
+import { KibanaLogic } from '../../../../../shared/kibana';
+import { EuiLinkTo } from '../../../../../shared/react_router_helpers';
+import { convertMetaToPagination, handlePageChange } from '../../../../../shared/table_pagination';
+import { CrawlerDomain } from '../../../../api/crawler/types';
+import { SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH } from '../../../../routes';
+import { IndexNameLogic } from '../../index_name_logic';
+
+import { getDeleteDomainConfirmationMessage } from '../utils';
+
+import { DomainManagementLogic } from './domain_management_logic';
+
+export const DomainsTable: React.FC = () => {
+ const { indexName } = useValues(IndexNameLogic);
+ const { domains, meta, isLoading } = useValues(DomainManagementLogic);
+ const { deleteDomain, onPaginate } = useActions(DomainManagementLogic);
+
+ const columns: Array> = [
+ {
+ field: 'url',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.column.domainURL', {
+ defaultMessage: 'Domain URL',
+ }),
+ render: (_, domain: CrawlerDomain) => (
+
+ {domain.url}
+
+ ),
+ },
+ {
+ field: 'lastCrawl',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity', {
+ defaultMessage: 'Last activity',
+ }),
+ render: (lastCrawl: CrawlerDomain['lastCrawl']) =>
+ lastCrawl ? : '',
+ },
+ {
+ field: 'documentCount',
+ name: i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.column.documents', {
+ defaultMessage: 'Documents',
+ }),
+ render: (documentCount: CrawlerDomain['documentCount']) => (
+
+ ),
+ },
+ {
+ name: i18n.translate('xpack.enterpriseSearch.crawler.domainsTable.column.actions', {
+ defaultMessage: 'Actions',
+ }),
+ actions: [
+ {
+ name: MANAGE_BUTTON_LABEL,
+ description: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel',
+ {
+ defaultMessage: 'Manage this domain',
+ }
+ ),
+ type: 'icon',
+ icon: 'eye',
+ onClick: (domain) => {
+ KibanaLogic.values.navigateToUrl(
+ generateEncodedPath(SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH, {
+ domainId: domain.id,
+ indexName,
+ })
+ );
+ },
+ },
+ {
+ name: DELETE_BUTTON_LABEL,
+ description: i18n.translate(
+ 'xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel',
+ {
+ defaultMessage: 'Delete this domain',
+ }
+ ),
+ type: 'icon',
+ icon: 'trash',
+ color: 'danger',
+ onClick: (domain) => {
+ if (window.confirm(getDeleteDomainConfirmationMessage(domain.url))) {
+ deleteDomain(domain);
+ }
+ },
+ },
+ ],
+ },
+ ];
+
+ return (
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/empty_state_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/empty_state_panel.tsx
new file mode 100644
index 0000000000000..40cd93f5b8345
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/domain_management/empty_state_panel.tsx
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { useActions, useValues } from 'kea';
+
+import {
+ EuiLink,
+ EuiSpacer,
+ EuiText,
+ EuiTitle,
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiButton,
+ EuiPanel,
+} from '@elastic/eui';
+
+import { i18n } from '@kbn/i18n';
+
+import { CrawlerLogic } from '../crawler_logic';
+
+import { AddDomainForm } from './add_domain/add_domain_form';
+import { AddDomainFormErrors } from './add_domain/add_domain_form_errors';
+import { AddDomainFormSubmitButton } from './add_domain/add_domain_form_submit_button';
+import { AddDomainLogic } from './add_domain/add_domain_logic';
+
+export const EmptyStatePanel: React.FC = () => {
+ const { openFlyout } = useActions(AddDomainLogic);
+ const { events } = useValues(CrawlerLogic);
+ return (
+
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.domainManagement.emptyState.title', {
+ defaultMessage: 'Add a domain to your index',
+ })}
+
+
+
+ {events.length > 0 ? (
+ <>
+
+
+ {i18n.translate('xpack.enterpriseSearch.crawler.domainManagement.emptyState', {
+ defaultMessage:
+ 'You don’t have any domains on this index. Add your first domain to start crawling and indexing documents.',
+ })}
+
+
+
+
+
+
+ {i18n.translate(
+ 'xpack.enterpriseSearch.crawler.domainManagement.emptyState.addDomainButtonLabel',
+ {
+ defaultMessage: 'Add your first domain',
+ }
+ )}
+
+
+
+ Learn more
+
+
+ >
+ ) : (
+ <>
+
+
+
+
+ >
+ )}
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/utils.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/utils.ts
new file mode 100644
index 0000000000000..7c8d42bf887ee
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/crawler/utils.ts
@@ -0,0 +1,21 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { i18n } from '@kbn/i18n';
+
+export const getDeleteDomainConfirmationMessage = (domainUrl: string) => {
+ return i18n.translate(
+ 'xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage',
+ {
+ defaultMessage:
+ 'Are you sure you want to remove the domain "{domainUrl}" and all of its settings?',
+ values: {
+ domainUrl,
+ },
+ }
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/generate_api_key_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/generate_api_key_panel.tsx
new file mode 100644
index 0000000000000..c3148876608e6
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/generate_api_key_panel.tsx
@@ -0,0 +1,66 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import {
+ EuiFlexGroup,
+ EuiFlexItem,
+ EuiPanel,
+ EuiText,
+ EuiButtonIcon,
+ EuiButton,
+ EuiCodeBlock,
+} from '@elastic/eui';
+
+import { getEnterpriseSearchUrl } from '../../../shared/enterprise_search_url';
+import { DOCUMENTS_API_JSON_EXAMPLE } from '../new_index/constants';
+
+export const GenerateApiKeyPanel: React.FC = () => {
+ const searchIndexApiUrl = getEnterpriseSearchUrl('/api/ent/v1/search_indices/');
+ const apiKey = 'Create an API Key';
+
+ return (
+
+
+
+
+
+
+
+
+ Indexing by API
+
+
+
+
+
+
+
+
+ Generate an API key
+
+
+
+
+
+
+
+ {`\
+curl -X POST '${searchIndexApiUrl}${name}/document' \\
+-H 'Content-Type: application/json' \\
+-H 'Authorization: Bearer ${apiKey}' \\
+-d '${JSON.stringify(DOCUMENTS_API_JSON_EXAMPLE, null, 2)}'
+`}
+
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/index_name_logic.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/index_name_logic.ts
new file mode 100644
index 0000000000000..0c3339c78276e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/index_name_logic.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { kea, MakeLogicType } from 'kea';
+
+export interface IndexNameProps {
+ indexName: string;
+}
+
+export type IndexNameValues = IndexNameProps;
+
+export interface IndexNameActions {
+ setIndexName: (indexName: string) => { indexName: string };
+}
+
+export const IndexNameLogic = kea>(
+ {
+ path: ['enterprise_search', 'content', 'index_name'],
+ actions: {
+ setIndexName: (indexName) => ({ indexName }),
+ },
+ reducers: ({ props }) => ({
+ indexName: [
+ props.indexName,
+ {
+ setIndexName: (_, { indexName }) => indexName,
+ },
+ ],
+ }),
+ }
+);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/overview.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/overview.tsx
index 7b4cc80d5af2e..989cd6570e3ea 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/overview.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/overview.tsx
@@ -9,32 +9,26 @@ import React from 'react';
import { useValues } from 'kea';
-import {
- EuiCodeBlock,
- EuiText,
- EuiFlexGroup,
- EuiButton,
- EuiButtonIcon,
- EuiFlexItem,
- EuiPanel,
-} from '@elastic/eui';
+import { EuiSpacer } from '@elastic/eui';
import { Status } from '../../../../../common/types/api';
-import { getEnterpriseSearchUrl } from '../../../shared/enterprise_search_url/external_url';
-
import { FetchIndexApiLogic } from '../../api/index/fetch_index_api_logic';
-import { DOCUMENTS_API_JSON_EXAMPLE } from '../new_index/constants';
+import { CrawlDetailsFlyout } from './crawler/crawl_details_flyout/crawl_details_flyout';
+import { CrawlRequestsPanel } from './crawler/crawl_requests_panel/crawl_requests_panel';
+import { GenerateApiKeyPanel } from './generate_api_key_panel';
import { TotalStats } from './total_stats';
export const SearchIndexOverview: React.FC = () => {
const { data, status } = useValues(FetchIndexApiLogic);
- const searchIndexApiUrl = getEnterpriseSearchUrl('/api/ent/v1/search_indices/');
- const apiKey = 'Create an API Key';
+ const isCrawler = typeof data?.crawler !== 'undefined';
+ const isConnector = typeof data?.connector !== 'undefined';
+ const isApi = !(isCrawler || isConnector);
return (
<>
+
{status === Status.SUCCESS && data && (
{
ingestionType={data.connector ? 'Connector' : data.crawler ? 'Crawler' : 'API'}
/>
)}
-
-
-
-
-
-
-
-
- Indexing by API
-
-
-
-
-
-
-
-
- Generate an API key
-
-
-
-
-
-
-
- {`\
-curl -X POST '${searchIndexApiUrl}${name}/document' \\
- -H 'Content-Type: application/json' \\
- -H 'Authorization: Bearer ${apiKey}' \\
- -d '${JSON.stringify(DOCUMENTS_API_JSON_EXAMPLE, null, 2)}'
-`}
-
-
-
-
-
-
+
+ {isApi && }
+ {isCrawler && (
+ <>
+
+
+ >
+ )}
>
);
};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index.tsx
index 2963aa4b40520..0c0e2e2925be0 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index.tsx
@@ -16,8 +16,7 @@ import { EuiTabbedContent, EuiTabbedContentTab } from '@elastic/eui';
import { i18n } from '@kbn/i18n';
import { Status } from '../../../../../common/types/api';
-
-import { generateEncodedPath } from '../../../app_search/utils/encode_path_params';
+import { generateEncodedPath } from '../../../shared/encode_path_params';
import { KibanaLogic } from '../../../shared/kibana';
import { FetchIndexApiLogic } from '../../api/index/fetch_index_api_logic';
import { SEARCH_INDEX_PATH, SEARCH_INDEX_TAB_PATH } from '../../routes';
@@ -27,9 +26,13 @@ import { baseBreadcrumbs } from '../search_indices';
import { ConnectorConfiguration } from './connector/connector_configuration';
import { ConnectorSchedulingComponent } from './connector/connector_scheduling';
+import { AutomaticCrawlScheduler } from './crawler/automatic_crawl_scheduler/automatic_crawl_scheduler';
+import { CrawlCustomSettingsFlyout } from './crawler/crawl_custom_settings_flyout/crawl_custom_settings_flyout';
+import { CrawlerStatusIndicator } from './crawler/crawler_status_indicator/crawler_status_indicator';
+import { SearchIndexDomainManagement } from './crawler/domain_management/domain_management';
import { SearchIndexDocuments } from './documents';
-import { SearchIndexDomainManagement } from './domain_management';
import { SearchIndexIndexMappings } from './index_mappings';
+import { IndexNameLogic } from './index_name_logic';
import { SearchIndexOverview } from './overview';
export enum SearchIndexTabId {
@@ -47,8 +50,8 @@ export enum SearchIndexTabId {
export const SearchIndex: React.FC = () => {
const { makeRequest, apiReset } = useActions(FetchIndexApiLogic);
const { data: indexData, status: indexApiStatus } = useValues(FetchIndexApiLogic);
- const { indexName, tabId = SearchIndexTabId.OVERVIEW } = useParams<{
- indexName: string;
+ const { indexName } = useValues(IndexNameLogic);
+ const { tabId = SearchIndexTabId.OVERVIEW } = useParams<{
tabId?: string;
}>();
@@ -106,6 +109,13 @@ export const SearchIndex: React.FC = () => {
defaultMessage: 'Manage Domains',
}),
},
+ {
+ content: ,
+ id: SearchIndexTabId.SCHEDULING,
+ name: i18n.translate('xpack.enterpriseSearch.content.searchIndex.schedulingTabLabel', {
+ defaultMessage: 'Scheduling',
+ }),
+ },
];
const tabs: EuiTabbedContentTab[] = [
@@ -127,15 +137,18 @@ export const SearchIndex: React.FC = () => {
)
);
};
-
return (
] : [],
+ }}
>
+ {indexData?.crawler && }
);
};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index_router.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index_router.tsx
new file mode 100644
index 0000000000000..c5acd80aaf6bc
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_index/search_index_router.tsx
@@ -0,0 +1,47 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React, { useEffect } from 'react';
+import { Route, Switch, useParams } from 'react-router-dom';
+
+import { useActions } from 'kea';
+
+import {
+ SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH,
+ SEARCH_INDEX_PATH,
+ SEARCH_INDEX_TAB_PATH,
+} from '../../routes';
+
+import { CrawlerDomainDetail } from '../crawler_domain_detail/crawler_domain_detail';
+
+import { IndexNameLogic } from './index_name_logic';
+import { SearchIndex } from './search_index';
+
+export const SearchIndexRouter: React.FC = () => {
+ const { indexName } = useParams<{ indexName: string }>();
+
+ const indexNameLogic = IndexNameLogic({ indexName });
+ const { setIndexName } = useActions(indexNameLogic);
+
+ useEffect(() => {
+ setIndexName(indexName);
+ }, [indexName]);
+
+ return (
+
+
+
+
+
+
+
+
+
+
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.test.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.test.tsx
index 371f2b6f49cc2..047292927245e 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.test.tsx
@@ -8,17 +8,25 @@
import '../../../__mocks__/react_router';
import React from 'react';
-import { Route, Switch } from 'react-router-dom';
+import { Switch } from 'react-router-dom';
import { shallow } from 'enzyme';
+import { NewIndex } from '../new_index';
+import { SearchIndexRouter } from '../search_index/search_index_router';
+
+import { SearchIndices } from './search_indices';
+
import { SearchIndicesRouter } from '.';
describe('SearchIndicesRouter', () => {
it('renders Search index routes', () => {
const wrapper = shallow( );
- expect(wrapper.find(Switch)).toHaveLength(1);
- expect(wrapper.find(Route)).toHaveLength(4);
+ const routeSwitch = wrapper.find(Switch);
+
+ expect(routeSwitch.find(NewIndex)).toHaveLength(1);
+ expect(routeSwitch.find(SearchIndices)).toHaveLength(1);
+ expect(routeSwitch.find(SearchIndexRouter)).toHaveLength(1);
});
});
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.tsx b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.tsx
index 74e88333e8c47..0d7993f168fcd 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/components/search_indices/search_indices_router.tsx
@@ -8,15 +8,10 @@
import React from 'react';
import { Route, Switch } from 'react-router-dom';
-import {
- SEARCH_INDICES_PATH,
- SEARCH_INDEX_PATH,
- SEARCH_INDEX_TAB_PATH,
- NEW_INDEX_PATH,
-} from '../../routes';
+import { SEARCH_INDICES_PATH, SEARCH_INDEX_PATH, NEW_INDEX_PATH } from '../../routes';
import { NewIndex } from '../new_index';
-import { SearchIndex } from '../search_index/search_index';
+import { SearchIndexRouter } from '../search_index/search_index_router';
import { SearchIndices } from './search_indices';
@@ -29,11 +24,8 @@ export const SearchIndicesRouter: React.FC = () => {
-
-
-
-
-
+
+
);
diff --git a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/routes.ts b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/routes.ts
index b429b4b531997..045920c8043fe 100644
--- a/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/routes.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/enterprise_search_content/routes.ts
@@ -19,3 +19,4 @@ export const NEW_DIRECT_UPLOAD_PATH = `${NEW_INDEX_PATH}/upload`;
export const SEARCH_INDEX_PATH = `${SEARCH_INDICES_PATH}/:indexName`;
export const SEARCH_INDEX_TAB_PATH = `${SEARCH_INDEX_PATH}/:tabId`;
+export const SEARCH_INDEX_CRAWLER_DOMAIN_DETAIL_PATH = `${SEARCH_INDEX_PATH}/crawler/domains/:domainId`;
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.scss b/x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.scss
similarity index 100%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.scss
rename to x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.scss
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.test.tsx
similarity index 97%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.test.tsx
index 2109160f5bb25..13fe9c8f0efd9 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.test.tsx
@@ -5,8 +5,6 @@
* 2.0.
*/
-import '../../../../__mocks__/engine_logic.mock';
-
import React from 'react';
import { shallow, ShallowWrapper } from 'enzyme';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.tsx
similarity index 100%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_details_flyout/accordion_list.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/accordion_list/accordion_list.tsx
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.test.tsx
similarity index 94%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.test.tsx
index 789b1b1bb6580..cef318d36da09 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/custom_formatted_timestamp.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.test.tsx
@@ -11,7 +11,7 @@ import { shallow } from 'enzyme';
import { FormattedRelative } from '@kbn/i18n-react';
-import { FormattedDateTime } from '../../../utils/formatted_date_time';
+import { FormattedDateTime } from '../formatted_date_time';
import { CustomFormattedTimestamp } from './custom_formatted_timestamp';
diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.tsx
new file mode 100644
index 0000000000000..7e65493ec7e08
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/custom_formatted_timestamp/custom_formatted_timestamp.tsx
@@ -0,0 +1,28 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { FormattedRelative } from '@kbn/i18n-react';
+
+import { FormattedDateTime } from '../formatted_date_time';
+
+interface CustomFormattedTimestampProps {
+ timestamp: string;
+}
+
+export const CustomFormattedTimestamp: React.FC = ({
+ timestamp,
+}) => {
+ const date = new Date(timestamp);
+ const isDateToday = date >= new Date(new Date(Date.now()).toDateString());
+ return isDateToday ? (
+
+ ) : (
+
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.scss b/x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.scss
similarity index 100%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.scss
rename to x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.scss
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.test.tsx
similarity index 98%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.test.tsx
index e17b9263bb49d..703e4d1328e7d 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.test.tsx
@@ -11,7 +11,7 @@ import { shallow } from 'enzyme';
import { EuiIcon, EuiButton, EuiTitle, EuiFlexGroup, EuiSpacer } from '@elastic/eui';
-import { LoadingOverlay } from '../../../shared/loading';
+import { LoadingOverlay } from '../loading';
import { DataPanel } from './data_panel';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.tsx
similarity index 86%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.tsx
index d199d2a6d3edd..50a30a658e8be 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/data_panel/data_panel.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/data_panel/data_panel.tsx
@@ -22,9 +22,8 @@ import {
} from '@elastic/eui';
import { _EuiPanelDivlike } from '@elastic/eui/src/components/panel/panel';
-import { LoadingOverlay } from '../../../shared/loading';
-
import './data_panel.scss';
+import { LoadingOverlay } from '../loading';
type Props = Omit<_EuiPanelDivlike, 'title'> & {
title: React.ReactElement; // e.g., h2 tag
@@ -63,7 +62,7 @@ export const DataPanel: React.FC = ({
aria-busy={isLoading}
{...props}
>
-
+
{iconType && (
@@ -76,9 +75,12 @@ export const DataPanel: React.FC = ({
{subtitle && (
-
- {subtitle}
-
+ <>
+
+
+ {subtitle}
+
+ >
)}
{action && {action} }
diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.test.ts b/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.test.ts
new file mode 100644
index 0000000000000..9a2dcda0ce4dd
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.test.ts
@@ -0,0 +1,49 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { mockUseParams } from '../../__mocks__/react_router';
+
+import { encodePathParams, generateEncodedPath, useDecodedParams } from '.';
+
+describe('encodePathParams', () => {
+ it('encodeURIComponent()s all object values', () => {
+ const params = {
+ someValue: 'hello world???',
+ anotherValue: 'test!@#$%^&*[]/|;:"<>~`',
+ };
+ expect(encodePathParams(params)).toEqual({
+ someValue: 'hello%20world%3F%3F%3F',
+ anotherValue: 'test!%40%23%24%25%5E%26*%5B%5D%2F%7C%3B%3A%22%3C%3E~%60',
+ });
+ });
+});
+
+describe('generateEncodedPath', () => {
+ it('generates a react router path with encoded path parameters', () => {
+ expect(
+ generateEncodedPath('/values/:someValue/:anotherValue/new', {
+ someValue: 'hello world???',
+ anotherValue: 'test!@#$%^&*[]/|;:"<>~`',
+ })
+ ).toEqual(
+ '/values/hello%20world%3F%3F%3F/test!%40%23%24%25%5E%26*%5B%5D%2F%7C%3B%3A%22%3C%3E~%60/new'
+ );
+ });
+});
+
+describe('useDecodedParams', () => {
+ it('decodeURIComponent()s all object values from useParams()', () => {
+ mockUseParams.mockReturnValue({
+ someValue: 'hello%20world%3F%3F%3F',
+ anotherValue: 'test!%40%23%24%25%5E%26*%5B%5D%2F%7C%3B%3A%22%3C%3E~%60',
+ });
+ expect(useDecodedParams()).toEqual({
+ someValue: 'hello world???',
+ anotherValue: 'test!@#$%^&*[]/|;:"<>~`',
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.ts b/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.ts
new file mode 100644
index 0000000000000..2f9d9bed30944
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/encode_path_params/index.ts
@@ -0,0 +1,35 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { generatePath, useParams } from 'react-router-dom';
+
+type PathParams = Record;
+
+export const encodePathParams = (pathParams: PathParams) => {
+ const encodedParams: PathParams = {};
+
+ Object.entries(pathParams).map(([key, value]) => {
+ encodedParams[key] = encodeURIComponent(value);
+ });
+
+ return encodedParams;
+};
+
+export const generateEncodedPath = (path: string, pathParams: PathParams) => {
+ return generatePath(path, encodePathParams(pathParams));
+};
+
+export const useDecodedParams = () => {
+ const decodedParams: PathParams = {};
+
+ const params = useParams();
+ Object.entries(params).map(([key, value]) => {
+ decodedParams[key] = decodeURIComponent(value as string);
+ });
+
+ return decodedParams;
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.test.tsx
similarity index 94%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.test.tsx
index d29c58bca81e2..f74125b1528c7 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/utils/formatted_date_time/index.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.test.tsx
@@ -7,7 +7,7 @@
import React from 'react';
-import { mountWithIntl } from '../../../test_helpers';
+import { mountWithIntl } from '../../test_helpers';
import { FormattedDateTime } from '.';
diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.tsx
new file mode 100644
index 0000000000000..74ee71185f920
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/formatted_date_time/index.tsx
@@ -0,0 +1,27 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import React from 'react';
+
+import { FormattedDate, FormattedTime } from '@kbn/i18n-react';
+
+interface Props {
+ date: Date;
+ hideTime?: boolean;
+}
+
+export const FormattedDateTime: React.FC = ({ date, hideTime = false }) => (
+ <>
+
+ {!hideTime && (
+ <>
+ {' '}
+
+ >
+ )}
+ >
+);
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.test.tsx
similarity index 98%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.test.tsx
index 7a564988f1859..d8d687f71396c 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_select_domains_modal/simplified_selectable.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.test.tsx
@@ -11,7 +11,7 @@ import { shallow, ShallowWrapper } from 'enzyme';
import { EuiSelectable, EuiSelectableList, EuiSelectableSearch } from '@elastic/eui';
-import { mountWithIntl } from '../../../../../test_helpers';
+import { mountWithIntl } from '../../test_helpers';
import { SimplifiedSelectable } from './simplified_selectable';
diff --git a/x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.tsx
new file mode 100644
index 0000000000000..e13304b4a8f2f
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/simplified_selectable/simplified_selectable.tsx
@@ -0,0 +1,97 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+import React from 'react';
+
+import { EuiButtonEmpty, EuiFlexGroup, EuiFlexItem, EuiSelectable } from '@elastic/eui';
+import { EuiSelectableLIOption } from '@elastic/eui/src/components/selectable/selectable_option';
+import { i18n } from '@kbn/i18n';
+
+export interface Props {
+ emptyMessage?: string;
+ options: string[];
+ selectedOptions: string[];
+ onChange(selectedOptions: string[]): void;
+}
+
+export interface OptionMap {
+ [key: string]: boolean;
+}
+
+export const SimplifiedSelectable: React.FC = ({
+ emptyMessage,
+ options,
+ selectedOptions,
+ onChange,
+}) => {
+ const selectedOptionsMap: OptionMap = selectedOptions.reduce(
+ (acc, selectedOption) => ({
+ ...acc,
+ [selectedOption]: true,
+ }),
+ {}
+ );
+
+ const selectableOptions: Array> = options.map((option) => ({
+ label: option,
+ checked: selectedOptionsMap[option] ? 'on' : undefined,
+ }));
+
+ return (
+ <>
+
+
+ onChange(options)}
+ disabled={selectedOptions.length === options.length}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.selectAllButtonLabel',
+ {
+ defaultMessage: 'Select all',
+ }
+ )}
+
+
+
+ onChange([])}
+ disabled={selectedOptions.length === 0}
+ >
+ {i18n.translate(
+ 'xpack.enterpriseSearch.appSearch.crawler.simplifiedSelectable.deselectAllButtonLabel',
+ {
+ defaultMessage: 'Deselect all',
+ }
+ )}
+
+
+
+ {
+ onChange(
+ newSelectableOptions.filter((option) => option.checked).map((option) => option.label)
+ );
+ }}
+ emptyMessage={emptyMessage}
+ >
+ {(list, search) => (
+ <>
+ {search}
+ {list}
+ >
+ )}
+
+ >
+ );
+};
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.scss b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.scss
similarity index 100%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.scss
rename to x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.scss
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.test.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.test.tsx
similarity index 95%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.test.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.test.tsx
index 831b6066b21ea..00370d2ac6b42 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.test.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.test.tsx
@@ -5,7 +5,7 @@
* 2.0.
*/
-import { setMockActions, setMockValues } from '../../../../../__mocks__/kea_logic';
+import { setMockActions, setMockValues } from '../../__mocks__/kea_logic';
jest.mock('@elastic/eui', () => ({
...(jest.requireActual('@elastic/eui') as object),
@@ -18,7 +18,7 @@ import { ShallowWrapper, shallow } from 'enzyme';
import { EuiComboBox, EuiFormRow } from '@elastic/eui';
-import { rerender } from '../../../../../test_helpers';
+import { rerender } from '../../test_helpers';
import { UrlComboBox } from './url_combo_box';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.tsx b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.tsx
similarity index 89%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.tsx
rename to x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.tsx
index 0bcb2afaf896e..56fd31435e922 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box.tsx
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box.tsx
@@ -47,12 +47,9 @@ export const UrlComboBox: React.FC = ({ label, selectedUrls, onChange })
isInvalid={isInvalid}
error={
isInvalid
- ? i18n.translate(
- 'xpack.enterpriseSearch.appSearch.crawler.urlComboBox.invalidUrlErrorMessage',
- {
- defaultMessage: 'Please enter a valid URL',
- }
- )
+ ? i18n.translate('xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage', {
+ defaultMessage: 'Please enter a valid URL',
+ })
: undefined
}
>
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.test.ts b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.test.ts
similarity index 93%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.test.ts
rename to x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.test.ts
index c4b48bc01e363..6c92560e93633 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.test.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.test.ts
@@ -4,7 +4,7 @@
* 2.0; you may not use this file except in compliance with the Elastic License
* 2.0.
*/
-import { LogicMounter } from '../../../../../__mocks__/kea_logic';
+import { LogicMounter } from '../../__mocks__/kea_logic';
import { UrlComboBoxLogic } from './url_combo_box_logic';
diff --git a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.ts b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.ts
similarity index 90%
rename from x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.ts
rename to x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.ts
index 18c471bf1ba81..6dd3f81028ab4 100644
--- a/x-pack/plugins/enterprise_search/public/applications/app_search/components/crawler/components/crawl_custom_settings_flyout/url_combo_box_logic.ts
+++ b/x-pack/plugins/enterprise_search/public/applications/shared/url_combo_box/url_combo_box_logic.ts
@@ -17,7 +17,7 @@ export interface UrlComboBoxActions {
export const UrlComboBoxLogic = kea>({
key: (props) => props.id,
- path: (key: string) => ['enterprise_search', 'app_search', 'url_combo_box', key],
+ path: (key: string) => ['enterprise_search', 'url_combo_box', key],
actions: () => ({
setIsInvalid: (isInvalid) => ({ isInvalid }),
}),
diff --git a/x-pack/plugins/enterprise_search/server/lib/indices/fetch_index.ts b/x-pack/plugins/enterprise_search/server/lib/indices/fetch_index.ts
index 003e95930c384..7d2551c0d747c 100644
--- a/x-pack/plugins/enterprise_search/server/lib/indices/fetch_index.ts
+++ b/x-pack/plugins/enterprise_search/server/lib/indices/fetch_index.ts
@@ -9,6 +9,7 @@ import { IScopedClusterClient } from '@kbn/core/server';
import { CONNECTORS_INDEX } from '../..';
import { Connector } from '../../types/connector';
+import { Crawler } from '../../types/crawler';
import { mapIndexStats } from './fetch_indices';
@@ -21,17 +22,32 @@ export const fetchIndex = async (client: IScopedClusterClient, index: string) =>
}
const indexStats = indices[index];
const indexResult = mapIndexStats(indexData, indexStats, index);
+
const connectorResult = await client.asCurrentUser.search({
index: CONNECTORS_INDEX,
query: { term: { 'index_name.keyword': index } },
});
- const connector = connectorResult.hits.hits[0] ? connectorResult.hits.hits[0]._source : undefined;
+ const connector = connectorResult.hits.hits[0]?._source ?? undefined;
+
if (connector) {
return {
connector: { ...connector, id: connectorResult.hits.hits[0]._id },
index: indexResult,
};
- } else {
- return { index: indexResult };
}
+
+ const crawlerResult = await client.asCurrentUser.search({
+ index: '.ent-search-actastic-crawler2_configurations',
+ query: { term: { index_name: index } },
+ });
+ const crawler = crawlerResult.hits.hits[0]?._source ?? undefined;
+
+ if (crawler) {
+ return {
+ crawler,
+ index: indexResult,
+ };
+ }
+
+ return { index: indexResult };
};
diff --git a/x-pack/plugins/enterprise_search/server/plugin.ts b/x-pack/plugins/enterprise_search/server/plugin.ts
index f32f8fd45e63b..cfc3d8d196fd9 100644
--- a/x-pack/plugins/enterprise_search/server/plugin.ts
+++ b/x-pack/plugins/enterprise_search/server/plugin.ts
@@ -48,7 +48,7 @@ import { registerAppSearchRoutes } from './routes/app_search';
import { registerEnterpriseSearchRoutes } from './routes/enterprise_search';
import { registerConfigDataRoute } from './routes/enterprise_search/config_data';
import { registerConnectorRoutes } from './routes/enterprise_search/connectors';
-import { registerCrawlerRoutes } from './routes/enterprise_search/crawler';
+import { registerCrawlerRoutes } from './routes/enterprise_search/crawler/crawler';
import { registerCreateAPIKeyRoute } from './routes/enterprise_search/create_api_key';
import { registerTelemetryRoute } from './routes/enterprise_search/telemetry';
import { registerWorkplaceSearchRoutes } from './routes/workplace_search';
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler.ts
deleted file mode 100644
index b74c8d1bc98ca..0000000000000
--- a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler.ts
+++ /dev/null
@@ -1,30 +0,0 @@
-/*
- * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
- * or more contributor license agreements. Licensed under the Elastic License
- * 2.0; you may not use this file except in compliance with the Elastic License
- * 2.0.
- */
-
-import { schema } from '@kbn/config-schema';
-
-import { RouteDependencies } from '../../plugin';
-
-export function registerCrawlerRoutes({
- router,
- enterpriseSearchRequestHandler,
-}: RouteDependencies) {
- router.post(
- {
- path: '/internal/enterprise_search/crawler',
- validate: {
- body: schema.object({
- index_name: schema.string(),
- language: schema.string(),
- }),
- },
- },
- enterpriseSearchRequestHandler.createRequest({
- path: '/api/ent/v1/internal/indices/',
- })
- );
-}
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.test.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.test.ts
new file mode 100644
index 0000000000000..3891f4023feb0
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.test.ts
@@ -0,0 +1,660 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { MockRouter, mockDependencies, mockRequestHandler } from '../../../__mocks__';
+
+import { registerCrawlerRoutes } from './crawler';
+
+describe('crawler routes', () => {
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2',
+ });
+ });
+
+ it('validates correctly with name', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: {} };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/:crawlRequestId',
+ });
+ });
+
+ it('validates correctly with name and id', () => {
+ const request = { params: { indexName: 'index-name', crawlRequestId: '12345' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: { crawlRequestId: '12345' } };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without id', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('POST /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests',
+ });
+ });
+
+ it('validates correctly with name', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with domain urls', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { overrides: { domain_allowlist: ['https://www.elastic.co'] } },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with max crawl depth', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { overrides: { max_crawl_depth: 10 } },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with seed urls', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { overrides: { seed_urls: ['https://www.elastic.co/guide'] } },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with sitemap urls', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { overrides: { sitemap_urls: ['https://www.elastic.co/sitemap1.xml'] } },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly when we set sitemap discovery', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { overrides: { sitemap_discovery_disabled: true } },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with empty overrides', () => {
+ const request = { params: { indexName: 'index-name' }, body: { overrides: {} } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: {} };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domains', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
+ });
+ });
+
+ it('validates correctly', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ query: {
+ 'page[current]': 5,
+ 'page[size]': 10,
+ },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without required params', () => {
+ const request = { params: {} };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('POST /internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/active/cancel',
+ });
+ });
+
+ it('validates correctly with name', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: {} };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('POST /internal/enterprise_search/indices/{indexName}/crawler/domains', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
+ });
+ });
+
+ it('validates correctly with params and body', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { name: 'https://elastic.co/guide', entry_points: [{ value: '/guide' }] },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a name param', () => {
+ const request = {
+ params: {},
+ body: { name: 'https://elastic.co/guide', entry_points: [{ value: '/guide' }] },
+ };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without a body', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: {},
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('DELETE /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'delete',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ });
+ });
+
+ it('validates correctly with name and id', () => {
+ const request = { params: { indexName: 'index-name', domainId: '1234' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: { domainId: '1234' } };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without id', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('PUT /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'put',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ });
+ });
+
+ it('validates correctly with crawl rules', () => {
+ const request = {
+ params: { indexName: 'index-name', domainId: '1234' },
+ body: {
+ crawl_rules: [
+ {
+ order: 1,
+ id: '5678',
+ },
+ ],
+ },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with deduplication enabled', () => {
+ const request = {
+ params: { indexName: 'index-name', domainId: '1234' },
+ body: {
+ deduplication_enabled: true,
+ },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly with deduplication fields', () => {
+ const request = {
+ params: { indexName: 'index-name', domainId: '1234' },
+ body: {
+ deduplication_fields: ['title', 'description'],
+ },
+ };
+ mockRouter.shouldValidate(request);
+ });
+ });
+
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ });
+ });
+
+ it('validates correctly with name and id', () => {
+ const request = { params: { indexName: 'index-name', domainId: '1234' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: { domainId: '1234' } };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without id', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('POST /internal/enterprise_search/crawler/validate_url', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/internal/enterprise_search/crawler/validate_url',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/crawler/validate_url',
+ });
+ });
+
+ it('validates correctly with body', () => {
+ const request = {
+ body: { url: 'elastic.co', checks: ['tcp', 'url_request'] },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a body', () => {
+ const request = {
+ body: {},
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('POST /internal/enterprise_search/indices/{indexName}/crawler/process_crawls', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'post',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/process_crawls',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/process_crawls',
+ });
+ });
+
+ it('validates correctly', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { domains: ['https://elastic.co', 'https://swiftype.com'] },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('validates correctly without body', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: {},
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a name param', () => {
+ const request = {
+ params: {},
+ body: { domains: ['https://elastic.co', 'https://swiftype.com'] },
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ });
+ });
+
+ it('validates correctly', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a name param', () => {
+ const request = {
+ params: {},
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('PUT /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'put',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ });
+ });
+
+ it('validates correctly', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { unit: 'day', frequency: 7 },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a name param', () => {
+ const request = {
+ params: {},
+ body: { unit: 'day', frequency: 7 },
+ };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without a unit property in body', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { frequency: 7 },
+ };
+ mockRouter.shouldThrow(request);
+ });
+
+ it('fails validation without a frequency property in body', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ body: { unit: 'day' },
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('DELETE /internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'delete',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ });
+ });
+
+ it('validates correctly', () => {
+ const request = {
+ params: { indexName: 'index-name' },
+ };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without a name param', () => {
+ const request = {
+ params: {},
+ };
+ mockRouter.shouldThrow(request);
+ });
+ });
+
+ describe('GET /internal/enterprise_search/indices/{indexName}/crawler/domain_configs', () => {
+ let mockRouter: MockRouter;
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ mockRouter = new MockRouter({
+ method: 'get',
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domain_configs',
+ });
+
+ registerCrawlerRoutes({
+ ...mockDependencies,
+ router: mockRouter.router,
+ });
+ });
+
+ it('creates a request to enterprise search', () => {
+ expect(mockRequestHandler.createRequest).toHaveBeenCalledWith({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domain_configs',
+ });
+ });
+
+ it('validates correctly with name', () => {
+ const request = { params: { indexName: 'index-name' } };
+ mockRouter.shouldValidate(request);
+ });
+
+ it('fails validation without name', () => {
+ const request = { params: {} };
+ mockRouter.shouldThrow(request);
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.ts
new file mode 100644
index 0000000000000..652f72318074e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler.ts
@@ -0,0 +1,294 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { schema } from '@kbn/config-schema';
+
+import { RouteDependencies } from '../../../plugin';
+
+import { registerCrawlerCrawlRulesRoutes } from './crawler_crawl_rules';
+import { registerCrawlerEntryPointRoutes } from './crawler_entry_points';
+import { registerCrawlerSitemapRoutes } from './crawler_sitemaps';
+
+export function registerCrawlerRoutes(routeDependencies: RouteDependencies) {
+ const { router, enterpriseSearchRequestHandler } = routeDependencies;
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/crawler',
+ validate: {
+ body: schema.object({
+ index_name: schema.string(),
+ language: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/',
+ })
+ );
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/crawler/validate_url',
+ validate: {
+ body: schema.object({
+ url: schema.string(),
+ checks: schema.arrayOf(schema.string()),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/crawler/validate_url',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2',
+ })
+ );
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ body: schema.object({
+ overrides: schema.maybe(
+ schema.object({
+ domain_allowlist: schema.maybe(schema.arrayOf(schema.string())),
+ max_crawl_depth: schema.maybe(schema.number()),
+ seed_urls: schema.maybe(schema.arrayOf(schema.string())),
+ sitemap_urls: schema.maybe(schema.arrayOf(schema.string())),
+ sitemap_discovery_disabled: schema.maybe(schema.boolean()),
+ })
+ ),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests',
+ })
+ );
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/cancel',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/active/cancel',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_requests/{crawlRequestId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ crawlRequestId: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_requests/:crawlRequestId',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ query: schema.object({
+ 'page[current]': schema.number(),
+ 'page[size]': schema.number(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
+ })
+ );
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains',
+ validate: {
+ body: schema.object({
+ entry_points: schema.arrayOf(
+ schema.object({
+ value: schema.string(),
+ })
+ ),
+ name: schema.string(),
+ }),
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ validate: {
+ params: schema.object({
+ domainId: schema.string(),
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ })
+ );
+
+ router.put(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ }),
+ body: schema.object({
+ crawl_rules: schema.maybe(
+ schema.arrayOf(
+ schema.object({
+ order: schema.number(),
+ id: schema.string(),
+ })
+ )
+ ),
+ deduplication_enabled: schema.maybe(schema.boolean()),
+ deduplication_fields: schema.maybe(schema.arrayOf(schema.string())),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ })
+ );
+
+ router.delete(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}',
+ validate: {
+ params: schema.object({
+ domainId: schema.string(),
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domain_configs',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domain_configs',
+ })
+ );
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/process_crawls',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ body: schema.object({
+ domains: schema.maybe(schema.arrayOf(schema.string())),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/process_crawls',
+ })
+ );
+
+ router.get(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ })
+ );
+
+ router.put(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ body: schema.object({
+ unit: schema.string(),
+ frequency: schema.number(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ })
+ );
+
+ router.delete(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/crawl_schedule',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/crawl_schedule',
+ })
+ );
+
+ registerCrawlerCrawlRulesRoutes(routeDependencies);
+ registerCrawlerEntryPointRoutes(routeDependencies);
+ registerCrawlerSitemapRoutes(routeDependencies);
+}
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_crawl_rules.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_crawl_rules.ts
new file mode 100644
index 0000000000000..d449856e4b183
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_crawl_rules.ts
@@ -0,0 +1,82 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { schema } from '@kbn/config-schema';
+
+import { RouteDependencies } from '../../../plugin';
+
+export function registerCrawlerCrawlRulesRoutes({
+ router,
+ enterpriseSearchRequestHandler,
+}: RouteDependencies) {
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ }),
+ body: schema.object({
+ pattern: schema.string(),
+ policy: schema.string(),
+ rule: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.put(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules/{crawlRuleId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ crawlRuleId: schema.string(),
+ }),
+ body: schema.object({
+ order: schema.number(),
+ pattern: schema.string(),
+ policy: schema.string(),
+ rule: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules/:crawlRuleId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.delete(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/crawl_rules/{crawlRuleId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ crawlRuleId: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/crawl_rules/:crawlRuleId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+}
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_entry_points.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_entry_points.ts
new file mode 100644
index 0000000000000..1cf7cc591494b
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_entry_points.ts
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { schema } from '@kbn/config-schema';
+
+import { RouteDependencies } from '../../../plugin';
+
+export function registerCrawlerEntryPointRoutes({
+ router,
+ enterpriseSearchRequestHandler,
+}: RouteDependencies) {
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ }),
+ body: schema.object({
+ value: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.put(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points/{entryPointId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ entryPointId: schema.string(),
+ }),
+ body: schema.object({
+ value: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points/:entryPointId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.delete(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/entry_points/{entryPointId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ entryPointId: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/entry_points/:entryPointId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+}
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_sitemaps.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_sitemaps.ts
new file mode 100644
index 0000000000000..d299122d80a91
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/crawler/crawler_sitemaps.ts
@@ -0,0 +1,77 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { schema } from '@kbn/config-schema';
+
+import { RouteDependencies } from '../../../plugin';
+
+export function registerCrawlerSitemapRoutes({
+ router,
+ enterpriseSearchRequestHandler,
+}: RouteDependencies) {
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ }),
+ body: schema.object({
+ url: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.put(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ sitemapId: schema.string(),
+ }),
+ body: schema.object({
+ url: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps/:sitemapId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+
+ router.delete(
+ {
+ path: '/internal/enterprise_search/indices/{indexName}/crawler/domains/{domainId}/sitemaps/{sitemapId}',
+ validate: {
+ params: schema.object({
+ indexName: schema.string(),
+ domainId: schema.string(),
+ sitemapId: schema.string(),
+ }),
+ },
+ },
+ enterpriseSearchRequestHandler.createRequest({
+ path: '/api/ent/v1/internal/indices/:indexName/crawler2/domains/:domainId/sitemaps/:sitemapId',
+ params: {
+ respond_with: 'index',
+ },
+ })
+ );
+}
diff --git a/x-pack/plugins/enterprise_search/server/types/crawler.ts b/x-pack/plugins/enterprise_search/server/types/crawler.ts
new file mode 100644
index 0000000000000..afe4078f1421f
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/types/crawler.ts
@@ -0,0 +1,11 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+export interface Crawler {
+ id: string;
+ index_name: string;
+}
diff --git a/x-pack/plugins/translations/translations/fr-FR.json b/x-pack/plugins/translations/translations/fr-FR.json
index 7513c33637460..d1a3658b7389f 100644
--- a/x-pack/plugins/translations/translations/fr-FR.json
+++ b/x-pack/plugins/translations/translations/fr-FR.json
@@ -10966,7 +10966,6 @@
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "Indexation avec des paramètres personnalisés",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlSelectDomainsMenuLabel": "Indexation de domaines sélectionnés",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.startACrawlButtonLabel": "Démarrer une indexation",
- "xpack.enterpriseSearch.appSearch.crawler.urlComboBox.invalidUrlErrorMessage": "Veuillez entrer une URL valide",
"xpack.enterpriseSearch.appSearch.credentials.apiEndpoint": "Point de terminaison",
"xpack.enterpriseSearch.appSearch.credentials.apiKeys": "Clés d'API",
"xpack.enterpriseSearch.appSearch.credentials.copied": "Copié",
@@ -11670,6 +11669,21 @@
"xpack.enterpriseSearch.content.newIndex.pageTitle": "Nouvel index de recherche",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.description": "Ajoutez votre contenu à Enterprise Search en créant un index de recherche.",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.title": "Créer un index de recherche",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.chineseDropDownOptionLabel": "Chinois",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.danishDropDownOptionLabel": "Danois",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.dutchDropDownOptionLabel": "Néerlandais",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.englishDropDownOptionLabel": "Anglais",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.frenchDropDownOptionLabel": "Français",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.germanDropDownOptionLabel": "Allemand",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.italianDropDownOptionLabel": "Italien",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.japaneseDropDownOptionLabel": "Japonais",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.koreanDropDownOptionLabel": "Coréen",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseBrazilDropDownOptionLabel": "Portugais (Brésil)",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseDropDownOptionLabel": "Portugais",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.russianDropDownOptionLabel": "Russe",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.spanishDropDownOptionLabel": "Espagnol",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.thaiDropDownOptionLabel": "Thaï",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.universalDropDownOptionLabel": "Universel",
"xpack.enterpriseSearch.content.newIndex.types.api": "Point de terminaison d'API",
"xpack.enterpriseSearch.content.newIndex.types.connector": "Connecteur",
"xpack.enterpriseSearch.content.newIndex.types.crawler": "Robot d'indexation",
@@ -11688,6 +11702,144 @@
"xpack.enterpriseSearch.content.searchIndices.searchIndices.pageTitle": "Contenu",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.stepsTitle": "Créer de belles expériences de recherche avec Enterprise Search",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.tableTitle": "Rechercher dans les index",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage": "Voulez-vous vraiment supprimer le domaine \"{domainUrl}\" et tous ses paramètres ?",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.successMessage": "Le domaine \"{domainUrl}\" a été supprimé",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "Ajouter un domaine",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.title": "Ajouter un nouveau domaine",
+ "xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "Vérification de contenu",
+ "xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "Le point d'entrée du robot d'indexation a été défini sur {entryPointValue}",
+ "xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "Un problème est survenu. Veuillez corriger les erreurs et réessayer.",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "Le robot d'indexation ne pourra pas indexer le contenu de ce domaine tant que les erreurs ci-dessus n'auront pas été corrigées.",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "Ignorer les échecs de validation et continuer",
+ "xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "Restrictions d'indexation",
+ "xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "Validation initiale",
+ "xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "Connectivité réseau",
+ "xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "Ajouter un domaine",
+ "xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "Tester l'URL dans le navigateur",
+ "xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "Erreur inattendue",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "Les URL de domaine requièrent un protocole et ne peuvent pas contenir de chemins.",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "URL de domaine",
+ "xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "Valider le domaine",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "Indexer automatiquement",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlUnitsPrefix": "Tou(te)s les",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.formDescription": "Ne vous inquiétez pas, nous lancerons une indexation à votre place. {readMoreMessage}.",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "En lire plus.",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "La planification de l'indexation s'applique à tous les domaines de ce moteur.",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "Planifier la fréquence",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "Planifier des unités de temps",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.disableCrawlSchedule.successMessage": "L'indexation automatique a été désactivée.",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.submitCrawlSchedule.successMessage": "Votre planification d'indexation automatique a été mise à jour.",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "{crawlType} indexation sur {domainCount, plural, one {# domaine} other {# domaines}}",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "Profondeur maximale de l'indexation",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "Type d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "URL de points d'entrée personnalisés",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "URL des plans de site personnalisés",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "Ajouter des domaines à votre indexation",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "Veuillez sélectionner un domaine.",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "Points d'entrée",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "Configurer une indexation unique avec des paramètres personnalisés.",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "Configuration personnalisée de l'indexation",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "Inclure les plans de site découverts dans {robotsDotTxt}",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "Définir une profondeur d'exploration maximale pour indiquer le nombre de pages que le robot d'exploration doit parcourir. Définir la valeur sur un (1) pour limiter l'indexation aux seuls points d'entrée.",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "Profondeur maximale de l'indexation",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "URL de base",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "sélectionné",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "Plans de site",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "Appliquer et indexer maintenant",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "Aperçu",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "Raw JSON",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "Détails de la requête d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "Domaines",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "URL de base",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "URL des plans de site",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "Réponse moy.",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "Erreurs 4xx",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "Durée",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "Activer les journaux du robot d'indexation dans les paramètres pour obtenir des statistiques d'indexation plus détaillées.",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "URL visitées et extraites pendant l'indexation.",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "Pages visitées",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "Pages",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "Erreurs 5xx",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "URL trouvées par le robot pendant l'indexation, y compris celles qui ne sont pas suivies en raison de la configuration de l'indexation.",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "URL vues",
+ "xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "Les modifications que vous effectuez maintenant ne prendront effet qu'au début de votre prochaine indexation.",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "Annuler l'indexation",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "Indexation en cours…",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "En attente…",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "Réessayer l'indexation",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "Afficher uniquement les champs sélectionnés",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "Démarrer une indexation",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "Démarrage en cours…",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "Arrêt en cours…",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "Annulé",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "Annulation",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "Échoué",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "En attente",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "En cours d'exécution",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "Ignoré",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "Démarrage",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "Succès",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "Suspendu",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "Suspension",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "Type d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "Créé",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "Domaines",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "ID de requête",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "Statut",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "Vous n'avez encore démarré aucune indexation.",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "Aucune demande d'indexation récente",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "Commence par",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "Contient",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "Se termine par",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "Regex",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "Autoriser",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "Interdire",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "Ajouter une règle d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "La règle d'indexation a été supprimée.",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.description": "Créez une règle d'indexation pour inclure ou exclure les pages dont l'URL correspond à la règle. Les règles sont exécutées dans l'ordre séquentiel, et chaque URL est évaluée en fonction de la première correspondance. {link}",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "En savoir plus sur les règles d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "Modèle de chemin",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "Le modèle de chemin est une chaîne littérale, à l'exception du caractère astérisque (*), qui est un métacaractère pouvant correspondre à n'importe quel élément.",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "Politique",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "Le modèle de chemin est une expression régulière compatible avec le moteur d'expression régulière du langage Ruby.",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "Règle",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.title": "Règles d'indexation",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "Pleine",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "Partielle",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "Règles d'indexation réappliquées",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "Tous les champs",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.description": "Le robot d'indexation n'indexe que les pages uniques. Choisissez les champs que le robot d'indexation doit utiliser lorsqu'il recherche les pages en double. Désélectionnez tous les champs de schéma pour autoriser les documents en double dans ce domaine. {documentationLink}.",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "En savoir plus sur le hachage de contenu",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "Réinitialiser aux valeurs par défaut",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "Champs sélectionnés",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "Afficher tous les champs",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.title": "Traitement des documents en double",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage": "Le domaine {domainUrl} a été ajouté avec succès",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "Supprimer ce domaine",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "Gérer ce domaine",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.actions": "Actions",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.documents": "Documents",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "URL de domaine",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "Dernière activité",
+ "xpack.enterpriseSearch.crawler.domainsTitle": "Domaines",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "Ajouter un point d'entrée",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.description": "Inclure ici les URL les plus importantes pour votre site web. Les URL de point d'entrée seront les premières pages à être indexées et traitées pour servir de liens vers les autres pages.",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "{link} pour spécifier un point d'entrée pour le robot d'indexation",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "Ajouter un point d'entrée",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "Il n'existe aucun point d'entrée.",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "Le robot d'indexation nécessite au moins un point d'entrée.",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "Découvrez plus d'informations sur les points d'entrée.",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.title": "Points d'entrée",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "Les règles d'indexation sont en train d'être réappliquées dans l'arrière-plan",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "Ajouter un plan du site",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "Le plan du site a été supprimé.",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.description": "Spécifiez les URL du plan du site pour le robot d'indexation dans ce domaine.",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "Il n'existe aucun plan de site.",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.title": "Plans de site",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "Indexation avec des paramètres personnalisés",
+ "xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "Veuillez entrer une URL valide",
"xpack.enterpriseSearch.curations.settings.licenseUpgradeLink": "En savoir plus sur les mises à niveau incluses dans la licence",
"xpack.enterpriseSearch.curations.settings.start30DayTrialButtonLabel": "Démarrer un essai gratuit de 30 jours",
"xpack.enterpriseSearch.elasticsearch.nav.contentTitle": "Elasticsearch",
diff --git a/x-pack/plugins/translations/translations/ja-JP.json b/x-pack/plugins/translations/translations/ja-JP.json
index 6259805966410..9468f1398ddae 100644
--- a/x-pack/plugins/translations/translations/ja-JP.json
+++ b/x-pack/plugins/translations/translations/ja-JP.json
@@ -10957,7 +10957,6 @@
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "カスタム設定でクロール",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlSelectDomainsMenuLabel": "選択したドメインをクロール",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.startACrawlButtonLabel": "クロールを開始",
- "xpack.enterpriseSearch.appSearch.crawler.urlComboBox.invalidUrlErrorMessage": "有効なURLを入力してください",
"xpack.enterpriseSearch.appSearch.credentials.apiEndpoint": "エンドポイント",
"xpack.enterpriseSearch.appSearch.credentials.apiKeys": "APIキー",
"xpack.enterpriseSearch.appSearch.credentials.copied": "コピー完了",
@@ -11661,6 +11660,21 @@
"xpack.enterpriseSearch.content.newIndex.pageTitle": "新しい検索インデックス",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.description": "検索インデックスを作成し、コンテンツをエンタープライズ サーチに追加します。",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.title": "検索インデックスの作成",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.chineseDropDownOptionLabel": "中国語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.danishDropDownOptionLabel": "デンマーク語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.dutchDropDownOptionLabel": "オランダ語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.englishDropDownOptionLabel": "英語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.frenchDropDownOptionLabel": "フランス語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.germanDropDownOptionLabel": "ドイツ語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.italianDropDownOptionLabel": "イタリア語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.japaneseDropDownOptionLabel": "日本語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.koreanDropDownOptionLabel": "韓国語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseBrazilDropDownOptionLabel": "ポルトガル語(ブラジル)",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseDropDownOptionLabel": "ポルトガル語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.russianDropDownOptionLabel": "ロシア語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.spanishDropDownOptionLabel": "スペイン語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.thaiDropDownOptionLabel": "タイ語",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.universalDropDownOptionLabel": "ユニバーサル",
"xpack.enterpriseSearch.content.newIndex.types.api": "APIエンドポイント",
"xpack.enterpriseSearch.content.newIndex.types.connector": "コネクター",
"xpack.enterpriseSearch.content.newIndex.types.crawler": "Webクローラー",
@@ -11679,6 +11693,144 @@
"xpack.enterpriseSearch.content.searchIndices.searchIndices.pageTitle": "コンテンツ",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.stepsTitle": "エンタープライズ サーチで構築する優れた検索エクスペリエンス",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.tableTitle": "インデックスの検索",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage": "ドメイン\"{domainUrl}\"とすべての設定を削除しますか?",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.successMessage": "ドメイン'{domainUrl}'が削除されました",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "ドメインを追加",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.title": "新しいドメインを追加",
+ "xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "コンテンツ検証",
+ "xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "Webクローラーエントリポイントが{entryPointValue}として設定されました",
+ "xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "何か問題が発生しましたエラーを解決して、再試行してください。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "上記のエラーが解決されるまで、Webクローラーはこのドメインのコンテンツにインデックスを作成できません。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "検証の失敗を無視して続行",
+ "xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "インデックスの制約",
+ "xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "初期検証",
+ "xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "ネットワーク接続",
+ "xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "ドメインを追加",
+ "xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "ブラウザーでURLをテスト",
+ "xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "予期しないエラー",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "ドメインURLにはプロトコルが必要です。パスを含めることはできません。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "ドメインURL",
+ "xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "ドメインを検証",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "自動的にクロール",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlUnitsPrefix": "毎",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.formDescription": "ご安心ください。クロールは自動的に開始されます。{readMoreMessage}。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "詳細をお読みください。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "クロールスケジュールはこのエンジンのすべてのドメインに適用されます。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "スケジュール頻度",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "スケジュール時間単位",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.disableCrawlSchedule.successMessage": "自動クローリングが無効にされました。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.submitCrawlSchedule.successMessage": "自動クローリングスケジュールが更新されました。",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "{domainCount, plural, other {# 件のドメイン}}で{crawlType}クロール",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "最大クロール深度",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "クロールタイプ",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "カスタム入力ポイントURL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "カスタムサイトマップURL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "ドメインをクロールに追加",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "ドメインを選択してください。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "エントリポイント",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "カスタム設定でワンタイムクロールを設定します。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "カスタムクロール構成",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "{robotsDotTxt}で検出されたサイトマップを含める",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "クローラーが走査するページの数を指定する最大クロール深度を設定します。クロールをエントリポイントのみに制限する場合は、値を1に設定します。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "最大クロール深度",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "シードURL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "選択済み",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "サイトマップ",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "今すぐ適用してクロール",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "プレビュー",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "元のJSON",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "クロールリクエスト詳細",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "ドメイン",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "シードURL",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "サイトマップURL",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "平均応答",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "4xxエラー",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "期間",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "詳細なクロール統計情報については、設定でWebクローラーログを有効にします。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "クロール中にアクセスされ抽出されたページ。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "アクセスされたページ",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "ページ",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "5xxエラー",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "クロール中にクローラーによって検出されたURL(クロール構成のため従われなかったURLを含む)。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "検出されたURL",
+ "xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "行った変更は次回のクロールの開始まで適用されません。",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "クロールをキャンセル",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "クロール中...",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "保留中...",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "クロールを再試行",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "選択したフィールドのみを表示",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "クロールを開始",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "開始中...",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "停止中...",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "キャンセル",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "キャンセル中",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "失敗",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "保留中",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "実行中",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "スキップ",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "開始中",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "成功",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "一時停止",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "一時停止中",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "クロールタイプ",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "作成済み",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "ドメイン",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "リクエストID",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "ステータス",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "まだクロールを開始していません。",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "最近のクロールリクエストがありません",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "で開始",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "を含む",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "で終了",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "正規表現",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "許可",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "禁止",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "クロールルールを追加",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "クロールルールが削除されました。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.description": "URLがルールと一致するページを含めるか除外するためのクロールルールを作成します。ルールは連続で実行されます。各URLは最初の一致に従って評価されます。{link}",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "クロールルールの詳細",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "パスパターン",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "パスパターンはアスタリスク(*)を除くリテラル文字列です。アスタリスクはいずれかと一致するメタ文字です。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "ポリシー",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "パスパターンは、Ruby言語正規表現エンジンと互換性がある正規表現です。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "ルール",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.title": "クロールルール",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "完全",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "部分",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "再適用されたクロールルール",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "すべてのフィールド",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.description": "Webクローラーは一意のページにのみインデックスします。重複するページを検討するときにクローラーが使用するフィールドを選択します。すべてのスキーマフィールドを選択解除して、このドメインで重複するドキュメントを許可します。{documentationLink}。",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "コンテンツハッシュの詳細",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "デフォルトにリセット",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "スクリプトフィールド",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "すべてのフィールドを表示",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.title": "ドキュメント処理を複製",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage": "ドメイン'{domainUrl}'が正常に追加されました",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "このドメインを削除",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "このドメインを管理",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.actions": "アクション",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.documents": "ドキュメント",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "ドメインURL",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "前回のアクティビティ",
+ "xpack.enterpriseSearch.crawler.domainsTitle": "ドメイン",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "エントリポイントを追加",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.description": "ここではWebサイトの最も重要なURLを含めます。エントリポイントURLは、他のページへのリンク目的で最初にインデックスおよび処理されるページです。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "クローラーのエントリポイントを指定するには、{link}してください",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "エントリポイントを追加",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "既存のエントリポイントがありません。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "クローラーには1つ以上のエントリポイントが必要です。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "エントリポイントの詳細。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.title": "エントリポイント",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "クロールルールはバックグラウンドで再適用されています",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "サイトマップを追加",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "サイトマップが削除されました。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.description": "このドメインのクローラーのサイトマップURLを指定します。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "既存のサイトマップがありません。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.title": "サイトマップ",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "カスタム設定でクロール",
+ "xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "有効なURLを入力してください",
"xpack.enterpriseSearch.curations.settings.licenseUpgradeLink": "ライセンスアップグレードの詳細",
"xpack.enterpriseSearch.curations.settings.start30DayTrialButtonLabel": "30 日間のトライアルの開始",
"xpack.enterpriseSearch.elasticsearch.nav.contentTitle": "Elasticsearch",
diff --git a/x-pack/plugins/translations/translations/zh-CN.json b/x-pack/plugins/translations/translations/zh-CN.json
index ddee6ff3528b6..48a6fbd79c2c6 100644
--- a/x-pack/plugins/translations/translations/zh-CN.json
+++ b/x-pack/plugins/translations/translations/zh-CN.json
@@ -10972,7 +10972,6 @@
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "使用定制设置执行爬网",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.crawlSelectDomainsMenuLabel": "爬网选定域",
"xpack.enterpriseSearch.appSearch.crawler.startCrawlContextMenu.startACrawlButtonLabel": "开始爬网",
- "xpack.enterpriseSearch.appSearch.crawler.urlComboBox.invalidUrlErrorMessage": "请输入有效 URL",
"xpack.enterpriseSearch.appSearch.credentials.apiEndpoint": "终端",
"xpack.enterpriseSearch.appSearch.credentials.apiKeys": "API 密钥",
"xpack.enterpriseSearch.appSearch.credentials.copied": "已复制",
@@ -11676,6 +11675,21 @@
"xpack.enterpriseSearch.content.newIndex.pageTitle": "新搜索索引",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.description": "通过创建搜索索引将内容添加到 Enterprise Search。",
"xpack.enterpriseSearch.content.newIndex.selectSearchIndex.title": "创建搜索索引",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.chineseDropDownOptionLabel": "中文",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.danishDropDownOptionLabel": "丹麦语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.dutchDropDownOptionLabel": "荷兰语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.englishDropDownOptionLabel": "英语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.frenchDropDownOptionLabel": "法语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.germanDropDownOptionLabel": "德语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.italianDropDownOptionLabel": "意大利语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.japaneseDropDownOptionLabel": "日语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.koreanDropDownOptionLabel": "朝鲜语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseBrazilDropDownOptionLabel": "葡萄牙语(巴西)",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.portugueseDropDownOptionLabel": "葡萄牙语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.russianDropDownOptionLabel": "俄语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.spanishDropDownOptionLabel": "西班牙语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.thaiDropDownOptionLabel": "泰语",
+ "xpack.enterpriseSearch.content.newIndex.supportedLanguages.universalDropDownOptionLabel": "通用",
"xpack.enterpriseSearch.content.newIndex.types.api": "API 终端",
"xpack.enterpriseSearch.content.newIndex.types.connector": "连接器",
"xpack.enterpriseSearch.content.newIndex.types.crawler": "网络爬虫",
@@ -11694,6 +11708,144 @@
"xpack.enterpriseSearch.content.searchIndices.searchIndices.pageTitle": "内容",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.stepsTitle": "通过 Enterprise Search 构建出色的搜索体验",
"xpack.enterpriseSearch.content.searchIndices.searchIndices.tableTitle": "搜索索引",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.confirmationPopupMessage": "确定要移除域“{domainUrl}”和其所有设置?",
+ "xpack.enterpriseSearch.crawler.action.deleteDomain.successMessage": "域“{domainUrl}”已删除",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.openButtonLabel": "添加域",
+ "xpack.enterpriseSearch.crawler.addDomainFlyout.title": "添加新域",
+ "xpack.enterpriseSearch.crawler.addDomainForm.contentVerificationLabel": "内容验证",
+ "xpack.enterpriseSearch.crawler.addDomainForm.entryPointLabel": "网络爬虫入口点已设置为 {entryPointValue}",
+ "xpack.enterpriseSearch.crawler.addDomainForm.errorsTitle": "出问题了。请解决这些错误,然后重试。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationDescription": "在解决以上错误之前,网络爬虫将无法索引此域上的任何内容。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.ignoreValidationTitle": "忽略验证失败并继续",
+ "xpack.enterpriseSearch.crawler.addDomainForm.indexingRestrictionsLabel": "索引限制",
+ "xpack.enterpriseSearch.crawler.addDomainForm.initialVaidationLabel": "初始验证",
+ "xpack.enterpriseSearch.crawler.addDomainForm.networkConnectivityLabel": "网络连接性",
+ "xpack.enterpriseSearch.crawler.addDomainForm.submitButtonLabel": "添加域",
+ "xpack.enterpriseSearch.crawler.addDomainForm.testUrlButtonLabel": "在浏览器中测试 URL",
+ "xpack.enterpriseSearch.crawler.addDomainForm.unexpectedValidationErrorMessage": "意外错误",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlHelpText": "域 URL 需要协议,且不能包含任何路径。",
+ "xpack.enterpriseSearch.crawler.addDomainForm.urlLabel": "域 URL",
+ "xpack.enterpriseSearch.crawler.addDomainForm.validateButtonLabel": "验证域",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlAutomaticallySwitchLabel": "自动爬网",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.crawlUnitsPrefix": "每",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.formDescription": "不用担心,我们将为您开始爬网。{readMoreMessage}。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.readMoreLink": "阅读更多内容。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleDescription": "爬网计划适用此引擎上的每个域。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleFrequencyLabel": "计划频率",
+ "xpack.enterpriseSearch.crawler.automaticCrawlSchedule.scheduleUnitsLabel": "计划时间单位",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.disableCrawlSchedule.successMessage": "自动爬网已禁用。",
+ "xpack.enterpriseSearch.crawler.automaticCrawlScheduler.submitCrawlSchedule.successMessage": "您的自动爬网计划已更新。",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlCountOnDomains": "在 {domainCount, plural, other {# 个域}}上进行 {crawlType} 爬网",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlDepthLabel": "最大爬网深度",
+ "xpack.enterpriseSearch.crawler.components.crawlDetailsSummary.crawlTypeLabel": "爬网类型",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customEntryPointUrlsTextboxLabel": "定制入口点 URL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.customSitemapUrlsTextboxLabel": "定制站点地图 URL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.domainsAccordionButtonLabel": "添加域到您的爬网",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.emptyDomainsMessage": "请选择域。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.entryPointsTabLabel": "入口点",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeaderDescription": "使用定制设置设置一次性爬网。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.flyoutHeadTitle": "定制爬网配置",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.includeSitemapsCheckboxLabel": "包括在 {robotsDotTxt} 中发现的站点地图",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldDescription": "设置最大爬网深度以指定网络爬虫应遍历的页面深度。将该值设置为一 (1) 可将爬网仅限定为入口点。",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.maxCrawlDepthFieldLabel": "最大爬网深度",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.seedUrlsAccordionButtonLabel": "种子 URL",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.selectedDescriptor": "已选定",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.sitemapsTabLabel": "站点地图",
+ "xpack.enterpriseSearch.crawler.crawlCustomSettingsFlyout.startCrawlButtonLabel": "立即应用并爬网",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.previewTabLabel": "预览",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.rawJSONTabLabel": "原始 JSON",
+ "xpack.enterpriseSearch.crawler.crawlDetailsFlyout.title": "爬网请求详情",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.domainsTitle": "域",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.seedUrlsTitle": "种子 URL",
+ "xpack.enterpriseSearch.crawler.crawlDetailsPreview.sitemapUrlsTitle": "站点地图 URL",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.avgResponseTimeLabel": "平均响应",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.clientErrorsLabel": "4xx 错误",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.durationTooltipTitle": "持续时间",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.logsDisabledMessage": "在设置中启用网络爬虫日志以获取更详细的爬网统计信息。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltip": "在爬网期间访问并提取的 URL。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesTooltipTitle": "访问的页面",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.pagesVisitedTooltipTitle": "页面",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.serverErrorsLabel": "5xx 错误",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltip": "网络爬虫在爬网期间发现的 URL,包括那些由于爬网配置未跟踪的 URL。",
+ "xpack.enterpriseSearch.crawler.crawlDetailsSummary.urlsTooltipTitle": "看到的 URL",
+ "xpack.enterpriseSearch.crawler.crawlerStatusBanner.changesCalloutTitle": "所做的更改不会立即生效,直到下一次爬网开始。",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.cancelCrawlMenuItemLabel": "取消爬网",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.crawlingButtonLabel": "正在爬网.....",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.pendingButtonLabel": "待处理......",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.retryCrawlButtonLabel": "重试爬网",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.showSelectedFieldsButtonLabel": "仅显示选定字段",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startACrawlButtonLabel": "开始爬网",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.startingButtonLabel": "正在启动......",
+ "xpack.enterpriseSearch.crawler.crawlerStatusIndicator.stoppingButtonLabel": "正在停止......",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceled": "已取消",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.canceling": "正在取消",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.failed": "失败",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.pending": "待处理",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.running": "正在运行",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.skipped": "已跳过",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.starting": "正在启动",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.success": "成功",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspended": "已挂起",
+ "xpack.enterpriseSearch.crawler.crawlerStatusOptions.suspending": "正在挂起",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.crawlType": "爬网类型",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.created": "创建时间",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domains": "域",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.domainURL": "请求 ID",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.column.status": "状态",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.body": "您尚未开始任何爬网。",
+ "xpack.enterpriseSearch.crawler.crawlRequestsTable.emptyPrompt.title": "最近没有爬网请求",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.beginsWithLabel": "开始于",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.containsLabel": "Contains",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.endsWithLabel": "结束于",
+ "xpack.enterpriseSearch.crawler.crawlRulesCrawlerRules.regexLabel": "Regex",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.allowLabel": "允许",
+ "xpack.enterpriseSearch.crawler.crawlRulesPolicies.disallowLabel": "不允许",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.addButtonLabel": "添加爬网规则",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.deleteSuccessToastMessage": "爬网规则已删除。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.description": "创建爬网规则以包括或排除 URL 匹配规则的页面。规则按顺序运行,每个 URL 根据第一个匹配进行评估。{link}",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.descriptionLinkText": "详细了解爬网规则",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTableHead": "路径模式",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.pathPatternTooltip": "路径模式为文本字符串,但星号 (*) 字符除外,它是将匹配任何内容的元字符。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.policyTableHead": "策略",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.regexPathPatternTooltip": "路径模式是与 Ruby 语言正则表达式引擎兼容的正则表达式。",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.ruleTableHead": "规则",
+ "xpack.enterpriseSearch.crawler.crawlRulesTable.title": "爬网规则",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.full": "实线",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.partial": "部分",
+ "xpack.enterpriseSearch.crawler.crawlTypeOptions.reAppliedCrawlRules": "已重新应用爬网规则",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.allFieldsLabel": "所有字段",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.description": "网络爬虫仅索引唯一的页面。选择网络爬虫在考虑哪些网页重复时应使用的字段。取消选择所有架构字段以在此域上允许重复的文档。{documentationLink}。",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.learnMoreMessage": "详细了解内容哈希",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.resetToDefaultsButtonLabel": "重置为默认值",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.selectedFieldsLabel": "选定字段",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.showAllFieldsButtonLabel": "显示所有字段",
+ "xpack.enterpriseSearch.crawler.deduplicationPanel.title": "重复文档处理",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.add.successMessage": "已成功添加域“{domainUrl}”",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.delete.buttonLabel": "删除此域",
+ "xpack.enterpriseSearch.crawler.domainsTable.action.manage.buttonLabel": "管理此域",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.actions": "操作",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.documents": "文档",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.domainURL": "域 URL",
+ "xpack.enterpriseSearch.crawler.domainsTable.column.lastActivity": "上次活动",
+ "xpack.enterpriseSearch.crawler.domainsTitle": "域",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.addButtonLabel": "添加入口点",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.description": "在此加入您的网站最重要的 URL。入口点 URL 将是要为其他页面的链接索引和处理的首批页面。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageDescription": "{link}以指定网络爬虫的入口点",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageLinkText": "添加入口点",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.emptyMessageTitle": "当前没有入口点。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.lastItemMessage": "网络爬虫需要至少一个入口点。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.learnMoreLinkText": "详细了解入口点。",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.title": "入口点",
+ "xpack.enterpriseSearch.crawler.entryPointsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.manageCrawlsPopover.reApplyCrawlRules.successMessage": "正在后台重新应用爬网规则",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.addButtonLabel": "添加站点地图",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.deleteSuccessToastMessage": "站点地图已删除。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.description": "为此域上的网络爬虫指定站点地图。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.emptyMessageTitle": "当前没有站点地图。",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.title": "站点地图",
+ "xpack.enterpriseSearch.crawler.sitemapsTable.urlTableHead": "URL",
+ "xpack.enterpriseSearch.crawler.startCrawlContextMenu.crawlCustomSettingsMenuLabel": "使用定制设置执行爬网",
+ "xpack.enterpriseSearch.crawler.urlComboBox.invalidUrlErrorMessage": "请输入有效 URL",
"xpack.enterpriseSearch.curations.settings.licenseUpgradeLink": "详细了解许可证升级",
"xpack.enterpriseSearch.curations.settings.start30DayTrialButtonLabel": "开始为期 30 天的试用",
"xpack.enterpriseSearch.elasticsearch.nav.contentTitle": "Elasticsearch",
From 1f813fc72547b95767f01da1d8c0c2d7cc3e19f8 Mon Sep 17 00:00:00 2001
From: Katerina Patticha
Date: Mon, 11 Jul 2022 23:14:34 +0200
Subject: [PATCH 03/61] [APM] Unskip API test (#136107)
---
.../tests/services/sorted_and_filtered_services.spec.ts | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/x-pack/test/apm_api_integration/tests/services/sorted_and_filtered_services.spec.ts b/x-pack/test/apm_api_integration/tests/services/sorted_and_filtered_services.spec.ts
index fe29e3e795684..b0e29debb62af 100644
--- a/x-pack/test/apm_api_integration/tests/services/sorted_and_filtered_services.spec.ts
+++ b/x-pack/test/apm_api_integration/tests/services/sorted_and_filtered_services.spec.ts
@@ -50,8 +50,7 @@ export default function ApiTest({ getService }: FtrProviderContext) {
type ServiceListItem = ValuesType>>;
- // FLAKY: https://github.com/elastic/kibana/issues/127939
- registry.when.skip(
+ registry.when(
'Sorted and filtered services',
{ config: 'trial', archives: ['apm_mappings_only_8.0.0'] },
() => {
From a2234a03e953dc8621dc6fdcfaef4734419aca0c Mon Sep 17 00:00:00 2001
From: James Rucker
Date: Mon, 11 Jul 2022 14:16:27 -0700
Subject: [PATCH 04/61] [Enterprise Search] Create API Index API (#135877)
* Getting started with an index create API
* Added default mappings and filter settings
* Added analysis settings, fixed type errors, added simple tests.
* [CI] Auto-commit changed files from 'node scripts/eslint --no-cache --fix'
* PascalCase it is.
* Bubble up the ability to use the default language.
* [CI] Auto-commit changed files from 'node scripts/precommit_hook.js --ref HEAD~1..HEAD --fix'
* Clean up types
* [CI] Auto-commit changed files from 'node scripts/precommit_hook.js --ref HEAD~1..HEAD --fix'
* Fix jest specs
Co-authored-by: kibanamachine <42973632+kibanamachine@users.noreply.github.com>
---
.../server/lib/indices/create_index.test.ts | 188 +++++++++++
.../server/lib/indices/create_index.ts | 76 +++++
.../server/lib/indices/text_analysis.test.ts | 263 ++++++++++++++++
.../server/lib/indices/text_analysis.ts | 295 ++++++++++++++++++
.../routes/enterprise_search/indices.ts | 29 ++
5 files changed, 851 insertions(+)
create mode 100644 x-pack/plugins/enterprise_search/server/lib/indices/create_index.test.ts
create mode 100644 x-pack/plugins/enterprise_search/server/lib/indices/create_index.ts
create mode 100644 x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.test.ts
create mode 100644 x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.ts
diff --git a/x-pack/plugins/enterprise_search/server/lib/indices/create_index.test.ts b/x-pack/plugins/enterprise_search/server/lib/indices/create_index.test.ts
new file mode 100644
index 0000000000000..877fbc1dcc12e
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/lib/indices/create_index.test.ts
@@ -0,0 +1,188 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { elasticsearchServiceMock } from '@kbn/core/server/mocks';
+
+import { createApiIndex } from './create_index';
+
+describe('createApiIndex lib function', () => {
+ const mockClient = elasticsearchServiceMock.createScopedClusterClient();
+
+ beforeEach(() => {
+ jest.clearAllMocks();
+ });
+
+ it('successfully creates an index', async () => {
+ await expect(createApiIndex(mockClient, 'index_name', 'en')).resolves.toEqual({
+ body: {},
+ headers: {
+ 'x-elastic-product': 'Elasticsearch',
+ },
+ meta: {},
+ statusCode: 200,
+ warnings: [],
+ });
+ expect(mockClient.asCurrentUser.indices.create).toHaveBeenCalledWith({
+ body: {
+ mappings: {
+ dynamic: true,
+ dynamic_templates: [
+ {
+ all_text_fields: {
+ mapping: {
+ analyzer: 'iq_text_base',
+ fields: {
+ delimiter: {
+ analyzer: 'iq_text_delimiter',
+ index_options: 'freqs',
+ type: 'text',
+ },
+ enum: {
+ ignore_above: 2048,
+ type: 'keyword',
+ },
+ joined: {
+ analyzer: 'i_text_bigram',
+ index_options: 'freqs',
+ search_analyzer: 'q_text_bigram',
+ type: 'text',
+ },
+ prefix: {
+ analyzer: 'i_prefix',
+ index_options: 'docs',
+ search_analyzer: 'q_prefix',
+ type: 'text',
+ },
+ stem: {
+ analyzer: 'iq_text_stem',
+ type: 'text',
+ },
+ },
+ },
+ match_mapping_type: 'string',
+ },
+ },
+ ],
+ },
+ settings: {
+ analysis: {
+ analyzer: {
+ i_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'front_ngram'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ i_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stem-filter',
+ 'bigram_joiner',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_base: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'en-stop-words-filter'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_delimiter: {
+ filter: [
+ 'delimiter',
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stop-words-filter',
+ 'en-stem-filter',
+ ],
+ tokenizer: 'whitespace',
+ type: 'custom',
+ },
+ iq_text_stem: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stop-words-filter',
+ 'en-stem-filter',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stem-filter',
+ 'bigram_joiner_unigrams',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ },
+ filter: {
+ bigram_joiner: {
+ max_shingle_size: 2,
+ output_unigrams: false,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_joiner_unigrams: {
+ max_shingle_size: 2,
+ output_unigrams: true,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_max_size: {
+ max: 16,
+ min: 0,
+ type: 'length',
+ },
+ delimiter: {
+ catenate_all: true,
+ catenate_numbers: true,
+ catenate_words: true,
+ generate_number_parts: true,
+ generate_word_parts: true,
+ preserve_original: false,
+ split_on_case_change: true,
+ split_on_numerics: true,
+ stem_english_possessive: true,
+ type: 'word_delimiter_graph',
+ },
+ 'en-stem-filter': {
+ name: 'light_english',
+ language: 'light_english',
+ type: 'stemmer',
+ },
+ 'en-stop-words-filter': {
+ stopwords: '_english_',
+ type: 'stop',
+ },
+ front_ngram: {
+ max_gram: 12,
+ min_gram: 1,
+ type: 'edge_ngram',
+ },
+ },
+ },
+ },
+ },
+ index: 'index_name',
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/server/lib/indices/create_index.ts b/x-pack/plugins/enterprise_search/server/lib/indices/create_index.ts
new file mode 100644
index 0000000000000..be6748f0a9bdf
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/lib/indices/create_index.ts
@@ -0,0 +1,76 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { MappingKeywordProperty, MappingTextProperty } from '@elastic/elasticsearch/lib/api/types';
+import { IScopedClusterClient } from '@kbn/core/server';
+
+import { textAnalysisSettings } from './text_analysis';
+
+const prefixMapping: MappingTextProperty = {
+ search_analyzer: 'q_prefix',
+ analyzer: 'i_prefix',
+ type: 'text',
+ index_options: 'docs',
+};
+
+const delimiterMapping: MappingTextProperty = {
+ analyzer: 'iq_text_delimiter',
+ type: 'text',
+ index_options: 'freqs',
+};
+
+const joinedMapping: MappingTextProperty = {
+ search_analyzer: 'q_text_bigram',
+ analyzer: 'i_text_bigram',
+ type: 'text',
+ index_options: 'freqs',
+};
+
+const enumMapping: MappingKeywordProperty = {
+ ignore_above: 2048,
+ type: 'keyword',
+};
+
+const stemMapping: MappingTextProperty = {
+ analyzer: 'iq_text_stem',
+ type: 'text',
+};
+
+const defaultMappings = {
+ dynamic: true,
+ dynamic_templates: [
+ {
+ all_text_fields: {
+ match_mapping_type: 'string',
+ mapping: {
+ analyzer: 'iq_text_base',
+ fields: {
+ prefix: prefixMapping,
+ delimiter: delimiterMapping,
+ joined: joinedMapping,
+ enum: enumMapping,
+ stem: stemMapping,
+ },
+ },
+ },
+ },
+ ],
+};
+
+export const createApiIndex = async (
+ client: IScopedClusterClient,
+ indexName: string,
+ language: string | undefined
+) => {
+ return await client.asCurrentUser.indices.create({
+ index: indexName,
+ body: {
+ mappings: defaultMappings,
+ settings: textAnalysisSettings(language),
+ },
+ });
+};
diff --git a/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.test.ts b/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.test.ts
new file mode 100644
index 0000000000000..ff9a1266bf3f7
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.test.ts
@@ -0,0 +1,263 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { textAnalysisSettings } from './text_analysis';
+
+describe('textAnalysisSettings lib function', () => {
+ it('supports a default language', async () => {
+ expect(textAnalysisSettings()).toEqual({
+ analysis: {
+ analyzer: {
+ i_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'front_ngram'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ i_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stem-filter',
+ 'bigram_joiner',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_base: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'en-stop-words-filter'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_delimiter: {
+ filter: [
+ 'delimiter',
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stop-words-filter',
+ 'en-stem-filter',
+ ],
+ tokenizer: 'whitespace',
+ type: 'custom',
+ },
+ iq_text_stem: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stop-words-filter',
+ 'en-stem-filter',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'en-stem-filter',
+ 'bigram_joiner_unigrams',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ },
+ filter: {
+ bigram_joiner: {
+ max_shingle_size: 2,
+ output_unigrams: false,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_joiner_unigrams: {
+ max_shingle_size: 2,
+ output_unigrams: true,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_max_size: {
+ max: 16,
+ min: 0,
+ type: 'length',
+ },
+ delimiter: {
+ catenate_all: true,
+ catenate_numbers: true,
+ catenate_words: true,
+ generate_number_parts: true,
+ generate_word_parts: true,
+ preserve_original: false,
+ split_on_case_change: true,
+ split_on_numerics: true,
+ stem_english_possessive: true,
+ type: 'word_delimiter_graph',
+ },
+ 'en-stem-filter': {
+ name: 'light_english',
+ language: 'light_english',
+ type: 'stemmer',
+ },
+ 'en-stop-words-filter': {
+ stopwords: '_english_',
+ type: 'stop',
+ },
+ front_ngram: {
+ max_gram: 12,
+ min_gram: 1,
+ type: 'edge_ngram',
+ },
+ },
+ },
+ });
+ });
+
+ it('returns settings for another language', async () => {
+ expect(textAnalysisSettings('fr')).toEqual({
+ analysis: {
+ analyzer: {
+ i_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'front_ngram'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ i_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'fr-stem-filter',
+ 'bigram_joiner',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_base: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'fr-stop-words-filter'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ iq_text_delimiter: {
+ filter: [
+ 'fr-elision',
+ 'delimiter',
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'fr-stop-words-filter',
+ 'fr-stem-filter',
+ ],
+ tokenizer: 'whitespace',
+ type: 'custom',
+ },
+ iq_text_stem: {
+ filter: [
+ 'fr-elision',
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'fr-stop-words-filter',
+ 'fr-stem-filter',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_prefix: {
+ filter: ['cjk_width', 'lowercase', 'asciifolding'],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ q_text_bigram: {
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ 'fr-stem-filter',
+ 'bigram_joiner_unigrams',
+ 'bigram_max_size',
+ ],
+ tokenizer: 'standard',
+ type: 'custom',
+ },
+ },
+ filter: {
+ bigram_joiner: {
+ max_shingle_size: 2,
+ output_unigrams: false,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_joiner_unigrams: {
+ max_shingle_size: 2,
+ output_unigrams: true,
+ token_separator: '',
+ type: 'shingle',
+ },
+ bigram_max_size: {
+ max: 16,
+ min: 0,
+ type: 'length',
+ },
+ delimiter: {
+ catenate_all: true,
+ catenate_numbers: true,
+ catenate_words: true,
+ generate_number_parts: true,
+ generate_word_parts: true,
+ preserve_original: false,
+ split_on_case_change: true,
+ split_on_numerics: true,
+ stem_english_possessive: true,
+ type: 'word_delimiter_graph',
+ },
+ 'fr-elision': {
+ articles: [
+ 'l',
+ 'm',
+ 't',
+ 'qu',
+ 'n',
+ 's',
+ 'j',
+ 'd',
+ 'c',
+ 'jusqu',
+ 'quoiqu',
+ 'lorsqu',
+ 'puisqu',
+ ],
+ articles_case: true,
+ type: 'elision',
+ },
+ 'fr-stem-filter': {
+ name: 'light_french',
+ language: 'light_french',
+ type: 'stemmer',
+ },
+ 'fr-stop-words-filter': {
+ stopwords: '_french_',
+ type: 'stop',
+ },
+ front_ngram: {
+ max_gram: 12,
+ min_gram: 1,
+ type: 'edge_ngram',
+ },
+ },
+ },
+ });
+ });
+});
diff --git a/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.ts b/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.ts
new file mode 100644
index 0000000000000..87f0b2cfca4ed
--- /dev/null
+++ b/x-pack/plugins/enterprise_search/server/lib/indices/text_analysis.ts
@@ -0,0 +1,295 @@
+/*
+ * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
+ * or more contributor license agreements. Licensed under the Elastic License
+ * 2.0; you may not use this file except in compliance with the Elastic License
+ * 2.0.
+ */
+
+import { AnalysisTokenFilter } from '@elastic/elasticsearch/lib/api/types';
+
+interface LanguageDataEntry {
+ name: string;
+ stemmer: string;
+ stop_words: string;
+ custom_filter_definitions?: object;
+ prepended_filters?: string[];
+ postpended_filters?: string[];
+}
+
+const languageData: Record = {
+ da: {
+ name: 'Danish',
+ stemmer: 'danish',
+ stop_words: '_danish_',
+ },
+ de: {
+ name: 'German',
+ stemmer: 'light_german',
+ stop_words: '_german_',
+ },
+ en: {
+ name: 'English',
+ stemmer: 'light_english',
+ stop_words: '_english_',
+ },
+ es: {
+ name: 'Spanish',
+ stemmer: 'light_spanish',
+ stop_words: '_spanish_',
+ },
+ fr: {
+ name: 'French',
+ stemmer: 'light_french',
+ stop_words: '_french_',
+ custom_filter_definitions: {
+ 'fr-elision': {
+ type: 'elision' as const,
+ articles: [
+ 'l',
+ 'm',
+ 't',
+ 'qu',
+ 'n',
+ 's',
+ 'j',
+ 'd',
+ 'c',
+ 'jusqu',
+ 'quoiqu',
+ 'lorsqu',
+ 'puisqu',
+ ],
+ articles_case: true,
+ },
+ },
+ prepended_filters: ['fr-elision'],
+ },
+ it: {
+ name: 'Italian',
+ stemmer: 'light_italian',
+ stop_words: '_italian_',
+ custom_filter_definitions: {
+ 'it-elision': {
+ type: 'elision' as const,
+ articles: [
+ 'c',
+ 'l',
+ 'all',
+ 'dall',
+ 'dell',
+ 'nell',
+ 'sull',
+ 'coll',
+ 'pell',
+ 'gl',
+ 'agl',
+ 'dagl',
+ 'degl',
+ 'negl',
+ 'sugl',
+ 'un',
+ 'm',
+ 't',
+ 's',
+ 'v',
+ 'd',
+ ],
+ articles_case: true,
+ },
+ },
+ prepended_filters: ['it-elision'],
+ },
+ ja: {
+ name: 'Japanese',
+ stemmer: 'light_english',
+ stop_words: '_english_',
+ postpended_filters: ['cjk_bigram'],
+ },
+ ko: {
+ name: 'Korean',
+ stemmer: 'light_english',
+ stop_words: '_english_',
+ postpended_filters: ['cjk_bigram'],
+ },
+ nl: {
+ name: 'Dutch',
+ stemmer: 'dutch',
+ stop_words: '_dutch_',
+ },
+ pt: {
+ name: 'Portuguese',
+ stemmer: 'light_portuguese',
+ stop_words: '_portuguese_',
+ },
+ 'pt-br': {
+ name: 'Portuguese (Brazil)',
+ stemmer: 'brazilian',
+ stop_words: '_brazilian_',
+ },
+ ru: {
+ name: 'Russian',
+ stemmer: 'russian',
+ stop_words: '_russian_',
+ },
+ th: {
+ name: 'Thai',
+ stemmer: 'light_english',
+ stop_words: '_thai_',
+ },
+ zh: {
+ name: 'Chinese',
+ stemmer: 'light_english',
+ stop_words: '_english_',
+ postpended_filters: ['cjk_bigram'],
+ },
+};
+
+const FRONT_NGRAM_MAX_GRAM = 12;
+
+const genericFilters: Record = {
+ front_ngram: {
+ type: 'edge_ngram' as const,
+ min_gram: 1,
+ max_gram: FRONT_NGRAM_MAX_GRAM,
+ },
+ delimiter: {
+ type: 'word_delimiter_graph' as const,
+ generate_word_parts: true,
+ generate_number_parts: true,
+ catenate_words: true,
+ catenate_numbers: true,
+ catenate_all: true,
+ preserve_original: false,
+ split_on_case_change: true,
+ split_on_numerics: true,
+ stem_english_possessive: true,
+ },
+ bigram_joiner: {
+ type: 'shingle' as const,
+ token_separator: '',
+ max_shingle_size: 2,
+ output_unigrams: false,
+ },
+ bigram_joiner_unigrams: {
+ type: 'shingle' as const,
+ token_separator: '',
+ max_shingle_size: 2,
+ output_unigrams: true,
+ },
+ bigram_max_size: {
+ type: 'length' as const,
+ min: 0,
+ max: 16,
+ },
+};
+
+export const textAnalysisSettings = (language: string = 'en') => {
+ return {
+ analysis: {
+ analyzer: analyzerDefinitions(language),
+ filter: filterDefinitions(language),
+ },
+ };
+};
+
+const stemFilterName = (languageCode: string) => {
+ return `${languageCode}-stem-filter`;
+};
+
+const stopWordsFilterName = (languageCode: string) => {
+ return `${languageCode}-stop-words-filter`;
+};
+
+const analyzerDefinitions = (language: string) => {
+ const prependedFilters = languageData[language].prepended_filters || [];
+ const postpendedFilters = languageData[language].postpended_filters || [];
+
+ return {
+ i_prefix: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: ['cjk_width', 'lowercase', 'asciifolding', 'front_ngram'],
+ },
+ q_prefix: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: ['cjk_width', 'lowercase', 'asciifolding'],
+ },
+ iq_text_base: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: ['cjk_width', 'lowercase', 'asciifolding', stopWordsFilterName(language)],
+ },
+ iq_text_stem: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: [
+ ...prependedFilters,
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ stopWordsFilterName(language),
+ stemFilterName(language),
+ ...postpendedFilters,
+ ],
+ },
+ iq_text_delimiter: {
+ type: 'custom' as const,
+ tokenizer: 'whitespace',
+ filter: [
+ ...prependedFilters,
+ 'delimiter',
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ stopWordsFilterName(language),
+ stemFilterName(language),
+ ...postpendedFilters,
+ ],
+ },
+ i_text_bigram: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ stemFilterName(language),
+ 'bigram_joiner',
+ 'bigram_max_size',
+ ],
+ },
+ q_text_bigram: {
+ type: 'custom' as const,
+ tokenizer: 'standard',
+ filter: [
+ 'cjk_width',
+ 'lowercase',
+ 'asciifolding',
+ stemFilterName(language),
+ 'bigram_joiner_unigrams',
+ 'bigram_max_size',
+ ],
+ },
+ };
+};
+
+const filterDefinitions = (language: string) => {
+ const stemmerName = languageData[language].stemmer;
+ const stopWordsName = languageData[language].stop_words;
+ const customFilterDefinitions = languageData[language].custom_filter_definitions || {};
+
+ return {
+ ...genericFilters,
+ [stemFilterName(language)]: {
+ type: 'stemmer' as const,
+ name: stemmerName,
+ language: stemmerName,
+ },
+ [stopWordsFilterName(language)]: {
+ type: 'stop' as const,
+ stopwords: stopWordsName,
+ },
+ ...customFilterDefinitions,
+ };
+};
diff --git a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/indices.ts b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/indices.ts
index 7636acdfa26ad..6a0fd3935628c 100644
--- a/x-pack/plugins/enterprise_search/server/routes/enterprise_search/indices.ts
+++ b/x-pack/plugins/enterprise_search/server/routes/enterprise_search/indices.ts
@@ -7,6 +7,7 @@
import { schema } from '@kbn/config-schema';
+import { createApiIndex } from '../../lib/indices/create_index';
import { fetchIndex } from '../../lib/indices/fetch_index';
import { fetchIndices } from '../../lib/indices/fetch_indices';
import { generateApiKey } from '../../lib/indices/generate_api_key';
@@ -124,4 +125,32 @@ export function registerIndexRoutes({ router }: RouteDependencies) {
}
}
);
+
+ router.post(
+ {
+ path: '/internal/enterprise_search/indices',
+ validate: {
+ body: schema.object({
+ indexName: schema.string(),
+ language: schema.maybe(schema.string()),
+ }),
+ },
+ },
+ async (context, request, response) => {
+ const { indexName, language } = request.body;
+ const { client } = (await context.core).elasticsearch;
+ try {
+ const createIndexResponse = await createApiIndex(client, indexName, language);
+ return response.ok({
+ body: createIndexResponse,
+ headers: { 'content-type': 'application/json' },
+ });
+ } catch (error) {
+ return response.customError({
+ body: 'Error fetching data from Enterprise Search',
+ statusCode: 502,
+ });
+ }
+ }
+ );
}
From e21e5069a724eb12f12b339ce758a2a4992b4941 Mon Sep 17 00:00:00 2001
From: Trevor Pierce <1Copenut@users.noreply.github.com>
Date: Mon, 11 Jul 2022 16:33:16 -0500
Subject: [PATCH 05/61] Bumping EUI to v60.1.2. (#135373)
* Bumping EUI to v60.1.0.
* Bumping to bugfixed v60.1.1.
* Updating snapshots for EUI Emotion changes.
* Updating a theme icon size declaration.
* Updating tests for Emotion and new EUI theme objects.
* Updating Lens drag and drop to account for Emotion wrapper.
* Updating icon size in EUI theme object for test.
* Updating Lens Editor Frame test to assert 1 click instead of 2.
* Updating two Jest tests for Emotion extra wrapper.
* Changing a Cypress selector for Emotion classnames.
* Update removed EuiIcon classes
-isLoaded was deprecated in favor of data-attrs - hook should still work
* Remove static EuiIcon classes from static SVG
- these classNames no longer output meaningful CSS and should not be used
* Fix theme JSON imports not to rely on any iconSizes keys
- (which will soon be deprecated by Emotion conversion)
- use the generic euiSizes instead, which will likely be the last to be deprecated
* Fix one more euiIcon-isLoaded className change
- we should be using the `data-is-loaded` attribute that it was switched to
* Bumping EUI to 60.1.2 patch release.
* Updating Storyshots for EUI 60.1.2.
* Removing two assertions from instances_table.spec to match localhost instance UI.
Co-authored-by: Constance Chen
Co-authored-by: Stratoula Kalafateli
Co-authored-by: Kibana Machine <42973632+kibanamachine@users.noreply.github.com>
---
package.json | 2 +-
.../exit_full_screen_button.test.tsx.snap | 170 +++++++++++++++++-
.../icon_button_group.test.tsx.snap | 158 +++++++++++++++-
.../__snapshots__/no_data_card.test.tsx.snap | 8 +-
src/dev/license_checker/config.ts | 2 +-
.../__snapshots__/data_view.test.tsx.snap | 8 +-
.../url/__snapshots__/url.test.tsx.snap | 6 +-
.../exit_full_screen_button.test.tsx.snap | 2 +-
.../not_found_errors.test.tsx.snap | 8 +-
.../service_overview/instances_table.spec.ts | 4 +-
.../shared/span_icon/icons/elasticsearch.svg | 2 +-
.../asset_manager.stories.storyshot | 8 +-
.../workpad_table.stories.storyshot | 4 +-
.../text_style_picker.stories.storyshot | 8 +-
.../tool_tip_shortcut.stories.storyshot | 14 +-
.../__snapshots__/edit_var.stories.storyshot | 2 +-
.../__snapshots__/settings.test.tsx.snap | 10 +-
.../__snapshots__/drag_drop.test.tsx.snap | 2 +-
.../lens/public/drag_drop/drag_drop.test.tsx | 34 ++--
.../config_panel/layer_panel.test.tsx | 1 +
.../editor_frame/editor_frame.test.tsx | 2 +-
.../datapanel.test.tsx | 2 +-
.../request_trial_extension.test.js.snap | 8 +-
.../revert_to_basic.test.js.snap | 6 +-
.../__snapshots__/start_trial.test.js.snap | 8 +-
.../analytics_list/_analytics_table.scss | 2 +-
.../__snapshots__/no_data.test.js.snap | 4 +-
.../__snapshots__/exporters.test.js.snap | 6 +-
.../__snapshots__/reason_found.test.js.snap | 6 +-
.../__snapshots__/summary_status.test.js.snap | 12 +-
.../security_solution/cypress/tasks/alerts.ts | 2 +-
.../authentications_host_table.test.tsx.snap | 4 +-
.../authentications_user_table.test.tsx.snap | 4 +-
.../__snapshots__/index.test.tsx.snap | 6 +-
.../components/card_compressed_header.tsx | 4 +-
.../back_to_external_app_button.tsx | 4 +-
.../field_renderers.test.tsx.snap | 2 +-
.../netflow/__snapshots__/index.test.tsx.snap | 10 +-
.../__snapshots__/index.test.tsx.snap | 2 +-
.../netflow_row_renderer.test.tsx.snap | 138 +++++++-------
.../__snapshots__/expanded_row.test.tsx.snap | 2 +-
.../monitor_status.bar.test.tsx.snap | 2 +-
yarn.lock | 8 +-
43 files changed, 509 insertions(+), 188 deletions(-)
diff --git a/package.json b/package.json
index d0a70bf1aee8e..b31198a809324 100644
--- a/package.json
+++ b/package.json
@@ -108,7 +108,7 @@
"@elastic/datemath": "5.0.3",
"@elastic/elasticsearch": "npm:@elastic/elasticsearch-canary@8.3.0-canary.1",
"@elastic/ems-client": "8.3.3",
- "@elastic/eui": "60.0.0",
+ "@elastic/eui": "60.1.2",
"@elastic/filesaver": "1.1.2",
"@elastic/node-crypto": "1.2.1",
"@elastic/numeral": "^2.5.1",
diff --git a/packages/shared-ux/button/exit_full_screen/src/__snapshots__/exit_full_screen_button.test.tsx.snap b/packages/shared-ux/button/exit_full_screen/src/__snapshots__/exit_full_screen_button.test.tsx.snap
index 793c17ae1b1e3..b16ba80b305ee 100644
--- a/packages/shared-ux/button/exit_full_screen/src/__snapshots__/exit_full_screen_button.test.tsx.snap
+++ b/packages/shared-ux/button/exit_full_screen/src/__snapshots__/exit_full_screen_button.test.tsx.snap
@@ -46,9 +46,92 @@ exports[` with kibana services is rendered 1`] = `
- In full screen mode, press ESC to exit.
+
+
+
+
+ ,
+ "ctr": 1,
+ "insertionPoint": undefined,
+ "isSpeedy": false,
+ "key": "css",
+ "nonce": undefined,
+ "prepend": undefined,
+ "tags": Array [
+ ,
+ ],
+ },
+ }
+ }
+ isStringTag={true}
+ serialized={
+ Object {
+ "map": undefined,
+ "name": "4rqdyn-euiScreenReaderOnly",
+ "next": undefined,
+ "styles": ";
+ // Take the element out of the layout
+ position: absolute;
+ // Keep it vertically inline
+ top: auto;
+ // Chrome requires a left value, and Selenium (used by Kibana's FTR) requires an off-screen position for its .getVisibleText() to not register SR-only text
+ left: -10000px;
+ // The element must have a size (for some screen readers)
+ width: 1px;
+ height: 1px;
+ // But reduce the visible size to nothing
+ clip: rect(0 0 0 0);
+ clip-path: inset(50%);
+ // And ensure no overflows occur
+ overflow: hidden;
+ // Chrome requires the negative margin to not cause overflows of parent containers
+ margin: -1px;
+;label:euiScreenReaderOnly;;;;",
+ "toString": [Function],
+ }
+ }
+ />
+
+ In full screen mode, press ESC to exit.
+
with manual services is rendered 1`] = `
- In full screen mode, press ESC to exit.
+
+
+
+ ,
+ "ctr": 1,
+ "insertionPoint": undefined,
+ "isSpeedy": false,
+ "key": "css",
+ "nonce": undefined,
+ "prepend": undefined,
+ "tags": Array [
+ ,
+ ],
+ },
+ }
+ }
+ isStringTag={true}
+ serialized={
+ Object {
+ "map": undefined,
+ "name": "4rqdyn-euiScreenReaderOnly",
+ "next": undefined,
+ "styles": ";
+ // Take the element out of the layout
+ position: absolute;
+ // Keep it vertically inline
+ top: auto;
+ // Chrome requires a left value, and Selenium (used by Kibana's FTR) requires an off-screen position for its .getVisibleText() to not register SR-only text
+ left: -10000px;
+ // The element must have a size (for some screen readers)
+ width: 1px;
+ height: 1px;
+ // But reduce the visible size to nothing
+ clip: rect(0 0 0 0);
+ clip-path: inset(50%);
+ // And ensure no overflows occur
+ overflow: hidden;
+ // Chrome requires the negative margin to not cause overflows of parent containers
+ margin: -1px;
+;label:euiScreenReaderOnly;;;;",
+ "toString": [Function],
+ }
+ }
+ />
+
+ In full screen mode, press ESC to exit.
+
is rendered 1`] = `
>
- Legend
+
+
+
+
+
+
+
+ ,
+ "ctr": 5,
+ "insertionPoint": undefined,
+ "isSpeedy": false,
+ "key": "css",
+ "nonce": undefined,
+ "prepend": undefined,
+ "tags": Array [
+ ,
+ ,
+ ,
+ ,
+ ,
+ ],
+ },
+ }
+ }
+ isStringTag={true}
+ serialized={
+ Object {
+ "map": undefined,
+ "name": "4rqdyn-euiScreenReaderOnly",
+ "next": undefined,
+ "styles": ";
+ // Take the element out of the layout
+ position: absolute;
+ // Keep it vertically inline
+ top: auto;
+ // Chrome requires a left value, and Selenium (used by Kibana's FTR) requires an off-screen position for its .getVisibleText() to not register SR-only text
+ left: -10000px;
+ // The element must have a size (for some screen readers)
+ width: 1px;
+ height: 1px;
+ // But reduce the visible size to nothing
+ clip: rect(0 0 0 0);
+ clip-path: inset(50%);
+ // And ensure no overflows occur
+ overflow: hidden;
+ // Chrome requires the negative margin to not cause overflows of parent containers
+ margin: -1px;
+;label:euiScreenReaderOnly;;;;",
+ "toString": [Function],
+ }
+ }
+ />
+
+ Legend
+
is rendered 1`] = `
Object {
"insert": [Function],
"inserted": Object {
+ "4rqdyn-euiScreenReaderOnly": true,
"iuv015-EuiButtonGroup": true,
},
"key": "css",
@@ -198,6 +336,13 @@ exports[` is rendered 1`] = `
"_insertTag": [Function],
"before": null,
"container":
+
is rendered 1`] = `
data-styled-version="5.1.0"
/>
,
- "ctr": 4,
+ "ctr": 5,
"insertionPoint": undefined,
"isSpeedy": false,
"key": "css",
"nonce": undefined,
"prepend": undefined,
"tags": Array [
+ ,