Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master' into knn-filter-alias
Browse files Browse the repository at this point in the history
  • Loading branch information
mayya-sharipova committed Oct 21, 2021
2 parents 8cd73ff + 6cc0a41 commit d522f12
Show file tree
Hide file tree
Showing 139 changed files with 2,459 additions and 640 deletions.
49 changes: 49 additions & 0 deletions .ci/jobs.t/elastic+elasticsearch+pull-request+release-tests.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
---
- job:
name: "elastic+elasticsearch+pull-request+release-tests"
display-name: "elastic / elasticsearch - pull request release-tests"
description: "Testing of Elasticsearch pull requests - release-tests"
workspace: "/dev/shm/elastic+elasticsearch+pull-request+release-tests"
scm:
- git:
refspec: "+refs/pull/${ghprbPullId}/*:refs/remotes/origin/pr/${ghprbPullId}/*"
branches:
- "${ghprbActualCommit}"
triggers:
- github-pull-request:
org-list:
- elastic
allow-whitelist-orgs-as-admins: true
trigger-phrase: '.*run\W+elasticsearch-ci/release-tests.*'
github-hooks: true
status-context: elasticsearch-ci/release-tests
cancel-builds-on-update: true
excluded-regions:
- ^docs/.*
white-list-labels:
- 'test-release'
black-list-target-branches:
- 7.15
- 6.8
builders:
- inject:
properties-file: '.ci/java-versions.properties'
properties-content: |
JAVA_HOME=$HOME/.java/$ES_BUILD_JAVA
RUNTIME_JAVA_HOME=$HOME/.java/$ES_RUNTIME_JAVA
JAVA8_HOME=$HOME/.java/java8
JAVA11_HOME=$HOME/.java/java11
JAVA15_HOME=$HOME/.java/openjdk15
- shell: |
#!/usr/local/bin/runbld --redirect-stderr
# Fetch beats artifacts
export ES_VERSION=$(grep 'elasticsearch' build-tools-internal/version.properties | awk '{print $3}')
export BEATS_DIR=$(pwd)/distribution/docker/build/artifacts/beats
mkdir -p ${BEATS_DIR}
curl -o "${BEATS_DIR}/metricbeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/metricbeat/metricbeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
curl -o "${BEATS_DIR}/filebeat-${ES_VERSION}-linux-x86_64.tar.gz" https://snapshots-no-kpi.elastic.co/downloads/beats/filebeat/filebeat-${ES_VERSION}-SNAPSHOT-linux-x86_64.tar.gz
$WORKSPACE/.ci/scripts/run-gradle.sh -Dbwc.checkout.align=true -Dbuild.snapshot=false \
-Dtests.jvm.argline=-Dbuild.snapshot=false -Dlicense.key=${WORKSPACE}/x-pack/license-tools/src/test/resources/public.key -Dbuild.id=deadbeef build
Original file line number Diff line number Diff line change
Expand Up @@ -118,6 +118,10 @@ private Object[] getTargets(String projectPath) {
":client:sniffer",
":client:test",
":distribution:archives:integ-test-zip",
":distribution:bwc:bugfix",
":distribution:bwc:bugfix",
":distribution:bwc:minor",
":distribution:bwc:staged",
":distribution:docker",
":docs",
":example-plugins:custom-settings",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
package org.elasticsearch.client;

import org.elasticsearch.Build;
import org.elasticsearch.ElasticsearchStatusException;
import org.elasticsearch.action.support.master.AcknowledgedResponse;
import org.elasticsearch.client.license.DeleteLicenseRequest;
import org.elasticsearch.client.license.GetBasicStatusResponse;
Expand All @@ -23,6 +24,7 @@
import org.elasticsearch.client.license.StartTrialRequest;
import org.elasticsearch.client.license.StartTrialResponse;
import org.elasticsearch.common.Strings;
import org.elasticsearch.rest.RestStatus;
import org.elasticsearch.xcontent.XContentParser;
import org.elasticsearch.xcontent.json.JsonXContent;
import org.junit.After;
Expand All @@ -40,6 +42,7 @@
import static org.hamcrest.Matchers.empty;
import static org.hamcrest.Matchers.emptyOrNullString;
import static org.hamcrest.Matchers.not;
import static org.hamcrest.Matchers.stringContainsInOrder;

public class LicenseIT extends ESRestHighLevelClientTestCase {

Expand Down Expand Up @@ -91,6 +94,40 @@ public void testStartTrial() throws Exception {
}
}

public void testPutInvalidTrialLicense() throws Exception {
assumeTrue("Trial license is only valid when tested against snapshot/test builds",
Build.CURRENT.isSnapshot());

// use a hard-coded trial license for 20 yrs to be able to roll back from another licenses
final String signature = "xx"; // Truncated, so it is expected to fail validation
final String licenseDefinition = Strings.toString(jsonBuilder()
.startObject()
.field("licenses", List.of(
Map.of(
"uid", "96fc37c6-6fc9-43e2-a40d-73143850cd72",
"type", "trial",
// 2018-10-16 07:02:48 UTC
"issue_date_in_millis", "1539673368158",
// 2038-10-11 07:02:48 UTC, 20 yrs later
"expiry_date_in_millis", "2170393368158",
"max_nodes", "5",
"issued_to", "client_rest-high-level_integTestCluster",
"issuer", "elasticsearch",
"start_date_in_millis", "-1",
"signature", signature)))
.endObject());

final PutLicenseRequest request = new PutLicenseRequest();
request.setAcknowledge(true);
request.setLicenseDefinition(licenseDefinition);
ElasticsearchStatusException e = expectThrows(
ElasticsearchStatusException.class,
() -> highLevelClient().license().putLicense(request, RequestOptions.DEFAULT)
);
assertThat(e.status(), equalTo(RestStatus.BAD_REQUEST));
assertThat(e.getMessage(), stringContainsInOrder("malformed signature for license"));
}

public static void putTrialLicense() throws IOException {
assumeTrue("Trial license is only valid when tested against snapshot/test builds",
Build.CURRENT.isSnapshot());
Expand Down
10 changes: 0 additions & 10 deletions docs/Versions.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -54,16 +54,6 @@ endif::[]
:javadoc-license: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/license
:javadoc-watcher: {rest-high-level-client-javadoc}/org/elasticsearch/protocol/xpack/watcher

///////
Permanently unreleased branches (master, n.X)
///////
ifeval::["{source_branch}"=="master"]
:permanently-unreleased-branch:
endif::[]
ifeval::["{source_branch}"=="{major-version}"]
:permanently-unreleased-branch:
endif::[]

///////
Shared attribute values are pulled from elastic/docs
///////
Expand Down
4 changes: 4 additions & 0 deletions docs/plugins/ingest-attachment.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -28,6 +28,7 @@ include::install_remove.asciidoc[]
| `indexed_chars_field` | no | `null` | Field name from which you can overwrite the number of chars being used for extraction. See `indexed_chars`.
| `properties` | no | all properties | Array of properties to select to be stored. Can be `content`, `title`, `name`, `author`, `keywords`, `date`, `content_type`, `content_length`, `language`
| `ignore_missing` | no | `false` | If `true` and `field` does not exist, the processor quietly exits without modifying the document
| `remove_binary` | no | `false` | If `true`, the binary `field` will be removed from the document
| `resource_name` | no | | Field containing the name of the resource to decode. If specified, the processor passes this resource name to the underlying Tika library to enable https://tika.apache.org/1.24.1/detection.html#Resource_Name_Based_Detection[Resource Name Based Detection].
|======

Expand Down Expand Up @@ -94,6 +95,9 @@ The document's `attachment` object contains extracted properties for the file:
----
// TESTRESPONSE[s/"_seq_no": \d+/"_seq_no" : $body._seq_no/ s/"_primary_term" : 1/"_primary_term" : $body._primary_term/]

NOTE: Keeping the binary as a field within the document might consume a lot of resources. It is highly recommended
to remove that field from the document. Set `remove_binary` to `true` to automatically remove the field.

To extract only certain `attachment` fields, specify the `properties` array:

[source,console]
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,8 @@ experimental::[]

A multi-bucket aggregation that groups semi-structured text into buckets. Each `text` field is re-analyzed
using a custom analyzer. The resulting tokens are then categorized creating buckets of similarly formatted
text values. This aggregation works best with machine generated text like system logs.
text values. This aggregation works best with machine generated text like system logs. Only the first 100 analyzed
tokens are used to categorize the text.

NOTE: If you have considerable memory allocated to your JVM but are receiving circuit breaker exceptions from this
aggregation, you may be attempting to categorize text that is poorly formatted for categorization. Consider
Expand All @@ -27,11 +28,13 @@ The semi-structured text field to categorize.
The maximum number of unique tokens at any position up to `max_matched_tokens`.
Must be larger than 1. Smaller values use less memory and create fewer categories.
Larger values will use more memory and create narrower categories.
Max allowed value is `100`.

`max_matched_tokens`::
(Optional, integer, default: `5`)
The maximum number of token positions to match on before attempting to merge categories.
Larger values will use more memory and create narrower categories.
Max allowed value is `100`.

Example:
`max_matched_tokens` of 2 would disallow merging of the categories
Expand Down Expand Up @@ -90,7 +93,6 @@ include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=tokenizer]
(array of strings or objects)
include::{es-repo-dir}/ml/ml-shared.asciidoc[tag=filter]
=====
end::categorization-analyzer[]

`shard_size`::
(Optional, integer)
Expand Down
2 changes: 2 additions & 0 deletions docs/reference/commands/setup-passwords.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
[[setup-passwords]]
== elasticsearch-setup-passwords

deprecated[8.0, "The `elasticsearch-setup-passwords` tool is deprecated and will be removed in a future release. To manually reset the password for the `elastic` user, use the <<reset-elastic-password,`elasticsearch-reset-elastic-password` tool>>. To change passwords for other users, use either {kib} or the {es} change passwords API."]

The `elasticsearch-setup-passwords` command sets the passwords for the
<<built-in-users,built-in users>>.

Expand Down
14 changes: 13 additions & 1 deletion docs/reference/migration/migrate_8_0/api.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -83,10 +83,22 @@ Use {ref}/ml-apis.html#ml-api-datafeed-endpoint[{dfeeds}] instead.
[%collapsible]
====
*Details* +
The ability to update a `job_id` in a {dfeed} was deprecated in 7.3.0. and is
The ability to update a `job_id` in a {dfeed} was deprecated in 7.3.0. and is
removed in 8.0.
*Impact* +
It is not possible to move {dfeeds} between {anomaly-jobs}.
====

.Create repository and delete repository API's return `409` status code when a repository is in use instead of `500`.
[%collapsible]
====
*Details* +
The {ref}/put-snapshot-repo-api.html[Create or update snapshot repository API] and
{ref}/delete-snapshot-repo-api.html[Delete snapshot repository API] return `409`
status code when the request is attempting to modify an existing repository that's in use instead of status code `500`.
*Impact* +
Update client code that handles creation and deletion of repositories to reflect this change.
====
// end::notable-breaking-changes[]
6 changes: 3 additions & 3 deletions docs/reference/modules/indices/circuit_breaker.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -126,11 +126,11 @@ within a period of time.
See the "prefer-parameters" section of the <<modules-scripting-using,scripting>>
documentation for more information.

`script.context.$CONTEXT.max_compilations_rate`::
`script.max_compilations_rate`::
(<<dynamic-cluster-setting,Dynamic>>)
Limit for the number of unique dynamic scripts within a certain interval
that are allowed to be compiled for a given context. Defaults to `75/5m`,
meaning 75 every 5 minutes.
that are allowed to be compiled. Defaults to `150/5m`,
meaning 150 every 5 minutes.

[[regex-circuit-breaker]]
[discrete]
Expand Down
13 changes: 13 additions & 0 deletions docs/reference/modules/node.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -459,6 +459,19 @@ should be configured to locate the data directory outside the {es} home
directory, so that the home directory can be deleted without deleting your data!
The RPM and Debian distributions do this for you already.

// tag::modules-node-data-path-warning-tag[]
WARNING: Don't modify anything within the data directory or run processes that
might interfere with its contents. If something other than {es} modifies the
contents of the data directory, then {es} may fail, reporting corruption or
other data inconsistencies, or may appear to work correctly having silently
lost some of your data. Don't attempt to take filesystem backups of the data
directory; there is no supported way to restore such a backup. Instead, use
<<snapshot-restore>> to take backups safely. Don't run virus scanners on the
data directory. A virus scanner can prevent {es} from working correctly and may
modify the contents of the data directory. The data directory contains no
executables so a virus scan will only find false positives.
// end::modules-node-data-path-warning-tag[]

[discrete]
[[other-node-settings]]
=== Other node settings
Expand Down
12 changes: 4 additions & 8 deletions docs/reference/scripting/using.asciidoc
Original file line number Diff line number Diff line change
Expand Up @@ -120,12 +120,8 @@ the `multiplier` parameter without {es} recompiling the script.
}
----

For most contexts, you can compile up to 75 scripts per 5 minutes by default.
For ingest contexts, the default script compilation rate is unlimited. You
can change these settings dynamically by setting
`script.context.$CONTEXT.max_compilations_rate`. For example, the following
setting limits script compilation to 100 scripts every 10 minutes for the
{painless}/painless-field-context.html[field context]:
You can compile up to 150 scripts per 5 minutes by default.
For ingest contexts, the default script compilation rate is unlimited.

[source,js]
----
Expand Down Expand Up @@ -406,8 +402,8 @@ small.

All scripts are cached by default so that they only need to be recompiled
when updates occur. By default, scripts do not have a time-based expiration.
You can change this behavior by using the `script.context.$CONTEXT.cache_expire` setting.
Use the `script.context.$CONTEXT.cache_max_size` setting to configure the size of the cache.
You can change this behavior by using the `script.cache.expire` setting.
Use the `script.cache.max_size` setting to configure the size of the cache.

NOTE: The size of scripts is limited to 65,535 bytes. Set the value of `script.max_size_in_bytes` to increase that soft limit. If your scripts are
really large, then consider using a
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -17,16 +17,14 @@ In production, we strongly recommend you set the `path.data` and `path.logs` in
`.msi`>> installations write data and log to locations outside of `$ES_HOME` by
default.

IMPORTANT: To avoid errors, only {es} should open files in the `path.data`
directory. Exclude the `path.data` directory from other services that may open
and lock its files, such as antivirus or backup programs.

Supported `path.data` and `path.logs` values vary by platform:

include::{es-repo-dir}/tab-widgets/code.asciidoc[]

include::{es-repo-dir}/tab-widgets/customize-data-log-path-widget.asciidoc[]

include::{es-repo-dir}/modules/node.asciidoc[tag=modules-node-data-path-warning-tag]

[discrete]
==== Multiple data paths
deprecated::[7.13.0]
Expand Down
Loading

0 comments on commit d522f12

Please sign in to comment.