From bbb105d7f166c3beb267000af59737212ddb874e Mon Sep 17 00:00:00 2001
From: Michal Pristas <michal.pristas@gmail.com>
Date: Wed, 17 Jun 2020 21:23:56 +0200
Subject: [PATCH] [Ingest Manager] New structure of agent configuration
 (#19128)

* phase 1

* phase 2

* phase 4

* updated configuration

* fixed compact form (depends on aprser)

* configuration update

* fixed tests

* mod
---
 .../_meta/config/common.p2.yml.tmpl           |  25 +-
 .../_meta/config/common.reference.p2.yml.tmpl |  25 +-
 .../config/elastic-agent.docker.yml.tmpl      |  25 +-
 x-pack/elastic-agent/_meta/elastic-agent.yml  |  25 +-
 .../docs/elastic-agent-configuration.asciidoc |  29 +-
 .../elastic-agent_configuration_example.yml   | 965 +++++++++---------
 x-pack/elastic-agent/elastic-agent.docker.yml |  25 +-
 .../elastic-agent/elastic-agent.reference.yml |  25 +-
 x-pack/elastic-agent/elastic-agent.yml        |  25 +-
 .../application/filters/constraints_filter.go |  24 +-
 .../agent/application/managed_mode_test.go    | 386 ++++---
 .../pkg/agent/program/program.go              |  12 +-
 .../pkg/agent/program/program_test.go         | 192 ++--
 .../pkg/agent/program/supported.go            |   2 +-
 .../testdata/constraints_config-filebeat.yml  |   1 -
 .../program/testdata/constraints_config.yml   |  26 +-
 .../agent/program/testdata/enabled_false.yml  |  16 +-
 .../program/testdata/enabled_output_false.yml |  14 +-
 .../testdata/enabled_output_true-filebeat.yml |   1 -
 .../program/testdata/enabled_output_true.yml  |  14 +-
 .../testdata/enabled_true-filebeat.yml        |   1 -
 .../agent/program/testdata/enabled_true.yml   |  16 +-
 .../testdata/single_config-filebeat.yml       |  21 +-
 .../agent/program/testdata/single_config.yml  |  72 +-
 .../pkg/agent/transpiler/merge_strategy.go    |   4 -
 .../pkg/agent/transpiler/rules.go             | 388 ++++---
 .../pkg/agent/transpiler/rules_test.go        | 216 ++--
 x-pack/elastic-agent/spec/filebeat.yml        |  19 +-
 x-pack/elastic-agent/spec/metricbeat.yml      |  11 +-
 29 files changed, 1311 insertions(+), 1294 deletions(-)

diff --git a/x-pack/elastic-agent/_meta/config/common.p2.yml.tmpl b/x-pack/elastic-agent/_meta/config/common.p2.yml.tmpl
index 10a78b5e0521..03dcb39c7308 100644
--- a/x-pack/elastic-agent/_meta/config/common.p2.yml.tmpl
+++ b/x-pack/elastic-agent/_meta/config/common.p2.yml.tmpl
@@ -8,20 +8,19 @@ outputs:
     username: elastic
     password: changeme
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # settings.monitoring:
 #   # enabled turns on monitoring of running processes
diff --git a/x-pack/elastic-agent/_meta/config/common.reference.p2.yml.tmpl b/x-pack/elastic-agent/_meta/config/common.reference.p2.yml.tmpl
index 5086a5fa253b..6fd2e4f4e247 100644
--- a/x-pack/elastic-agent/_meta/config/common.reference.p2.yml.tmpl
+++ b/x-pack/elastic-agent/_meta/config/common.reference.p2.yml.tmpl
@@ -8,20 +8,19 @@ outputs:
     username: elastic
     password: changeme
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # management:
 #   # Mode of management, the Elastic Agent support two modes of operation:
diff --git a/x-pack/elastic-agent/_meta/config/elastic-agent.docker.yml.tmpl b/x-pack/elastic-agent/_meta/config/elastic-agent.docker.yml.tmpl
index a4effcf24f8a..962484e11f20 100644
--- a/x-pack/elastic-agent/_meta/config/elastic-agent.docker.yml.tmpl
+++ b/x-pack/elastic-agent/_meta/config/elastic-agent.docker.yml.tmpl
@@ -8,20 +8,19 @@ outputs:
     username: '${ELASTICSEARCH_USERNAME:elastic}'
     password: '${ELASTICSEARCH_PASSWORD:changeme}'
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # management:
 #   # Mode of management, the Elastic Agent support two modes of operation:
diff --git a/x-pack/elastic-agent/_meta/elastic-agent.yml b/x-pack/elastic-agent/_meta/elastic-agent.yml
index 15582908fe7b..b7db83d12bda 100644
--- a/x-pack/elastic-agent/_meta/elastic-agent.yml
+++ b/x-pack/elastic-agent/_meta/elastic-agent.yml
@@ -8,20 +8,19 @@ outputs:
     username: elastic
     password: changeme
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # management:
 #   # Mode of management, the Elastic Agent support two modes of operation:
diff --git a/x-pack/elastic-agent/docs/elastic-agent-configuration.asciidoc b/x-pack/elastic-agent/docs/elastic-agent-configuration.asciidoc
index abb28cc154aa..daca829624ec 100644
--- a/x-pack/elastic-agent/docs/elastic-agent-configuration.asciidoc
+++ b/x-pack/elastic-agent/docs/elastic-agent-configuration.asciidoc
@@ -20,7 +20,7 @@ To alter this behavior, configure the output and other configuration settings:
 == Output settings
 
 Specify one or more outputs. Specifying multiple outputs allows you to pair
-each data source with a different output. 
+each data source with a different output.
 
 IMPORTANT: {agent} currently works with the {es} output only.
 
@@ -55,7 +55,7 @@ A default output configuration is required.
 [[elastic-agent-monitoring-configuration]]
 == {beats} monitoring settings
 
-{agent} monitors {beats} by default. To disable or change monitoring 
+{agent} monitors {beats} by default. To disable or change monitoring
 settings, set options under `settings.monitoring`:
 
 [source,yaml]
@@ -90,20 +90,19 @@ filesystem metrics, and sends them to the default output. For example:
 
 [source,yaml]
 -------------------------------------------------------------------------------------
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 -------------------------------------------------------------------------------------
 
 If `use_output` is not specified, the `default` output is used.
diff --git a/x-pack/elastic-agent/docs/elastic-agent_configuration_example.yml b/x-pack/elastic-agent/docs/elastic-agent_configuration_example.yml
index 774306f23bae..4c30013562a4 100644
--- a/x-pack/elastic-agent/docs/elastic-agent_configuration_example.yml
+++ b/x-pack/elastic-agent/docs/elastic-agent_configuration_example.yml
@@ -34,481 +34,496 @@ settings.monitoring:
 # Available log levels are: error, warning, info, debug
 #logging.level: trace
 
-datasources:
-  # use the nginx package
-  - id?: nginx-x1
-    enabled?: true # default to true
-    title?: "This is a nice title for human"
-    # Package this config group is coming from. On importing, we know where it belongs
-    # The package tells the UI which application to link to
-    package?:
-      name: epm/nginx
-      version: 1.7.0
-    namespace?: prod
+inputs:
+  - type: logs
+    name: epm/nginx
+    version: 1.7.0
+    dataset.namespace: prod
+    constraints?:
+      # Contraints look are not final
+      - os.platform: { in: "windows" }
+      - agent.version: { ">=": "8.0.0" }
+    use_output: long_term_storage
+    processors?:
+    streams:
+      - id?: {id}
+        enabled?: true # default to true
+        dataset.name: nginx.acccess
+        paths: /var/log/nginx/access.log
+      - id?: {id}
+        enabled?: true # default to true
+        dataset.name: nginx.error
+        paths: /var/log/nginx/error.log
+  - type: nginx/metricspackage?:
+    name: epm/nginx
+    version: 1.7.0
+    dataset.namespace: prod
     constraints?:
       # Contraints look are not final
       - os.platform: { in: "windows" }
       - agent.version: { ">=": "8.0.0" }
     use_output: long_term_storage
-    inputs:
-      - type: logs
-        processors?:
-        streams:
-          - id?: {id}
-            enabled?: true # default to true
-            dataset: nginx.acccess
-            paths: /var/log/nginx/access.log
-          - id?: {id}
-            enabled?: true # default to true
-            dataset: nginx.error
-            paths: /var/log/nginx/error.log
-      - type: nginx/metrics
-        streams:
-          - id?: {id}
-            enabled?: true # default to true
-            dataset: nginx.stub_status
-            metricset: stub_status
+    streams:
+      - id?: {id}
+        enabled?: true # default to true
+        dataset.name: nginx.stub_status
+        metricset: stub_status
 
-  #################################################################################################
-  # Custom Kafka datasource
-  - id: kafka-x1
+#################################################################################################
+# Custom Kafka datasource
+inputs:
+  - type: kafka
+    id: kafka-x1
     title: "Consume data from kafka"
-    namespace?: prod
+    dataset.namespace: prod
     use_output: long_term_storage
-    inputs:
-      - type: kafka
-        host: localhost:6566
-        streams:
-          - dataset: foo.dataset
-            topic: foo
-            processors:
-             - extract_bro_specifics
+    host: localhost:6566
+    streams:
+      - dataset.name: foo.dataset
+        topic: foo
+        processors:
+          - extract_bro_specifics
 
 
-  #################################################################################################
-  # System EPM package
-  - id?: system
+#################################################################################################
+# System EPM package
+inputs:
+  - type: system/metrics
+    id?: system
     title: Collect system information and metrics
     package:
       name: epm/system
       version: 1.7.0
-    inputs:
-      - type: system/metrics
-        streams:
-         - id?: {id}
-           enabled?: false # default true
-           metricset: cpu
-           dataset: system.cpu
-           metrics: ["percentages", "normalized_percentages"]
-           period: 10s
-         - metricset: memory
-           dataset: system.memory
-           period: 10s
-         - metricset: diskio
-           dataset: system.diskio
-           period: 10s
-         - metricset: load
-           dataset: system.load
-           period: 10s
-         - metricset: memory
-           dataset: system.memory
-           period: 10s
-         - metricset: process
-           dataset: system.process
-           processes: ["firefox*"]
-           include_top_n:
-              by_cpu: 5      # include top 5 processes by CPU
-              by_memory: 5   # include top 5 processes by memory
-           period: 10s
-         - metricset: process_summary
-           dataset: system.process_summary
-           period: 10s
-         - metricset: uptime
-           dataset: system.uptime
-           period: 15m
-         - metricset: socket_summary
-           dataset: system.socket_summary
-           period: 10s
-         - metricset: filesystem
-           dataset: system.filesystem
-           period: 10s
-         - metricset: raid
-           dataset: system.raid
-           period: 10s
-         - metricset: socket
-           dataset: system.socket
-           period: 10s
-         - metricset: service
-           dataset: system.service
-           period: 10s
-         - metricset: fsstat
-           dataset: system.fsstat
-           period: 10s
-         - metricset: foo
-           dataset: system.foo
-           period: 10s
+    streams:
+      - id?: {id}
+        enabled?: false # default true
+        metricset: cpu
+        dataset.name: system.cpu
+        metrics: ["percentages", "normalized_percentages"]
+        period: 10s
+      - metricset: memory
+        dataset.name: system.memory
+        period: 10s
+      - metricset: diskio
+        dataset.name: system.diskio
+        period: 10s
+      - metricset: load
+        dataset.name: system.load
+        period: 10s
+      - metricset: memory
+        dataset.name: system.memory
+        period: 10s
+      - metricset: process
+        dataset.name: system.process
+        processes: ["firefox*"]
+        include_top_n:
+          by_cpu: 5      # include top 5 processes by CPU
+          by_memory: 5   # include top 5 processes by memory
+        period: 10s
+      - metricset: process_summary
+        dataset.name: system.process_summary
+        period: 10s
+      - metricset: uptime
+        dataset.name: system.uptime
+        period: 15m
+      - metricset: socket_summary
+        dataset.name: system.socket_summary
+        period: 10s
+      - metricset: filesystem
+        dataset.name: system.filesystem
+        period: 10s
+      - metricset: raid
+        dataset.name: system.raid
+        period: 10s
+      - metricset: socket
+        dataset.name: system.socket
+        period: 10s
+      - metricset: service
+        dataset.name: system.service
+        period: 10s
+      - metricset: fsstat
+        dataset.name: system.fsstat
+        period: 10s
+      - metricset: foo
+        dataset.name: system.foo
+        period: 10s
 
 
-  #################################################################################################
-  # Elasticsearch package example
-  - id?: my-endpoint
-    title: Collect Elasticsearch information
+#################################################################################################
+# Elasticsearch package example
+inputs:
+    - type: log
+      id?: my-endpoint
+      title: Collect Elasticsearch information
+      package:
+        name: epm/elasticsearch
+        version: 1.7.0
+      streams:
+      - id?: {id}
+        enabled?: true # default to true
+        dataset.name: elasticsearch.audit
+        paths: [/var/log/elasticsearch/*_access.log, /var/log/elasticsearch/*_audit.log]
+      - id?: {id}
+        enabled?: true
+        dataset.name: elasticsearch.deprecation
+        paths: [/var/log/elasticsearch/*_deprecation.log]
+      - id?: {id}
+        enabled?: true
+        dataset.name: elasticsearch.gc
+        paths: [/var/log/elasticsearch/*_gc.log, /var/log/elasticsearch/*_gc.log.[0-9]*]
+      - id?: {id}
+        enabled?: true
+        dataset.name: elasticsearch.server
+        paths: [/var/log/elasticsearch/*.log]
+      - id?: {id}
+        enabled?: true
+        dataset.name: elasticsearch.slowlog
+        paths: [/var/log/elasticsearch/*_index_search_slowlog.log, /var/log/elasticsearch/*_index_indexing_slowlog.log]
+    - type: elasticsearch/metrics
+      id?: my-endpoint
+      title: Collect Elasticsearch information
+      package:
+        name: epm/elasticsearch
+        version: 1.7.0
+      hosts: ["http://localhost:9200"]
+      hosts: ["http://localhost:9200"]
+      # api_key: xxxx
+      # username: elastic
+      # password: changeme
+      # ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
+      # ssl.ca_sha256: BxZ...
+      # ssl.certificate: ...
+      # ssl.key: ...
+      xpack.enabled: true
+      streams:
+      - id?: {id}
+        metricset: ccr
+        dataset.name: elasticseach.ccr
+        period: 10s
+      - id?: {id}
+        metricset: cluster_stats
+        dataset.name: elasticseach.cluster_stats
+        period: 10s
+      - id?: {id}
+        metricset: enrich
+        dataset.name: elasticseach.enrich
+        period: 10s
+      - id?: {id}
+        metricset: index
+        dataset.name: elasticseach.index
+        period: 10s
+      - id?: {id}
+        metricset: index_recovery
+        dataset.name: elasticseach.index_recovery
+        active_only: true
+        period: 10s
+      - id?: {id}
+        metricset: ml_jobs
+        dataset.name: elasticseach.ml_jobs
+        period: 10s
+      - id?: {id}
+        metricset: node_stats
+        dataset.name: elasticseach.node_stats
+        period: 10s
+      - id?: {id}
+        metricset: shard
+        dataset.name: elasticseach.shard
+        period: 10s
+
+#################################################################################################
+# AWS module
+inputs:
+  # Looking at the AWS modules, I believe each fileset need to be in their own
+  # buckets?
+  - type: s3
+    id?: my-aws
+    title: Collect AWS
     package:
-      name: epm/elasticsearch
+      name: epm/aws
       version: 1.7.0
-    inputs:
-        - type: log
-          streams:
-          - id?: {id}
-            enabled?: true # default to true
-            dataset: elasticsearch.audit
-            paths: [/var/log/elasticsearch/*_access.log, /var/log/elasticsearch/*_audit.log]
-          - id?: {id}
-            enabled?: true
-            dataset: elasticsearch.deprecation
-            paths: [/var/log/elasticsearch/*_deprecation.log]
-          - id?: {id}
-            enabled?: true
-            dataset: elasticsearch.gc
-            paths: [/var/log/elasticsearch/*_gc.log, /var/log/elasticsearch/*_gc.log.[0-9]*]
-          - id?: {id}
-            enabled?: true
-            dataset: elasticsearch.server
-            paths: [/var/log/elasticsearch/*.log]
-          - id?: {id}
-            enabled?: true
-            dataset: elasticsearch.slowlog
-            paths: [/var/log/elasticsearch/*_index_search_slowlog.log, /var/log/elasticsearch/*_index_indexing_slowlog.log]
-        - type: elasticsearch/metrics
-          hosts: ["http://localhost:9200"]
-          hosts: ["http://localhost:9200"]
-          # api_key: xxxx
-          # username: elastic
-          # password: changeme
-          # ssl.certificate_authorities: ["/etc/pki/root/ca.pem"]
-          # ssl.ca_sha256: BxZ...
-          # ssl.certificate: ...
-          # ssl.key: ...
-          xpack.enabled: true
-          streams:
-          - id?: {id}
-            metricset: ccr
-            dataset: elasticseach.ccr
-            period: 10s
-          - id?: {id}
-            metricset: cluster_stats
-            dataset: elasticseach.cluster_stats
-            period: 10s
-          - id?: {id}
-            metricset: enrich
-            dataset: elasticseach.enrich
-            period: 10s
-          - id?: {id}
-            metricset: index
-            dataset: elasticseach.index
-            period: 10s
-          - id?: {id}
-            metricset: index_recovery
-            dataset: elasticseach.index_recovery
-            active_only: true
-            period: 10s
-          - id?: {id}
-            metricset: ml_jobs
-            dataset: elasticseach.ml_jobs
-            period: 10s
-          - id?: {id}
-            metricset: node_stats
-            dataset: elasticseach.node_stats
-            period: 10s
-          - id?: {id}
-            metricset: shard
-            dataset: elasticseach.shard
-            period: 10s
-
-  #################################################################################################
-  # AWS module
-  - id?: my-aws
+    credential_profile_name: fb-aws
+    #shared_credential_file: /etc/filebeat/aws_credentials
+    streams:
+    - id?: {id}
+      dataset.name: aws.s3
+      queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
+    - id?: {id}
+      dataset.name: aws.s3access
+      queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
+    - id?: {id}
+      dataset.name: aws.vpcflow
+      queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
+    - id?: {id}
+      dataset.name: aws.cloudtrail
+      queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
+  - type: aws/metrics
+    id?: my-aws
     title: Collect AWS
     package:
       name: epm/aws
       version: 1.7.0
-    inputs:
-        # Looking at the AWS modules, I believe each fileset need to be in their own
-        # buckets?
-        - type: s3
-          credential_profile_name: fb-aws
-          #shared_credential_file: /etc/filebeat/aws_credentials
-          streams:
-          - id?: {id}
-            dataset: aws.s3
-            queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
-          - id?: {id}
-            dataset: aws.s3access
-            queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
-          - id?: {id}
-            dataset: aws.vpcflow
-            queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
-          - id?: {id}
-            dataset: aws.cloudtrail
-            queue_url: https://sqs.myregion.amazonaws.com/123456/sqs-queue
-       - type: aws/metrics
-         access_key_id: '${AWS_ACCESS_KEY_ID:""}'
-         secret_access_key: '${AWS_SECRET_ACCESS_KEY:""}'
-         session_token: '${AWS_SESSION_TOKEN:""}'
-         #credential_profile_name: test-mb
-         #shared_credential_file: ...
-         streams:
-          - id?: {id}
-            metricset: usage
-            dataset: aws.usage
-            period: 5m
-          - id?: {id}
-            metricset: cloudwatch
-            dataset: aws.cloudwatch
-            period: 5m
-            name: ["CPUUtilization", "DiskWriteOps"]
-            tags.resource_type_filter: ec2:instance
-            #dimensions:
-            #  - name: InstanceId
-            #    value: i-0686946e22cf9494a
-            statistic: ["Average", "Maximum"]
-          - id?: {id}
-            metricset: ebs
-            dataset: aws.ebs
-            period: 5m
-          - id?: {id}
-            metricset: ec2
-            dataset: aws.ec2
-            period: 5m
-          - id?: {id}
-            metricset: elb
-            dataset: aws.elb
-            period: 5m
-          - id?: {id}
-            metricset: sns
-            dataset: aws.sns
-            period: 5m
-          - id?: {id}
-            metricset: sqs
-            dataset: aws.sqs
-            period: 5m
-          - id?: {id}
-            metricset: rds
-            dataset: aws.rds
-            period: 5m
-          - id?: {id}
-            metricset: billing
-            dataset: aws.billing
-            period: 12h
-          - id?: {id}
-            metricset: billing
-            dataset: aws.billing
-            period: 12h
-          - id?: {id}
-            metricset: s3_daily_storage
-            dataset: aws.s3_daily_storage
-            period: 24h
-          - id?: {id}
-            metricset: s3_request
-            dataset: aws.s3_request
-            period: 24h
+    access_key_id: '${AWS_ACCESS_KEY_ID:""}'
+    secret_access_key: '${AWS_SECRET_ACCESS_KEY:""}'
+    session_token: '${AWS_SESSION_TOKEN:""}'
+    #credential_profile_name: test-mb
+    #shared_credential_file: ...
+    streams:
+    - id?: {id}
+      metricset: usage
+      dataset.name: aws.usage
+      period: 5m
+    - id?: {id}
+      metricset: cloudwatch
+      dataset.name: aws.cloudwatch
+      period: 5m
+      name: ["CPUUtilization", "DiskWriteOps"]
+      tags.resource_type_filter: ec2:instance
+      #dimensions:
+      #  - name: InstanceId
+      #    value: i-0686946e22cf9494a
+      statistic: ["Average", "Maximum"]
+    - id?: {id}
+      metricset: ebs
+      dataset.name: aws.ebs
+      period: 5m
+    - id?: {id}
+      metricset: ec2
+      dataset.name: aws.ec2
+      period: 5m
+    - id?: {id}
+      metricset: elb
+      dataset.name: aws.elb
+      period: 5m
+    - id?: {id}
+      metricset: sns
+      dataset.name: aws.sns
+      period: 5m
+    - id?: {id}
+      metricset: sqs
+      dataset.name: aws.sqs
+      period: 5m
+    - id?: {id}
+      metricset: rds
+      dataset.name: aws.rds
+      period: 5m
+    - id?: {id}
+      metricset: billing
+      dataset.name: aws.billing
+      period: 12h
+    - id?: {id}
+      metricset: billing
+      dataset.name: aws.billing
+      period: 12h
+    - id?: {id}
+      metricset: s3_daily_storage
+      dataset.name: aws.s3_daily_storage
+      period: 24h
+    - id?: {id}
+      metricset: s3_request
+      dataset.name: aws.s3_request
+      period: 24h
 
 
-  #################################################################################################
-  # Kubernetes
-  - id?: my-kubernetes
+#################################################################################################
+# Kubernetes
+inputs:
+  - type: kubernetes-node/metrics
+    id?: my-kubernetes
     title: Collect Kubernetes
     package:
       name: epm/kubernetes
       version: 1.7.0
-    inputs:
-      - type: kubernetes-node/metrics
-        hosts: ["localhost:10250"]
-        bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token
-        ssl.certificate_authorities:
-          - /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt
-        # username: "user"
-        # password: "secret"
-        # If kube_config is not set, KUBECONFIG environment variable will be checked
-        # and if not present it will fall back to InCluster
-        # kube_config: ~/.kube/config
-        streams:
-          - id?: {id}
-            metricset: node
-            dataset: kubernetes.node
-            period: 10s
-          - id?: {id}
-            metricset: system
-            dataset: kubernetes.system
-            period: 10s
-          - id?: {id}
-            metricset: pod
-            dataset: kubernetes.pod
-            period: 10s
-          - id?: {id}
-            metricset: container
-            dataset: kubernetes.container
-            period: 10s
-          - id?: {id}
-            metricset: container
-            dataset: kubernetes.container
-            period: 10s
-          - id?: {id}
-            metricset: event
-            dataset: kubernetes.event
-            period: 10s
-      - type: kubernetes-state/metrics
-        hosts: ["kube-state-metrics:8080"]
-        streams:
-          - id?: {id}
-            metricset: state_node
-            dataset: kubernetes.node
-            period: 10s
-          - id?: {id}
-            metricset: state_deployment
-            dataset: kubernetes.deployment
-            period: 10s
-          - id?: {id}
-            metricset: state_replicaset
-            dataset: kubernetes.replicaset
-            period: 10s
-          - id?: {id}
-            metricset: state_statefulset
-            dataset: kubernetes.statefulset
-            period: 10s
-          - id?: {id}
-            metricset: state_pod
-            dataset: kubernetes.pod
-            period: 10s
-          - id?: {id}
-            metricset: state_container
-            dataset: kubernetes.container
-            period: 10s
-          - id?: {id}
-            metricset: state_container
-            dataset: kubernetes.container
-            period: 10s
-          - id?: {id}
-            metricset: state_cronjob
-            dataset: kubernetes.cronjob
-            period: 10s
-          - id?: {id}
-            metricset: state_resourcequota
-            dataset: kubernetes.resourcequota
-            period: 10s
-          - id?: {id}
-            metricset: state_service
-            dataset: kubernetes.service
-            period: 10s
-          - id?: {id}
-            metricset: state_persistentvolume
-            dataset: kubernetes.persistentvolume
-            period: 10s
-          - id?: {id}
-            metricset: state_persistentvolumeclaim
-            dataset: kubernetes.persistentvolumeclaim
-            period: 10s
+    hosts: ["localhost:10250"]
+    bearer_token_file: /var/run/secrets/kubernetes.io/serviceaccount/token
+    ssl.certificate_authorities:
+      - /var/run/secrets/kubernetes.io/serviceaccount/service-ca.crt
+    # username: "user"
+    # password: "secret"
+    # If kube_config is not set, KUBECONFIG environment variable will be checked
+    # and if not present it will fall back to InCluster
+    # kube_config: ~/.kube/config
+    streams:
+      - id?: {id}
+        metricset: node
+        dataset.name: kubernetes.node
+        period: 10s
+      - id?: {id}
+        metricset: system
+        dataset.name: kubernetes.system
+        period: 10s
+      - id?: {id}
+        metricset: pod
+        dataset.name: kubernetes.pod
+        period: 10s
+      - id?: {id}
+        metricset: container
+        dataset.name: kubernetes.container
+        period: 10s
+      - id?: {id}
+        metricset: container
+        dataset.name: kubernetes.container
+        period: 10s
+      - id?: {id}
+        metricset: event
+        dataset.name: kubernetes.event
+        period: 10s
+  - type: kubernetes-state/metrics
+    id?: my-kubernetes
+    title: Collect Kubernetes
+    package:
+      name: epm/kubernetes
+      version: 1.7.0
+    hosts: ["kube-state-metrics:8080"]
+    streams:
+      - id?: {id}
+        metricset: state_node
+        dataset.name: kubernetes.node
+        period: 10s
+      - id?: {id}
+        metricset: state_deployment
+        dataset.name: kubernetes.deployment
+        period: 10s
+      - id?: {id}
+        metricset: state_replicaset
+        dataset.name: kubernetes.replicaset
+        period: 10s
+      - id?: {id}
+        metricset: state_statefulset
+        dataset.name: kubernetes.statefulset
+        period: 10s
+      - id?: {id}
+        metricset: state_pod
+        dataset.name: kubernetes.pod
+        period: 10s
+      - id?: {id}
+        metricset: state_container
+        dataset.name: kubernetes.container
+        period: 10s
+      - id?: {id}
+        metricset: state_container
+        dataset.name: kubernetes.container
+        period: 10s
+      - id?: {id}
+        metricset: state_cronjob
+        dataset.name: kubernetes.cronjob
+        period: 10s
+      - id?: {id}
+        metricset: state_resourcequota
+        dataset.name: kubernetes.resourcequota
+        period: 10s
+      - id?: {id}
+        metricset: state_service
+        dataset.name: kubernetes.service
+        period: 10s
+      - id?: {id}
+        metricset: state_persistentvolume
+        dataset.name: kubernetes.persistentvolume
+        period: 10s
+      - id?: {id}
+        metricset: state_persistentvolumeclaim
+        dataset.name: kubernetes.persistentvolumeclaim
+        period: 10s
 
-  #################################################################################################
-  # Docker
-  - id?: my-docker
+#################################################################################################
+# Docker
+inputs:
+  - type: docker/metrics
+    id?: my-docker
     title: Collect docker
     package:
       name: epm/docker
       version: 1.7.0
-    inputs:
-      - type: docker/metrics
-        hosts: ["localhost:10250"]
-        #labels.dedot: false
+    hosts: ["localhost:10250"]
+    #labels.dedot: false
 
-        # To connect to Docker over TLS you must specify a client and CA certificate.
-        #ssl:
-          #certificate_authority: "/etc/pki/root/ca.pem"
-          #certificate:           "/etc/pki/client/cert.pem"
-          #key:                   "/etc/pki/client/cert.key"
-        streams:
-          - id?: {id}
-            metricset: container
-            dataset: docker.container
-            period: 10s
-          - id?: {id}
-            metricset: cpu
-            dataset: docker.cpu
-            period: 10s
-          - id?: {id}
-            metricset: diskio
-            dataset: docker.diskio
-            period: 10s
-          - id?: {id}
-            metricset: event
-            dataset: docker.event
-            period: 10s
-          - id?: {id}
-            metricset: healthcheck
-            dataset: docker.healthcheck
-            period: 10s
-          - id?: {id}
-            metricset: info
-            dataset: docker.info
-            period: 10s
-          - id?: {id}
-            metricset: memory
-            dataset: docker.memory
-            period: 10s
-          - id?: {id}
-            metricset: network
-            dataset: docker.network
-            period: 10s
+    # To connect to Docker over TLS you must specify a client and CA certificate.
+    #ssl:
+      #certificate_authority: "/etc/pki/root/ca.pem"
+      #certificate:           "/etc/pki/client/cert.pem"
+      #key:                   "/etc/pki/client/cert.key"
+    streams:
+      - id?: {id}
+        metricset: container
+        dataset.name: docker.container
+        period: 10s
+      - id?: {id}
+        metricset: cpu
+        dataset.name: docker.cpu
+        period: 10s
+      - id?: {id}
+        metricset: diskio
+        dataset.name: docker.diskio
+        period: 10s
+      - id?: {id}
+        metricset: event
+        dataset.name: docker.event
+        period: 10s
+      - id?: {id}
+        metricset: healthcheck
+        dataset.name: docker.healthcheck
+        period: 10s
+      - id?: {id}
+        metricset: info
+        dataset.name: docker.info
+        period: 10s
+      - id?: {id}
+        metricset: memory
+        dataset.name: docker.memory
+        period: 10s
+      - id?: {id}
+        metricset: network
+        dataset.name: docker.network
+        period: 10s
 
 #################################################################################################
 ### Suricata
 #
- - id?: suricata-x1
-   title: Suricata's data
-   namespace?: "abc"
-   package:
-     name: suricata
-     version: x.x.x
-   inputs:
-     - type: log
-       streams:
-         -  id?: {id}
-            type: "typeX"
-            dataset: suricata.logs
-            path: /var/log/surcata/eve.json
+inputs:
+  - type: log
+    id?: suricata-x1
+    title: Suricata's data
+    dataset.namespace: "abc"
+    package:
+      name: suricata
+      version: x.x.x
+    streams:
+      -  id?: {id}
+        type: "typeX"
+        dataset.name: suricata.logs
+        path: /var/log/surcata/eve.json
 
 #################################################################################################
 ### suggestion 1
- - id?: myendpoint-x1
-   title: Endpoint configuration
-   namespace?: "canada"
-   package:
-     name: endpoint
-     version: xxx
-   inputs:
-     - type: endpoint # Reserved key word
-       streams:
-         - type: malware
-           detect: true
-           prevent: false
-           notify_user: false
-           threshold: recommended
-           platform: windows
+inputs:
+  - type: endpoint # Reserved key word
+    id?: myendpoint-x1
+    title: Endpoint configuration
+    dataset.namespace: "canada"
+    package:
+      name: endpoint
+      version: xxx
+    streams:
+      - type: malware
+        detect: true
+        prevent: false
+        notify_user: false
+        threshold: recommended
+        platform: windows
 
-        - type: eventing
-          api: true
-          clr: false
-          dll_and_driver_load: false
-          dns: true
-          file: false
-          platform: windows
+      - type: eventing
+        api: true
+        clr: false
+        dll_and_driver_load: false
+        dns: true
+        file: false
+        platform: windows
 
-       - type: malware
-         detect: true
-         prevent: false
-         notify_user: false
-         threshold: recommended
-         platform: mac
+      - type: malware
+        detect: true
+        prevent: false
+        notify_user: false
+        threshold: recommended
+        platform: mac
 
       - type: eventing
         api: true
@@ -518,12 +533,12 @@ datasources:
         file: false
         platform: mac
 
-       - type: malware
-         detect: true
-         prevent: false
-         notify_user: false
-         threshold: recommended
-         platform: linux
+      - type: malware
+        detect: true
+        prevent: false
+        notify_user: false
+        threshold: recommended
+        platform: linux
 
       - type: eventing
         api: true
@@ -535,40 +550,40 @@ datasources:
 
 #################################################################################################
 ### suggestion 2
- - id?: myendpoint-1
-   title: Endpoint configuration
-   namespace?: "canada"
-   package:
-     name: epm/endpoint # This establish the link with the package and will allow to link it to endpoint app.
-     version: xxx
-   inputs:
-     - type: endpoint # Reserved key word
-       windows:
-            eventing:
-                api: true
-                clr: false
-                dll_and_driver_load: false
-                dns: true
-                 ...
-                file: false
-            malware:
-                detect: true
-                prevent: false
-                notify_user: false
-                threshold: recommended
-        mac:
-            eventing:
-                file: true
-                network: false
-                process: false
-                ...
-            malware:
-               detect: true
-               prevent: false
-               notify_user: false
-               threshold: recommended
-         linux:
-            eventing:
-                file: true
-                network: false
-                process: false
+inputs:
+  - type: endpoint # Reserved key word
+    id?: myendpoint-1
+    title: Endpoint configuration
+    dataset.namespace: "canada"
+    package:
+      name: epm/endpoint # This establish the link with the package and will allow to link it to endpoint app.
+      version: xxx
+    windows:
+        eventing:
+            api: true
+            clr: false
+            dll_and_driver_load: false
+            dns: true
+              ...
+            file: false
+        malware:
+            detect: true
+            prevent: false
+            notify_user: false
+            threshold: recommended
+    mac:
+        eventing:
+            file: true
+            network: false
+            process: false
+            ...
+        malware:
+            detect: true
+            prevent: false
+            notify_user: false
+            threshold: recommended
+      linux:
+        eventing:
+            file: true
+            network: false
+            process: false
diff --git a/x-pack/elastic-agent/elastic-agent.docker.yml b/x-pack/elastic-agent/elastic-agent.docker.yml
index a4effcf24f8a..962484e11f20 100644
--- a/x-pack/elastic-agent/elastic-agent.docker.yml
+++ b/x-pack/elastic-agent/elastic-agent.docker.yml
@@ -8,20 +8,19 @@ outputs:
     username: '${ELASTICSEARCH_USERNAME:elastic}'
     password: '${ELASTICSEARCH_PASSWORD:changeme}'
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # management:
 #   # Mode of management, the Elastic Agent support two modes of operation:
diff --git a/x-pack/elastic-agent/elastic-agent.reference.yml b/x-pack/elastic-agent/elastic-agent.reference.yml
index 98283027c62a..5530d0b455ba 100644
--- a/x-pack/elastic-agent/elastic-agent.reference.yml
+++ b/x-pack/elastic-agent/elastic-agent.reference.yml
@@ -14,20 +14,19 @@ outputs:
     username: elastic
     password: changeme
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # management:
 #   # Mode of management, the Elastic Agent support two modes of operation:
diff --git a/x-pack/elastic-agent/elastic-agent.yml b/x-pack/elastic-agent/elastic-agent.yml
index f218468b44f9..a24a07a982dc 100644
--- a/x-pack/elastic-agent/elastic-agent.yml
+++ b/x-pack/elastic-agent/elastic-agent.yml
@@ -14,20 +14,19 @@ outputs:
     username: elastic
     password: changeme
 
-datasources:
-  - namespace: default
+inputs:
+  - type: system/metrics
+    dataset.namespace: default
     use_output: default
-    inputs:
-      - type: system/metrics
-        streams:
-          - metricset: cpu
-            dataset: system.cpu
-          - metricset: memory
-            dataset: system.memory
-          - metricset: network
-            dataset: system.network
-          - metricset: filesystem
-            dataset: system.filesystem
+    streams:
+      - metricset: cpu
+        dataset.name: system.cpu
+      - metricset: memory
+        dataset.name: system.memory
+      - metricset: network
+        dataset.name: system.network
+      - metricset: filesystem
+        dataset.name: system.filesystem
 
 # settings.monitoring:
 #   # enabled turns on monitoring of running processes
diff --git a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go b/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go
index 9159199e7940..c891cefb056f 100644
--- a/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go
+++ b/x-pack/elastic-agent/pkg/agent/application/filters/constraints_filter.go
@@ -17,7 +17,7 @@ import (
 )
 
 const (
-	datasourcesKey          = "datasources"
+	inputsKey               = "inputs"
 	constraintsKey          = "constraints"
 	validateVersionFuncName = "validate_version"
 )
@@ -30,26 +30,26 @@ var (
 // ConstraintFilter filters ast based on included constraints.
 func ConstraintFilter(log *logger.Logger, ast *transpiler.AST) error {
 	// get datasources
-	dsNode, found := transpiler.Lookup(ast, datasourcesKey)
+	inputsNode, found := transpiler.Lookup(ast, inputsKey)
 	if !found {
 		return nil
 	}
 
-	dsListNode, ok := dsNode.Value().(*transpiler.List)
+	inputsListNode, ok := inputsNode.Value().(*transpiler.List)
 	if !ok {
 		return nil
 	}
 
-	dsList, ok := dsListNode.Value().([]transpiler.Node)
+	inputsList, ok := inputsListNode.Value().([]transpiler.Node)
 	if !ok {
 		return nil
 	}
 
 	// for each datasource
 	i := 0
-	originalLen := len(dsList)
-	for i < len(dsList) {
-		constraintMatch, err := evaluateConstraints(log, dsList[i])
+	originalLen := len(inputsList)
+	for i < len(inputsList) {
+		constraintMatch, err := evaluateConstraints(log, inputsList[i])
 		if err != nil {
 			return err
 		}
@@ -58,20 +58,20 @@ func ConstraintFilter(log *logger.Logger, ast *transpiler.AST) error {
 			i++
 			continue
 		}
-		dsList = append(dsList[:i], dsList[i+1:]...)
+		inputsList = append(inputsList[:i], inputsList[i+1:]...)
 	}
 
-	if len(dsList) == originalLen {
+	if len(inputsList) == originalLen {
 		return nil
 	}
 
 	// Replace datasources with limited set
-	if err := transpiler.RemoveKey(datasourcesKey).Apply(ast); err != nil {
+	if err := transpiler.RemoveKey(inputsKey).Apply(ast); err != nil {
 		return err
 	}
 
-	newList := transpiler.NewList(dsList)
-	return transpiler.Insert(ast, newList, datasourcesKey)
+	newList := transpiler.NewList(inputsList)
+	return transpiler.Insert(ast, newList, inputsKey)
 }
 
 func evaluateConstraints(log *logger.Logger, datasourceNode transpiler.Node) (bool, error) {
diff --git a/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go b/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go
index 18000d34a93b..b5303d0dc605 100644
--- a/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go
+++ b/x-pack/elastic-agent/pkg/agent/application/managed_mode_test.go
@@ -88,204 +88,190 @@ func (m *mockStreamStore) Close() error {
 
 const fleetResponse = `
 {
-  "action": "checkin",
-  "success": true,
-  "actions": [
-    {
-      "agent_id": "17e93530-7f42-11ea-9330-71e968b29fa4",
-      "type": "CONFIG_CHANGE",
-      "data": {
-        "config": {
-          "id": "86561d50-7f3b-11ea-9fab-3db3bdb4efa4",
-          "outputs": {
-            "default": {
-              "type": "elasticsearch",
-              "hosts": [
-                "http://localhost:9200"
-              ],
-              "api_key": "pNr6fnEBupQ3-5oEEkWJ:FzhrQOzZSG-Vpsq9CGk4oA"
-            }
-          },
-          "datasources": [
-            {
-              "id": "system-1",
-              "enabled": true,
-              "use_output": "default",
-              "inputs": [
-                {
-                  "type": "system/metrics",
-                  "enabled": true,
-                  "streams": [
-                    {
-                      "id": "system/metrics-system.core",
-                      "enabled": true,
-                      "dataset": "system.core",
-                      "period": "10s",
-                      "metrics": [
-                        "percentages"
-                      ]
-                    },
-                    {
-                      "id": "system/metrics-system.cpu",
-                      "enabled": true,
-                      "dataset": "system.cpu",
-                      "period": "10s",
-                      "metrics": [
-                        "percentages",
-                        "normalized_percentages"
-                      ]
-                    },
-                    {
-                      "id": "system/metrics-system.diskio",
-                      "enabled": true,
-                      "dataset": "system.diskio",
-                      "period": "10s",
-                      "include_devices": []
-                    },
-                    {
-                      "id": "system/metrics-system.entropy",
-                      "enabled": true,
-                      "dataset": "system.entropy",
-                      "period": "10s",
-                      "include_devices": []
-                    },
-                    {
-                      "id": "system/metrics-system.filesystem",
-                      "enabled": true,
-                      "dataset": "system.filesystem",
-                      "period": "1m",
-                      "ignore_types": []
-                    },
-                    {
-                      "id": "system/metrics-system.fsstat",
-                      "enabled": true,
-                      "dataset": "system.fsstat",
-                      "period": "1m",
-                      "ignore_types": []
-                    },
-                    {
-                      "id": "system/metrics-system.load",
-                      "enabled": true,
-                      "dataset": "system.load",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.memory",
-                      "enabled": true,
-                      "dataset": "system.memory",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.network",
-                      "enabled": true,
-                      "dataset": "system.network",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.network_summary",
-                      "enabled": true,
-                      "dataset": "system.network_summary",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.process",
-                      "enabled": true,
-                      "dataset": "system.process",
-                      "period": "10s",
-                      "processes": [
-                        ".*"
-                      ],
-                      "include_top_n.enabled": true,
-                      "include_top_n.by_cpu": 5,
-                      "include_top_n.by_memory": 5,
-                      "cmdline.cache.enabled": true,
-                      "cgroups.enabled": true,
-                      "env.whitelist": [],
-                      "include_cpu_ticks": false
-                    },
-                    {
-                      "id": "system/metrics-system.process_summary",
-                      "enabled": true,
-                      "dataset": "system.process_summary",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.raid",
-                      "enabled": true,
-                      "dataset": "system.raid",
-                      "period": "10s",
-                      "mount_point": "/"
-                    },
-                    {
-                      "id": "system/metrics-system.service",
-                      "enabled": true,
-                      "dataset": "system.service",
-                      "period": "10s",
-                      "state_filter": []
-                    },
-                    {
-                      "id": "system/metrics-system.socket_summary",
-                      "enabled": true,
-                      "dataset": "system.socket_summary",
-                      "period": "10s"
-                    },
-                    {
-                      "id": "system/metrics-system.uptime",
-                      "enabled": true,
-                      "dataset": "system.uptime",
-                      "period": "15m"
-                    },
-                    {
-                      "id": "system/metrics-system.users",
-                      "enabled": true,
-                      "dataset": "system.users",
-                      "period": "10s"
-                    }
-                  ]
-                },
-                {
-                  "type": "logs",
-                  "enabled": true,
-                  "streams": [
-                    {
-                      "id": "logs-system.auth",
-                      "enabled": true,
-                      "dataset": "system.auth",
-                      "paths": [
-                        "/var/log/auth.log*",
-                        "/var/log/secure*"
-                      ]
-                    },
-                    {
-                      "id": "logs-system.syslog",
-                      "enabled": true,
-                      "dataset": "system.syslog",
-                      "paths": [
-                        "/var/log/messages*",
-                        "/var/log/syslog*"
-                      ]
-                    }
-                  ]
-                }
-              ],
-              "package": {
-                "name": "system",
-                "version": "0.9.0"
-              }
-            }
-          ],
-          "revision": 3,
-          "settings.monitoring": {
-            "use_output": "default",
-            "enabled": true,
-            "logs": true,
-            "metrics": true
-          }
-        }
-      },
-      "id": "1c7e26a0-7f42-11ea-9330-71e968b29fa4",
-      "created_at": "2020-04-15T17:54:11.081Z"
-    }
-  ]
-}	
+	"action": "checkin",
+	"success": true,
+	"actions": [{
+		"agent_id": "17e93530-7f42-11ea-9330-71e968b29fa4",
+		"type": "CONFIG_CHANGE",
+		"data": {
+			"config": {
+				"id": "86561d50-7f3b-11ea-9fab-3db3bdb4efa4",
+				"outputs": {
+					"default": {
+						"type": "elasticsearch",
+						"hosts": [
+							"http://localhost:9200"
+						],
+						"api_key": "pNr6fnEBupQ3-5oEEkWJ:FzhrQOzZSG-Vpsq9CGk4oA"
+					}
+				},
+
+				"inputs": [{
+						"type": "system/metrics",
+						"enabled": true,
+						"streams": [{
+								"id": "system/metrics-system.core",
+								"enabled": true,
+								"dataset.name": "system.core",
+								"period": "10s",
+								"metrics": [
+									"percentages"
+								]
+							},
+							{
+								"id": "system/metrics-system.cpu",
+								"enabled": true,
+								"dataset.name": "system.cpu",
+								"period": "10s",
+								"metrics": [
+									"percentages",
+									"normalized_percentages"
+								]
+							},
+							{
+								"id": "system/metrics-system.diskio",
+								"enabled": true,
+								"dataset.name": "system.diskio",
+								"period": "10s",
+								"include_devices": []
+							},
+							{
+								"id": "system/metrics-system.entropy",
+								"enabled": true,
+								"dataset.name": "system.entropy",
+								"period": "10s",
+								"include_devices": []
+							},
+							{
+								"id": "system/metrics-system.filesystem",
+								"enabled": true,
+								"dataset.name": "system.filesystem",
+								"period": "1m",
+								"ignore_types": []
+							},
+							{
+								"id": "system/metrics-system.fsstat",
+								"enabled": true,
+								"dataset.name": "system.fsstat",
+								"period": "1m",
+								"ignore_types": []
+							},
+							{
+								"id": "system/metrics-system.load",
+								"enabled": true,
+								"dataset.name": "system.load",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.memory",
+								"enabled": true,
+								"dataset.name": "system.memory",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.network",
+								"enabled": true,
+								"dataset.name": "system.network",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.network_summary",
+								"enabled": true,
+								"dataset.name": "system.network_summary",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.process",
+								"enabled": true,
+								"dataset.name": "system.process",
+								"period": "10s",
+								"processes": [
+									".*"
+								],
+								"include_top_n.enabled": true,
+								"include_top_n.by_cpu": 5,
+								"include_top_n.by_memory": 5,
+								"cmdline.cache.enabled": true,
+								"cgroups.enabled": true,
+								"env.whitelist": [],
+								"include_cpu_ticks": false
+							},
+							{
+								"id": "system/metrics-system.process_summary",
+								"enabled": true,
+								"dataset.name": "system.process_summary",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.raid",
+								"enabled": true,
+								"dataset.name": "system.raid",
+								"period": "10s",
+								"mount_point": "/"
+							},
+							{
+								"id": "system/metrics-system.service",
+								"enabled": true,
+								"dataset.name": "system.service",
+								"period": "10s",
+								"state_filter": []
+							},
+							{
+								"id": "system/metrics-system.socket_summary",
+								"enabled": true,
+								"dataset.name": "system.socket_summary",
+								"period": "10s"
+							},
+							{
+								"id": "system/metrics-system.uptime",
+								"enabled": true,
+								"dataset.name": "system.uptime",
+								"period": "15m"
+							},
+							{
+								"id": "system/metrics-system.users",
+								"enabled": true,
+								"dataset.name": "system.users",
+								"period": "10s"
+							}
+						]
+					},
+					{
+						"type": "logs",
+						"enabled": true,
+						"streams": [{
+								"id": "logs-system.auth",
+								"enabled": true,
+								"dataset.name": "system.auth",
+								"paths": [
+									"/var/log/auth.log*",
+									"/var/log/secure*"
+								]
+							},
+							{
+								"id": "logs-system.syslog",
+								"enabled": true,
+								"dataset.name": "system.syslog",
+								"paths": [
+									"/var/log/messages*",
+									"/var/log/syslog*"
+								]
+							}
+						]
+					}
+				],
+
+				"revision": 3,
+				"settings.monitoring": {
+					"use_output": "default",
+					"enabled": true,
+					"logs": true,
+					"metrics": true
+				}
+			}
+		},
+		"id": "1c7e26a0-7f42-11ea-9330-71e968b29fa4",
+		"created_at": "2020-04-15T17:54:11.081Z"
+	}]
+}
 	`
diff --git a/x-pack/elastic-agent/pkg/agent/program/program.go b/x-pack/elastic-agent/pkg/agent/program/program.go
index f9fd50983d74..17f796aa75f3 100644
--- a/x-pack/elastic-agent/pkg/agent/program/program.go
+++ b/x-pack/elastic-agent/pkg/agent/program/program.go
@@ -115,7 +115,7 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error)
 	const (
 		outputsKey = "outputs"
 		outputKey  = "output"
-		streamsKey = "datasources"
+		inputsKey  = "inputs"
 		typeKey    = "type"
 	)
 
@@ -168,12 +168,12 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error)
 		clone := cloneMap(normMap)
 		delete(clone, outputsKey)
 		clone[outputKey] = map[string]interface{}{n: v}
-		clone[streamsKey] = make([]map[string]interface{}, 0)
+		clone[inputsKey] = make([]map[string]interface{}, 0)
 
 		grouped[k] = clone
 	}
 
-	s, ok := normMap[streamsKey]
+	s, ok := normMap[inputsKey]
 	if !ok {
 		s = make([]interface{}, 0)
 	}
@@ -199,17 +199,17 @@ func groupByOutputs(single *transpiler.AST) (map[string]*transpiler.AST, error)
 			return nil, fmt.Errorf("unknown configuration output with name %s", targetName)
 		}
 
-		streams := config[streamsKey].([]map[string]interface{})
+		streams := config[inputsKey].([]map[string]interface{})
 		streams = append(streams, stream)
 
-		config[streamsKey] = streams
+		config[inputsKey] = streams
 		grouped[targetName] = config
 	}
 
 	transpiled := make(map[string]*transpiler.AST)
 
 	for name, group := range grouped {
-		if len(group[streamsKey].([]map[string]interface{})) == 0 {
+		if len(group[inputsKey].([]map[string]interface{})) == 0 {
 			continue
 		}
 
diff --git a/x-pack/elastic-agent/pkg/agent/program/program_test.go b/x-pack/elastic-agent/pkg/agent/program/program_test.go
index d1b5c0917183..b9171b43d6a6 100644
--- a/x-pack/elastic-agent/pkg/agent/program/program_test.go
+++ b/x-pack/elastic-agent/pkg/agent/program/program_test.go
@@ -41,25 +41,19 @@ func TestGroupBy(t *testing.T) {
 				},
 			},
 
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":       "log",
 					"use_output": "special",
+					"streams":    map[string]interface{}{"paths": "/var/log/hello.log"},
 				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
+				{
+					"type":       "system/metrics",
 					"use_output": "special",
 				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/infosec.log"},
 					"use_output": "infosec1",
 				},
 			},
@@ -80,18 +74,14 @@ func TestGroupBy(t *testing.T) {
 					"password": "mypassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/hello.log"},
 					"use_output": "special",
 				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
+				{
+					"type":       "system/metrics",
 					"use_output": "special",
 				},
 			},
@@ -105,12 +95,10 @@ func TestGroupBy(t *testing.T) {
 					"password": "anotherpassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/infosec.log"},
 					"use_output": "infosec1",
 				},
 			},
@@ -148,25 +136,19 @@ func TestGroupBy(t *testing.T) {
 				},
 			},
 
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/hello.log"},
 					"use_output": "special",
 				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
+				{
+					"type":       "system/metrics",
 					"use_output": "special",
 				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/infosec.log"},
 					"use_output": "donotexist",
 				},
 			},
@@ -195,23 +177,18 @@ func TestGroupBy(t *testing.T) {
 					"password": "anotherpassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
+				},
+
+				{
+					"type": "system/metrics",
+				},
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
 				},
 			},
 		}
@@ -231,23 +208,19 @@ func TestGroupBy(t *testing.T) {
 					"password": "mypassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
+				},
+
+				{
+					"type": "system/metrics",
+				},
+
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
 				},
 			},
 		})
@@ -277,23 +250,19 @@ func TestGroupBy(t *testing.T) {
 					"password": "anotherpassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
+				},
+
+				{
+					"type": "system/metrics",
+				},
+
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/infosec.log"},
 					"use_output": "infosec1",
 				},
 			},
@@ -314,17 +283,14 @@ func TestGroupBy(t *testing.T) {
 					"password": "mypassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
-					},
-				},
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type": "system/metrics",
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":    "log",
+					"streams": map[string]interface{}{"paths": "/var/log/hello.log"},
+				},
+
+				{
+					"type": "system/metrics",
 				},
 			},
 		})
@@ -337,12 +303,10 @@ func TestGroupBy(t *testing.T) {
 					"password": "anotherpassword",
 				},
 			},
-			"datasources": []map[string]interface{}{
-				map[string]interface{}{
-					"inputs": map[string]interface{}{
-						"type":    "log",
-						"streams": map[string]interface{}{"paths": "/var/log/infosec.log"},
-					},
+			"inputs": []map[string]interface{}{
+				{
+					"type":       "log",
+					"streams":    map[string]interface{}{"paths": "/var/log/infosec.log"},
 					"use_output": "infosec1",
 				},
 			},
diff --git a/x-pack/elastic-agent/pkg/agent/program/supported.go b/x-pack/elastic-agent/pkg/agent/program/supported.go
index 40db706c87ad..9789a6c0f33d 100644
--- a/x-pack/elastic-agent/pkg/agent/program/supported.go
+++ b/x-pack/elastic-agent/pkg/agent/program/supported.go
@@ -19,7 +19,7 @@ func init() {
 	// Packed Files
 	// spec/filebeat.yml
 	// spec/metricbeat.yml
-	unpacked := packer.MustUnpack("eJzsV0tzqzrWnX8/I+Ov+oKI001XnYEhl5cdcowTSWiGJAewJewKGAxd/d+7BNjGzjm3b78mXT1IkcLSfq691+IvD+Vhw375yMWGbpLqD60UD39+oNKpyNs+jaQoCQpEjFeLGOi719ySCTqJWMKMzw8Zk7x7zS3q57rj503qF6HgHmyWUpR0PRNUOjl14e47Ihn1QtGfuT9bRIJiq4xxJJYSHmMUlAStTCKdkoH3fGnP8+X78KTIOcaIC4rgkduzioJIfMdpxVxnm7S6pC4U3PZL3/araK2eQRWjWUYArAiaaVP73At0sr45W1LAiwTNiqU8CS5h+R1FIi5g4QttwQpYEvzyZOdaSqR4xEaoMQkz+rZPN4a2WK6tA5UHERvRR4JmO4LTJzufp75taRtsidfcUvY7O91XviuOiYRb7pgd9wIRI/2DeUEdA9gxYLav6T717XlKwewjBuaRyNMhNlZP6h4DsOWOmZEiEmw8xz3RqH5R1yxYs1exiI0XtTEKNQxOB6ZiwqrOL+eYmhhH+9fc2sU4yhgwdSZDwZrBXn/2bZ8maNZwHHXnPGh3fceMqCXIqUZ/VYzn97YPtLB07oXnHM92Wo5OgnX7hfLl21oaA7PZQDOj7umDu+YHdUXHn8+/X31PfKaX31zziEFYU0nKBIXaa24dKTCbwdfwR3C2JdjSeqwWY9/wS4+pBK36J0GzLJYnQXr8BA2T5pbgsKNG0Kn+38WqUd0sExxq514NsQiNIO3J96yWglAwI6xZ8fLP5nGgRShYEX3EEkpqBMJO/6He/qz+B+7C6jW37t/f9HfAqZ6xcx88K+NuOmDQg9o59utc9HlUfh/brKOuo5G3mx6e4+rnYdofipyGubC9qaV6r/DcDbienvdtS83nkdtmv4+G2WKL6V3fhaCfeSPUiCuOfb5udKB3Pggmgharmrtho2KLjflP7MAde96nHEXNXSzKd01cc5sA2KqZoiD8JNi/s3OqSWu2BEUHppsddU1D5fWaW8O75mvuSyOcMXCqSY8Z0fV1GDCg8tYIDj64dEqOYHeuHQOwJCjUqOF/6VUC4Oy2/haI0Um/6Z+Kczy/XN/WbLm2dOLNL7Ogana9F9UxqC47abm+zUvZ2uBw0ictZd3L5X8qoUbkqebX83vuRU1ShPXEf/3SxXqM4S7Bq+tM4oPOJBww4WZ6fD2/pa6pE5fUHM12rNhd7hBg1gScBPNgzgyYX/OoMiKrbMA/OVAvEkyYd3WyOqJ40Ihqtv2yJz8J3j357oVrxvlRHAhv9l7PQau+n1tqWLNzD0kR1D/bv2TCySMXlXwyO9f+BxdMjHgZeftaS46jhk/qqPDBVE7yfcFBJuh2n65dp1sP+2Uf4IpO/B/P8WGsHXw7lv6vWca0WUYR7BQnk3VaUANqCoNB26QBgGWMQy1BYUeQ08YgLZb2vGBS9fOlWPax8c8Ykc94zUrf5j1nctfpEpsd7PTbt4f/HySL3FSfOfuBaHlDUGNSbEeRsqVIEacuuBccYjCKGRz0iwW1F/LvCI50Zs8O1NWOFzJ41mWMTt2dUDif1QjSm37R/T2xI/WMSqcgSFfL5kiRuSNv+uMSW1kMyoooX3j1W2Lnah9HLUd3wsg1CwLEkbSzsl9oz/qOoEAnbcDtwqrZIEpUIUs6LHS13Gsu4Uff0MmiZq6jJc97FXNNvH65H4ltquFV5HbcIL30L0vU0lQtCV49KbFFQaRTF3ZLuarVwlJEsyxERe3ZLsHhmWD/HWKqIjhqFZD+o4JqxNL/RNV/taiqfBc+ci/IFN76OjlmL5Iu9bwXP8Yl38VtjHfvvai+9uzlZmFjYyQE3TQSHO2xEQgC4ONFcLmV2LztByyuxuUu35985/G4aM0L/oP5bxL/vyoWLnvnp4LB6/dRn5sSchfb4x65w/sXnKq60yLsPwCxwQ/czT6YhAXBWXODg3MPvGjG3Pdp/1s1qzhPf3l/PvX74Xv++LlYf63RYEf5SJ98O5oK0cr3AjFyyNS2ZNKsfiRaufsnxSvDjlxdSLWKUSUwcFomndkPcXzmIiMU/CauHiuXmMmlZmNsyAQEmr24mMYx9mly70roTMKKGkRg0GNpmtePRcrvuhNlipDJKED62T2fc0lLgfYFS2OtbjhY3WX9B7ku7oUPnXK44uRxfqZ1OnPNjVDbnuc1FNSFW+6a7RRrN3Z/pxCyC74n6PFpEDeqh3q3sOEfEbjJp+f0Htu6bi7Wcxnklh/j8FXlxwzF8+/7AJg69yyd2724EdR1Ou6KLQMwYzLcB22jYrgKInteJMCRCfi1/7//IDIUt6TFYrX/9vDX//tbAAAA///srwFQ")
+	unpacked := packer.MustUnpack("eJzsmM+To7gVx+/5M+aaVAJi3bWkag+GXgTYTY/pbknohiQbsCXsNPgHpPK/pwTGxu7Z2ZnJJqccumzT+vH03ve99xH//FTtlvxvq0Iu2TKt/9oo+envn5jyavq6zWIlK4pDmZDFLAHm5rlwVIpPMlEoF9NdzpVonwuHBYXpBcUxC8pICh8d50pW7GUimfIKBtHmM6Y58yPZjbkfW8aSEadKSCznCu0THFYUL2yqvIqDt2LuTov5W//JsLdPsJAMo71wJzUDsfxMsppDb502pmIQSeEGVeAGdfyiP8M6wZOcAlRTPDHG6ws/NOnLzdiKAVGmeFLO1UkKharPOJZJicpAGjNeooqSpwe3MDKq5E/EigyuUM5et9nSMmbzF2fH1E4mVrxK8WRDSfbgFtMscB1jSRz5XDh6/dbNtnUA5T5VaC08uxV+KBNsrrgfHhKAWg7s5jnbZoE7zRiYrBJg76k67RJr8aDncYAa4dk5LWPJz+OEL486XgzaJT9utS11QqbD/seExNvnwtmx0jGF/zTTcwI/NBlE7bDX/MVZM8uZEOBVzLMNZtpVSiJj+H8/Jz48F85gc9uvP/0NWycHZk1nl7muI5cwktxfPAReVXArbij2at50vt/RLm7hRTO9RpyGgUhyKzrw8qkgiztbrfhAwGnHrcXD2JaUxFLHZexTfvzRc1x8XlBMD1y9zbQGeKclUw7+oWV40HumeHIUJG6H2LP2+uxy5j5GcunHTYIjoz9DvOrs7jT2pbhFd/Y6OwFR/fEct3v2ejNz/rg9x93JBcz6c/nIGPxy1XfnhzrobJu0DHoGfd2O4zjY1en6xnfYO3KImhvN6Odal22vz/F4rQmhdC7bXV3pc4TPxnMDiECXu1ZkUCj33XlhvGN3e1BCJSsXBwGjo7YtudHeeB204Y/bTOD4eGeL3vtAob1OAWqeC2fDQPROSXC3zulAG7uhON5x024ZtC19rufC6Z8dP559bkUTDk4H2m4zbsl25Ic68EPJsA0ost8p2dzqGKDJc+GYHKAVs5Ch4/Ud8zYJiXMOfmzOXE1yhlGrayu95l1DsXkQCq268eMYwFouX7cZhZ6RkEgKd9LlR68/umN+LLm0QYJP5lVnTkt1P7HiA19/yJ3OxsCPJxy+nbWr+wiSvB30aGS3vuw1cv2NNnx6sX2kCSMb4jz2DYWyHa3Vx+x4md8+Xb/vKYmVjv1lPMwN4Tv/4MC+7g9/bqM1ygWOd+JqR74sUUNxr8fUR8V1fFQxC22osk2m4mZ59XvJgF0yiI4Ch63AweUcKfBUCn6dnftSJSBqiHXnJz+SDKK1gHbzoQ8MccOTPFEnSc92CuVVAt/0B4OX6BzP6MD9QT+O4squP/aXrsaOeKGrefq3QUnwMf7lqPZovUBUUOxdtQttU/iOOfLjOyWy1Weii52VQqRrQ5CQKNR1kTeZXJr1dX93MtRRsXrZZGGRZC/Qa18165DomOBIzlxxECQ+CrIo5+60pPiUcyveJVYkExKuU5dXgSv6PG94pW0LQZ1TVedhc8xCrRcr0vrfhs1m9ukvPV6pZf1e8C8A1itGBldyfQaqNcO64ZhS+OEuAWfwImFXPHFzAZWWktjk+kDQ2F8C92iqBJ/aO6gZxhoUm8eumP8emCkzZ8orKTZ1Qd0zbG/oq/nTnDh5Aqqa6r3I4mtgdl2fxI3AdxAH7ZICuafNpOqK9qO5oTg0aRMKt3QOvAeoVkCvYn3T0g2sKzpa4ONmxKFnpI9bbfOB+l0D21PXbpckMig29ktsVsGlSDmG9iUliwcNhgzEHQDN1eKgi7IWzbyUNXMnm5REvU/d4I8Av5qSuElx9N+Fv7OWznDRaF0w/PYtSfoRMH4PGgYfkP/D5B8Bk5TkBlee1kTnJwKkzteLPwedfwEkb228ey7gz2PQGxXgcDUUeGKJnYD5iitUUpIfhz3PQDCCPKfROiPFT+9zcK5R1tPmq438P23+8HJh+00A6C5yix++9Gi/7zk45QKilQDSSD0Nd0Iu/emNDoYYdPm2GMc/1Dm+nD3ai889rPx5XlS7jz7q/7o9HrdZOAYb9+OFom/OtGHAuIUg9wr5ujYLfJLXJmnmKUCrhIRNcg94Z41c6sQVDEdaGWzWl/OnsW01s6gkoIOFsR3nOI3mTb8DFu+g47sA80fmDFD6PwRiQSJJwAVeZkO9+ZaLI7vhg6/W9aHHdNA0V7eXu3Nshx55A4uXGgNQRXFkMCsc58fNut8GY9uZALlk620HWC/9pXYbkprdnKd/gdPlIyHGLnATFfya59wY+fklK4eLTw9YqEpIZOheSrHXJCAr550NVygLXPGeYPqevHTfu5cuAnpt6vKdm/3yy6d//enfAQAA///5GGLX")
 	SupportedMap = make(map[string]bool)
 
 	for f, v := range unpacked {
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml
index d4a38b09c6b7..c0e520115ecf 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config-filebeat.yml
@@ -4,7 +4,6 @@ filebeat:
     paths:
       - /var/log/hello1.log
       - /var/log/hello2.log
-    dataset: generic
     index: logs-generic-default
     processors:
       - add_fields:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml
index 8699e1984388..b445fb80e160 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/constraints_config.yml
@@ -15,23 +15,19 @@ outputs:
     hosts: ["monitoring:9200"]
     ca_sha256: "7lHLiyp4J8m9kw38SJ7SURJP4bXRZv/BNxyyXkCcE/M="
 
-datasources:
-  - use_output: default
-    inputs:
-    - type: logs
-      streams:
-      - paths:
-        - /var/log/hello1.log
-        - /var/log/hello2.log
-  - namespace: testing
-    use_output: default
+inputs:
+  - type: logs
+    streams:
+    - paths:
+      - /var/log/hello1.log
+      - /var/log/hello2.log
+  - type: apache/metrics
     constraints:
       - "validate_version(%{[agent.version]}, '1.0.0 - 7.0.0')"
-    inputs:
-      - type: apache/metrics
-        streams:
-          - enabled: true
-            metricset: info
+    dataset.namespace: testing
+    streams:
+      - enabled: true
+        metricset: info
 
 settings.monitoring:
   use_output: monitoring
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_false.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_false.yml
index c7c11a9017de..34b7388e1e18 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_false.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_false.yml
@@ -1,12 +1,10 @@
-datasources:
-  - use_output: default
-    inputs:
-      - type: event/file
-        streams:
-          - enabled: false
-            paths:
-              - var/log/hello1.log
-              - var/log/hello2.log
+inputs:
+  - type: event/file
+    streams:
+      - enabled: false
+        paths:
+          - var/log/hello1.log
+          - var/log/hello2.log
 management:
   host: "localhost"
 config:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_false.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_false.yml
index 1bb3a9896f95..f0b57a018970 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_false.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_false.yml
@@ -1,11 +1,9 @@
-datasources:
-  - use_output: default
-    inputs:
-      - type: event/file
-        streams:
-          - paths:
-              - /var/log/hello1.log
-              - /var/log/hello2.log
+inputs:
+  - type: event/file
+    streams:
+      - paths:
+          - /var/log/hello1.log
+          - /var/log/hello2.log
 management:
   host: "localhost"
 config:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true-filebeat.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true-filebeat.yml
index bc79c306334d..0b8bcd471323 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true-filebeat.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true-filebeat.yml
@@ -4,7 +4,6 @@ filebeat:
     paths:
       - /var/log/hello1.log
       - /var/log/hello2.log
-    dataset: generic
     index: logs-generic-default
     processors:
       - add_fields:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true.yml
index a89d2cf0b7b6..9601388c536d 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_output_true.yml
@@ -1,11 +1,9 @@
-datasources:
-  - use_output: default
-    inputs:
-      - type: event/file
-        streams:
-          - paths:
-              - /var/log/hello1.log
-              - /var/log/hello2.log
+inputs:
+  - type: event/file
+    streams:
+      - paths:
+          - /var/log/hello1.log
+          - /var/log/hello2.log
 management:
   host: "localhost"
 config:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true-filebeat.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true-filebeat.yml
index 9ad59d63f6fd..d655f5dd5e1d 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true-filebeat.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true-filebeat.yml
@@ -5,7 +5,6 @@ filebeat:
     paths:
       - /var/log/hello1.log
       - /var/log/hello2.log
-    dataset: generic
     index: logs-generic-default
     processors:
       - add_fields:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true.yml
index 91caed69c762..6afc7f37ab15 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/enabled_true.yml
@@ -3,15 +3,13 @@ fleet:
     kibana_url:		https://kibana.mydomain.com:5601
     ca_hash:		7HIpactkIAq2Y49orFOOQKurWxmmSFZhBCoQYcRhJ3Y=
     checkin_interval:	5m
-datasources:
-  - use_output: default
-    inputs:
-      - type: event/file
-        streams:
-          - enabled: true
-            paths:
-              - /var/log/hello1.log
-              - /var/log/hello2.log
+inputs:
+  - type: event/file
+    streams:
+      - enabled: true
+        paths:
+          - /var/log/hello1.log
+          - /var/log/hello2.log
 management:
   host: "localhost"
 config:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/single_config-filebeat.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/single_config-filebeat.yml
index 59346260ca2b..7d60c10fb023 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/single_config-filebeat.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/single_config-filebeat.yml
@@ -5,7 +5,6 @@ filebeat:
       - /var/log/hello1.log
       - /var/log/hello2.log
     index: logs-generic-default
-    dataset: generic
     vars:
       var: value
     processors:
@@ -21,6 +20,26 @@ filebeat:
             type: logs
             dataset: generic
             namespace: default
+  - type: log
+    paths:
+      - /var/log/hello3.log
+      - /var/log/hello4.log
+    index: testtype-generic-default
+    vars:
+      var: value
+    processors:
+      - add_fields:
+          target: "dataset"
+          fields:
+            type: testtype
+            name: generic
+            namespace: default
+      - add_fields:
+          target: "stream"
+          fields:
+            type: testtype
+            dataset: generic
+            namespace: default
 output:
   elasticsearch:
     hosts:
diff --git a/x-pack/elastic-agent/pkg/agent/program/testdata/single_config.yml b/x-pack/elastic-agent/pkg/agent/program/testdata/single_config.yml
index d84687cf98ba..95d78138aa2f 100644
--- a/x-pack/elastic-agent/pkg/agent/program/testdata/single_config.yml
+++ b/x-pack/elastic-agent/pkg/agent/program/testdata/single_config.yml
@@ -19,37 +19,49 @@ outputs:
     hosts: ["monitoring:9200"]
     ca_sha256: "7lHLiyp4J8m9kw38SJ7SURJP4bXRZv/BNxyyXkCcE/M="
 
-datasources:
-  - use_output: default
-    inputs:
-    - type: docker/metrics
-      streams:
-        - metricset: status
-          dataset: docker.status
-        - metricset: info
-          dataset: ""
-      hosts: ["http://127.0.0.1:8080"]
-    - type: logs
-      streams:
-        - paths:
-          - /var/log/hello1.log
-          - /var/log/hello2.log
-      vars:
-        var: value
-  - namespace: testing
+inputs:
+  - type: docker/metrics
     use_output: default
-    inputs:
-      - type: apache/metrics
-        processors:
-          - add_fields:
-              fields:
-                should_be: first
-        streams:
-          - enabled: true
-            metricset: info
-            hosts: ["http://apache.remote"]
-        hosts: ["http://apache.local"]
-        id: apache-metrics-id
+    streams:
+      - metricset: status
+        dataset:
+          name: docker.status
+      - metricset: info
+        dataset:
+          name: ""
+    hosts: ["http://127.0.0.1:8080"]
+  - type: logs
+    use_output: default
+    streams:
+      - paths:
+        - /var/log/hello1.log
+        - /var/log/hello2.log
+    vars:
+      var: value
+  - type: logs
+    dataset:
+      type: testtype
+    use_output: default
+    streams:
+      - paths:
+        - /var/log/hello3.log
+        - /var/log/hello4.log
+    vars:
+      var: value
+  - type: apache/metrics
+    dataset:
+      namespace: testing
+    use_output: default
+    processors:
+      - add_fields:
+          fields:
+            should_be: first
+    streams:
+      - enabled: true
+        metricset: info
+        hosts: ["http://apache.remote"]
+    hosts: ["http://apache.local"]
+    id: apache-metrics-id
 
 settings.monitoring:
   use_output: monitoring
diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/merge_strategy.go b/x-pack/elastic-agent/pkg/agent/transpiler/merge_strategy.go
index da96b5c11d27..a20e44936d21 100644
--- a/x-pack/elastic-agent/pkg/agent/transpiler/merge_strategy.go
+++ b/x-pack/elastic-agent/pkg/agent/transpiler/merge_strategy.go
@@ -4,8 +4,6 @@
 
 package transpiler
 
-import "fmt"
-
 type injector interface {
 	Inject(target []Node, source interface{}) []Node
 	InjectItem(target []Node, source Node) []Node
@@ -82,12 +80,10 @@ func (replaceInjector) InjectCollection(target []Node, source []Node) []Node {
 
 func inject(i injector, target []Node, source interface{}) []Node {
 	if sourceCollection, ok := source.([]Node); ok {
-		fmt.Printf(">>[%T] list of nodes %T %d\n", i, source, len(sourceCollection))
 		return i.InjectCollection(target, sourceCollection)
 	}
 
 	if node, ok := source.(Node); ok {
-		fmt.Printf(">> one of nodes %T\n", source)
 		return i.InjectItem(target, node)
 	}
 
diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/rules.go b/x-pack/elastic-agent/pkg/agent/transpiler/rules.go
index 2e3fb60b6fab..3fbeb396db22 100644
--- a/x-pack/elastic-agent/pkg/agent/transpiler/rules.go
+++ b/x-pack/elastic-agent/pkg/agent/transpiler/rules.go
@@ -355,22 +355,22 @@ type FixStreamRule struct {
 
 // Apply stream fixes.
 func (r *FixStreamRule) Apply(ast *AST) error {
-	const defaultNamespace = "default"
 	const defaultDataset = "generic"
+	const defaultNamespace = "default"
 
-	datasourcesNode, found := Lookup(ast, "datasources")
+	inputsNode, found := Lookup(ast, "inputs")
 	if !found {
 		return nil
 	}
 
-	datasourcesList, ok := datasourcesNode.Value().(*List)
+	inputsNodeList, ok := inputsNode.Value().(*List)
 	if !ok {
 		return nil
 	}
 
-	for _, datasourceNode := range datasourcesList.value {
-		nsNode, found := datasourceNode.Find("namespace")
-		if found {
+	for _, inputNode := range inputsNodeList.value {
+		// fix this only if in compact form
+		if nsNode, found := inputNode.Find("dataset.namespace"); found {
 			nsKey, ok := nsNode.(*Key)
 			if ok {
 				if newNamespace := nsKey.value.String(); newNamespace == "" {
@@ -378,55 +378,90 @@ func (r *FixStreamRule) Apply(ast *AST) error {
 				}
 			}
 		} else {
-			datasourceMap, ok := datasourceNode.(*Dict)
-			if !ok {
-				continue
+			dsNode, found := inputNode.Find("dataset")
+			if found {
+				// got a dataset
+				datasetMap, ok := dsNode.Value().(*Dict)
+				if ok {
+					nsNode, found := datasetMap.Find("namespace")
+					if found {
+						nsKey, ok := nsNode.(*Key)
+						if ok {
+							if newNamespace := nsKey.value.String(); newNamespace == "" {
+								nsKey.value = &StrVal{value: defaultNamespace}
+							}
+						}
+					} else {
+						inputMap, ok := inputNode.(*Dict)
+						if ok {
+							inputMap.value = append(inputMap.value, &Key{
+								name:  "dataset.namespace",
+								value: &StrVal{value: defaultNamespace},
+							})
+						}
+					}
+				}
+			} else {
+				inputMap, ok := inputNode.(*Dict)
+				if ok {
+					inputMap.value = append(inputMap.value, &Key{
+						name:  "dataset.namespace",
+						value: &StrVal{value: defaultNamespace},
+					})
+				}
 			}
-			datasourceMap.value = append(datasourceMap.value, &Key{
-				name:  "namespace",
-				value: &StrVal{value: defaultNamespace},
-			})
 		}
 
-		// get input
-		inputNode, found := datasourceNode.Find("inputs")
-		if !found {
+		streamsNode, ok := inputNode.Find("streams")
+		if !ok {
 			continue
 		}
 
-		inputsList, ok := inputNode.Value().(*List)
+		streamsList, ok := streamsNode.Value().(*List)
 		if !ok {
 			continue
 		}
 
-		for _, inputNode := range inputsList.value {
-			streamsNode, ok := inputNode.Find("streams")
-			if !ok {
-				continue
-			}
-
-			streamsList, ok := streamsNode.Value().(*List)
+		for _, streamNode := range streamsList.value {
+			streamMap, ok := streamNode.(*Dict)
 			if !ok {
 				continue
 			}
 
-			for _, streamNode := range streamsList.value {
-				streamMap, ok := streamNode.(*Dict)
-				if !ok {
-					continue
+			// fix this only if in compact form
+			if dsNameNode, found := streamMap.Find("dataset.name"); found {
+				dsKey, ok := dsNameNode.(*Key)
+				if ok {
+					if newDataset := dsKey.value.String(); newDataset == "" {
+						dsKey.value = &StrVal{value: defaultDataset}
+					}
 				}
+			} else {
 
-				dsNode, found := streamNode.Find("dataset")
+				datasetNode, found := streamMap.Find("dataset")
 				if found {
-					dsKey, ok := dsNode.(*Key)
-					if ok {
-						if newDataset := dsKey.value.String(); newDataset == "" {
-							dsKey.value = &StrVal{value: defaultDataset}
+					datasetMap, ok := datasetNode.Value().(*Dict)
+					if !ok {
+						continue
+					}
+
+					dsNameNode, found := datasetMap.Find("name")
+					if found {
+						dsKey, ok := dsNameNode.(*Key)
+						if ok {
+							if newDataset := dsKey.value.String(); newDataset == "" {
+								dsKey.value = &StrVal{value: defaultDataset}
+							}
 						}
+					} else {
+						streamMap.value = append(streamMap.value, &Key{
+							name:  "dataset.name",
+							value: &StrVal{value: defaultDataset},
+						})
 					}
 				} else {
 					streamMap.value = append(streamMap.value, &Key{
-						name:  "dataset",
+						name:  "dataset.name",
 						value: &StrVal{value: defaultDataset},
 					})
 				}
@@ -453,77 +488,41 @@ type InjectIndexRule struct {
 
 // Apply injects index into input.
 func (r *InjectIndexRule) Apply(ast *AST) error {
-	const defaultNamespace = "default"
-	const defaultDataset = "generic"
-
-	datasourcesNode, found := Lookup(ast, "datasources")
+	inputsNode, found := Lookup(ast, "inputs")
 	if !found {
 		return nil
 	}
 
-	datasourcesList, ok := datasourcesNode.Value().(*List)
+	inputsList, ok := inputsNode.Value().(*List)
 	if !ok {
 		return nil
 	}
 
-	for _, datasourceNode := range datasourcesList.value {
-		namespace := defaultNamespace
-		nsNode, found := datasourceNode.Find("namespace")
-		if found {
-			nsKey, ok := nsNode.(*Key)
-			if ok {
-				if newNamespace := nsKey.value.String(); newNamespace != "" {
-					namespace = newNamespace
-				}
-			}
-		}
+	for _, inputNode := range inputsList.value {
+		namespace := datasetNamespaceFromInputNode(inputNode)
+		datasetType := datasetTypeFromInputNode(inputNode, r.Type)
 
-		// get input
-		inputNode, found := datasourceNode.Find("inputs")
-		if !found {
+		streamsNode, ok := inputNode.Find("streams")
+		if !ok {
 			continue
 		}
 
-		inputsList, ok := inputNode.Value().(*List)
+		streamsList, ok := streamsNode.Value().(*List)
 		if !ok {
 			continue
 		}
 
-		for _, inputNode := range inputsList.value {
-			streamsNode, ok := inputNode.Find("streams")
+		for _, streamNode := range streamsList.value {
+			streamMap, ok := streamNode.(*Dict)
 			if !ok {
 				continue
 			}
 
-			streamsList, ok := streamsNode.Value().(*List)
-			if !ok {
-				continue
-			}
-
-			for _, streamNode := range streamsList.value {
-				streamMap, ok := streamNode.(*Dict)
-				if !ok {
-					continue
-				}
-
-				dataset := defaultDataset
-
-				dsNode, found := streamNode.Find("dataset")
-				if found {
-					dsKey, ok := dsNode.(*Key)
-					if ok {
-						if newDataset := dsKey.value.String(); newDataset != "" {
-							dataset = newDataset
-						}
-					}
-
-				}
-
-				streamMap.value = append(streamMap.value, &Key{
-					name:  "index",
-					value: &StrVal{value: fmt.Sprintf("%s-%s-%s", r.Type, dataset, namespace)},
-				})
-			}
+			dataset := datasetNameFromStreamNode(streamNode)
+			streamMap.value = append(streamMap.value, &Key{
+				name:  "index",
+				value: &StrVal{value: fmt.Sprintf("%s-%s-%s", datasetType, dataset, namespace)},
+			})
 		}
 	}
 
@@ -546,112 +545,78 @@ type InjectStreamProcessorRule struct {
 
 // Apply injects processor into input.
 func (r *InjectStreamProcessorRule) Apply(ast *AST) error {
-	const defaultNamespace = "default"
-	const defaultDataset = "generic"
-
-	datasourcesNode, found := Lookup(ast, "datasources")
+	inputsNode, found := Lookup(ast, "inputs")
 	if !found {
 		return nil
 	}
 
-	datasourcesList, ok := datasourcesNode.Value().(*List)
+	inputsList, ok := inputsNode.Value().(*List)
 	if !ok {
 		return nil
 	}
 
-	for _, datasourceNode := range datasourcesList.value {
-		namespace := defaultNamespace
-		nsNode, found := datasourceNode.Find("namespace")
-		if found {
-			nsKey, ok := nsNode.(*Key)
-			if ok {
-				if newNamespace := nsKey.value.String(); newNamespace != "" {
-					namespace = newNamespace
-				}
-			}
-		}
+	for _, inputNode := range inputsList.value {
+		namespace := datasetNamespaceFromInputNode(inputNode)
+		datasetType := datasetTypeFromInputNode(inputNode, r.Type)
 
-		// get input
-		inputNode, found := datasourceNode.Find("inputs")
-		if !found {
+		streamsNode, ok := inputNode.Find("streams")
+		if !ok {
 			continue
 		}
 
-		inputsList, ok := inputNode.Value().(*List)
+		streamsList, ok := streamsNode.Value().(*List)
 		if !ok {
 			continue
 		}
 
-		for _, inputNode := range inputsList.value {
-			streamsNode, ok := inputNode.Find("streams")
-			if !ok {
-				continue
-			}
-
-			streamsList, ok := streamsNode.Value().(*List)
+		for _, streamNode := range streamsList.value {
+			streamMap, ok := streamNode.(*Dict)
 			if !ok {
 				continue
 			}
 
-			for _, streamNode := range streamsList.value {
-				streamMap, ok := streamNode.(*Dict)
-				if !ok {
-					continue
-				}
-
-				dataset := defaultDataset
-
-				dsNode, found := streamNode.Find("dataset")
-				if found {
-					dsKey, ok := dsNode.(*Key)
-					if ok {
-						if newDataset := dsKey.value.String(); newDataset != "" {
-							dataset = newDataset
-						}
-					}
-				}
-
-				// get processors node
-				processorsNode, found := streamNode.Find("processors")
-				if !found {
-					processorsNode = &Key{
-						name:  "processors",
-						value: &List{value: make([]Node, 0)},
-					}
+			dataset := datasetNameFromStreamNode(streamNode)
 
-					streamMap.value = append(streamMap.value, processorsNode)
+			// get processors node
+			processorsNode, found := streamNode.Find("processors")
+			if !found {
+				processorsNode = &Key{
+					name:  "processors",
+					value: &List{value: make([]Node, 0)},
 				}
 
-				processorsList, ok := processorsNode.Value().(*List)
-				if !ok {
-					return errors.New("InjectStreamProcessorRule: processors is not a list")
-				}
+				streamMap.value = append(streamMap.value, processorsNode)
+			}
 
-				processorMap := &Dict{value: make([]Node, 0)}
-				processorMap.value = append(processorMap.value, &Key{name: "target", value: &StrVal{value: "dataset"}})
-				processorMap.value = append(processorMap.value, &Key{name: "fields", value: &Dict{value: []Node{
-					&Key{name: "type", value: &StrVal{value: r.Type}},
-					&Key{name: "namespace", value: &StrVal{value: namespace}},
-					&Key{name: "name", value: &StrVal{value: dataset}},
-				}}})
-
-				addFieldsMap := &Dict{value: []Node{&Key{"add_fields", processorMap}}}
-				processorsList.value = mergeStrategy(r.OnConflict).InjectItem(processorsList.value, addFieldsMap)
-
-				// add this for backwards compatibility remove later
-				streamProcessorMap := &Dict{value: make([]Node, 0)}
-				streamProcessorMap.value = append(streamProcessorMap.value, &Key{name: "target", value: &StrVal{value: "stream"}})
-				streamProcessorMap.value = append(streamProcessorMap.value, &Key{name: "fields", value: &Dict{value: []Node{
-					&Key{name: "type", value: &StrVal{value: r.Type}},
-					&Key{name: "namespace", value: &StrVal{value: namespace}},
-					&Key{name: "dataset", value: &StrVal{value: dataset}},
-				}}})
-
-				streamAddFieldsMap := &Dict{value: []Node{&Key{"add_fields", streamProcessorMap}}}
-
-				processorsList.value = mergeStrategy(r.OnConflict).InjectItem(processorsList.value, streamAddFieldsMap)
-				// end of backward compatibility section
+			processorsList, ok := processorsNode.Value().(*List)
+			if !ok {
+				return errors.New("InjectStreamProcessorRule: processors is not a list")
 			}
+
+			processorMap := &Dict{value: make([]Node, 0)}
+			processorMap.value = append(processorMap.value, &Key{name: "target", value: &StrVal{value: "dataset"}})
+			processorMap.value = append(processorMap.value, &Key{name: "fields", value: &Dict{value: []Node{
+				&Key{name: "type", value: &StrVal{value: datasetType}},
+				&Key{name: "namespace", value: &StrVal{value: namespace}},
+				&Key{name: "name", value: &StrVal{value: dataset}},
+			}}})
+
+			addFieldsMap := &Dict{value: []Node{&Key{"add_fields", processorMap}}}
+			processorsList.value = mergeStrategy(r.OnConflict).InjectItem(processorsList.value, addFieldsMap)
+
+			// add this for backwards compatibility remove later
+			streamProcessorMap := &Dict{value: make([]Node, 0)}
+			streamProcessorMap.value = append(streamProcessorMap.value, &Key{name: "target", value: &StrVal{value: "stream"}})
+			streamProcessorMap.value = append(streamProcessorMap.value, &Key{name: "fields", value: &Dict{value: []Node{
+				&Key{name: "type", value: &StrVal{value: datasetType}},
+				&Key{name: "namespace", value: &StrVal{value: namespace}},
+				&Key{name: "dataset", value: &StrVal{value: dataset}},
+			}}})
+
+			streamAddFieldsMap := &Dict{value: []Node{&Key{"add_fields", streamProcessorMap}}}
+
+			processorsList.value = mergeStrategy(r.OnConflict).InjectItem(processorsList.value, streamAddFieldsMap)
+			// end of backward compatibility section
 		}
 	}
 
@@ -1212,3 +1177,94 @@ func keys(m map[string]interface{}) []string {
 	}
 	return keys
 }
+
+func datasetNamespaceFromInputNode(inputNode Node) string {
+	const defaultNamespace = "default"
+
+	if namespaceNode, found := inputNode.Find("dataset.namespace"); found {
+		nsKey, ok := namespaceNode.(*Key)
+		if ok {
+			if newNamespace := nsKey.value.String(); newNamespace != "" {
+				return newNamespace
+			}
+		}
+	}
+
+	dsNode, found := inputNode.Find("dataset")
+	if found {
+		dsMapNode, ok := dsNode.Value().(*Dict)
+		if ok {
+			nsNode, found := dsMapNode.Find("namespace")
+			if found {
+				nsKey, ok := nsNode.(*Key)
+				if ok {
+					if newNamespace := nsKey.value.String(); newNamespace != "" {
+						return newNamespace
+					}
+				}
+			}
+		}
+	}
+
+	return defaultNamespace
+}
+
+func datasetTypeFromInputNode(inputNode Node, defaultType string) string {
+	if dsTypeNode, found := inputNode.Find("dataset.type"); found {
+		dsTypeKey, ok := dsTypeNode.(*Key)
+		if ok {
+			if newDatasetType := dsTypeKey.value.String(); newDatasetType != "" {
+				return newDatasetType
+			}
+		}
+	}
+
+	dsNode, found := inputNode.Find("dataset")
+	if found {
+		dsMapNode, ok := dsNode.Value().(*Dict)
+		if ok {
+			typeNode, found := dsMapNode.Find("type")
+			if found {
+				typeKey, ok := typeNode.(*Key)
+				if ok {
+					if newDatasetType := typeKey.value.String(); newDatasetType != "" {
+						return newDatasetType
+					}
+				}
+			}
+		}
+	}
+
+	return defaultType
+}
+
+func datasetNameFromStreamNode(streamNode Node) string {
+	const defaultDataset = "generic"
+
+	if dsNameNode, found := streamNode.Find("dataset.name"); found {
+		dsNameKey, ok := dsNameNode.(*Key)
+		if ok {
+			if newDatasetName := dsNameKey.value.String(); newDatasetName != "" {
+				return newDatasetName
+			}
+		}
+	}
+
+	dsNode, found := streamNode.Find("dataset")
+	if found {
+		dsMapNode, ok := dsNode.Value().(*Dict)
+		if ok {
+			dsNameNode, found := dsMapNode.Find("name")
+			if found {
+				dsKey, ok := dsNameNode.(*Key)
+				if ok {
+					if newDataset := dsKey.value.String(); newDataset != "" {
+						return newDataset
+					}
+				}
+			}
+		}
+	}
+
+	return defaultDataset
+}
diff --git a/x-pack/elastic-agent/pkg/agent/transpiler/rules_test.go b/x-pack/elastic-agent/pkg/agent/transpiler/rules_test.go
index 6da73a3dd05a..438b8c28efbd 100644
--- a/x-pack/elastic-agent/pkg/agent/transpiler/rules_test.go
+++ b/x-pack/elastic-agent/pkg/agent/transpiler/rules_test.go
@@ -24,76 +24,66 @@ func TestRules(t *testing.T) {
 	}{
 		"fix streams": {
 			givenYAML: `
-datasources:
+inputs:
   - name: All default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/error.log
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
   - name: Specified namespace
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
   - name: Specified dataset
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified with empty strings
-    namespace: ""
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: ""
+    type: file
+    dataset.namespace: ""
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: ""
 `,
 			expectedYAML: `
-datasources:
+inputs:
   - name: All default
-    namespace: default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/error.log
-          dataset: generic
+    type: file
+    dataset.namespace: default
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: generic
   - name: Specified namespace
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: generic
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: generic
   - name: Specified dataset
-    namespace: default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    dataset.namespace: default
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified with empty strings
-    namespace: default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: generic
+    type: file
+    dataset.namespace: default
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: generic
 `,
 			rule: &RuleList{
 				Rules: []Rule{
@@ -104,77 +94,69 @@ datasources:
 
 		"inject index": {
 			givenYAML: `
-datasources:
+inputs:
   - name: All default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/error.log
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
   - name: Specified namespace
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+
   - name: Specified dataset
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
   - name: All specified with empty strings
-    namespace: ""
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: ""
+    type: file
+    dataset.namespace: ""
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: ""
 `,
 			expectedYAML: `
-datasources:
+inputs:
   - name: All default
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/error.log
-          index: mytype-generic-default
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
+        index: mytype-generic-default
   - name: Specified namespace
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          index: mytype-generic-nsns
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        index: mytype-generic-nsns
+
   - name: Specified dataset
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
-          index: mytype-dsds-default
+    type: file
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
+        index: mytype-dsds-default
   - name: All specified
-    namespace: nsns
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: dsds
-          index: mytype-dsds-nsns
+    type: file
+    dataset.namespace: nsns
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: dsds
+        index: mytype-dsds-nsns
   - name: All specified with empty strings
-    namespace: ""
-    inputs:
-    - type: file
-      streams:
-        - paths: /var/log/mysql/access.log
-          dataset: ""
-          index: mytype-generic-default
+    type: file
+    dataset.namespace: ""
+    streams:
+      - paths: /var/log/mysql/error.log
+        dataset.name: ""
+        index: mytype-generic-default
 `,
 			rule: &RuleList{
 				Rules: []Rule{
diff --git a/x-pack/elastic-agent/spec/filebeat.yml b/x-pack/elastic-agent/spec/filebeat.yml
index ba725993e799..02178ccbb66f 100644
--- a/x-pack/elastic-agent/spec/filebeat.yml
+++ b/x-pack/elastic-agent/spec/filebeat.yml
@@ -10,13 +10,8 @@ rules:
     on_conflict: insert_after
     type: logs
 
-- extract_list_items:
-    path: datasources
-    item: inputs
-    to: inputsstreams
-
 - map:
-    path: inputsstreams
+    path: inputs
     rules:
     - copy_all_to_list:
         to: streams
@@ -27,6 +22,9 @@ rules:
         to: streams
         on_conflict: insert_before
 
+- rename:
+    from: inputs
+    to: inputsstreams
 
 - extract_list_items:
     path: inputsstreams
@@ -47,7 +45,14 @@ rules:
           log/docker: docker
           log/redis_slowlog: redis
           log/syslog: syslog
-
+    - remove_key:
+        key: use_output
+    - remove_key:
+        key: dataset
+    - remove_key:
+        key: dataset.namespace
+    - remove_key:
+        key: dataset.name
 
 - filter_values:
     selector: inputs
diff --git a/x-pack/elastic-agent/spec/metricbeat.yml b/x-pack/elastic-agent/spec/metricbeat.yml
index b15bd890b48e..14552838b75f 100644
--- a/x-pack/elastic-agent/spec/metricbeat.yml
+++ b/x-pack/elastic-agent/spec/metricbeat.yml
@@ -14,9 +14,8 @@ rules:
     on_conflict: insert_after
     type: metrics
 
-- extract_list_items:
-    path: datasources
-    item: inputs
+- rename:
+    from: inputs
     to: inputsstreams
 
 - map:
@@ -66,6 +65,12 @@ rules:
         key: enabled
     - remove_key:
         key: dataset
+    - remove_key:
+        key: dataset.name
+    - remove_key:
+        key: dataset.namespace
+    - remove_key:
+        key: use_output
 
 - copy:
     from: inputs