diff --git a/data-prepper-plugins/common/README.md b/data-prepper-plugins/common/README.md
index 86da8ea331..9be29adec5 100644
--- a/data-prepper-plugins/common/README.md
+++ b/data-prepper-plugins/common/README.md
@@ -8,9 +8,33 @@ A prepper plugin to generate new string records with upper or lower case convers
## `file` (source)
-A source plugin to read input data from the specified file path.
-
-- path (String): absolute input data file path
+A source plugin to read input data from the specified file path. The file source creates a new Record for each line of data in the file.
+
+* `path` (String): absolute input data file path. It is required
+
+* `format` (String): The format of each line of the file. Valid options are `json` or `plain`. Default is `plain`.
+
+ * `plain`: Reads plaintext data from files. Internally, a plain text line from a file will be given a key of `message` as shown below.
+ ```
+ Example log line in file
+ ```
+ becomes
+ ```
+ { "message": "Example log line in file" }
+ ```
+
+ * `json`: Reads data that is in the form of a JSON string from a file. If the json string is unable to be parsed, the file source will treat it as a plaintext line.
+ Expects json lines as follows:
+ ```
+ { "key1": "val1" }
+ { "key2": "val2" }
+ { "key3": "val3" }
+ ```
+
+
+* `record_type` (String): The Event type that will be stored in the metadata of the Event. Default is `string`.
+Temporarily, `type` can either be `event` or `string`. If you would like to use the file source for log analytics use cases like grok,
+ change this to `event`.
## `file` (sink)
diff --git a/data-prepper-plugins/common/build.gradle b/data-prepper-plugins/common/build.gradle
index 22e498359c..7d07857328 100644
--- a/data-prepper-plugins/common/build.gradle
+++ b/data-prepper-plugins/common/build.gradle
@@ -23,8 +23,10 @@ dependencies {
implementation "org.bouncycastle:bcprov-jdk15on:1.69"
implementation "org.bouncycastle:bcpkix-jdk15on:1.69"
implementation 'org.reflections:reflections:0.10.2'
+ testImplementation project(':data-prepper-plugins:blocking-buffer')
testImplementation 'commons-io:commons-io:2.11.0'
testImplementation "org.hamcrest:hamcrest:2.2"
+ testImplementation "org.mockito:mockito-inline:${versionMap.mockito}"
}
jacocoTestCoverageVerification {
diff --git a/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/FileSource.java b/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/FileSource.java
deleted file mode 100644
index a2b75aed09..0000000000
--- a/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/FileSource.java
+++ /dev/null
@@ -1,91 +0,0 @@
-/*
- * SPDX-License-Identifier: Apache-2.0
- *
- * The OpenSearch Contributors require contributions made to
- * this file be licensed under the Apache-2.0 license or a
- * compatible open source license.
- *
- * Modifications Copyright OpenSearch Contributors. See
- * GitHub history for details.
- */
-
-package com.amazon.dataprepper.plugins.source;
-
-import com.amazon.dataprepper.model.annotations.DataPrepperPlugin;
-import com.amazon.dataprepper.model.buffer.Buffer;
-import com.amazon.dataprepper.model.configuration.PluginSetting;
-import com.amazon.dataprepper.model.record.Record;
-import com.amazon.dataprepper.model.source.Source;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
-import java.io.BufferedReader;
-import java.io.IOException;
-import java.nio.charset.StandardCharsets;
-import java.nio.file.Files;
-import java.nio.file.Paths;
-import java.util.concurrent.TimeoutException;
-
-import static com.google.common.base.Preconditions.checkNotNull;
-import static java.lang.String.format;
-
-@DataPrepperPlugin(name = "file", pluginType = Source.class)
-public class FileSource implements Source> {
- private static final Logger LOG = LoggerFactory.getLogger(FileSource.class);
- private static final String ATTRIBUTE_PATH = "path";
- private static final String ATTRIBUTE_TIMEOUT = "write_timeout";
- private static final int WRITE_TIMEOUT = 5_000;
-
- private final String filePathToRead;
- private final int writeTimeout;
- private final String pipelineName;
- private boolean isStopRequested;
-
-
- /**
- * Mandatory constructor for Data Prepper Component - This constructor is used by Data Prepper
- * runtime engine to construct an instance of {@link FileSource} using an instance of {@link PluginSetting} which
- * has access to pluginSetting metadata from pipeline
- * pluginSetting file.
- *
- * @param pluginSetting instance with metadata information from pipeline pluginSetting file.
- */
- public FileSource(final PluginSetting pluginSetting) {
- this((String) checkNotNull(pluginSetting, "PluginSetting cannot be null")
- .getAttributeFromSettings(ATTRIBUTE_PATH),
- pluginSetting.getIntegerOrDefault(ATTRIBUTE_TIMEOUT, WRITE_TIMEOUT),
- pluginSetting.getPipelineName());
- }
-
- public FileSource(final String filePath, final int writeTimeout, final String pipelineName) {
- if (filePath == null || filePath.isEmpty()) {
- throw new RuntimeException(format("Pipeline [%s] - path is a required attribute for file source",
- pipelineName));
- }
- this.filePathToRead = filePath;
- this.writeTimeout = writeTimeout;
- this.pipelineName = checkNotNull(pipelineName, "Pipeline name cannot be null");
- isStopRequested = false;
- }
-
-
- @Override
- public void start(final Buffer> buffer) {
- checkNotNull(buffer, format("Pipeline [%s] - buffer cannot be null for file source to start", pipelineName));
- try (BufferedReader reader = Files.newBufferedReader(Paths.get(filePathToRead), StandardCharsets.UTF_8)) {
- String line;
- while ((line = reader.readLine()) != null && !isStopRequested) {
- buffer.write(new Record<>(line), writeTimeout);
- }
- } catch (IOException | TimeoutException ex) {
- LOG.error("Pipeline [{}] - Error processing the input file [{}]", pipelineName, filePathToRead, ex);
- throw new RuntimeException(format("Pipeline [%s] - Error processing the input file %s", pipelineName,
- filePathToRead), ex);
- }
- }
-
- @Override
- public void stop() {
- isStopRequested = true;
- }
-}
\ No newline at end of file
diff --git a/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileFormat.java b/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileFormat.java
new file mode 100644
index 0000000000..a969ea7a31
--- /dev/null
+++ b/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileFormat.java
@@ -0,0 +1,33 @@
+package com.amazon.dataprepper.plugins.source.file;
+
+import java.util.Arrays;
+import java.util.Map;
+import java.util.function.Function;
+import java.util.stream.Collectors;
+
+/**
+ * An enumm to represent the file formats supported in Data Prepper's file source.
+ * @since 1.2
+ */
+public enum FileFormat {
+
+ PLAIN("plain"),
+ JSON("json");
+
+ private static final Map NAMES_MAP = Arrays.stream(FileFormat.values())
+ .collect(Collectors.toMap(FileFormat::toString, Function.identity()));
+
+ private final String name;
+
+ FileFormat(final String name) {
+ this.name = name;
+ }
+
+ public String toString() {
+ return this.name;
+ }
+
+ public static FileFormat getByName(final String name) {
+ return NAMES_MAP.get(name.toLowerCase());
+ }
+}
\ No newline at end of file
diff --git a/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileSource.java b/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileSource.java
new file mode 100644
index 0000000000..0023069451
--- /dev/null
+++ b/data-prepper-plugins/common/src/main/java/com/amazon/dataprepper/plugins/source/file/FileSource.java
@@ -0,0 +1,121 @@
+/*
+ * SPDX-License-Identifier: Apache-2.0
+ *
+ * The OpenSearch Contributors require contributions made to
+ * this file be licensed under the Apache-2.0 license or a
+ * compatible open source license.
+ *
+ * Modifications Copyright OpenSearch Contributors. See
+ * GitHub history for details.
+ */
+
+package com.amazon.dataprepper.plugins.source.file;
+
+import com.amazon.dataprepper.metrics.PluginMetrics;
+import com.amazon.dataprepper.model.annotations.DataPrepperPlugin;
+import com.amazon.dataprepper.model.annotations.DataPrepperPluginConstructor;
+import com.amazon.dataprepper.model.buffer.Buffer;
+import com.amazon.dataprepper.model.event.JacksonEvent;
+import com.amazon.dataprepper.model.plugin.PluginFactory;
+import com.amazon.dataprepper.model.record.Record;
+import com.amazon.dataprepper.model.source.Source;
+import com.fasterxml.jackson.core.JsonProcessingException;
+import com.fasterxml.jackson.core.type.TypeReference;
+import com.fasterxml.jackson.databind.ObjectMapper;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
+import java.io.BufferedReader;
+import java.io.IOException;
+import java.nio.charset.StandardCharsets;
+import java.nio.file.Files;
+import java.nio.file.Paths;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.concurrent.TimeoutException;
+
+import static com.google.common.base.Preconditions.checkNotNull;
+import static java.lang.String.format;
+
+@DataPrepperPlugin(name = "file", pluginType = Source.class, pluginConfigurationType = FileSourceConfig.class)
+public class FileSource implements Source> {
+
+ static final String MESSAGE_KEY = "message";
+ private static final Logger LOG = LoggerFactory.getLogger(FileSource.class);
+ private static final TypeReference