diff --git a/avroparquet/src/test/java/docs/javadsl/AvroParquetSinkTest.java b/avroparquet/src/test/java/docs/javadsl/AvroParquetSinkTest.java index 5aed60b12..a6b7b4da9 100644 --- a/avroparquet/src/test/java/docs/javadsl/AvroParquetSinkTest.java +++ b/avroparquet/src/test/java/docs/javadsl/AvroParquetSinkTest.java @@ -76,7 +76,7 @@ public void setup() { records.add(new GenericRecordBuilder(schema).set("id", "3").set("body", "body13").build()); } - @SuppressWarnings("msg=deprecated") + @SuppressWarnings("deprecation") @Test public void createNewParquetFile() throws InterruptedException, IOException, TimeoutException, ExecutionException { diff --git a/avroparquet/src/test/java/docs/javadsl/Examples.java b/avroparquet/src/test/java/docs/javadsl/Examples.java index 10974163c..a1a884f5e 100644 --- a/avroparquet/src/test/java/docs/javadsl/Examples.java +++ b/avroparquet/src/test/java/docs/javadsl/Examples.java @@ -58,7 +58,7 @@ public class Examples { Source source = AvroParquetSource.create(reader); // #init-source - @SuppressWarnings("msg=deprecated") + @SuppressWarnings("deprecation") public Examples() throws IOException { // #init-flow diff --git a/avroparquet/src/test/scala/docs/scaladsl/AbstractAvroParquetBase.scala b/avroparquet/src/test/scala/docs/scaladsl/AbstractAvroParquetBase.scala index 970e07ad5..9875e9eb3 100644 --- a/avroparquet/src/test/scala/docs/scaladsl/AbstractAvroParquetBase.scala +++ b/avroparquet/src/test/scala/docs/scaladsl/AbstractAvroParquetBase.scala @@ -47,7 +47,7 @@ trait AbstractAvroParquetBase { val conf: Configuration = new Configuration() conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, true) - @SuppressWarnings(Array("msg=deprecated")) + @SuppressWarnings(Array("deprecation")) def parquetWriter[T <: GenericRecord](file: String, conf: Configuration, schema: Schema): ParquetWriter[T] = AvroParquetWriter.builder[T](new Path(file)).withConf(conf).withSchema(schema).build() @@ -81,7 +81,7 @@ trait AbstractAvroParquetBase { // #prepare-source } - @SuppressWarnings(Array("msg=deprecated")) + @SuppressWarnings(Array("deprecation")) def sinkDocumentation(): Unit = { // #prepare-sink import com.sksamuel.avro4s.Record @@ -89,31 +89,30 @@ trait AbstractAvroParquetBase { import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroReadSupport - val file: String = "./sample/path/test.parquet" - val conf: Configuration = new Configuration() + val file = "./sample/path/test.parquet" + val conf = new Configuration() conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, true) - val writer: ParquetWriter[Record] = + val writer = AvroParquetWriter.builder[Record](new Path(file)).withConf(conf).withSchema(schema).build() // #prepare-sink if (writer != null) { // forces val usage } } - @SuppressWarnings(Array("msg=deprecated")) + @SuppressWarnings(Array("deprecation")) def initWriterDocumentation(): Unit = { // #init-writer import org.apache.avro.generic.GenericRecord import org.apache.hadoop.fs.Path import org.apache.parquet.avro.AvroParquetReader - import org.apache.parquet.hadoop.ParquetReader import org.apache.parquet.hadoop.util.HadoopInputFile - val file: String = "./sample/path/test.parquet" - val writer: ParquetWriter[GenericRecord] = + val file = "./sample/path/test.parquet" + val writer = AvroParquetWriter.builder[GenericRecord](new Path(file)).withConf(conf).withSchema(schema).build() // #init-writer // #init-reader - val reader: ParquetReader[GenericRecord] = + val reader = AvroParquetReader.builder[GenericRecord](HadoopInputFile.fromPath(new Path(file), conf)).withConf(conf).build() // #init-reader if (writer != null && reader != null) { // forces val usage