Skip to content

Commit

Permalink
try to suppress deprecation warnings
Browse files Browse the repository at this point in the history
  • Loading branch information
pjfanning committed Oct 7, 2023
1 parent 404373c commit 33b6e24
Show file tree
Hide file tree
Showing 3 changed files with 11 additions and 12 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -76,7 +76,7 @@ public void setup() {
records.add(new GenericRecordBuilder(schema).set("id", "3").set("body", "body13").build());
}

@SuppressWarnings("msg=deprecated")
@SuppressWarnings("deprecation")
@Test
public void createNewParquetFile()
throws InterruptedException, IOException, TimeoutException, ExecutionException {
Expand Down
2 changes: 1 addition & 1 deletion avroparquet/src/test/java/docs/javadsl/Examples.java
Original file line number Diff line number Diff line change
Expand Up @@ -58,7 +58,7 @@ public class Examples {
Source<GenericRecord, NotUsed> source = AvroParquetSource.create(reader);
// #init-source

@SuppressWarnings("msg=deprecated")
@SuppressWarnings("deprecation")
public Examples() throws IOException {

// #init-flow
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ trait AbstractAvroParquetBase {
val conf: Configuration = new Configuration()
conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, true)

@SuppressWarnings(Array("msg=deprecated"))
@SuppressWarnings(Array("deprecation"))
def parquetWriter[T <: GenericRecord](file: String, conf: Configuration, schema: Schema): ParquetWriter[T] =
AvroParquetWriter.builder[T](new Path(file)).withConf(conf).withSchema(schema).build()

Expand Down Expand Up @@ -81,39 +81,38 @@ trait AbstractAvroParquetBase {
// #prepare-source
}

@SuppressWarnings(Array("msg=deprecated"))
@SuppressWarnings(Array("deprecation"))
def sinkDocumentation(): Unit = {
// #prepare-sink
import com.sksamuel.avro4s.Record
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.fs.Path
import org.apache.parquet.avro.AvroReadSupport

val file: String = "./sample/path/test.parquet"
val conf: Configuration = new Configuration()
val file = "./sample/path/test.parquet"
val conf = new Configuration()
conf.setBoolean(AvroReadSupport.AVRO_COMPATIBILITY, true)
val writer: ParquetWriter[Record] =
val writer =
AvroParquetWriter.builder[Record](new Path(file)).withConf(conf).withSchema(schema).build()
// #prepare-sink
if (writer != null) { // forces val usage
}
}

@SuppressWarnings(Array("msg=deprecated"))
@SuppressWarnings(Array("deprecation"))
def initWriterDocumentation(): Unit = {
// #init-writer
import org.apache.avro.generic.GenericRecord
import org.apache.hadoop.fs.Path
import org.apache.parquet.avro.AvroParquetReader
import org.apache.parquet.hadoop.ParquetReader
import org.apache.parquet.hadoop.util.HadoopInputFile

val file: String = "./sample/path/test.parquet"
val writer: ParquetWriter[GenericRecord] =
val file = "./sample/path/test.parquet"
val writer =
AvroParquetWriter.builder[GenericRecord](new Path(file)).withConf(conf).withSchema(schema).build()
// #init-writer
// #init-reader
val reader: ParquetReader[GenericRecord] =
val reader =
AvroParquetReader.builder[GenericRecord](HadoopInputFile.fromPath(new Path(file), conf)).withConf(conf).build()
// #init-reader
if (writer != null && reader != null) { // forces val usage
Expand Down

0 comments on commit 33b6e24

Please sign in to comment.