From 27b684f01757685f1df8870e5fea2c70e1fc9bf1 Mon Sep 17 00:00:00 2001 From: Luis Bianchin Date: Mon, 15 Apr 2019 16:07:40 +0200 Subject: [PATCH] Better logs --- .../java/com/spotify/dbeam/avro/AvroWriter.java | 9 +++------ .../java/com/spotify/dbeam/avro/JdbcAvroIO.java | 14 +++++++------- 2 files changed, 10 insertions(+), 13 deletions(-) diff --git a/dbeam-core/src/main/java/com/spotify/dbeam/avro/AvroWriter.java b/dbeam-core/src/main/java/com/spotify/dbeam/avro/AvroWriter.java index 285b1bf8..fff647ae 100644 --- a/dbeam-core/src/main/java/com/spotify/dbeam/avro/AvroWriter.java +++ b/dbeam-core/src/main/java/com/spotify/dbeam/avro/AvroWriter.java @@ -36,20 +36,18 @@ public class AvroWriter implements Runnable { private static final Logger LOGGER = LoggerFactory.getLogger(AvroWriter.class); private final DataFileWriter dataFileWriter; - private final JdbcAvroMetering metering; private final BlockingQueue queue; public AvroWriter( DataFileWriter dataFileWriter, - JdbcAvroMetering metering, BlockingQueue queue) { this.dataFileWriter = dataFileWriter; - this.metering = metering; this.queue = queue; } @Override public void run() { + LOGGER.debug("AvroWriter started"); try { while (true) { final ByteBuffer datum = queue.take(); @@ -61,10 +59,9 @@ public void run() { } } } catch (InterruptedException ex) { - System.out.println("CONSUMER INTERRUPTED"); + LOGGER.warn("AvroWriter interrupted"); } catch (IOException e) { - e.printStackTrace(); + LOGGER.error("Error on AvroWriter", e); } - } } diff --git a/dbeam-core/src/main/java/com/spotify/dbeam/avro/JdbcAvroIO.java b/dbeam-core/src/main/java/com/spotify/dbeam/avro/JdbcAvroIO.java index 4bf5400f..b28c7877 100644 --- a/dbeam-core/src/main/java/com/spotify/dbeam/avro/JdbcAvroIO.java +++ b/dbeam-core/src/main/java/com/spotify/dbeam/avro/JdbcAvroIO.java @@ -139,7 +139,7 @@ public FileBasedSink.Writer createWriter() { } private static class JdbcAvroWriter extends FileBasedSink.Writer { - private final Logger logger = LoggerFactory.getLogger(JdbcAvroWriter.class); + private static final Logger LOGGER = LoggerFactory.getLogger(JdbcAvroWriter.class); private final int syncInterval = DataFileConstants.DEFAULT_SYNC_INTERVAL * 16; // 1 MB private final DynamicAvroDestinations dynamicDestinations; private final JdbcAvroArgs jdbcAvroArgs; @@ -166,7 +166,7 @@ public Void getDestination() { @SuppressWarnings("deprecation") // uses internal test functionality. @Override protected void prepareWrite(WritableByteChannel channel) throws Exception { - logger.info("jdbcavroio : Preparing write..."); + LOGGER.debug("jdbcavroio : Preparing write..."); connection = jdbcAvroArgs.jdbcConnectionConfiguration().createConnection(); Void destination = getDestination(); Schema schema = dynamicDestinations.getSchema(destination); @@ -176,7 +176,7 @@ protected void prepareWrite(WritableByteChannel channel) throws Exception { dataFileWriter.setMeta("created_by", this.getClass().getCanonicalName()); this.countingOutputStream = new CountingOutputStream(Channels.newOutputStream(channel)); dataFileWriter.create(schema, this.countingOutputStream); - logger.info("jdbcavroio : Write prepared"); + LOGGER.debug("jdbcavroio : Write prepared"); } private ResultSet executeQuery(String query) throws Exception { @@ -199,7 +199,7 @@ private ResultSet executeQuery(String query) throws Exception { } long startTime = System.nanoTime(); - logger.info( + LOGGER.info( "jdbcavroio : Executing query with fetchSize={} (this might take a few minutes) ...", statement.getFetchSize()); ResultSet resultSet = statement.executeQuery(); @@ -213,7 +213,7 @@ private ResultSet executeQuery(String query) throws Exception { public void write(String query) throws Exception { checkArgument(dataFileWriter != null, "Avro DataFileWriter was not properly created"); - logger.info("jdbcavroio : Starting write..."); + LOGGER.debug("jdbcavroio : Starting write..."); final ExecutorService executorService = Executors.newSingleThreadExecutor(); try (ResultSet resultSet = executeQuery(query)) { final Future future = executorService.submit(new AvroWriter(dataFileWriter, queue)); @@ -241,14 +241,14 @@ private void convertAllResultSet(ResultSet resultSet, JdbcAvroRecordConverter co @Override protected void finishWrite() throws Exception { - logger.info("jdbcavroio : Closing connection, flushing writer..."); + LOGGER.debug("jdbcavroio : Closing connection, flushing writer..."); if (connection != null) { connection.close(); } if (dataFileWriter != null) { dataFileWriter.close(); } - logger.info("jdbcavroio : Write finished"); + LOGGER.info("jdbcavroio : Write finished"); } }