-
Notifications
You must be signed in to change notification settings - Fork 56
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
- Loading branch information
1 parent
d5f5ba7
commit ffc31da
Showing
3 changed files
with
128 additions
and
17 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
120 changes: 120 additions & 0 deletions
120
dbeam-core/src/main/java/com/spotify/dbeam/avro/JdbcAvroRecordConverter.java
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,120 @@ | ||
/*- | ||
* -\-\- | ||
* DBeam Core | ||
* -- | ||
* Copyright (C) 2016 - 2019 Spotify AB | ||
* -- | ||
* Licensed under the Apache License, Version 2.0 (the "License"); | ||
* you may not use this file except in compliance with the License. | ||
* You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
* -/-/- | ||
*/ | ||
|
||
package com.spotify.dbeam.avro; | ||
|
||
import java.io.ByteArrayOutputStream; | ||
import java.io.IOException; | ||
import java.nio.ByteBuffer; | ||
import java.sql.ResultSet; | ||
import java.sql.ResultSetMetaData; | ||
import java.sql.SQLException; | ||
|
||
import org.apache.avro.io.BinaryEncoder; | ||
import org.apache.avro.io.EncoderFactory; | ||
|
||
public class JdbcAvroRecordConverter { | ||
private final JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings; | ||
private final int columnCount; | ||
private final ResultSet resultSet; | ||
private final EncoderFactory encoderFactory = EncoderFactory.get(); | ||
|
||
public JdbcAvroRecordConverter( | ||
JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings, int columnCount, | ||
ResultSet resultSet) { | ||
this.mappings = mappings; | ||
this.columnCount = columnCount; | ||
this.resultSet = resultSet; | ||
} | ||
|
||
public static JdbcAvroRecordConverter create(ResultSet resultSet) | ||
throws SQLException { | ||
return new JdbcAvroRecordConverter( | ||
computeAllMappings(resultSet), | ||
resultSet.getMetaData().getColumnCount(), | ||
resultSet); | ||
} | ||
|
||
@SuppressWarnings("unchecked") | ||
static JdbcAvroRecord.SqlFunction<ResultSet, Object>[] computeAllMappings(ResultSet resultSet) | ||
throws SQLException { | ||
final ResultSetMetaData meta = resultSet.getMetaData(); | ||
final int columnCount = meta.getColumnCount(); | ||
|
||
final JdbcAvroRecord.SqlFunction<ResultSet, Object>[] mappings = | ||
new JdbcAvroRecord.SqlFunction[columnCount + 1]; | ||
|
||
for (int i = 1; i <= columnCount; i++) { | ||
mappings[i] = JdbcAvroRecord.computeMapping(meta, i); | ||
} | ||
return mappings; | ||
} | ||
|
||
private BinaryEncoder binaryEncoder = null; | ||
|
||
public static class MyByteArrayOutputStream extends ByteArrayOutputStream { | ||
|
||
MyByteArrayOutputStream(int size) { | ||
super(size); | ||
} | ||
|
||
// provide access to internal buffer, avoiding copy | ||
byte[] getBufffer() { | ||
return buf; | ||
} | ||
} | ||
|
||
/** | ||
* Read data from a single row of result set and and encode into a Avro record as byte array. | ||
* Directly reading and encoding has the benefit of less need for copying bytes between objects. | ||
*/ | ||
public ByteBuffer convertResultSetIntoAvroBytes() | ||
throws SQLException, IOException { | ||
final MyByteArrayOutputStream out = new MyByteArrayOutputStream(columnCount * 64); | ||
binaryEncoder = encoderFactory.directBinaryEncoder(out, binaryEncoder); | ||
for (int i = 1; i <= columnCount; i++) { | ||
final Object value = mappings[i].apply(resultSet); | ||
if (value == null || resultSet.wasNull()) { | ||
binaryEncoder.writeIndex(0); | ||
binaryEncoder.writeNull(); | ||
} else { | ||
binaryEncoder.writeIndex(1); | ||
if (value instanceof String) { | ||
binaryEncoder.writeString((String) value); | ||
} else if (value instanceof Long) { | ||
binaryEncoder.writeLong((Long) value); | ||
} else if (value instanceof Integer) { | ||
binaryEncoder.writeInt((Integer) value); | ||
} else if (value instanceof Boolean) { | ||
binaryEncoder.writeBoolean((Boolean) value); | ||
} else if (value instanceof ByteBuffer) { | ||
binaryEncoder.writeBytes((ByteBuffer) value); | ||
} else if (value instanceof Double) { | ||
binaryEncoder.writeDouble((Double) value); | ||
} else if (value instanceof Float) { | ||
binaryEncoder.writeFloat((Float) value); | ||
} | ||
} | ||
} | ||
binaryEncoder.flush(); | ||
return ByteBuffer.wrap(out.getBufffer()); | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters