Skip to content

Commit

Permalink
Added test for DSWriteConfigSetup.performInitialSetup
Browse files Browse the repository at this point in the history
  • Loading branch information
Aryex committed May 27, 2022
1 parent a1b331b commit 0935965
Show file tree
Hide file tree
Showing 2 changed files with 79 additions and 1 deletion.
Original file line number Diff line number Diff line change
Expand Up @@ -13,8 +13,27 @@

package com.vertica.spark.common

import com.vertica.spark.config.{AWSOptions, FileStoreConfig, GCSOptions}
import com.vertica.spark.config.{AWSOptions, BasicJdbcAuth, DistributedFilesystemWriteConfig, FileStoreConfig, GCSOptions, JDBCConfig, JDBCTLSConfig, TableName, ValidFilePermissions}
import com.vertica.spark.datasource.core.Require
import org.apache.spark.sql.types.StructType

object TestObjects {
val fileStoreConfig: FileStoreConfig = FileStoreConfig("hdfs://example-hdfs:8020/tmp/", "test", false, AWSOptions(None, None, None, None, None, None, None), GCSOptions(None, None, None))
val tablename: TableName = TableName("testtable", None)
val jdbcConfig: JDBCConfig = JDBCConfig(
"1.1.1.1", 1234, "test", BasicJdbcAuth("test", "test"), JDBCTLSConfig(tlsMode = Require, None, None, None, None))
val writeConfig: DistributedFilesystemWriteConfig = DistributedFilesystemWriteConfig(
jdbcConfig = jdbcConfig,
fileStoreConfig = fileStoreConfig,
tablename = tablename,
schema = new StructType(),
targetTableSql = None,
strlen = 1024,
copyColumnList = None,
sessionId = "id",
failedRowPercentTolerance = 0.0f,
filePermissions = ValidFilePermissions("777").getOrElse(throw new Exception("File perm error")),
createExternalTable = None,
saveJobStatusTable = false
)
}
Original file line number Diff line number Diff line change
Expand Up @@ -14,13 +14,15 @@
package com.vertica.spark.datasource.core

import cats.data.Validated.{Invalid, Valid}
import com.vertica.spark.common.TestObjects
import org.scalatest.BeforeAndAfterAll
import org.scalatest.flatspec.AnyFlatSpec
import com.vertica.spark.config._
import com.vertica.spark.datasource.core.factory.VerticaPipeFactoryInterface
import org.scalamock.scalatest.MockFactory
import com.vertica.spark.util.error._
import com.vertica.spark.datasource.v2.DummyReadPipe
import com.vertica.spark.util.error.ErrorHandling.ConnectorResult
import org.apache.spark.sql.SparkSession
import org.apache.spark.sql.types._

Expand All @@ -31,6 +33,7 @@ class DSConfigSetupTest extends AnyFlatSpec with BeforeAndAfterAll with MockFact
override def afterAll(): Unit = {
}

val writeConfig: DistributedFilesystemWriteConfig = TestObjects.writeConfig

// Parses config expecting success
// Calling test with fail if an error is returned
Expand Down Expand Up @@ -693,4 +696,60 @@ class DSConfigSetupTest extends AnyFlatSpec with BeforeAndAfterAll with MockFact
spark.close()
}
}

/**
* Dummy class to instantiate mock object
* */
class TestWritePipe extends VerticaPipeWriteInterface with VerticaPipeInterface {
/**
* Initial setup for the whole write operation. Called by driver.
*/
override def doPreWriteSteps(): ConnectorResult[Unit] = ???

/**
* Initial setup for the write of an individual partition. Called by executor.
*
* @param uniqueId Unique identifier for the partition being written
*/
override def startPartitionWrite(uniqueId: String): ConnectorResult[Unit] = ???

/**
* Write a block of data to the underlying source. Called by executor.
*/
override def writeData(data: DataBlock): ConnectorResult[Unit] = ???

/**
* Ends the write, doing any necessary cleanup. Called by executor once writing of the given partition is done.
*/
override def endPartitionWrite(): ConnectorResult[Unit] = ???

/**
* Commits the data being written. Called by the driver once all executors have succeeded writing.
*/
override def commit(): ConnectorResult[Unit] = ???

/**
* Retrieve any needed metadata for a table needed to inform the configuration of the operation.
*
* Can include schema and things like node information / segmentation -- should have caching mechanism
*/
override def getMetadata: ConnectorResult[VerticaMetadata] = ???

/**
* Returns the default number of rows to read/write from this pipe at a time.
*/
override def getDataBlockSize: ConnectorResult[Long] = ???
}

it should "perform initial setup" in {
val pipeFactoryMock = mock[VerticaPipeFactoryInterface]
val writePipeMock = mock[TestWritePipe]
(pipeFactoryMock.getWritePipe _).expects(writeConfig, true).returning(writePipeMock)
(writePipeMock.doPreWriteSteps _).expects().returning(Right())
new DSWriteConfigSetup(Some(new StructType), pipeFactoryMock).performInitialSetup(writeConfig) match {
case Left(value) => fail("Expected to succeed: " + value.getFullContext)
case Right(result) =>
assert(result == None)
}
}
}

0 comments on commit 0935965

Please sign in to comment.