forked from apache/carbondata
-
Notifications
You must be signed in to change notification settings - Fork 4
Commit
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.
Showing
7 changed files
with
307 additions
and
4 deletions.
There are no files selected for viewing
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
96 changes: 96 additions & 0 deletions
96
.../src/test/scala/org/apache/carbondata/spark/testsuite/addsegment/AddSegmentTestCase.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,96 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
package org.apache.carbondata.spark.testsuite.addsegment | ||
|
||
import java.io.File | ||
import java.nio.file.{Files, Paths} | ||
|
||
import org.apache.spark.sql.catalyst.InternalRow | ||
import org.apache.spark.sql.execution.strategy.CarbonDataSourceScan | ||
import org.apache.spark.sql.test.util.QueryTest | ||
import org.apache.spark.sql.{CarbonEnv, DataFrame, Row} | ||
import org.scalatest.BeforeAndAfterAll | ||
|
||
import org.apache.carbondata.core.constants.CarbonCommonConstants | ||
import org.apache.carbondata.core.datastore.filesystem.CarbonFile | ||
import org.apache.carbondata.core.datastore.impl.FileFactory | ||
import org.apache.carbondata.core.util.CarbonProperties | ||
import org.apache.carbondata.core.util.path.CarbonTablePath | ||
import org.apache.carbondata.spark.rdd.CarbonScanRDD | ||
|
||
class AddSegmentTestCase extends QueryTest with BeforeAndAfterAll { | ||
|
||
override def beforeAll { | ||
dropTable | ||
|
||
CarbonProperties.getInstance() | ||
.addProperty(CarbonCommonConstants.CARBON_TIMESTAMP_FORMAT, "dd-MM-yyyy") | ||
CarbonProperties.getInstance() | ||
.addProperty(CarbonCommonConstants.CARBON_DATE_FORMAT, "dd-MM-yyyy") | ||
|
||
} | ||
|
||
test("Test add segment ") { | ||
|
||
sql( | ||
""" | ||
| CREATE TABLE addsegment1 (empname String, designation String, doj Timestamp, | ||
| workgroupcategory int, workgroupcategoryname String, deptno int, deptname String, | ||
| projectcode int, projectjoindate Timestamp, projectenddate Date,attendance int, | ||
| utilization int,salary int, empno int) | ||
| STORED BY 'org.apache.carbondata.format' | ||
""".stripMargin) | ||
|
||
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE addsegment1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""") | ||
|
||
sql(s"""LOAD DATA local inpath '$resourcesPath/data.csv' INTO TABLE addsegment1 OPTIONS('DELIMITER'= ',', 'QUOTECHAR'= '"')""") | ||
|
||
sql("select count(*) from addsegment1").show() | ||
val table = CarbonEnv.getCarbonTable(None, "addsegment1") (sqlContext.sparkSession) | ||
val path = CarbonTablePath.getSegmentPath(table.getTablePath, "1") | ||
val newPath = storeLocation + "/" + "addsegtest" | ||
move(path, newPath) | ||
sql("delete from table addsegment1 where segment.id in (1)") | ||
sql("clean files for table addsegment1") | ||
val rows = sql("select count(*) from addsegment1").collect() | ||
checkAnswer(sql("select count(*) from addsegment1"), Seq(Row(10))) | ||
|
||
sql(s"add segment on table addsegment1 with path '$newPath' options('format'='carbon')").show() | ||
checkAnswer(sql("select count(*) from addsegment1"), Seq(Row(20))) | ||
sql("select * from addsegment1").show() | ||
FileFactory.deleteAllFilesOfDir(new File(newPath)) | ||
} | ||
|
||
def move(oldLoc: String, newLoc: String): Unit = { | ||
val oldFolder = FileFactory.getCarbonFile(oldLoc) | ||
FileFactory.mkdirs(newLoc, FileFactory.getConfiguration) | ||
val oldFiles = oldFolder.listFiles | ||
for (file <- oldFiles) { | ||
Files.copy(Paths.get(file.getParentFile.getPath, file.getName), Paths.get(newLoc, file.getName)) | ||
} | ||
} | ||
|
||
|
||
override def afterAll = { | ||
dropTable | ||
} | ||
|
||
def dropTable = { | ||
sql("drop table if exists addsegment1") | ||
} | ||
|
||
} |
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
94 changes: 94 additions & 0 deletions
94
...c/main/scala/org/apache/spark/sql/execution/command/management/CarbonAddLoadCommand.scala
This file contains bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -0,0 +1,94 @@ | ||
/* | ||
* Licensed to the Apache Software Foundation (ASF) under one or more | ||
* contributor license agreements. See the NOTICE file distributed with | ||
* this work for additional information regarding copyright ownership. | ||
* The ASF licenses this file to You under the Apache License, Version 2.0 | ||
* (the "License"); you may not use this file except in compliance with | ||
* the License. You may obtain a copy of the License at | ||
* | ||
* http://www.apache.org/licenses/LICENSE-2.0 | ||
* | ||
* Unless required by applicable law or agreed to in writing, software | ||
* distributed under the License is distributed on an "AS IS" BASIS, | ||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | ||
* See the License for the specific language governing permissions and | ||
* limitations under the License. | ||
*/ | ||
|
||
package org.apache.spark.sql.execution.command.management | ||
|
||
import java.util | ||
|
||
import scala.collection.JavaConverters._ | ||
|
||
import org.apache.spark.sql.execution.command.{Checker, MetadataCommand} | ||
import org.apache.spark.sql.{AnalysisException, CarbonEnv, Row, SparkSession} | ||
|
||
import org.apache.carbondata.common.exceptions.sql.MalformedCarbonCommandException | ||
import org.apache.carbondata.core.datamap.Segment | ||
import org.apache.carbondata.core.exception.ConcurrentOperationException | ||
import org.apache.carbondata.core.metadata.SegmentFileStore | ||
import org.apache.carbondata.core.statusmanager.{SegmentStatus, SegmentStatusManager} | ||
import org.apache.carbondata.core.util.path.CarbonTablePath | ||
import org.apache.carbondata.processing.loading.model.{CarbonDataLoadSchema, CarbonLoadModel} | ||
import org.apache.carbondata.processing.util.CarbonLoaderUtil | ||
|
||
|
||
/** | ||
* User can add external data folder as a segment to a transactional table. | ||
* In case of external carbon data folder user no need to specify the format in options. But for | ||
* other formats like parquet user must specify the format=parquet in options. | ||
*/ | ||
case class CarbonAddLoadCommand( | ||
databaseNameOp: Option[String], | ||
tableName: String, | ||
segmentPath: String, | ||
options: Option[Map[String, String]]) | ||
extends MetadataCommand { | ||
|
||
override def processMetadata(sparkSession: SparkSession): Seq[Row] = { | ||
Checker.validateTableExists(databaseNameOp, tableName, sparkSession) | ||
val carbonTable = CarbonEnv.getCarbonTable(databaseNameOp, tableName)(sparkSession) | ||
setAuditTable(carbonTable) | ||
if (!carbonTable.getTableInfo.isTransactionalTable) { | ||
throw new MalformedCarbonCommandException("Unsupported operation on non transactional table") | ||
} | ||
|
||
// if insert overwrite in progress, do not allow add segment | ||
if (SegmentStatusManager.isOverwriteInProgressInTable(carbonTable)) { | ||
throw new ConcurrentOperationException(carbonTable, "insert overwrite", "delete segment") | ||
} | ||
|
||
val model = new CarbonLoadModel | ||
model.setCarbonTransactionalTable(true) | ||
model.setCarbonDataLoadSchema(new CarbonDataLoadSchema(carbonTable)) | ||
model.setDatabaseName(carbonTable.getDatabaseName) | ||
model.setTableName(carbonTable.getTableName) | ||
|
||
CarbonLoaderUtil.readAndUpdateLoadProgressInTableMeta(model, false) | ||
|
||
val segment = new Segment(model.getSegmentId, | ||
SegmentFileStore.genSegmentFileName( | ||
model.getSegmentId, | ||
System.nanoTime().toString) + CarbonTablePath.SEGMENT_EXT, | ||
segmentPath, | ||
options.map(o => new util.HashMap[String, String](o.asJava)).getOrElse(new util.HashMap())) | ||
val isSuccess = | ||
SegmentFileStore.writeSegmentFile(carbonTable, segment) | ||
|
||
if (isSuccess) { | ||
SegmentFileStore.updateSegmentFile( | ||
carbonTable, | ||
model.getSegmentId, | ||
segment.getSegmentFileName, | ||
carbonTable.getCarbonTableIdentifier.getTableId, | ||
new SegmentFileStore(carbonTable.getTablePath, segment.getSegmentFileName), | ||
SegmentStatus.SUCCESS) | ||
} else { | ||
throw new AnalysisException("Adding segment with path failed.") | ||
} | ||
Seq.empty | ||
} | ||
|
||
override protected def opName: String = "ADD SEGMENT WITH PATH" | ||
} |
Oops, something went wrong.