Skip to content

Commit

Permalink
Fixed my bad
Browse files Browse the repository at this point in the history
  • Loading branch information
HeartSaVioR committed Jan 9, 2020
1 parent 29ba0f2 commit 163bda0
Show file tree
Hide file tree
Showing 3 changed files with 9 additions and 11 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -91,10 +91,10 @@ private[spark] object EventFilter extends Logging {

event.foreach { e =>
val results = filters.flatMap(_.acceptFn().lift.apply(e))
if (results.isEmpty || !results.contains(false)) {
onAccepted(line, e)
} else {
if (results.nonEmpty && results.forall(_ == false)) {
onRejected(line, e)
} else {
onAccepted(line, e)
}
}
} catch {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ import org.apache.spark.util.Utils
* represents approximate rate of filtered-out events. Score is being calculated via applying
* heuristic; task events tend to take most size in event log.
*/
private[spark] class EventLogFileCompactor(
class EventLogFileCompactor(
sparkConf: SparkConf,
hadoopConf: Configuration,
fs: FileSystem) extends Logging {
Expand Down Expand Up @@ -200,11 +200,9 @@ private[spark] class EventLogFileCompactor(
* @param compactIndex The index of compact file if the compaction is successful.
* Otherwise it will be None.
*/
private[spark] case class CompactionResult(
code: CompactionResultCode.Value,
compactIndex: Option[Long])
case class CompactionResult(code: CompactionResultCode.Value, compactIndex: Option[Long])

private[spark] object CompactionResultCode extends Enumeration {
object CompactionResultCode extends Enumeration {
val SUCCESS, NOT_ENOUGH_FILES, LOW_SCORE_FOR_COMPACTION = Value
}

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -239,9 +239,9 @@ class EventLogFileCompactorSuite extends SparkFunSuite {
// filterApplicationEnd: Some(true) & Some(true) => filter in
expectedLines += writeEventToWriter(writer, SparkListenerApplicationEnd(0))

// filterBlockManagerAdded: Some(true) & Some(false) => filter out
writeEventToWriter(writer, SparkListenerBlockManagerAdded(0, BlockManagerId("1", "host1", 1),
10))
// filterBlockManagerAdded: Some(true) & Some(false) => filter in
expectedLines += writeEventToWriter(writer, SparkListenerBlockManagerAdded(
0, BlockManagerId("1", "host1", 1), 10))

// filterApplicationStart: Some(false) & Some(false) => filter out
writeEventToWriter(writer, SparkListenerApplicationStart("app", None, 0, "user", None))
Expand Down

0 comments on commit 163bda0

Please sign in to comment.