Skip to content

Commit 866b856

Browse files
committed
scalastyle and a fix
GitOrigin-RevId: 7b93f511509cdb9973fc1351296727fe2779198d
1 parent cbf258e commit 866b856

File tree

2 files changed

+3
-6
lines changed

2 files changed

+3
-6
lines changed

core/src/main/scala/org/apache/spark/sql/delta/commands/OptimizeTableCommand.scala

+1-3
Original file line numberDiff line numberDiff line change
@@ -315,9 +315,7 @@ class OptimizeExecutor(
315315
bin.size > 1 || // bin has more than one file or
316316
(bin.size == 1 && bin(0).deletionVector != null) || // single file in the bin has a DV or
317317
isMultiDimClustering // multi-clustering
318-
}
319-
320-
bins.map(b => (partition, b))
318+
}.map(b => (partition, b))
321319
}
322320
}
323321

core/src/test/scala/org/apache/spark/sql/delta/DeltaGenerateSymlinkManifestSuite.scala

+2-3
Original file line numberDiff line numberDiff line change
@@ -19,8 +19,6 @@ package org.apache.spark.sql.delta
1919
import java.io.File
2020
import java.net.URI
2121

22-
import org.apache.spark.SparkThrowable
23-
2422
// scalastyle:off import.ordering.noEmptyLine
2523
import org.apache.spark.sql.delta.DeltaOperations.Delete
2624
import org.apache.spark.sql.delta.commands.DeltaGenerateCommand
@@ -33,10 +31,12 @@ import org.apache.hadoop.fs._
3331
import org.apache.hadoop.fs.permission.FsPermission
3432
import org.apache.hadoop.util.Progressable
3533

34+
import org.apache.spark.SparkThrowable
3635
import org.apache.spark.sql._
3736
import org.apache.spark.sql.catalyst.TableIdentifier
3837
import org.apache.spark.sql.functions._
3938
import org.apache.spark.sql.test.SharedSparkSession
39+
// scalastyle:on import.ordering.noEmptyLine
4040

4141
class DeltaGenerateSymlinkManifestSuite
4242
extends DeltaGenerateSymlinkManifestSuiteBase
@@ -601,7 +601,6 @@ trait DeltaGenerateSymlinkManifestSuiteBase extends QueryTest
601601
subClass = ExistingDeletionVectorsWithIncrementalManifestGeneration) {
602602
setEnabledIncrementalManifest(tablePath, enabled = true)
603603
}
604-
605604
// Run optimize to delete the DVs and rewrite the data files
606605
withSQLConf(DeltaSQLConf.DELTA_OPTIMIZE_MAX_DELETED_ROWS_RATIO.key -> "0.00001") {
607606
spark.sql(s"OPTIMIZE delta.`$tablePath`")

0 commit comments

Comments
 (0)