Skip to content

Commit 809ee5b

Browse files
authored
Build: Enforce one import per line in Scalastyle (#2199)
1 parent 35f91b0 commit 809ee5b

15 files changed

+90
-29
lines changed

project/scalastyle_config.xml

+1
Original file line numberDiff line numberDiff line change
@@ -134,4 +134,5 @@
134134
<parameter name="tokens">COMMA</parameter>
135135
</parameters>
136136
</check>
137+
<check level="error" class="org.scalastyle.scalariform.BlockImportChecker" enabled="true" />
137138
</scalastyle>

spark3-extensions/src/main/scala/org/apache/iceberg/spark/extensions/IcebergSparkSessionExtensions.scala

+9-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,15 @@
2020
package org.apache.iceberg.spark.extensions
2121

2222
import org.apache.spark.sql.SparkSessionExtensions
23-
import org.apache.spark.sql.catalyst.analysis.{AlignMergeIntoTable, DeleteFromTablePredicateCheck, MergeIntoTablePredicateCheck, ProcedureArgumentCoercion, ResolveProcedures}
24-
import org.apache.spark.sql.catalyst.optimizer.{OptimizeConditionsInRowLevelOperations, PullupCorrelatedPredicatesInRowLevelOperations, RewriteDelete, RewriteMergeInto}
23+
import org.apache.spark.sql.catalyst.analysis.AlignMergeIntoTable
24+
import org.apache.spark.sql.catalyst.analysis.DeleteFromTablePredicateCheck
25+
import org.apache.spark.sql.catalyst.analysis.MergeIntoTablePredicateCheck
26+
import org.apache.spark.sql.catalyst.analysis.ProcedureArgumentCoercion
27+
import org.apache.spark.sql.catalyst.analysis.ResolveProcedures
28+
import org.apache.spark.sql.catalyst.optimizer.OptimizeConditionsInRowLevelOperations
29+
import org.apache.spark.sql.catalyst.optimizer.PullupCorrelatedPredicatesInRowLevelOperations
30+
import org.apache.spark.sql.catalyst.optimizer.RewriteDelete
31+
import org.apache.spark.sql.catalyst.optimizer.RewriteMergeInto
2532
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSparkSqlExtensionsParser
2633
import org.apache.spark.sql.execution.datasources.v2.ExtendedDataSourceV2Strategy
2734

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/AlignMergeIntoTable.scala

+6-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,12 @@
2020
package org.apache.spark.sql.catalyst.analysis
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.plans.logical.{Assignment, DeleteAction, InsertAction, LogicalPlan, MergeIntoTable, UpdateAction}
23+
import org.apache.spark.sql.catalyst.plans.logical.Assignment
24+
import org.apache.spark.sql.catalyst.plans.logical.DeleteAction
25+
import org.apache.spark.sql.catalyst.plans.logical.InsertAction
26+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
27+
import org.apache.spark.sql.catalyst.plans.logical.MergeIntoTable
28+
import org.apache.spark.sql.catalyst.plans.logical.UpdateAction
2429
import org.apache.spark.sql.catalyst.rules.Rule
2530
import org.apache.spark.sql.catalyst.utils.PlanUtils.isIcebergRelation
2631
import org.apache.spark.sql.internal.SQLConf

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/AssignmentAlignmentSupport.scala

+15-3
Original file line numberDiff line numberDiff line change
@@ -20,10 +20,22 @@
2020
package org.apache.spark.sql.catalyst.analysis
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.expressions.{Alias, AnsiCast, AttributeReference, Cast, CreateNamedStruct, Expression, ExtractValue, GetStructField, Literal, NamedExpression}
24-
import org.apache.spark.sql.catalyst.plans.logical.{Assignment, LogicalPlan}
23+
import org.apache.spark.sql.catalyst.expressions.Alias
24+
import org.apache.spark.sql.catalyst.expressions.AnsiCast
25+
import org.apache.spark.sql.catalyst.expressions.AttributeReference
26+
import org.apache.spark.sql.catalyst.expressions.Cast
27+
import org.apache.spark.sql.catalyst.expressions.CreateNamedStruct
28+
import org.apache.spark.sql.catalyst.expressions.Expression
29+
import org.apache.spark.sql.catalyst.expressions.ExtractValue
30+
import org.apache.spark.sql.catalyst.expressions.GetStructField
31+
import org.apache.spark.sql.catalyst.expressions.Literal
32+
import org.apache.spark.sql.catalyst.expressions.NamedExpression
33+
import org.apache.spark.sql.catalyst.plans.logical.Assignment
34+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2535
import org.apache.spark.sql.internal.SQLConf.StoreAssignmentPolicy
26-
import org.apache.spark.sql.types.{DataType, StructField, StructType}
36+
import org.apache.spark.sql.types.DataType
37+
import org.apache.spark.sql.types.StructField
38+
import org.apache.spark.sql.types.StructType
2739
import scala.collection.mutable
2840

2941
trait AssignmentAlignmentSupport extends CastSupport {

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/DeleteFromTablePredicateCheck.scala

+5-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,11 @@
2020
package org.apache.spark.sql.catalyst.analysis
2121

2222
import org.apache.spark.sql.AnalysisException
23-
import org.apache.spark.sql.catalyst.expressions.{Expression, InSubquery, Not}
24-
import org.apache.spark.sql.catalyst.plans.logical.{DeleteFromTable, LogicalPlan}
23+
import org.apache.spark.sql.catalyst.expressions.Expression
24+
import org.apache.spark.sql.catalyst.expressions.InSubquery
25+
import org.apache.spark.sql.catalyst.expressions.Not
26+
import org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable
27+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2528
import org.apache.spark.sql.catalyst.utils.PlanUtils.isIcebergRelation
2629

2730
object DeleteFromTablePredicateCheck extends (LogicalPlan => Unit) {

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/ProcedureArgumentCoercion.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -21,7 +21,8 @@ package org.apache.spark.sql.catalyst.analysis
2121

2222
import org.apache.spark.sql.AnalysisException
2323
import org.apache.spark.sql.catalyst.expressions.Cast
24-
import org.apache.spark.sql.catalyst.plans.logical.{Call, LogicalPlan}
24+
import org.apache.spark.sql.catalyst.plans.logical.Call
25+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2526
import org.apache.spark.sql.catalyst.rules.Rule
2627

2728
object ProcedureArgumentCoercion extends Rule[LogicalPlan] {

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/analysis/ResolveProcedures.scala

+15-5
Original file line numberDiff line numberDiff line change
@@ -20,12 +20,22 @@
2020
package org.apache.spark.sql.catalyst.analysis
2121

2222
import java.util.Locale
23-
import org.apache.spark.sql.{AnalysisException, SparkSession}
24-
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal}
25-
import org.apache.spark.sql.catalyst.plans.logical.{Call, CallArgument, CallStatement, LogicalPlan, NamedArgument, PositionalArgument}
23+
import org.apache.spark.sql.AnalysisException
24+
import org.apache.spark.sql.SparkSession
25+
import org.apache.spark.sql.catalyst.expressions.Expression
26+
import org.apache.spark.sql.catalyst.expressions.Literal
27+
import org.apache.spark.sql.catalyst.plans.logical.Call
28+
import org.apache.spark.sql.catalyst.plans.logical.CallArgument
29+
import org.apache.spark.sql.catalyst.plans.logical.CallStatement
30+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
31+
import org.apache.spark.sql.catalyst.plans.logical.NamedArgument
32+
import org.apache.spark.sql.catalyst.plans.logical.PositionalArgument
2633
import org.apache.spark.sql.catalyst.rules.Rule
27-
import org.apache.spark.sql.connector.catalog.{CatalogManager, CatalogPlugin, LookupCatalog}
28-
import org.apache.spark.sql.connector.iceberg.catalog.{ProcedureCatalog, ProcedureParameter}
34+
import org.apache.spark.sql.connector.catalog.CatalogManager
35+
import org.apache.spark.sql.connector.catalog.CatalogPlugin
36+
import org.apache.spark.sql.connector.catalog.LookupCatalog
37+
import org.apache.spark.sql.connector.iceberg.catalog.ProcedureCatalog
38+
import org.apache.spark.sql.connector.iceberg.catalog.ProcedureParameter
2939
import scala.collection.Seq
3040

3141
case class ResolveProcedures(spark: SparkSession) extends Rule[LogicalPlan] with LookupCatalog {

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/optimizer/OptimizeConditionsInRowLevelOperations.scala

+7-2
Original file line numberDiff line numberDiff line change
@@ -20,8 +20,13 @@
2020
package org.apache.spark.sql.catalyst.optimizer
2121

2222
import org.apache.spark.sql.SparkSession
23-
import org.apache.spark.sql.catalyst.expressions.{Expression, Literal, SubqueryExpression}
24-
import org.apache.spark.sql.catalyst.plans.logical.{DeleteFromTable, Filter, LocalRelation, LogicalPlan}
23+
import org.apache.spark.sql.catalyst.expressions.Expression
24+
import org.apache.spark.sql.catalyst.expressions.Literal
25+
import org.apache.spark.sql.catalyst.expressions.SubqueryExpression
26+
import org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable
27+
import org.apache.spark.sql.catalyst.plans.logical.Filter
28+
import org.apache.spark.sql.catalyst.plans.logical.LocalRelation
29+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2530
import org.apache.spark.sql.catalyst.rules.Rule
2631
import org.apache.spark.sql.catalyst.utils.PlanUtils.isIcebergRelation
2732
import org.apache.spark.sql.execution.datasources.v2.DataSourceV2ScanRelation

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/optimizer/PullupCorrelatedPredicatesInRowLevelOperations.scala

+3-1
Original file line numberDiff line numberDiff line change
@@ -20,7 +20,9 @@
2020
package org.apache.spark.sql.catalyst.optimizer
2121

2222
import org.apache.spark.sql.catalyst.expressions.SubqueryExpression
23-
import org.apache.spark.sql.catalyst.plans.logical.{DeleteFromTable, Filter, LogicalPlan}
23+
import org.apache.spark.sql.catalyst.plans.logical.DeleteFromTable
24+
import org.apache.spark.sql.catalyst.plans.logical.Filter
25+
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
2426
import org.apache.spark.sql.catalyst.rules.Rule
2527
import org.apache.spark.sql.catalyst.utils.PlanUtils.isIcebergRelation
2628

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/parser/extensions/IcebergSparkSqlExtensionsParser.scala

+12-5
Original file line numberDiff line numberDiff line change
@@ -25,14 +25,21 @@ import org.antlr.v4.runtime.atn.PredictionMode
2525
import org.antlr.v4.runtime.misc.ParseCancellationException
2626
import org.antlr.v4.runtime.tree.TerminalNodeImpl
2727
import org.apache.spark.sql.AnalysisException
28-
import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
28+
import org.apache.spark.sql.catalyst.FunctionIdentifier
29+
import org.apache.spark.sql.catalyst.TableIdentifier
2930
import org.apache.spark.sql.catalyst.expressions.Expression
30-
import org.apache.spark.sql.catalyst.parser.{ParseErrorListener, ParseException, ParserInterface, UpperCaseCharStream}
31-
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSqlExtensionsParser._
31+
import org.apache.spark.sql.catalyst.parser.ParseErrorListener
32+
import org.apache.spark.sql.catalyst.parser.ParseException
33+
import org.apache.spark.sql.catalyst.parser.ParserInterface
34+
import org.apache.spark.sql.catalyst.parser.UpperCaseCharStream
35+
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSqlExtensionsParser.NonReservedContext
36+
import org.apache.spark.sql.catalyst.parser.extensions.IcebergSqlExtensionsParser.QuotedIdentifierContext
3237
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
3338
import org.apache.spark.sql.catalyst.trees.Origin
34-
import org.apache.spark.sql.internal.{SQLConf, VariableSubstitution}
35-
import org.apache.spark.sql.types.{DataType, StructType}
39+
import org.apache.spark.sql.internal.SQLConf
40+
import org.apache.spark.sql.internal.VariableSubstitution
41+
import org.apache.spark.sql.types.DataType
42+
import org.apache.spark.sql.types.StructType
3643

3744
class IcebergSparkSqlExtensionsParser(delegate: ParserInterface) extends ParserInterface {
3845

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/Call.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@
1919

2020
package org.apache.spark.sql.catalyst.plans.logical
2121

22-
import org.apache.spark.sql.catalyst.expressions.{Attribute, Expression}
22+
import org.apache.spark.sql.catalyst.expressions.Attribute
23+
import org.apache.spark.sql.catalyst.expressions.Expression
2324
import org.apache.spark.sql.catalyst.util.truncatedString
2425
import org.apache.spark.sql.connector.iceberg.catalog.Procedure
2526
import scala.collection.Seq

spark3-extensions/src/main/scala/org/apache/spark/sql/catalyst/plans/logical/DynamicFileFilter.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -19,7 +19,8 @@
1919

2020
package org.apache.spark.sql.catalyst.plans.logical
2121

22-
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet}
22+
import org.apache.spark.sql.catalyst.expressions.Attribute
23+
import org.apache.spark.sql.catalyst.expressions.AttributeSet
2324
import org.apache.spark.sql.catalyst.util.truncatedString
2425
import org.apache.spark.sql.catalyst.utils.SetAccumulator
2526
import org.apache.spark.sql.connector.iceberg.read.SupportsFileFilter

spark3-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/DynamicFileFilterExec.scala

+6-3
Original file line numberDiff line numberDiff line change
@@ -19,17 +19,20 @@
1919

2020
package org.apache.spark.sql.execution.datasources.v2
2121

22-
import collection.JavaConverters._
2322
import org.apache.spark.SparkException
2423
import org.apache.spark.rdd.RDD
2524
import org.apache.spark.sql.catalyst.InternalRow
26-
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeSet, SortOrder}
25+
import org.apache.spark.sql.catalyst.expressions.Attribute
26+
import org.apache.spark.sql.catalyst.expressions.AttributeSet
27+
import org.apache.spark.sql.catalyst.expressions.SortOrder
2728
import org.apache.spark.sql.catalyst.plans.physical
2829
import org.apache.spark.sql.catalyst.util.truncatedString
2930
import org.apache.spark.sql.catalyst.utils.SetAccumulator
3031
import org.apache.spark.sql.connector.iceberg.read.SupportsFileFilter
31-
import org.apache.spark.sql.execution.{BinaryExecNode, SparkPlan}
32+
import org.apache.spark.sql.execution.BinaryExecNode
33+
import org.apache.spark.sql.execution.SparkPlan
3234
import org.apache.spark.sql.vectorized.ColumnarBatch
35+
import scala.collection.JavaConverters._
3336

3437
abstract class DynamicFileFilterExecBase(
3538
scanExec: SparkPlan,

spark3-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/ExtendedBatchScanExec.scala

+3-1
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,9 @@ import org.apache.spark.rdd.RDD
2323
import org.apache.spark.sql.catalyst.InternalRow
2424
import org.apache.spark.sql.catalyst.expressions.AttributeReference
2525
import org.apache.spark.sql.catalyst.plans.QueryPlan
26-
import org.apache.spark.sql.connector.read.{InputPartition, PartitionReaderFactory, Scan}
26+
import org.apache.spark.sql.connector.read.InputPartition
27+
import org.apache.spark.sql.connector.read.PartitionReaderFactory
28+
import org.apache.spark.sql.connector.read.Scan
2729

2830
// The only reason we need this class and cannot reuse BatchScanExec is because
2931
// BatchScanExec caches input partitions and we cannot apply file filtering before execution

spark3-extensions/src/main/scala/org/apache/spark/sql/execution/datasources/v2/SetWriteDistributionAndOrderingExec.scala

+2-1
Original file line numberDiff line numberDiff line change
@@ -27,8 +27,9 @@ import org.apache.iceberg.expressions.Term
2727
import org.apache.iceberg.spark.source.SparkTable
2828
import org.apache.spark.sql.catalyst.InternalRow
2929
import org.apache.spark.sql.catalyst.expressions.Attribute
30-
import org.apache.spark.sql.connector.catalog.{Identifier, TableCatalog}
3130
import org.apache.spark.sql.connector.catalog.CatalogV2Implicits
31+
import org.apache.spark.sql.connector.catalog.Identifier
32+
import org.apache.spark.sql.connector.catalog.TableCatalog
3233

3334
case class SetWriteDistributionAndOrderingExec(
3435
catalog: TableCatalog,

0 commit comments

Comments
 (0)