Since: https://github.com/apache/incubator-gluten/pull/8127. + *
Since: https://github.com/apache/gluten/pull/8127.
*/
public void finalize() throws Throwable {
release();
diff --git a/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/DynamicOffHeapSizingMemoryTarget.java b/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/DynamicOffHeapSizingMemoryTarget.java
index 3124f78fb79a..396a80b100c3 100644
--- a/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/DynamicOffHeapSizingMemoryTarget.java
+++ b/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/DynamicOffHeapSizingMemoryTarget.java
@@ -36,7 +36,7 @@
/**
* The memory target used by dynamic off-heap sizing. Since
- * https://github.com/apache/incubator-gluten/issues/5439.
+ * https://github.com/apache/gluten/issues/5439.
*/
@Experimental
public class DynamicOffHeapSizingMemoryTarget implements MemoryTarget, KnownNameAndStats {
@@ -143,7 +143,7 @@ public long borrow(long size) {
}
// Only JVM shrinking can reclaim space from the total JVM memory.
- // See https://github.com/apache/incubator-gluten/issues/9276.
+ // See https://github.com/apache/gluten/issues/9276.
long totalHeapMemory = Runtime.getRuntime().totalMemory();
long freeHeapMemory = Runtime.getRuntime().freeMemory();
long usedOffHeapMemory = USED_OFF_HEAP_BYTES.get();
diff --git a/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/MemoryTargets.java b/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/MemoryTargets.java
index b5138bd4c6d8..4a01ff94e4d3 100644
--- a/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/MemoryTargets.java
+++ b/gluten-core/src/main/java/org/apache/gluten/memory/memtarget/MemoryTargets.java
@@ -87,11 +87,11 @@ public static TreeMemoryTarget newConsumer(
// We don't need to retry on OOM in the case one single task occupies the whole executor.
return consumer;
}
- // Since https://github.com/apache/incubator-gluten/pull/8132.
+ // Since https://github.com/apache/gluten/pull/8132.
// Retry of spilling is needed in multi-slot and legacy mode (formerly named as share mode)
// because the maxMemoryPerTask defined by vanilla Spark's ExecutionMemoryPool is dynamic.
//
- // See the original issue https://github.com/apache/incubator-gluten/issues/8128.
+ // See the original issue https://github.com/apache/gluten/issues/8128.
return new RetryOnOomMemoryTarget(
consumer,
() -> {
diff --git a/gluten-core/src/main/scala/org/apache/gluten/config/GlutenCoreConfig.scala b/gluten-core/src/main/scala/org/apache/gluten/config/GlutenCoreConfig.scala
index d362060ded98..0caf90c659a0 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/config/GlutenCoreConfig.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/config/GlutenCoreConfig.scala
@@ -186,7 +186,7 @@ object GlutenCoreConfig extends ConfigRegistry {
.intConf
.createWithDefaultString("-1")
- // Since https://github.com/apache/incubator-gluten/issues/5439.
+ // Since https://github.com/apache/gluten/issues/5439.
val DYNAMIC_OFFHEAP_SIZING_ENABLED =
buildStaticConf("spark.gluten.memory.dynamic.offHeap.sizing.enabled")
.experimental()
@@ -204,7 +204,7 @@ object GlutenCoreConfig extends ConfigRegistry {
.booleanConf
.createWithDefault(false)
- // Since https://github.com/apache/incubator-gluten/issues/5439.
+ // Since https://github.com/apache/gluten/issues/5439.
val DYNAMIC_OFFHEAP_SIZING_MEMORY_FRACTION =
buildStaticConf("spark.gluten.memory.dynamic.offHeap.sizing.memory.fraction")
.experimental()
diff --git a/gluten-core/src/main/scala/org/apache/gluten/extension/GlutenColumnarRule.scala b/gluten-core/src/main/scala/org/apache/gluten/extension/GlutenColumnarRule.scala
index da0fa12bb059..15a6fa0c5a15 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/extension/GlutenColumnarRule.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/extension/GlutenColumnarRule.scala
@@ -95,7 +95,7 @@ case class GlutenColumnarRule(
case _ =>
throw new IllegalStateException(
"This should not happen. Please leave an issue at" +
- " https://github.com/apache/incubator-gluten.")
+ " https://github.com/apache/gluten.")
}
val vanillaPlan = Transitions.insert(originalPlan, outputsColumnar)
val applier = applierBuilder.apply(session)
diff --git a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/planner/metadata/Schema.scala b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/planner/metadata/Schema.scala
index 1e8ff1ae59db..b17c5fe62d6d 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/planner/metadata/Schema.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/enumerated/planner/metadata/Schema.scala
@@ -56,7 +56,7 @@ object Schema {
if (one != other) {
// We apply loose restriction on schema. Since Gluten still have some customized
// logics causing schema of an operator to change after being transformed.
- // For example: https://github.com/apache/incubator-gluten/pull/5171
+ // For example: https://github.com/apache/gluten/pull/5171
logWarning(s"Warning: Schema mismatch: one: $one, other: $other")
}
}
diff --git a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/RewriteSparkPlanRulesManager.scala b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/RewriteSparkPlanRulesManager.scala
index 24fa7a6fc974..5bbb5d6e8447 100644
--- a/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/RewriteSparkPlanRulesManager.scala
+++ b/gluten-core/src/main/scala/org/apache/gluten/extension/columnar/heuristic/RewriteSparkPlanRulesManager.scala
@@ -75,7 +75,7 @@ class RewriteSparkPlanRulesManager private (
} catch {
case e: Exception =>
// TODO: Remove this catch block
- // See https://github.com/apache/incubator-gluten/issues/7766
+ // See https://github.com/apache/gluten/issues/7766
(origin, Option(e.getMessage))
}
}
diff --git a/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCost.scala b/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCost.scala
index df0b6fafe900..dbdbb3c5d046 100644
--- a/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCost.scala
+++ b/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCost.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.adaptive
import org.apache.spark.sql.errors.QueryExecutionErrors
import org.apache.spark.sql.execution.SparkPlan
-/** Since https://github.com/apache/incubator-gluten/pull/6143. */
+/** Since https://github.com/apache/gluten/pull/6143. */
class GlutenCost(val eval: CostEvaluator, val plan: SparkPlan) extends Cost {
override def compare(that: Cost): Int = that match {
case that: GlutenCost if plan eq that.plan =>
diff --git a/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCostEvaluator.scala b/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCostEvaluator.scala
index f53a80639af0..6c5300aa0dac 100644
--- a/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCostEvaluator.scala
+++ b/gluten-core/src/main/scala/org/apache/spark/sql/execution/adaptive/GlutenCostEvaluator.scala
@@ -25,7 +25,7 @@ import org.apache.spark.util.{SparkVersionUtil, Utils}
/**
* This [[CostEvaluator]] is to force use the new physical plan when cost is equal.
*
- * Since https://github.com/apache/incubator-gluten/pull/6143.
+ * Since https://github.com/apache/gluten/pull/6143.
*/
case class GlutenCostEvaluator() extends CostEvaluator with SQLConfHelper {
diff --git a/gluten-flink/docs/Flink.md b/gluten-flink/docs/Flink.md
index 572df629c17b..1c800fb42db9 100644
--- a/gluten-flink/docs/Flink.md
+++ b/gluten-flink/docs/Flink.md
@@ -57,7 +57,7 @@ mvn clean install -DskipTests -Dgpg.skip -Dspotless.skip=true
## config maven, like proxy in ~/.m2/settings.xml
## fetch gluten code
-git clone https://github.com/apache/incubator-gluten.git
+git clone https://github.com/apache/gluten.git
```
# Build Gluten Flink with Velox Backend
@@ -126,7 +126,7 @@ bin/flink run examples/table/StreamSQLExample.jar
Then you can get the result in `log/flink-*-taskexecutor-*.out`.
And you can see an operator named `gluten-cal` from the web frontend of your flink job.
-**Notice: current this example will cause npe until [issue-10315](https://github.com/apache/incubator-gluten/issues/10315) get resolved.**
+**Notice: current this example will cause npe until [issue-10315](https://github.com/apache/gluten/issues/10315) get resolved.**
#### All operators executed by native
Another example supports all operators executed by native.
diff --git a/gluten-hudi/src/main/scala/org/apache/gluten/execution/OffloadHudiScan.scala b/gluten-hudi/src/main/scala/org/apache/gluten/execution/OffloadHudiScan.scala
index f2cc24ceafaf..60bf12a51b77 100644
--- a/gluten-hudi/src/main/scala/org/apache/gluten/execution/OffloadHudiScan.scala
+++ b/gluten-hudi/src/main/scala/org/apache/gluten/execution/OffloadHudiScan.scala
@@ -20,7 +20,7 @@ import org.apache.gluten.extension.columnar.offload.OffloadSingleNode
import org.apache.spark.sql.execution.SparkPlan
-/** Since https://github.com/apache/incubator-gluten/pull/6049. */
+/** Since https://github.com/apache/gluten/pull/6049. */
case class OffloadHudiScan() extends OffloadSingleNode {
override def offload(plan: SparkPlan): SparkPlan = {
plan match {
diff --git a/gluten-iceberg/src/main/scala/org/apache/gluten/execution/IcebergScanTransformer.scala b/gluten-iceberg/src/main/scala/org/apache/gluten/execution/IcebergScanTransformer.scala
index 483f5c147f9e..e4151c823c83 100644
--- a/gluten-iceberg/src/main/scala/org/apache/gluten/execution/IcebergScanTransformer.scala
+++ b/gluten-iceberg/src/main/scala/org/apache/gluten/execution/IcebergScanTransformer.scala
@@ -155,7 +155,7 @@ case class IcebergScanTransformer(
return ValidationResult.failed("Delete file format puffin is not supported")
}
}
- // https://github.com/apache/incubator-gluten/issues/11135
+ // https://github.com/apache/gluten/issues/11135
if (metadata.propertyAsBoolean(TableProperties.SPARK_WRITE_ACCEPT_ANY_SCHEMA, false)) {
return ValidationResult.failed("Not support read the file with accept any schema")
}
diff --git a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/Ras.scala b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/Ras.scala
index ad02e0b5f8c1..b222b7180cd0 100644
--- a/gluten-ras/common/src/main/scala/org/apache/gluten/ras/Ras.scala
+++ b/gluten-ras/common/src/main/scala/org/apache/gluten/ras/Ras.scala
@@ -22,7 +22,7 @@ import org.apache.gluten.ras.rule.{EnforcerRuleFactory, RasRule}
/**
* Entrypoint of RAS (relational algebra selector) 's search engine. See the basic introduction of
- * RAS: https://github.com/apache/incubator-gluten/issues/5057.
+ * RAS: https://github.com/apache/gluten/issues/5057.
*/
trait Optimization[T <: AnyRef] {
def newPlanner(plan: T, constraintSet: PropertySet[T]): RasPlanner[T]
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/backendsapi/SubstraitBackend.scala b/gluten-substrait/src/main/scala/org/apache/gluten/backendsapi/SubstraitBackend.scala
index f8fc12228b74..94d2738b6196 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/backendsapi/SubstraitBackend.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/backendsapi/SubstraitBackend.scala
@@ -93,7 +93,7 @@ trait SubstraitBackend extends Backend with Logging {
object SubstraitBackend extends Logging {
- /** Since https://github.com/apache/incubator-gluten/pull/2247. */
+ /** Since https://github.com/apache/gluten/pull/2247. */
private def postBuildInfoEvent(sc: SparkContext): Unit = {
// export gluten version to property to spark
System.setProperty("gluten.version", GlutenBuildInfo.VERSION)
@@ -135,7 +135,7 @@ object SubstraitBackend extends Logging {
// Disable vanilla columnar readers, to prevent columnar-to-columnar conversions.
// FIXME: Do we still need this trick since
- // https://github.com/apache/incubator-gluten/pull/1931 was merged?
+ // https://github.com/apache/gluten/pull/1931 was merged?
if (!conf.get(GlutenConfig.VANILLA_VECTORIZED_READERS_ENABLED)) {
// FIXME Hongze 22/12/06
// BatchScan.scala in shim was not always loaded by class loader.
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/config/GlutenConfig.scala b/gluten-substrait/src/main/scala/org/apache/gluten/config/GlutenConfig.scala
index 63e8794bb500..be818981b48e 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/config/GlutenConfig.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/config/GlutenConfig.scala
@@ -170,7 +170,7 @@ class GlutenConfig(conf: SQLConf) extends GlutenCoreConfig(conf) {
.equals("org.apache.spark.shuffle.sort.ColumnarShuffleManager")
// Whether to use CelebornShuffleManager.
- // TODO: Deprecate the API: https://github.com/apache/incubator-gluten/issues/10107.
+ // TODO: Deprecate the API: https://github.com/apache/gluten/issues/10107.
def isUseCelebornShuffleManager: Boolean =
conf
.getConfString("spark.shuffle.manager", "sort")
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/execution/ValidatablePlan.scala b/gluten-substrait/src/main/scala/org/apache/gluten/execution/ValidatablePlan.scala
index 1df804ebfd59..3a36d5b94267 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/execution/ValidatablePlan.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/execution/ValidatablePlan.scala
@@ -28,7 +28,7 @@ import org.apache.spark.sql.catalyst.analysis.UnresolvedException
/**
* Base interface for a Gluten query plan that is also open to validation calls.
*
- * Since https://github.com/apache/incubator-gluten/pull/2185.
+ * Since https://github.com/apache/gluten/pull/2185.
*/
trait ValidatablePlan extends GlutenPlan with LogLevelUtil {
protected def glutenConf: GlutenConfig = GlutenConfig.get
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala b/gluten-substrait/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
index acef5d798ea0..f89b73acf58d 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/execution/WholeStageTransformer.scala
@@ -86,7 +86,7 @@ trait TransformSupport extends ValidatablePlan {
*/
def columnarInputRDDs: Seq[RDD[ColumnarBatch]]
- // Since https://github.com/apache/incubator-gluten/pull/2185.
+ // Since https://github.com/apache/gluten/pull/2185.
protected def doNativeValidation(context: SubstraitContext, node: RelNode): ValidationResult = {
if (node != null && enableNativeValidation) {
val planNode = PlanBuilder.makePlan(context, Lists.newArrayList(node))
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/expression/ScalarSubqueryTransformer.scala b/gluten-substrait/src/main/scala/org/apache/gluten/expression/ScalarSubqueryTransformer.scala
index a1c6e9b71524..41d9cecbcb90 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/expression/ScalarSubqueryTransformer.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/expression/ScalarSubqueryTransformer.scala
@@ -32,7 +32,7 @@ case class ScalarSubqueryTransformer(substraitExprName: String, query: ScalarSub
if (TransformerState.underValidationState) {
return ExpressionBuilder.makeLiteral(null, query.dataType, true)
}
- // After https://github.com/apache/incubator-gluten/pull/5862, we do not need to execute
+ // After https://github.com/apache/gluten/pull/5862, we do not need to execute
// subquery manually so the exception behavior is same with vanilla Spark.
// Note that, this code change is just for simplify. The subquery has already been materialized
// before doing transform.
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/LoggedRule.scala b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/LoggedRule.scala
index 8aa68c4ad13e..ada8cf7e7d50 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/LoggedRule.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/LoggedRule.scala
@@ -25,7 +25,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.catalyst.util.sideBySide
import org.apache.spark.sql.execution.SparkPlan
-/** Since https://github.com/apache/incubator-gluten/pull/7606. */
+/** Since https://github.com/apache/gluten/pull/7606. */
class LoggedRule(delegate: Rule[SparkPlan]) extends Rule[SparkPlan] with Logging with LogLevelUtil {
override val ruleName: String = delegate.ruleName
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/MiscColumnarRules.scala b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/MiscColumnarRules.scala
index cb248b7add11..ca63bab6fbd2 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/MiscColumnarRules.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/MiscColumnarRules.scala
@@ -47,7 +47,7 @@ object MiscColumnarRules {
override def apply(plan: SparkPlan): SparkPlan = {
val out = plan.transformWithSubqueries {
case p =>
- // Since https://github.com/apache/incubator-gluten/pull/1851.
+ // Since https://github.com/apache/gluten/pull/1851.
//
// When AQE is on, the AQE sub-query cache should already be filled with
// row-based SubqueryBroadcastExec for reusing. Thus we are doing the same
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/PruneNestedColumnsInHiveTableScan.scala b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/PruneNestedColumnsInHiveTableScan.scala
index b8972b6d0e2c..ec24f1048763 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/PruneNestedColumnsInHiveTableScan.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/PruneNestedColumnsInHiveTableScan.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.SparkPlan
import org.apache.spark.sql.hive.HiveTableScanNestedColumnPruning
-// Since https://github.com/apache/incubator-gluten/pull/7268.
+// Since https://github.com/apache/gluten/pull/7268.
// Used only by CH backend as of now.
object PruneNestedColumnsInHiveTableScan extends Rule[SparkPlan] {
override def apply(plan: SparkPlan): SparkPlan = plan.transformUp {
diff --git a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/enumerated/RasOffload.scala b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/enumerated/RasOffload.scala
index af1fe35f7c18..6c4e56a19df1 100644
--- a/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/enumerated/RasOffload.scala
+++ b/gluten-substrait/src/main/scala/org/apache/gluten/extension/columnar/enumerated/RasOffload.scala
@@ -110,7 +110,7 @@ object RasOffload {
} catch {
case e: Exception =>
// TODO: Remove this catch block
- // See https://github.com/apache/incubator-gluten/issues/7766
+ // See https://github.com/apache/gluten/issues/7766
logWarning(
s"Exception thrown during rewriting the plan ${node.nodeName}. Skip offloading it",
e)
diff --git a/gluten-substrait/src/main/scala/org/apache/spark/sql/execution/ColumnarWriteFilesExec.scala b/gluten-substrait/src/main/scala/org/apache/spark/sql/execution/ColumnarWriteFilesExec.scala
index 998f4f86b50b..5e5d269cc333 100644
--- a/gluten-substrait/src/main/scala/org/apache/spark/sql/execution/ColumnarWriteFilesExec.scala
+++ b/gluten-substrait/src/main/scala/org/apache/spark/sql/execution/ColumnarWriteFilesExec.scala
@@ -67,7 +67,7 @@ abstract class ColumnarWriteFilesExec protected (
* processing or columnar processing. It's true because Spark only calls `doExecuteWrite` of the
* object.
*
- * Since https://github.com/apache/incubator-gluten/pull/6745.
+ * Since https://github.com/apache/gluten/pull/6745.
*/
override def batchType(): Convention.BatchType = BackendsApiManager.getSettings.primaryBatchType
override def rowType0(): RowType = {
diff --git a/gluten-substrait/src/main/scala/org/apache/spark/util/SparkPlanRules.scala b/gluten-substrait/src/main/scala/org/apache/spark/util/SparkPlanRules.scala
index bbaee81a5987..2e27ae0b758c 100644
--- a/gluten-substrait/src/main/scala/org/apache/spark/util/SparkPlanRules.scala
+++ b/gluten-substrait/src/main/scala/org/apache/spark/util/SparkPlanRules.scala
@@ -22,7 +22,7 @@ import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.execution.SparkPlan
object SparkPlanRules extends Logging {
- // Since https://github.com/apache/incubator-gluten/pull/1523
+ // Since https://github.com/apache/gluten/pull/1523
def extendedColumnarRule(ruleNamesStr: String): SparkSession => Rule[SparkPlan] =
(session: SparkSession) => {
val ruleNames = ruleNamesStr.split(",").filter(_.nonEmpty)
diff --git a/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index dc44facd7ef9..0f9cae024309 100644
--- a/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -729,7 +729,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("DATE_FROM_UNIX_DATE")
.exclude("UNIX_SECONDS")
.exclude("TIMESTAMP_SECONDS") // refer to https://github.com/ClickHouse/ClickHouse/issues/69280
- .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/incubator-gluten/issues/7127
+ .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/gluten/issues/7127
.exclude("SPARK-33498: GetTimestamp,UnixTimestamp,ToUnixTimestamp with parseError")
.exclude("SPARK-34739,SPARK-35889: add a year-month interval to a timestamp")
.exclude("SPARK-34761,SPARK-35889: add a day-time interval to a timestamp")
@@ -880,7 +880,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("REPEAT")
.exclude("ParseUrl")
.exclude("SPARK-33468: ParseUrl in ANSI mode should fail if input string is not a valid url")
- .exclude("FORMAT") // refer https://github.com/apache/incubator-gluten/issues/6765
+ .exclude("FORMAT") // refer https://github.com/apache/gluten/issues/6765
.exclude(
"soundex unit test"
) // CH and spark returns different results when input non-ASCII characters
diff --git a/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 48315d2697ea..001516de624d 100644
--- a/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark32/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -357,9 +357,9 @@ class VeloxTestSettings extends BackendTestSettings {
// Not useful and time consuming.
.exclude("SPARK-33084: Add jar support Ivy URI in SQL")
.exclude("SPARK-33084: Add jar support Ivy URI in SQL -- jar contains udf class")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.exclude("SPARK-17515: CollectLimit.execute() should perform per-partition limits")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.exclude("SPARK-19650: An action on a Command should not trigger a Spark job")
enableSuite[GlutenDatasetAggregatorSuite]
enableSuite[GlutenDatasetOptimizationSuite]
@@ -369,9 +369,9 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("dropDuplicates: columns with same column name")
.exclude("groupBy.as")
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
.exclude("$")
.exclude("$.store.book[0]")
@@ -415,7 +415,7 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("File source v2: support passing data filters to FileScan without partitionFilters")
// DISABLED: GLUTEN-4893 Vanilla UT checks scan operator by exactly matching the class type
.exclude("File source v2: support partition pruning")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.excludeGlutenTest("SPARK-25237 compute correct input metrics in FileScanRDD")
enableSuite[GlutenEnsureRequirementsSuite]
// Rewrite to change the shuffle partitions for optimizing repartition
@@ -640,7 +640,7 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("SPARK-17091: Convert IN predicate to Parquet filter push-down")
.exclude("Support Parquet column index")
.exclude("SPARK-34562: Bloom filter push down")
- // https://github.com/apache/incubator-gluten/issues/7174
+ // https://github.com/apache/gluten/issues/7174
.excludeGlutenTest("Filter applied on merged Parquet schema with new column should work")
enableSuite[GlutenParquetInteroperabilitySuite]
.exclude("parquet timestamp conversion")
diff --git a/gluten-ut/spark32/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark32/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index 1f343e1bbffc..7d5ca0c23676 100644
--- a/gluten-ut/spark32/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark32/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -126,7 +126,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(Round(12345.67890123456789, 6), 12345.678901)
checkEvaluation(Round(44, -1), 40)
checkEvaluation(Round(78, 1), 78)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(BRound(2.5, 0), 2.0)
checkEvaluation(BRound(3.5, 0), 4.0)
diff --git a/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index c6bb67748e9b..2ba48f9e546c 100644
--- a/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -399,7 +399,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
enableSuite[GlutenJsonExpressionsSuite]
.exclude(
"$.store.basket[0][*].b"
- ) // issue: https://github.com/apache/incubator-gluten/issues/8529
+ ) // issue: https://github.com/apache/gluten/issues/8529
.exclude("from_json - invalid data")
.exclude("from_json - input=object, schema=array, output=array of single row")
.exclude("from_json - input=empty object, schema=array, output=array of single row with null")
@@ -749,7 +749,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("DATE_FROM_UNIX_DATE")
.exclude("UNIX_SECONDS")
.exclude("TIMESTAMP_SECONDS") // refer to https://github.com/ClickHouse/ClickHouse/issues/69280
- .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/incubator-gluten/issues/7127
+ .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/gluten/issues/7127
.exclude("SPARK-33498: GetTimestamp,UnixTimestamp,ToUnixTimestamp with parseError")
.exclude("SPARK-34739,SPARK-35889: add a year-month interval to a timestamp")
.exclude("SPARK-34761,SPARK-35889: add a day-time interval to a timestamp")
@@ -797,9 +797,9 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("default")
.exclude("SPARK-37967: Literal.create support ObjectType")
enableSuite[GlutenMathExpressionsSuite]
- .exclude("unhex") // https://github.com/apache/incubator-gluten/issues/7232
- .exclude("round/bround/floor/ceil") // https://github.com/apache/incubator-gluten/issues/7233
- .exclude("atan2") // https://github.com/apache/incubator-gluten/issues/7233
+ .exclude("unhex") // https://github.com/apache/gluten/issues/7232
+ .exclude("round/bround/floor/ceil") // https://github.com/apache/gluten/issues/7233
+ .exclude("atan2") // https://github.com/apache/gluten/issues/7233
enableSuite[GlutenMiscExpressionsSuite]
enableSuite[GlutenNondeterministicSuite]
.exclude("MonotonicallyIncreasingID")
@@ -856,7 +856,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("REPEAT")
.exclude("ParseUrl")
.exclude("SPARK-33468: ParseUrl in ANSI mode should fail if input string is not a valid url")
- .exclude("FORMAT") // refer https://github.com/apache/incubator-gluten/issues/6765
+ .exclude("FORMAT") // refer https://github.com/apache/gluten/issues/6765
.exclude(
"soundex unit test"
) // CH and spark returns different results when input non-ASCII characters
diff --git a/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 4a850690d336..9c51dee8a6f3 100644
--- a/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark33/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -153,9 +153,9 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenGeneratorExpressionSuite]
enableSuite[GlutenIntervalExpressionsSuite]
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
.exclude("$")
.exclude("$.store.book[0]")
@@ -803,7 +803,7 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("File source v2: support passing data filters to FileScan without partitionFilters")
// DISABLED: GLUTEN-4893 Vanilla UT checks scan operator by exactly matching the class type
.exclude("File source v2: support partition pruning")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.excludeGlutenTest("SPARK-25237 compute correct input metrics in FileScanRDD")
enableSuite[GlutenFileScanSuite]
enableSuite[GlutenGeneratorFunctionSuite]
@@ -845,9 +845,9 @@ class VeloxTestSettings extends BackendTestSettings {
// Not useful and time consuming.
.exclude("SPARK-33084: Add jar support Ivy URI in SQL")
.exclude("SPARK-33084: Add jar support Ivy URI in SQL -- jar contains udf class")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.exclude("SPARK-17515: CollectLimit.execute() should perform per-partition limits")
- // https://github.com/apache/incubator-gluten/pull/9145.
+ // https://github.com/apache/gluten/pull/9145.
.exclude("SPARK-19650: An action on a Command should not trigger a Spark job")
enableSuite[GlutenSQLQueryTestSuite]
enableSuite[GlutenStatisticsCollectionSuite]
diff --git a/gluten-ut/spark33/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark33/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index e4c59095eea0..a256b80ef25b 100644
--- a/gluten-ut/spark33/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark33/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -285,7 +285,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(Round(1.12345678901234567, 8), 1.12345679)
checkEvaluation(Round(-0.98765432109876543, 5), -0.98765)
checkEvaluation(Round(12345.67890123456789, 6), 12345.678901)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(Round(-35, -1), -40)
checkEvaluation(Round(44, -1), 40)
diff --git a/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index 71b621081746..a2f6499e27b6 100644
--- a/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -647,7 +647,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("DATE_FROM_UNIX_DATE")
.exclude("UNIX_SECONDS")
.exclude("TIMESTAMP_SECONDS") // refer to https://github.com/ClickHouse/ClickHouse/issues/69280
- .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/incubator-gluten/issues/7127
+ .exclude("TIMESTAMP_MICROS") // refer to https://github.com/apache/gluten/issues/7127
.exclude("SPARK-33498: GetTimestamp,UnixTimestamp,ToUnixTimestamp with parseError")
.exclude("SPARK-34739,SPARK-35889: add a year-month interval to a timestamp")
.exclude("SPARK-34761,SPARK-35889: add a day-time interval to a timestamp")
@@ -791,7 +791,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.exclude("REPEAT")
.exclude("ParseUrl")
.exclude("SPARK-33468: ParseUrl in ANSI mode should fail if input string is not a valid url")
- .exclude("FORMAT") // refer https://github.com/apache/incubator-gluten/issues/6765
+ .exclude("FORMAT") // refer https://github.com/apache/gluten/issues/6765
.exclude(
"soundex unit test"
) // CH and spark returns different results when input non-ASCII characters
diff --git a/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 87d085ec510d..9ff2ce221ea6 100644
--- a/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark34/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -151,9 +151,9 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenHigherOrderFunctionsSuite]
enableSuite[GlutenIntervalExpressionsSuite]
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
.exclude("$")
.exclude("$.store.book[0]")
diff --git a/gluten-ut/spark34/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark34/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index 826176334c1c..c755a475ed5a 100644
--- a/gluten-ut/spark34/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark34/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -250,7 +250,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(BRound(-3.5, 0), -4.0)
checkEvaluation(BRound(-0.35, 1), -0.4)
checkEvaluation(BRound(-35, -1), -40)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(BRound(BigDecimal("45.00"), -1), BigDecimal(40))
checkEvaluation(checkDataTypeAndCast(RoundFloor(Literal(2.5), Literal(0))), Decimal(2))
diff --git a/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index 29d7534e8fae..30ee897ab42c 100644
--- a/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -1046,7 +1046,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.excludeCH(
"SPARK-45882: BroadcastHashJoinExec propagate partitioning should respect CoalescedHashPartitioning")
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.includeCH("$.store.book")
.includeCH("$")
.includeCH("$.store.book[0]")
diff --git a/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 1207121da708..121de63a9433 100644
--- a/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark35/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -151,9 +151,9 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenHigherOrderFunctionsSuite]
enableSuite[GlutenIntervalExpressionsSuite]
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
.exclude("$")
.exclude("$.store.book[0]")
diff --git a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index d49bbd3555eb..b4459df4209b 100644
--- a/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark35/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -253,7 +253,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(Round(1.12345678901234567, 8), 1.12345679)
checkEvaluation(Round(-0.98765432109876543, 5), -0.98765)
checkEvaluation(Round(12345.67890123456789, 6), 12345.678901)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(BRound(BigDecimal("45.00"), -1), BigDecimal(40))
checkEvaluation(checkDataTypeAndCast(RoundFloor(Literal(2.5), Literal(0))), Decimal(2))
diff --git a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index ec99089c324e..5a04389186b2 100644
--- a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -1022,7 +1022,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.excludeCH(
"SPARK-45882: BroadcastHashJoinExec propagate partitioning should respect CoalescedHashPartitioning")
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.includeCH("$.store.book")
.includeCH("$")
.includeCH("$.store.book[0]")
diff --git a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 4f7c67daaad6..0f18950da398 100644
--- a/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark40/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -161,9 +161,9 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenHigherOrderFunctionsSuite]
enableSuite[GlutenIntervalExpressionsSuite]
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
.exclude("$")
.exclude("$.store.book[0]")
@@ -623,7 +623,7 @@ class VeloxTestSettings extends BackendTestSettings {
// error message mismatch is accepted
.exclude("schema mismatch failure error message for parquet reader")
.exclude("schema mismatch failure error message for parquet vectorized reader")
- // https://github.com/apache/incubator-gluten/issues/11220
+ // https://github.com/apache/gluten/issues/11220
.excludeByPrefix("SPARK-40819")
.excludeByPrefix("SPARK-46056") // TODO: fix in Spark-4.0
.exclude("CANNOT_MERGE_SCHEMAS: Failed merging schemas")
@@ -943,7 +943,7 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("SPARK-41048: Improve output partitioning and ordering with AQE cache")
// Rewrite this test since it checks the physical operator which is changed in Gluten
.exclude("SPARK-27439: Explain result should match collected result after view change")
- // https://github.com/apache/incubator-gluten/issues/11570
+ // https://github.com/apache/gluten/issues/11570
.exclude("getRows: binary")
enableSuite[GlutenDataFrameTimeWindowingSuite]
enableSuite[GlutenDataFrameTungstenSuite]
diff --git a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index d49bbd3555eb..b4459df4209b 100644
--- a/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark40/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -253,7 +253,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(Round(1.12345678901234567, 8), 1.12345679)
checkEvaluation(Round(-0.98765432109876543, 5), -0.98765)
checkEvaluation(Round(12345.67890123456789, 6), 12345.678901)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(BRound(BigDecimal("45.00"), -1), BigDecimal(40))
checkEvaluation(checkDataTypeAndCast(RoundFloor(Literal(2.5), Literal(0))), Decimal(2))
diff --git a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
index ec99089c324e..5a04389186b2 100644
--- a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
+++ b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/clickhouse/ClickHouseTestSettings.scala
@@ -1022,7 +1022,7 @@ class ClickHouseTestSettings extends BackendTestSettings {
.excludeCH(
"SPARK-45882: BroadcastHashJoinExec propagate partitioning should respect CoalescedHashPartitioning")
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.includeCH("$.store.book")
.includeCH("$")
.includeCH("$.store.book[0]")
diff --git a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
index 0dadfa1d0bd8..e8516ce742f2 100644
--- a/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
+++ b/gluten-ut/spark41/src/test/scala/org/apache/gluten/utils/velox/VeloxTestSettings.scala
@@ -169,9 +169,9 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenHigherOrderFunctionsSuite]
enableSuite[GlutenIntervalExpressionsSuite]
enableSuite[GlutenJsonExpressionsSuite]
- // https://github.com/apache/incubator-gluten/issues/10948
+ // https://github.com/apache/gluten/issues/10948
.exclude("$['key with spaces']")
- // https://github.com/apache/incubator-gluten/issues/8102
+ // https://github.com/apache/gluten/issues/8102
.exclude("$.store.book")
.exclude("$")
.exclude("$.store.book[0]")
@@ -407,7 +407,7 @@ class VeloxTestSettings extends BackendTestSettings {
enableSuite[GlutenV2SessionCatalogTableSuite]
enableSuite[GlutenCSVv1Suite]
enableSuite[GlutenCSVv2Suite]
- // https://github.com/apache/incubator-gluten/issues/11505
+ // https://github.com/apache/gluten/issues/11505
enableSuite[GlutenCSVLegacyTimeParserSuite]
.exclude("Write timestamps correctly in ISO8601 format by default")
.exclude("csv with variant")
@@ -584,7 +584,7 @@ class VeloxTestSettings extends BackendTestSettings {
// error message mismatch is accepted
.exclude("schema mismatch failure error message for parquet reader")
.exclude("schema mismatch failure error message for parquet vectorized reader")
- // https://github.com/apache/incubator-gluten/issues/11220
+ // https://github.com/apache/gluten/issues/11220
.excludeByPrefix("SPARK-40819")
.excludeByPrefix("SPARK-46056") // TODO: fix in Spark-4.0
.exclude("CANNOT_MERGE_SCHEMAS: Failed merging schemas")
@@ -913,7 +913,7 @@ class VeloxTestSettings extends BackendTestSettings {
.exclude("SPARK-41048: Improve output partitioning and ordering with AQE cache")
// Rewrite this test since it checks the physical operator which is changed in Gluten
.exclude("SPARK-27439: Explain result should match collected result after view change")
- // https://github.com/apache/incubator-gluten/issues/11570
+ // https://github.com/apache/gluten/issues/11570
.exclude("getRows: binary")
enableSuite[GlutenDataFrameTimeWindowingSuite]
enableSuite[GlutenDataFrameTungstenSuite]
diff --git a/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala b/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
index d49bbd3555eb..b4459df4209b 100644
--- a/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
+++ b/gluten-ut/spark41/src/test/scala/org/apache/spark/sql/catalyst/expressions/GlutenMathExpressionsSuite.scala
@@ -253,7 +253,7 @@ class GlutenMathExpressionsSuite extends MathExpressionsSuite with GlutenTestsTr
checkEvaluation(Round(1.12345678901234567, 8), 1.12345679)
checkEvaluation(Round(-0.98765432109876543, 5), -0.98765)
checkEvaluation(Round(12345.67890123456789, 6), 12345.678901)
- // Enable the test after fixing https://github.com/apache/incubator-gluten/issues/6827
+ // Enable the test after fixing https://github.com/apache/gluten/issues/6827
// checkEvaluation(Round(0.5549999999999999, 2), 0.55)
checkEvaluation(BRound(BigDecimal("45.00"), -1), BigDecimal(40))
checkEvaluation(checkDataTypeAndCast(RoundFloor(Literal(2.5), Literal(0))), Decimal(2))
diff --git a/mkdocs.yml b/mkdocs.yml
index 1c03a1ce600a..4ba589908f76 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -15,7 +15,7 @@
site_name: Gluten
repo_name: 'Fork on GitHub '
-repo_url: "https://github.com/apache/incubator-gluten.git"
+repo_url: "https://github.com/apache/gluten.git"
edit_uri: ""
diff --git a/pom.xml b/pom.xml
index df2c3a1a8eb4..4637a9e77781 100644
--- a/pom.xml
+++ b/pom.xml
@@ -29,7 +29,7 @@