From 1f94e9b55fb0147c85225eb4767c3008b7e66e77 Mon Sep 17 00:00:00 2001 From: ZENOTME Date: Wed, 28 May 2025 00:11:57 +0800 Subject: [PATCH 1/2] add retryable property for Error --- crates/catalog/rest/src/catalog.rs | 3 ++- crates/iceberg/src/error.rs | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) diff --git a/crates/catalog/rest/src/catalog.rs b/crates/catalog/rest/src/catalog.rs index 851819069..9ba5c6ea3 100644 --- a/crates/catalog/rest/src/catalog.rs +++ b/crates/catalog/rest/src/catalog.rs @@ -754,7 +754,8 @@ impl Catalog for RestCatalog { return Err(Error::new( ErrorKind::Unexpected, "CommitFailedException, one or more requirements failed. The client may retry.", - )); + ) + .with_retryable(true)); } StatusCode::INTERNAL_SERVER_ERROR => { return Err(Error::new( diff --git a/crates/iceberg/src/error.rs b/crates/iceberg/src/error.rs index 37529ee6f..a91d2ed08 100644 --- a/crates/iceberg/src/error.rs +++ b/crates/iceberg/src/error.rs @@ -134,6 +134,8 @@ pub struct Error { source: Option, backtrace: Backtrace, + + retryable: bool, } impl Display for Error { @@ -225,9 +227,17 @@ impl Error { // `Backtrace::capture()` will check if backtrace has been enabled // internally. It's zero cost if backtrace is disabled. backtrace: Backtrace::capture(), + + retryable: false, } } + /// Set retryable of the error. + pub fn with_retryable(mut self, retryable: bool) -> Self { + self.retryable = retryable; + self + } + /// Add more context in error. pub fn with_context(mut self, key: &'static str, value: impl Into) -> Self { self.context.push((key, value.into())); From 2191258b2cf440c13de1d015832e44f3214cc25d Mon Sep 17 00:00:00 2001 From: ZENOTME Date: Wed, 28 May 2025 00:42:52 +0800 Subject: [PATCH 2/2] fix spark version --- crates/integration_tests/testdata/spark/Dockerfile | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/integration_tests/testdata/spark/Dockerfile b/crates/integration_tests/testdata/spark/Dockerfile index e866a74c9..420edb231 100644 --- a/crates/integration_tests/testdata/spark/Dockerfile +++ b/crates/integration_tests/testdata/spark/Dockerfile @@ -27,7 +27,7 @@ ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9.7-src.zip:$ RUN mkdir -p ${HADOOP_HOME} && mkdir -p ${SPARK_HOME} && mkdir -p /home/iceberg/spark-events WORKDIR ${SPARK_HOME} -ENV SPARK_VERSION=3.5.5 +ENV SPARK_VERSION=3.5.6 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.5_2.12 ENV ICEBERG_VERSION=1.6.0