diff --git a/crates/catalog/rest/src/catalog.rs b/crates/catalog/rest/src/catalog.rs index 851819069..9ba5c6ea3 100644 --- a/crates/catalog/rest/src/catalog.rs +++ b/crates/catalog/rest/src/catalog.rs @@ -754,7 +754,8 @@ impl Catalog for RestCatalog { return Err(Error::new( ErrorKind::Unexpected, "CommitFailedException, one or more requirements failed. The client may retry.", - )); + ) + .with_retryable(true)); } StatusCode::INTERNAL_SERVER_ERROR => { return Err(Error::new( diff --git a/crates/iceberg/src/error.rs b/crates/iceberg/src/error.rs index 37529ee6f..a91d2ed08 100644 --- a/crates/iceberg/src/error.rs +++ b/crates/iceberg/src/error.rs @@ -134,6 +134,8 @@ pub struct Error { source: Option, backtrace: Backtrace, + + retryable: bool, } impl Display for Error { @@ -225,9 +227,17 @@ impl Error { // `Backtrace::capture()` will check if backtrace has been enabled // internally. It's zero cost if backtrace is disabled. backtrace: Backtrace::capture(), + + retryable: false, } } + /// Set retryable of the error. + pub fn with_retryable(mut self, retryable: bool) -> Self { + self.retryable = retryable; + self + } + /// Add more context in error. pub fn with_context(mut self, key: &'static str, value: impl Into) -> Self { self.context.push((key, value.into())); diff --git a/crates/integration_tests/testdata/spark/Dockerfile b/crates/integration_tests/testdata/spark/Dockerfile index e866a74c9..420edb231 100644 --- a/crates/integration_tests/testdata/spark/Dockerfile +++ b/crates/integration_tests/testdata/spark/Dockerfile @@ -27,7 +27,7 @@ ENV PYTHONPATH=$SPARK_HOME/python:$SPARK_HOME/python/lib/py4j-0.10.9.7-src.zip:$ RUN mkdir -p ${HADOOP_HOME} && mkdir -p ${SPARK_HOME} && mkdir -p /home/iceberg/spark-events WORKDIR ${SPARK_HOME} -ENV SPARK_VERSION=3.5.5 +ENV SPARK_VERSION=3.5.6 ENV ICEBERG_SPARK_RUNTIME_VERSION=3.5_2.12 ENV ICEBERG_VERSION=1.6.0